1
0
Fork 0

merge with main

pull/1256/head
Vallie Joseph 2023-02-13 16:27:03 +00:00
commit 9912202f8d
49 changed files with 3853 additions and 2168 deletions

View File

@ -22,12 +22,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set Node.js 12.x
uses: actions/setup-node@v1
- name: Set Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 12.x
node-version: 16.x
# In order to upload & download artifacts from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
@ -55,16 +55,16 @@ jobs:
- name: Create files that will be uploaded
run: |
mkdir artifact-path
echo ${{ env.non-gzip-artifact-content }} > artifact-path/world.txt
echo ${{ env.gzip-artifact-content }} > artifact-path/gzip.txt
echo '${{ env.non-gzip-artifact-content }}' > artifact-path/world.txt
echo '${{ env.gzip-artifact-content }}' > artifact-path/gzip.txt
touch artifact-path/empty.txt
# We're using node -e to call the functions directly available in the @actions/artifact package
- name: Upload artifacts using uploadArtifact()
run: |
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-1',['artifact-path/world.txt'], '${{ github.workspace }}'))"
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-2',['artifact-path/gzip.txt'], '${{ github.workspace }}'))"
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-3',['artifact-path/empty.txt'], '${{ github.workspace }}'))"
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-1',['artifact-path/world.txt'], process.argv[1]))" "${{ github.workspace }}"
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-2',['artifact-path/gzip.txt'], process.argv[1]))" "${{ github.workspace }}"
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-3',['artifact-path/empty.txt'], process.argv[1]))" "${{ github.workspace }}"
- name: Download artifacts using downloadArtifact()
run: |

View File

@ -18,12 +18,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set Node.js 12.x
uses: actions/setup-node@v1
- name: Set Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 12.x
node-version: 16.x
- name: npm install
run: npm install
@ -31,9 +31,8 @@ jobs:
- name: Bootstrap
run: npm run bootstrap
- name: audit tools
# `|| npm audit` to pretty-print the output if vulnerabilies are found after filtering.
run: npm audit --audit-level=moderate --json | scripts/audit-allow-list || npm audit --audit-level=moderate
- name: audit tools (without allow-list)
run: npm audit --audit-level=moderate
- name: audit packages
run: npm run audit-all

View File

@ -22,12 +22,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set Node.js 12.x
uses: actions/setup-node@v1
- name: Set Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 12.x
node-version: 16.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed

View File

@ -0,0 +1,90 @@
name: cache-windows-bsd-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- shell: bash
run: |
rm "C:\Program Files\Git\usr\bin\tar.exe"
- name: Set Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the cache package
- name: Install root npm packages
run: npm ci
- name: Compile cache package
run: |
npm ci
npm run tsc
working-directory: packages/cache
- name: Generate files in working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
# We're using node -e to call the functions directly available in the @actions/cache package
- name: Save cache using saveCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with http-client
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))"
- name: Verify cache restored with http-client
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with Azure SDK
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Verify cache restored with Azure SDK
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache

View File

@ -20,7 +20,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@ -13,15 +13,15 @@ jobs:
steps:
- name: setup repo
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: verify package exists
run: ls packages/${{ github.event.inputs.package }}
- name: Set Node.js 12.x
uses: actions/setup-node@v1
- name: Set Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 12.x
node-version: 16.x
- name: npm install
run: npm install
@ -40,7 +40,7 @@ jobs:
working-directory: packages/${{ github.event.inputs.package }}
- name: upload artifact
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.inputs.package }}
path: packages/${{ github.event.inputs.package }}/*.tgz
@ -52,7 +52,7 @@ jobs:
steps:
- name: download artifact
uses: actions/download-artifact@v2
uses: actions/download-artifact@v3
with:
name: ${{ github.event.inputs.package }}

View File

@ -23,12 +23,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set Node.js 12.x
uses: actions/setup-node@v1
- name: Set Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 12.x
node-version: 16.x
- name: npm install
run: npm install
@ -40,7 +40,7 @@ jobs:
run: npm run build
- name: npm test
run: npm test
run: npm test -- --runInBand
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@ -9,7 +9,7 @@ jobs:
if: ${{ github.repository_owner == 'actions' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Update Octokit
working-directory: packages/github
run: |
@ -30,7 +30,7 @@ jobs:
fi
- name: Create PR
if: ${{steps.status.outputs.createPR}}
uses: actions/github-script@v2
uses: actions/github-script@v6
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |

View File

@ -32,7 +32,7 @@ jobs:
os: [ubuntu-16.04, windows-2019]
runs-on: ${{matrix.os}}
actions:
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
version: ${{matrix.node}}
- run: |

View File

@ -100,9 +100,12 @@ There are several commands to emit different levels of log output:
| log level | example usage |
|---|---|
| [debug](action-debugging.md) | `echo "::debug::My debug message"` |
| notice | `echo "::notice::My notice message"` |
| warning | `echo "::warning::My warning message"` |
| error | `echo "::error::My error message"` |
Additional syntax options are described at [the workflow command documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-debug-message).
### Command Echoing
By default, the echoing of commands to stdout only occurs if [Step Debugging is enabled](./action-debugging.md#How-to-Access-Step-Debug-Logs)

View File

@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
```yaml
steps:
using: actions/setup-node@v1
using: actions/setup-node@v3
```
# Define Metadata

4456
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@
},
"devDependencies": {
"@types/jest": "^27.0.2",
"@types/node": "^12.20.13",
"@types/node": "^16.18.1",
"@types/signale": "^1.4.1",
"@typescript-eslint/parser": "^4.0.0",
"concurrently": "^6.1.0",

View File

@ -90,3 +90,7 @@
### 1.1.0
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
### 1.1.1
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)

View File

@ -6,13 +6,28 @@ import {promises as fs} from 'fs'
import {createGZipFileOnDisk} from '../src/internal/upload-gzip'
const root = path.join(__dirname, '_temp', 'upload-gzip')
const tempGzipFilePath = path.join(root, 'file1.gzip')
const tempZipFilePath = path.join(root, 'file2.zip')
const tempTarlzFilePath = path.join(root, 'file3.tar.lz')
const tempGzFilePath = path.join(root, 'file4.tar.gz')
const tempBz2FilePath = path.join(root, 'file5.tar.bz2')
const temp7zFilePath = path.join(root, 'file6.7z')
const tempNormalFilePath = path.join(root, 'file6.txt')
const tempGzFilePath = path.join(root, 'file.gz')
const tempGzipFilePath = path.join(root, 'file.gzip')
const tempTgzFilePath = path.join(root, 'file.tgz')
const tempTazFilePath = path.join(root, 'file.taz')
const tempZFilePath = path.join(root, 'file.Z')
const tempTaZFilePath = path.join(root, 'file.taZ')
const tempBz2FilePath = path.join(root, 'file.bz2')
const tempTbzFilePath = path.join(root, 'file.tbz')
const tempTbz2FilePath = path.join(root, 'file.tbz2')
const tempTz2FilePath = path.join(root, 'file.tz2')
const tempLzFilePath = path.join(root, 'file.lz')
const tempLzmaFilePath = path.join(root, 'file.lzma')
const tempTlzFilePath = path.join(root, 'file.tlz')
const tempLzoFilePath = path.join(root, 'file.lzo')
const tempXzFilePath = path.join(root, 'file.xz')
const tempTxzFilePath = path.join(root, 'file.txz')
const tempZstFilePath = path.join(root, 'file.zst')
const tempZstdFilePath = path.join(root, 'file.zstd')
const tempTzstFilePath = path.join(root, 'file.tzst')
const tempZipFilePath = path.join(root, 'file.zip')
const temp7zFilePath = path.join(root, 'file.7z')
const tempNormalFilePath = path.join(root, 'file.txt')
jest.mock('../src/internal/config-variables')
@ -27,11 +42,26 @@ beforeAll(async () => {
// clear temp directory and create files that will be "uploaded"
await io.rmRF(root)
await fs.mkdir(path.join(root))
await fs.writeFile(tempGzFilePath, 'a file with a .gz file extension')
await fs.writeFile(tempGzipFilePath, 'a file with a .gzip file extension')
await fs.writeFile(tempZipFilePath, 'a file with a .zip file extension')
await fs.writeFile(tempTarlzFilePath, 'a file with a tar.lz file extension')
await fs.writeFile(tempGzFilePath, 'a file with a gz file file extension')
await fs.writeFile(tempTgzFilePath, 'a file with a .tgz file extension')
await fs.writeFile(tempTazFilePath, 'a file with a .taz file extension')
await fs.writeFile(tempZFilePath, 'a file with a .Z file extension')
await fs.writeFile(tempTaZFilePath, 'a file with a .taZ file extension')
await fs.writeFile(tempBz2FilePath, 'a file with a .bz2 file extension')
await fs.writeFile(tempTbzFilePath, 'a file with a .tbz file extension')
await fs.writeFile(tempTbz2FilePath, 'a file with a .tbz2 file extension')
await fs.writeFile(tempTz2FilePath, 'a file with a .tz2 file extension')
await fs.writeFile(tempLzFilePath, 'a file with a .lz file extension')
await fs.writeFile(tempLzmaFilePath, 'a file with a .lzma file extension')
await fs.writeFile(tempTlzFilePath, 'a file with a .tlz file extension')
await fs.writeFile(tempLzoFilePath, 'a file with a .lzo file extension')
await fs.writeFile(tempXzFilePath, 'a file with a .xz file extension')
await fs.writeFile(tempTxzFilePath, 'a file with a .txz file extension')
await fs.writeFile(tempZstFilePath, 'a file with a .zst file extension')
await fs.writeFile(tempZstdFilePath, 'a file with a .zstd file extension')
await fs.writeFile(tempTzstFilePath, 'a file with a .tzst file extension')
await fs.writeFile(tempZipFilePath, 'a file with a .zip file extension')
await fs.writeFile(temp7zFilePath, 'a file with a .7z file extension')
await fs.writeFile(tempNormalFilePath, 'a file with a .txt file extension')
})
@ -40,21 +70,66 @@ test('Number.MAX_SAFE_INTEGER is returned when an existing compressed file is us
// create temporary file
const tempFile = await tmp.file()
expect(await createGZipFileOnDisk(tempGzipFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempZipFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTarlzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempGzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempGzipFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTgzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTazFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempZFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTaZFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempBz2FilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTbzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTbz2FilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTz2FilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempLzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempLzmaFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTlzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempLzoFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempXzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTxzFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempZstFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempZstdFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempTzstFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(tempZipFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)
expect(await createGZipFileOnDisk(temp7zFilePath, tempFile.path)).toEqual(
Number.MAX_SAFE_INTEGER
)

View File

@ -1,12 +1,12 @@
{
"name": "@actions/artifact",
"version": "1.1.0",
"version": "1.1.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@actions/artifact",
"version": "1.1.0",
"version": "1.1.1",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.9.1",

View File

@ -1,6 +1,6 @@
{
"name": "@actions/artifact",
"version": "1.1.0",
"version": "1.1.1",
"preview": true,
"description": "Actions artifact lib",
"keywords": [

View File

@ -219,6 +219,13 @@ export class DownloadHttpClient {
fileDownloadPath: string
): Promise<void> => {
destinationStream.close()
// await until file is created at downloadpath; node15 and up fs.createWriteStream had not created a file yet
await new Promise<void>(resolve => {
destinationStream.on('close', resolve)
if (destinationStream.writableFinished) {
resolve()
}
})
await rmFile(fileDownloadPath)
destinationStream = fs.createWriteStream(fileDownloadPath)
}
@ -304,7 +311,7 @@ export class DownloadHttpClient {
const gunzip = zlib.createGunzip()
response.message
.on('error', error => {
core.error(
core.info(
`An error occurred while attempting to read the response stream`
)
gunzip.close()
@ -313,7 +320,7 @@ export class DownloadHttpClient {
})
.pipe(gunzip)
.on('error', error => {
core.error(
core.info(
`An error occurred while attempting to decompress the response stream`
)
destinationStream.close()
@ -324,7 +331,7 @@ export class DownloadHttpClient {
resolve()
})
.on('error', error => {
core.error(
core.info(
`An error occurred while writing a downloaded file to ${destinationStream.path}`
)
reject(error)
@ -332,7 +339,7 @@ export class DownloadHttpClient {
} else {
response.message
.on('error', error => {
core.error(
core.info(
`An error occurred while attempting to read the response stream`
)
destinationStream.close()
@ -343,7 +350,7 @@ export class DownloadHttpClient {
resolve()
})
.on('error', error => {
core.error(
core.info(
`An error occurred while writing a downloaded file to ${destinationStream.path}`
)
reject(error)

View File

@ -9,12 +9,27 @@ const stat = promisify(fs.stat)
* If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is
*/
const gzipExemptFileExtensions = [
'.gzip',
'.zip',
'.tar.lz',
'.tar.gz',
'.tar.bz2',
'.7z'
'.gz', // GZIP
'.gzip', // GZIP
'.tgz', // GZIP
'.taz', // GZIP
'.Z', // COMPRESS
'.taZ', // COMPRESS
'.bz2', // BZIP2
'.tbz', // BZIP2
'.tbz2', // BZIP2
'.tz2', // BZIP2
'.lz', // LZIP
'.lzma', // LZMA
'.tlz', // LZMA
'.lzo', // LZOP
'.xz', // XZ
'.txz', // XZ
'.zst', // ZSTD
'.zstd', // ZSTD
'.tzst', // ZSTD
'.zip', // ZIP
'.7z' // 7ZIP
]
/**
@ -47,7 +62,7 @@ export async function createGZipFileOnDisk(
outputStream.on('error', error => {
// eslint-disable-next-line no-console
console.log(error)
reject
reject(error)
})
})
}

View File

@ -25,7 +25,7 @@ export function getUploadSpecification(
if (!fs.existsSync(rootDirectory)) {
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`)
}
if (!fs.lstatSync(rootDirectory).isDirectory()) {
if (!fs.statSync(rootDirectory).isDirectory()) {
throw new Error(
`Provided rootDirectory ${rootDirectory} is not a valid directory`
)
@ -57,7 +57,7 @@ export function getUploadSpecification(
if (!fs.existsSync(file)) {
throw new Error(`File ${file} does not exist`)
}
if (!fs.lstatSync(file).isDirectory()) {
if (!fs.statSync(file).isDirectory()) {
// Normalize and resolve, this allows for either absolute or relative paths to be used
file = normalize(file)
file = resolve(file)

View File

@ -91,3 +91,27 @@
### 3.0.6
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
### 3.1.0-beta.1
- Update actions/cache on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
### 3.1.0-beta.2
- Added support for fallback to gzip to restore old caches on windows.
### 3.1.0-beta.3
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
### 3.1.0
- Update actions/cache on windows to use gnu tar and zstd by default
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
- Added support for fallback to gzip to restore old caches on windows.
### 3.1.1
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
- Added support for verbose logging about cache version during cache miss.
### 3.1.2
- Fix issue with symlink restoration on windows.
### 3.1.3
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).

View File

@ -7,7 +7,7 @@ jest.mock('../src/internal/downloadUtils')
test('getCacheVersion with one path returns version', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths)
const result = getCacheVersion(paths, undefined, true)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
)
@ -15,7 +15,7 @@ test('getCacheVersion with one path returns version', async () => {
test('getCacheVersion with multiple paths returns version', async () => {
const paths = ['node_modules', 'dist']
const result = getCacheVersion(paths)
const result = getCacheVersion(paths, undefined, true)
expect(result).toEqual(
'165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436'
)
@ -23,22 +23,33 @@ test('getCacheVersion with multiple paths returns version', async () => {
test('getCacheVersion with zstd compression returns version', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths, CompressionMethod.Zstd)
const result = getCacheVersion(paths, CompressionMethod.Zstd, true)
expect(result).toEqual(
'273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
)
})
test('getCacheVersion with gzip compression does not change vesion', async () => {
test('getCacheVersion with gzip compression returns version', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths, CompressionMethod.Gzip)
const result = getCacheVersion(paths, CompressionMethod.Gzip, true)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
'470e252814dbffc9524891b17cf4e5749b26c1b5026e63dd3f00972db2393117'
)
})
test('getCacheVersion with enableCrossOsArchive as false returns version on windows', async () => {
if (process.platform === 'win32') {
const paths = ['node_modules']
const result = getCacheVersion(paths)
expect(result).toEqual(
'2db19d6596dc34f51f0043120148827a264863f5c6ac857569c2af7119bad14e'
)
}
})
test('downloadCache uses http-client for non-Azure URLs', async () => {
const downloadCacheHttpClientMock = jest.spyOn(
downloadUtils,

View File

@ -142,7 +142,8 @@ test('restore with gzip compressed cache found', async () => {
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
@ -201,7 +202,8 @@ test('restore with zstd compressed cache found', async () => {
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
@ -258,7 +260,8 @@ test('restore with cache found for restore key', async () => {
expect(cacheKey).toBe(restoreKey)
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, {
compressionMethod: compression
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(

View File

@ -209,7 +209,9 @@ test('save with reserve cache failure should fail', async () => {
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
compressionMethod: compression
cacheSize: undefined,
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledTimes(0)
@ -253,7 +255,9 @@ test('save with server error should fail', async () => {
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
cacheSize: undefined,
compressionMethod: compression,
enableCrossOsArchive: false
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
@ -296,7 +300,9 @@ test('save with valid inputs uploads a cache', async () => {
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
cacheSize: undefined,
compressionMethod: compression,
enableCrossOsArchive: false
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)

View File

@ -1,7 +1,14 @@
import * as exec from '@actions/exec'
import * as io from '@actions/io'
import * as path from 'path'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import {
CacheFilename,
CompressionMethod,
GnuTarPathOnWindows,
ManifestFilename,
SystemTarPathOnWindows,
TarFilename
} from '../src/internal/constants'
import * as tar from '../src/internal/tar'
import * as utils from '../src/internal/cacheUtils'
// eslint-disable-next-line @typescript-eslint/no-require-imports
@ -13,7 +20,9 @@ jest.mock('@actions/io')
const IS_WINDOWS = process.platform === 'win32'
const IS_MAC = process.platform === 'darwin'
const defaultTarPath = process.platform === 'darwin' ? 'gtar' : 'tar'
const defaultTarPath = IS_MAC ? 'gtar' : 'tar'
const defaultEnv = {MSYS: 'winsymlinks:nativestrict'}
function getTempDir(): string {
return path.join(__dirname, '_temp', 'tar')
@ -28,6 +37,10 @@ beforeAll(async () => {
await jest.requireActual('@actions/io').rmRF(getTempDir())
})
beforeEach(async () => {
jest.restoreAllMocks()
})
afterAll(async () => {
delete process.env['GITHUB_WORKSPACE']
await jest.requireActual('@actions/io').rmRF(getTempDir())
@ -41,16 +54,15 @@ test('zstd extract tar', async () => {
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
const workspace = process.env['GITHUB_WORKSPACE']
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
await tar.extractTar(archivePath, CompressionMethod.Zstd)
expect(mkdirMock).toHaveBeenCalledWith(workspace)
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${defaultTarPath}"`,
[
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30',
`"${tarPath}"`,
'-xf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P',
@ -58,11 +70,70 @@ test('zstd extract tar', async () => {
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []),
{cwd: undefined}
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat([
'--use-compress-program',
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
])
.join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
})
test('zstd extract tar with windows BSDtar', async () => {
if (IS_WINDOWS) {
const mkdirMock = jest.spyOn(io, 'mkdirP')
const execMock = jest.spyOn(exec, 'exec')
jest
.spyOn(utils, 'getGnuTarPathOnWindows')
.mockReturnValue(Promise.resolve(''))
const archivePath = `${process.env['windir']}\\fakepath\\cache.tar`
const workspace = process.env['GITHUB_WORKSPACE']
const tarPath = SystemTarPathOnWindows
await tar.extractTar(archivePath, CompressionMethod.Zstd)
expect(mkdirMock).toHaveBeenCalledWith(workspace)
expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenNthCalledWith(
1,
[
'zstd -d --long=30 --force -o',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
].join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
`"${tarPath}"`,
'-xf',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workspace?.replace(/\\/g, '/')
].join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
}
})
test('gzip extract tar', async () => {
const mkdirMock = jest.spyOn(io, 'mkdirP')
const execMock = jest.spyOn(exec, 'exec')
@ -74,51 +145,58 @@ test('gzip extract tar', async () => {
await tar.extractTar(archivePath, CompressionMethod.Gzip)
expect(mkdirMock).toHaveBeenCalledWith(workspace)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: defaultTarPath
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'-z',
`"${tarPath}"`,
'-xf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P',
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
].concat(IS_MAC ? ['--delay-directory-restore'] : []),
{cwd: undefined}
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['-z'])
.join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
})
test('gzip extract GNU tar on windows', async () => {
test('gzip extract GNU tar on windows with GNUtar in path', async () => {
if (IS_WINDOWS) {
jest.spyOn(fs, 'existsSync').mockReturnValueOnce(false)
const isGnuMock = jest
.spyOn(utils, 'isGnuTarInstalled')
.mockReturnValue(Promise.resolve(true))
// GNU tar present in path but not at default location
jest
.spyOn(utils, 'getGnuTarPathOnWindows')
.mockReturnValue(Promise.resolve('tar'))
const execMock = jest.spyOn(exec, 'exec')
const archivePath = `${process.env['windir']}\\fakepath\\cache.tar`
const workspace = process.env['GITHUB_WORKSPACE']
await tar.extractTar(archivePath, CompressionMethod.Gzip)
expect(isGnuMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"tar"`,
[
'-z',
`"tar"`,
'-xf',
archivePath.replace(/\\/g, '/'),
'-P',
'-C',
workspace?.replace(/\\/g, '/'),
'--force-local'
],
{cwd: undefined}
'--force-local',
'-z'
].join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
}
})
@ -134,13 +212,13 @@ test('zstd create tar', async () => {
await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Zstd)
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${defaultTarPath}"`,
[
`"${tarPath}"`,
'--posix',
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30',
'-cf',
IS_WINDOWS ? CacheFilename.Zstd.replace(/\\/g, '/') : CacheFilename.Zstd,
'--exclude',
@ -149,16 +227,84 @@ test('zstd create tar', async () => {
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
'--files-from',
'manifest.txt'
ManifestFilename
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []),
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat([
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
])
.join(' '),
undefined, // args
{
cwd: archiveFolder
cwd: archiveFolder,
env: expect.objectContaining(defaultEnv)
}
)
})
test('zstd create tar with windows BSDtar', async () => {
if (IS_WINDOWS) {
const execMock = jest.spyOn(exec, 'exec')
jest
.spyOn(utils, 'getGnuTarPathOnWindows')
.mockReturnValue(Promise.resolve(''))
const archiveFolder = getTempDir()
const workspace = process.env['GITHUB_WORKSPACE']
const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`]
await fs.promises.mkdir(archiveFolder, {recursive: true})
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Zstd
)
const tarPath = SystemTarPathOnWindows
expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenNthCalledWith(
1,
[
`"${tarPath}"`,
'--posix',
'-cf',
TarFilename.replace(/\\/g, '/'),
'--exclude',
TarFilename.replace(/\\/g, '/'),
'-P',
'-C',
workspace?.replace(/\\/g, '/'),
'--files-from',
ManifestFilename
].join(' '),
undefined, // args
{
cwd: archiveFolder,
env: expect.objectContaining(defaultEnv)
}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
'zstd -T0 --long=30 --force -o',
CacheFilename.Zstd.replace(/\\/g, '/'),
TarFilename.replace(/\\/g, '/')
].join(' '),
undefined, // args
{
cwd: archiveFolder,
env: expect.objectContaining(defaultEnv)
}
)
}
})
test('gzip create tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
@ -170,16 +316,13 @@ test('gzip create tar', async () => {
await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Gzip)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: defaultTarPath
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
`"${tarPath}"`,
'--posix',
'-z',
'-cf',
IS_WINDOWS ? CacheFilename.Gzip.replace(/\\/g, '/') : CacheFilename.Gzip,
'--exclude',
@ -188,10 +331,16 @@ test('gzip create tar', async () => {
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
'--files-from',
'manifest.txt'
].concat(IS_MAC ? ['--delay-directory-restore'] : []),
ManifestFilename
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['-z'])
.join(' '),
undefined, // args
{
cwd: archiveFolder
cwd: archiveFolder,
env: expect.objectContaining(defaultEnv)
}
)
})
@ -205,22 +354,74 @@ test('zstd list tar', async () => {
await tar.listTar(archivePath, CompressionMethod.Zstd)
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${defaultTarPath}"`,
[
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30',
`"${tarPath}"`,
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []),
{cwd: undefined}
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat([
'--use-compress-program',
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
])
.join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
})
test('zstd list tar with windows BSDtar', async () => {
if (IS_WINDOWS) {
const execMock = jest.spyOn(exec, 'exec')
jest
.spyOn(utils, 'getGnuTarPathOnWindows')
.mockReturnValue(Promise.resolve(''))
const archivePath = `${process.env['windir']}\\fakepath\\cache.tar`
await tar.listTar(archivePath, CompressionMethod.Zstd)
const tarPath = SystemTarPathOnWindows
expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenNthCalledWith(
1,
[
'zstd -d --long=30 --force -o',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
].join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
`"${tarPath}"`,
'-tf',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
].join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
}
})
test('zstdWithoutLong list tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
@ -230,19 +431,24 @@ test('zstdWithoutLong list tar', async () => {
await tar.listTar(archivePath, CompressionMethod.ZstdWithoutLong)
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${defaultTarPath}"`,
[
'--use-compress-program',
IS_WINDOWS ? 'zstd -d' : 'unzstd',
`"${tarPath}"`,
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []),
{cwd: undefined}
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'])
.join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
})
@ -254,18 +460,23 @@ test('gzip list tar', async () => {
await tar.listTar(archivePath, CompressionMethod.Gzip)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: defaultTarPath
const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'-z',
`"${tarPath}"`,
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
].concat(IS_MAC ? ['--delay-directory-restore'] : []),
{cwd: undefined}
]
.concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['-z'])
.join(' '),
undefined,
{
cwd: undefined,
env: expect.objectContaining(defaultEnv)
}
)
})

16
packages/cache/package-lock.json generated vendored
View File

@ -1,12 +1,12 @@
{
"name": "@actions/cache",
"version": "3.0.6",
"version": "3.1.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@actions/cache",
"version": "3.0.6",
"version": "3.1.3",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.0",
@ -457,9 +457,9 @@
}
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@ -998,9 +998,9 @@
}
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"requires": {
"brace-expansion": "^1.1.7"
}

View File

@ -1,6 +1,6 @@
{
"name": "@actions/cache",
"version": "3.0.6",
"version": "3.1.3",
"preview": true,
"description": "Actions cache lib",
"keywords": [

View File

@ -60,13 +60,15 @@ export function isFeatureAvailable(): boolean {
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
export async function restoreCache(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
checkPaths(paths)
@ -90,9 +92,9 @@ export async function restoreCache(
try {
// path are needed to compute version
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
compressionMethod,
enableCrossOsArchive
})
if (!cacheEntry?.archiveLocation) {
// Cache not found
return undefined
@ -151,13 +153,15 @@ export async function restoreCache(
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
export async function saveCache(
paths: string[],
key: string,
options?: UploadOptions
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
checkPaths(paths)
checkKey(key)
@ -207,6 +211,7 @@ export async function saveCache(
paths,
{
compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize
}
)

View File

@ -17,7 +17,8 @@ import {
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse,
ITypedResponseWithError
ITypedResponseWithError,
ArtifactCacheList
} from './contracts'
import {downloadCacheHttpClient, downloadCacheStorageSDK} from './downloadUtils'
import {
@ -72,13 +73,21 @@ function createHttpClient(): HttpClient {
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod
compressionMethod?: CompressionMethod,
enableCrossOsArchive = false
): string {
const components = paths.concat(
!compressionMethod || compressionMethod === CompressionMethod.Gzip
? []
: [compressionMethod]
)
const components = paths
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod)
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only')
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
@ -95,7 +104,11 @@ export async function getCacheEntry(
options?: InternalCacheOptions
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient()
const version = getCacheVersion(paths, options?.compressionMethod)
const version = getCacheVersion(
paths,
options?.compressionMethod,
options?.enableCrossOsArchive
)
const resource = `cache?keys=${encodeURIComponent(
keys.join(',')
)}&version=${version}`
@ -103,7 +116,12 @@ export async function getCacheEntry(
const response = await retryTypedResponse('getCacheEntry', async () =>
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
)
// Cache not found
if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
await printCachesListForDiagnostics(keys[0], httpClient, version)
}
return null
}
if (!isSuccessStatusCode(response.statusCode)) {
@ -113,6 +131,7 @@ export async function getCacheEntry(
const cacheResult = response.result
const cacheDownloadUrl = cacheResult?.archiveLocation
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.')
}
core.setSecret(cacheDownloadUrl)
@ -122,6 +141,31 @@ export async function getCacheEntry(
return cacheResult
}
async function printCachesListForDiagnostics(
key: string,
httpClient: HttpClient,
version: string
): Promise<void> {
const resource = `caches?key=${encodeURIComponent(key)}`
const response = await retryTypedResponse('listCache', async () =>
httpClient.getJson<ArtifactCacheList>(getCacheApiUrl(resource))
)
if (response.statusCode === 200) {
const cacheListResult = response.result
const totalCount = cacheListResult?.totalCount
if (totalCount && totalCount > 0) {
core.debug(
`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`
)
for (const cacheEntry of cacheListResult?.artifactCaches || []) {
core.debug(
`Cache Key: ${cacheEntry?.cacheKey}, Cache Version: ${cacheEntry?.cacheVersion}, Cache Scope: ${cacheEntry?.scope}, Cache Created: ${cacheEntry?.creationTime}`
)
}
}
}
}
export async function downloadCache(
archiveLocation: string,
archivePath: string,
@ -149,7 +193,11 @@ export async function reserveCache(
options?: InternalCacheOptions
): Promise<ITypedResponseWithError<ReserveCacheResponse>> {
const httpClient = createHttpClient()
const version = getCacheVersion(paths, options?.compressionMethod)
const version = getCacheVersion(
paths,
options?.compressionMethod,
options?.enableCrossOsArchive
)
const reserveCacheRequest: ReserveCacheRequest = {
key,

View File

@ -7,7 +7,11 @@ import * as path from 'path'
import * as semver from 'semver'
import * as util from 'util'
import {v4 as uuidV4} from 'uuid'
import {CacheFilename, CompressionMethod} from './constants'
import {
CacheFilename,
CompressionMethod,
GnuTarPathOnWindows
} from './constants'
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
export async function createTempDirectory(): Promise<string> {
@ -90,11 +94,6 @@ async function getVersion(app: string): Promise<string> {
// Use zstandard if possible to maximize cache performance
export async function getCompressionMethod(): Promise<CompressionMethod> {
if (process.platform === 'win32' && !(await isGnuTarInstalled())) {
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return CompressionMethod.Gzip
}
const versionOutput = await getVersion('zstd')
const version = semver.clean(versionOutput)
@ -116,9 +115,12 @@ export function getCacheFileName(compressionMethod: CompressionMethod): string {
: CacheFilename.Zstd
}
export async function isGnuTarInstalled(): Promise<boolean> {
export async function getGnuTarPathOnWindows(): Promise<string> {
if (fs.existsSync(GnuTarPathOnWindows)) {
return GnuTarPathOnWindows
}
const versionOutput = await getVersion('tar')
return versionOutput.toLowerCase().includes('gnu tar')
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''
}
export function assertDefined<T>(name: string, value?: T): T {

View File

@ -11,6 +11,11 @@ export enum CompressionMethod {
Zstd = 'zstd'
}
export enum ArchiveToolType {
GNU = 'gnu',
BSD = 'bsd'
}
// The default number of retry attempts.
export const DefaultRetryAttempts = 2
@ -21,3 +26,13 @@ export const DefaultRetryDelay = 5000
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const SocketTimeout = 5000
// The default path of GNUtar on hosted Windows runners
export const GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`
// The default path of BSDtar on hosted Windows runners
export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`
export const TarFilename = 'cache.tar'
export const ManifestFilename = 'manifest.txt'

View File

@ -9,10 +9,16 @@ export interface ITypedResponseWithError<T> extends TypedResponse<T> {
export interface ArtifactCacheEntry {
cacheKey?: string
scope?: string
cacheVersion?: string
creationTime?: string
archiveLocation?: string
}
export interface ArtifactCacheList {
totalCount: number
artifactCaches?: ArtifactCacheEntry[]
}
export interface CommitCacheRequest {
size: number
}
@ -29,5 +35,11 @@ export interface ReserveCacheResponse {
export interface InternalCacheOptions {
compressionMethod?: CompressionMethod
enableCrossOsArchive?: boolean
cacheSize?: number
}
export interface ArchiveTool {
path: string
type: string
}

View File

@ -3,25 +3,28 @@ import * as io from '@actions/io'
import {existsSync, writeFileSync} from 'fs'
import * as path from 'path'
import * as utils from './cacheUtils'
import {CompressionMethod} from './constants'
import {ArchiveTool} from './contracts'
import {
CompressionMethod,
SystemTarPathOnWindows,
ArchiveToolType,
TarFilename,
ManifestFilename
} from './constants'
const IS_WINDOWS = process.platform === 'win32'
async function getTarPath(
args: string[],
compressionMethod: CompressionMethod
): Promise<string> {
// Returns tar path and type: BSD or GNU
async function getTarPath(): Promise<ArchiveTool> {
switch (process.platform) {
case 'win32': {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`
if (compressionMethod !== CompressionMethod.Gzip) {
// We only use zstandard compression on windows when gnu tar is installed due to
// a bug with compressing large files with bsdtar + zstd
args.push('--force-local')
const gnuTar = await utils.getGnuTarPathOnWindows()
const systemTar = SystemTarPathOnWindows
if (gnuTar) {
// Use GNUtar as default on windows
return <ArchiveTool>{path: gnuTar, type: ArchiveToolType.GNU}
} else if (existsSync(systemTar)) {
return systemTar
} else if (await utils.isGnuTarInstalled()) {
args.push('--force-local')
return <ArchiveTool>{path: systemTar, type: ArchiveToolType.BSD}
}
break
}
@ -29,27 +32,133 @@ async function getTarPath(
const gnuTar = await io.which('gtar', false)
if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
args.push('--delay-directory-restore')
return gnuTar
return <ArchiveTool>{path: gnuTar, type: ArchiveToolType.GNU}
} else {
return <ArchiveTool>{
path: await io.which('tar', true),
type: ArchiveToolType.BSD
}
}
break
}
default:
break
}
return await io.which('tar', true)
// Default assumption is GNU tar is present in path
return <ArchiveTool>{
path: await io.which('tar', true),
type: ArchiveToolType.GNU
}
}
async function execTar(
args: string[],
// Return arguments for tar as per tarPath, compressionMethod, method type and os
async function getTarArgs(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod,
cwd?: string
): Promise<void> {
try {
await exec(`"${await getTarPath(args, compressionMethod)}"`, args, {cwd})
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`)
type: string,
archivePath = ''
): Promise<string[]> {
const args = [`"${tarPath.path}"`]
const cacheFileName = utils.getCacheFileName(compressionMethod)
const tarFile = 'cache.tar'
const workingDirectory = getWorkingDirectory()
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
// Method specific args
switch (type) {
case 'create':
args.push(
'--posix',
'-cf',
BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
ManifestFilename
)
break
case 'extract':
args.push(
'-xf',
BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
)
break
case 'list':
args.push(
'-tf',
BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
)
break
}
// Platform specific args
if (tarPath.type === ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local')
break
case 'darwin':
args.push('--delay-directory-restore')
break
}
}
return args
}
// Returns commands to run tar and compression program
async function getCommands(
compressionMethod: CompressionMethod,
type: string,
archivePath = ''
): Promise<string[]> {
let args
const tarPath = await getTarPath()
const tarArgs = await getTarArgs(
tarPath,
compressionMethod,
type,
archivePath
)
const compressionArgs =
type !== 'create'
? await getDecompressionProgram(tarPath, compressionMethod, archivePath)
: await getCompressionProgram(tarPath, compressionMethod)
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]
} else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]
}
if (BSD_TAR_ZSTD) {
return args
}
return [args.join(' ')]
}
function getWorkingDirectory(): string {
@ -57,37 +166,110 @@ function getWorkingDirectory(): string {
}
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod: CompressionMethod): string[] {
async function getDecompressionProgram(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod,
archivePath: string
): Promise<string[]> {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
switch (compressionMethod) {
case CompressionMethod.Zstd:
return [
return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]
case CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']
return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']
default:
return ['-z']
}
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
async function getCompressionProgram(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod
): Promise<string[]> {
const cacheFileName = utils.getCacheFileName(compressionMethod)
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
switch (compressionMethod) {
case CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
]
case CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']
default:
return ['-z']
}
}
// Executes all commands as separate processes
async function execCommands(commands: string[], cwd?: string): Promise<void> {
for (const command of commands) {
try {
await exec(command, undefined, {
cwd,
env: {...(process.env as object), MSYS: 'winsymlinks:nativestrict'}
})
} catch (error) {
throw new Error(
`${command.split(' ')[0]} failed with error: ${error?.message}`
)
}
}
}
// List the contents of a tar
export async function listTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
]
await execTar(args, compressionMethod)
const commands = await getCommands(compressionMethod, 'list', archivePath)
await execCommands(commands)
}
// Extract a tar
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
@ -95,61 +277,21 @@ export async function extractTar(
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory()
await io.mkdirP(workingDirectory)
const args = [
...getCompressionProgram(compressionMethod),
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
await execTar(args, compressionMethod)
const commands = await getCommands(compressionMethod, 'extract', archivePath)
await execCommands(commands)
}
// Create a tar
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
compressionMethod: CompressionMethod
): Promise<void> {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'
const cacheFileName = utils.getCacheFileName(compressionMethod)
writeFileSync(
path.join(archiveFolder, manifestFilename),
path.join(archiveFolder, ManifestFilename),
sourceDirectories.join('\n')
)
const workingDirectory = getWorkingDirectory()
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(): string[] {
switch (compressionMethod) {
case CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
]
case CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']
default:
return ['-z']
}
}
const args = [
'--posix',
...getCompressionProgram(),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
]
await execTar(args, compressionMethod, archiveFolder)
const commands = await getCommands(compressionMethod, 'create')
await execCommands(commands, archiveFolder)
}

View File

@ -121,7 +121,7 @@ const result = await core.group('Do something async', async () => {
This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run).
```js
core.error('This is a bad error. This will also fail the build.')
core.error('This is a bad error, action may still succeed though.')
core.warning('Something went wrong, but it\'s not bad enough to fail the build.')

View File

@ -34,7 +34,7 @@ export enum ExitCode {
}
/**
* Optional properties that can be sent with annotatation commands (notice, error, and warning)
* Optional properties that can be sent with annotation commands (notice, error, and warning)
* See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations.
*/
export interface AnnotationProperties {

View File

@ -22,7 +22,7 @@ describe('@actions/github', () => {
proxyServer.listen(port, () => resolve())
})
proxyServer.on('connect', req => {
proxyConnects.push(req.url)
proxyConnects.push(req.url ?? '')
})
})

View File

@ -18,7 +18,7 @@ describe('@actions/github', () => {
proxyServer.listen(port, () => resolve(null))
})
proxyServer.on('connect', req => {
proxyConnects.push(req.url)
proxyConnects.push(req.url ?? '')
})
})

View File

@ -1,5 +1,8 @@
# @actions/glob Releases
### 0.4.0
- Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318)
### 0.3.0
- Added a `verbose` option to HashFiles [#1052](https://github.com/actions/toolkit/pull/1052/files)

View File

@ -51,6 +51,7 @@ describe('globber', () => {
)
})
const emptyDirectory = ''
it('followSymbolicLinks set to true', async () => {
const root = path.join(getTestTemp(), 'set-to-true')
await fs.mkdir(path.join(root, 'realdir'), {recursive: true})
@ -60,7 +61,9 @@ describe('globber', () => {
path.join(root, 'symDir')
)
const testPath = path.join(root, `symDir`)
const hash = await hashFiles(testPath, {followSymbolicLinks: true})
const hash = await hashFiles(testPath, emptyDirectory, {
followSymbolicLinks: true
})
expect(hash).toEqual(
'd8a411e8f8643821bed189e627ff57151918aa554c00c10b31c693ab2dded273'
)
@ -80,7 +83,9 @@ describe('globber', () => {
path.join(root, 'symDir')
)
const testPath = path.join(root, 'symdir')
const hash = await hashFiles(testPath, {followSymbolicLinks: false})
const hash = await hashFiles(testPath, emptyDirectory, {
followSymbolicLinks: false
})
expect(hash).toEqual('')
})

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.3.0",
"version": "0.4.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
@ -50,9 +50,9 @@
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@ -115,9 +115,9 @@
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"requires": {
"brace-expansion": "^1.1.7"
}

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.3.0",
"version": "0.4.0",
"preview": true,
"description": "Actions glob lib",
"keywords": [

View File

@ -22,10 +22,13 @@ export async function create(
* Computes the sha256 hash of a glob
*
* @param patterns Patterns separated by newlines
* @param currentWorkspace Workspace used when matching files
* @param options Glob options
* @param verbose Enables verbose logging
*/
export async function hashFiles(
patterns: string,
currentWorkspace = '',
options?: HashFileOptions,
verbose: Boolean = false
): Promise<string> {
@ -34,5 +37,5 @@ export async function hashFiles(
followSymbolicLinks = options.followSymbolicLinks
}
const globber = await create(patterns, {followSymbolicLinks})
return _hashFiles(globber, verbose)
return _hashFiles(globber, currentWorkspace, verbose)
}

View File

@ -8,11 +8,14 @@ import {Globber} from './glob'
export async function hashFiles(
globber: Globber,
currentWorkspace: string,
verbose: Boolean = false
): Promise<string> {
const writeDelegate = verbose ? core.info : core.debug
let hasMatch = false
const githubWorkspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()
const githubWorkspace = currentWorkspace
? currentWorkspace
: process.env['GITHUB_WORKSPACE'] ?? process.cwd()
const result = crypto.createHash('sha256')
let count = 0
for await (const file of globber.globGenerator()) {

View File

@ -19,7 +19,7 @@ describe('proxy', () => {
_proxyServer.listen(port, () => resolve())
})
_proxyServer.on('connect', req => {
_proxyConnects.push(req.url)
_proxyConnects.push(req.url ?? '')
})
})
@ -145,6 +145,44 @@ describe('proxy', () => {
expect(bypass).toBeFalsy()
})
it('checkBypass returns true if host with subdomain in no_proxy', () => {
process.env['no_proxy'] = 'myserver.com'
const bypass = pm.checkBypass(new URL('https://sub.myserver.com'))
expect(bypass).toBeTruthy()
})
it('checkBypass returns false if no_proxy is subdomain', () => {
process.env['no_proxy'] = 'myserver.com'
const bypass = pm.checkBypass(new URL('https://myserver.com.evil.org'))
expect(bypass).toBeFalsy()
})
it('checkBypass returns false if no_proxy is part of domain', () => {
process.env['no_proxy'] = 'myserver.com'
const bypass = pm.checkBypass(new URL('https://evilmyserver.com'))
expect(bypass).toBeFalsy()
})
// Do not strip leading dots as per https://github.com/actions/runner/blob/97195bad5870e2ad0915ebfef1616083aacf5818/docs/adrs/0263-proxy-support.md
it('checkBypass returns false if host with leading dot in no_proxy', () => {
process.env['no_proxy'] = '.myserver.com'
const bypass = pm.checkBypass(new URL('https://myserver.com'))
expect(bypass).toBeFalsy()
})
it('checkBypass returns true if host with subdomain in no_proxy defined with leading "."', () => {
process.env['no_proxy'] = '.myserver.com'
const bypass = pm.checkBypass(new URL('https://sub.myserver.com'))
expect(bypass).toBeTruthy()
})
// Do not match wildcard ("*") as per https://github.com/actions/runner/blob/97195bad5870e2ad0915ebfef1616083aacf5818/docs/adrs/0263-proxy-support.md
it('checkBypass returns true if no_proxy is "*"', () => {
process.env['no_proxy'] = '*'
const bypass = pm.checkBypass(new URL('https://anything.whatsoever.com'))
expect(bypass).toBeFalsy()
})
it('HttpClient does basic http get request through proxy', async () => {
process.env['http_proxy'] = _proxyUrl
const httpClient = new httpm.HttpClient()

View File

@ -51,7 +51,15 @@ export function checkBypass(reqUrl: URL): boolean {
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
if (
upperReqHosts.some(
x =>
x === upperNoProxyItem ||
x.endsWith(`.${upperNoProxyItem}`) ||
(upperNoProxyItem.startsWith('.') &&
x.endsWith(`${upperNoProxyItem}`))
)
) {
return true
}
}

View File

@ -17,10 +17,10 @@
"uuid": "^3.3.2"
},
"devDependencies": {
"@types/nock": "^10.0.3",
"@types/nock": "^11.1.0",
"@types/semver": "^6.0.0",
"@types/uuid": "^3.4.4",
"nock": "^10.0.6"
"nock": "^13.2.9"
}
},
"node_modules/@actions/core": {
@ -62,12 +62,13 @@
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw=="
},
"node_modules/@types/nock": {
"version": "10.0.3",
"resolved": "https://registry.npmjs.org/@types/nock/-/nock-10.0.3.tgz",
"integrity": "sha512-OthuN+2FuzfZO3yONJ/QVjKmLEuRagS9TV9lEId+WHL9KhftYG+/2z+pxlr0UgVVXSpVD8woie/3fzQn8ft/Ow==",
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/@types/nock/-/nock-11.1.0.tgz",
"integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==",
"deprecated": "This is a stub types definition. nock provides its own type definitions, so you do not need this installed.",
"dev": true,
"dependencies": {
"@types/node": "*"
"nock": "*"
}
},
"node_modules/@types/node": {
@ -91,41 +92,6 @@
"@types/node": "*"
}
},
"node_modules/assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/chai": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
"integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
"dev": true,
"dependencies": {
"assertion-error": "^1.1.0",
"check-error": "^1.0.2",
"deep-eql": "^3.0.1",
"get-func-name": "^2.0.0",
"pathval": "^1.1.0",
"type-detect": "^4.0.5"
},
"engines": {
"node": ">=4"
}
},
"node_modules/check-error": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/debug": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
@ -136,33 +102,6 @@
"ms": "^2.1.1"
}
},
"node_modules/deep-eql": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
"integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
"dev": true,
"dependencies": {
"type-detect": "^4.0.0"
},
"engines": {
"node": ">=0.12"
}
},
"node_modules/deep-equal": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz",
"integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=",
"dev": true
},
"node_modules/get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=",
"dev": true,
"engines": {
"node": "*"
}
},
"node_modules/json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
@ -175,24 +114,6 @@
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"dev": true
},
"node_modules/minimist": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"node_modules/mkdirp": {
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
"integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
"dev": true,
"dependencies": {
"minimist": "^1.2.5"
},
"bin": {
"mkdirp": "bin/cmd.js"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
@ -200,59 +121,27 @@
"dev": true
},
"node_modules/nock": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/nock/-/nock-10.0.6.tgz",
"integrity": "sha512-b47OWj1qf/LqSQYnmokNWM8D88KvUl2y7jT0567NB3ZBAZFz2bWp2PC81Xn7u8F2/vJxzkzNZybnemeFa7AZ2w==",
"version": "13.2.9",
"resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz",
"integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==",
"dev": true,
"dependencies": {
"chai": "^4.1.2",
"debug": "^4.1.0",
"deep-equal": "^1.0.0",
"json-stringify-safe": "^5.0.1",
"lodash": "^4.17.5",
"mkdirp": "^0.5.0",
"propagate": "^1.0.0",
"qs": "^6.5.1",
"semver": "^5.5.0"
"lodash": "^4.17.21",
"propagate": "^2.0.0"
},
"engines": {
"node": ">= 6.0"
}
},
"node_modules/nock/node_modules/semver": {
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz",
"integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==",
"dev": true,
"bin": {
"semver": "bin/semver"
}
},
"node_modules/pathval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"dev": true,
"engines": {
"node": "*"
"node": ">= 10.13"
}
},
"node_modules/propagate": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/propagate/-/propagate-1.0.0.tgz",
"integrity": "sha1-AMLa7t2iDofjeCs0Stuhzd1q1wk=",
"dev": true,
"engines": [
"node >= 0.8.1"
]
},
"node_modules/qs": {
"version": "6.7.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz",
"integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz",
"integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
"dev": true,
"engines": {
"node": ">=0.6"
"node": ">= 8"
}
},
"node_modules/semver": {
@ -271,15 +160,6 @@
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
}
},
"node_modules/type-detect": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
"integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/uuid": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
@ -329,12 +209,12 @@
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw=="
},
"@types/nock": {
"version": "10.0.3",
"resolved": "https://registry.npmjs.org/@types/nock/-/nock-10.0.3.tgz",
"integrity": "sha512-OthuN+2FuzfZO3yONJ/QVjKmLEuRagS9TV9lEId+WHL9KhftYG+/2z+pxlr0UgVVXSpVD8woie/3fzQn8ft/Ow==",
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/@types/nock/-/nock-11.1.0.tgz",
"integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==",
"dev": true,
"requires": {
"@types/node": "*"
"nock": "*"
}
},
"@types/node": {
@ -358,32 +238,6 @@
"@types/node": "*"
}
},
"assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true
},
"chai": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
"integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
"dev": true,
"requires": {
"assertion-error": "^1.1.0",
"check-error": "^1.0.2",
"deep-eql": "^3.0.1",
"get-func-name": "^2.0.0",
"pathval": "^1.1.0",
"type-detect": "^4.0.5"
}
},
"check-error": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
"dev": true
},
"debug": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
@ -393,27 +247,6 @@
"ms": "^2.1.1"
}
},
"deep-eql": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
"integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
"dev": true,
"requires": {
"type-detect": "^4.0.0"
}
},
"deep-equal": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz",
"integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=",
"dev": true
},
"get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=",
"dev": true
},
"json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
@ -426,21 +259,6 @@
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"dev": true
},
"minimist": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"mkdirp": {
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
"integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
"dev": true,
"requires": {
"minimist": "^1.2.5"
}
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
@ -448,46 +266,21 @@
"dev": true
},
"nock": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/nock/-/nock-10.0.6.tgz",
"integrity": "sha512-b47OWj1qf/LqSQYnmokNWM8D88KvUl2y7jT0567NB3ZBAZFz2bWp2PC81Xn7u8F2/vJxzkzNZybnemeFa7AZ2w==",
"version": "13.2.9",
"resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz",
"integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==",
"dev": true,
"requires": {
"chai": "^4.1.2",
"debug": "^4.1.0",
"deep-equal": "^1.0.0",
"json-stringify-safe": "^5.0.1",
"lodash": "^4.17.5",
"mkdirp": "^0.5.0",
"propagate": "^1.0.0",
"qs": "^6.5.1",
"semver": "^5.5.0"
},
"dependencies": {
"semver": {
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz",
"integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==",
"dev": true
"lodash": "^4.17.21",
"propagate": "^2.0.0"
}
}
},
"pathval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"dev": true
},
"propagate": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/propagate/-/propagate-1.0.0.tgz",
"integrity": "sha1-AMLa7t2iDofjeCs0Stuhzd1q1wk=",
"dev": true
},
"qs": {
"version": "6.7.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz",
"integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz",
"integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
"dev": true
},
"semver": {
@ -500,12 +293,6 @@
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"type-detect": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
"integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
"dev": true
},
"uuid": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",

View File

@ -44,9 +44,9 @@
"uuid": "^3.3.2"
},
"devDependencies": {
"@types/nock": "^10.0.3",
"@types/nock": "^11.1.0",
"@types/semver": "^6.0.0",
"@types/uuid": "^3.4.4",
"nock": "^10.0.6"
"nock": "^13.2.9"
}
}

View File

@ -1,5 +1,9 @@
#!/usr/bin/env node
// NEEDS TO BE UPDATED TO WORK ON NODE 16 BECAUSE NPM AUDIT --JSON OUTPUT CHANGED
// THE AUDIT WORKFLOW IS ONLY RUN ON PRS, BUT IT CAN BE IGNORED AND A RELEASE CAN BE CREATED NONETHELESS
// @fhammerl @rentziass
/*
This script takes the output of npm audit --json from stdin
and writes a filtered version to stdout.