mirror of https://github.com/actions/toolkit
Merge branch 'main' into kotewar/updating-blobstorage-client-to-12.13.0
commit
fe92749762
|
@ -153,6 +153,10 @@
|
|||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.1.5
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
|
|
@ -9,6 +9,7 @@ const useAzureSdk = true
|
|||
const downloadConcurrency = 8
|
||||
const timeoutInMs = 30000
|
||||
const segmentTimeoutInMs = 600000
|
||||
const lookupOnly = false
|
||||
const uploadConcurrency = 4
|
||||
const uploadChunkSize = 32 * 1024 * 1024
|
||||
|
||||
|
@ -19,7 +20,8 @@ test('getDownloadOptions sets defaults', async () => {
|
|||
useAzureSdk,
|
||||
downloadConcurrency,
|
||||
timeoutInMs,
|
||||
segmentTimeoutInMs
|
||||
segmentTimeoutInMs,
|
||||
lookupOnly
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -28,7 +30,8 @@ test('getDownloadOptions overrides all settings', async () => {
|
|||
useAzureSdk: false,
|
||||
downloadConcurrency: 14,
|
||||
timeoutInMs: 20000,
|
||||
segmentTimeoutInMs: 3600000
|
||||
segmentTimeoutInMs: 3600000,
|
||||
lookupOnly: true
|
||||
}
|
||||
|
||||
const actualOptions = getDownloadOptions(expectedOptions)
|
||||
|
@ -61,7 +64,8 @@ test('getDownloadOptions overrides download timeout minutes', async () => {
|
|||
useAzureSdk: false,
|
||||
downloadConcurrency: 14,
|
||||
timeoutInMs: 20000,
|
||||
segmentTimeoutInMs: 3600000
|
||||
segmentTimeoutInMs: 3600000,
|
||||
lookupOnly: true
|
||||
}
|
||||
process.env.SEGMENT_DOWNLOAD_TIMEOUT_MINS = '10'
|
||||
const actualOptions = getDownloadOptions(expectedOptions)
|
||||
|
@ -72,4 +76,5 @@ test('getDownloadOptions overrides download timeout minutes', async () => {
|
|||
)
|
||||
expect(actualOptions.timeoutInMs).toEqual(expectedOptions.timeoutInMs)
|
||||
expect(actualOptions.segmentTimeoutInMs).toEqual(600000)
|
||||
expect(actualOptions.lookupOnly).toEqual(expectedOptions.lookupOnly)
|
||||
})
|
||||
|
|
|
@ -276,3 +276,39 @@ test('restore with cache found for restore key', async () => {
|
|||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('restore with dry run', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const options = {lookupOnly: true}
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: 'refs/heads/main',
|
||||
archiveLocation: 'www.actionscache.test/download'
|
||||
}
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||
getCacheMock.mockImplementation(async () => {
|
||||
return Promise.resolve(cacheEntry)
|
||||
})
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const compression = CompressionMethod.Gzip
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValue(Promise.resolve(compression))
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, undefined, options)
|
||||
|
||||
expect(cacheKey).toBe(key)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
|
||||
compressionMethod: compression,
|
||||
enableCrossOsArchive: false
|
||||
})
|
||||
// creating a tempDir and downloading the cache are skipped
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
|
||||
expect(downloadCacheMock).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.1.5",
|
||||
"version": "3.2.1",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@actions/cache",
|
||||
"version": "3.1.5",
|
||||
"version": "3.2.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.1.5",
|
||||
"version": "3.2.1",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
|
|
|
@ -100,6 +100,11 @@ export async function restoreCache(
|
|||
return undefined
|
||||
}
|
||||
|
||||
if (options?.lookupOnly) {
|
||||
core.info('Lookup only - skipping download')
|
||||
return cacheEntry.cacheKey
|
||||
}
|
||||
|
||||
archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
|
|
|
@ -53,6 +53,15 @@ export interface DownloadOptions {
|
|||
* @default 3600000
|
||||
*/
|
||||
segmentTimeoutInMs?: number
|
||||
|
||||
/**
|
||||
* Weather to skip downloading the cache entry.
|
||||
* If lookupOnly is set to true, the restore function will only check if
|
||||
* a matching cache entry exists and return the cache key if it does.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
lookupOnly?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,6 +102,7 @@ export function getDownloadOptions(copy?: DownloadOptions): DownloadOptions {
|
|||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000
|
||||
lookupOnly: false
|
||||
}
|
||||
|
||||
if (copy) {
|
||||
|
@ -111,6 +121,10 @@ export function getDownloadOptions(copy?: DownloadOptions): DownloadOptions {
|
|||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs
|
||||
}
|
||||
|
||||
if (typeof copy.lookupOnly === 'boolean') {
|
||||
result.lookupOnly = copy.lookupOnly
|
||||
}
|
||||
}
|
||||
const segmentDownloadTimeoutMins =
|
||||
process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']
|
||||
|
@ -129,6 +143,7 @@ export function getDownloadOptions(copy?: DownloadOptions): DownloadOptions {
|
|||
`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`
|
||||
)
|
||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`)
|
||||
core.debug(`Lookup only: ${result.lookupOnly}`)
|
||||
|
||||
return result
|
||||
}
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
## Releases
|
||||
|
||||
## 2.1.0
|
||||
- Add support for `*` and subdomains in `no_proxy` [#1355](https://github.com/actions/toolkit/pull/1355) [#1223](https://github.com/actions/toolkit/pull/1223)
|
||||
- Bypass proxy for loopback IP adresses [#1360](https://github.com/actions/toolkit/pull/1360))
|
||||
|
||||
## 2.0.1
|
||||
- Fix an issue with missing `tunnel` dependency [#1085](https://github.com/actions/toolkit/pull/1085)
|
||||
|
||||
|
|
|
@ -176,11 +176,16 @@ describe('proxy', () => {
|
|||
expect(bypass).toBeTruthy()
|
||||
})
|
||||
|
||||
// Do not match wildcard ("*") as per https://github.com/actions/runner/blob/97195bad5870e2ad0915ebfef1616083aacf5818/docs/adrs/0263-proxy-support.md
|
||||
it('checkBypass returns true if no_proxy is "*"', () => {
|
||||
process.env['no_proxy'] = '*'
|
||||
const bypass = pm.checkBypass(new URL('https://anything.whatsoever.com'))
|
||||
expect(bypass).toBeFalsy()
|
||||
expect(bypass).toBeTruthy()
|
||||
})
|
||||
|
||||
it('checkBypass returns true if no_proxy contains comma separated "*"', () => {
|
||||
process.env['no_proxy'] = 'domain.com,* , example.com'
|
||||
const bypass = pm.checkBypass(new URL('https://anything.whatsoever.com'))
|
||||
expect(bypass).toBeTruthy()
|
||||
})
|
||||
|
||||
it('HttpClient does basic http get request through proxy', async () => {
|
||||
|
@ -237,6 +242,31 @@ describe('proxy', () => {
|
|||
expect(_proxyConnects).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('HttpClient bypasses proxy for loopback addresses (localhost, ::1, 127.*)', async () => {
|
||||
// setup a server listening on localhost:8091
|
||||
const server = http.createServer((request, response) => {
|
||||
response.writeHead(200)
|
||||
request.pipe(response)
|
||||
})
|
||||
server.listen(8091)
|
||||
try {
|
||||
process.env['http_proxy'] = _proxyUrl
|
||||
const httpClient = new httpm.HttpClient()
|
||||
let res = await httpClient.get('http://localhost:8091')
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
res = await httpClient.get('http://127.0.0.1:8091')
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
|
||||
// no support for ipv6 for now
|
||||
expect(httpClient.get('http://[::1]:8091')).rejects.toThrow()
|
||||
|
||||
// proxy at _proxyUrl was ignored
|
||||
expect(_proxyConnects).toEqual([])
|
||||
} finally {
|
||||
server.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('proxyAuth not set in tunnel agent when authentication is not provided', async () => {
|
||||
process.env['https_proxy'] = 'http://127.0.0.1:8080'
|
||||
const httpClient = new httpm.HttpClient()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/http-client",
|
||||
"version": "2.0.1",
|
||||
"version": "2.1.0",
|
||||
"description": "Actions Http Client",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
|
|
@ -25,6 +25,11 @@ export function checkBypass(reqUrl: URL): boolean {
|
|||
return false
|
||||
}
|
||||
|
||||
const reqHost = reqUrl.hostname
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true
|
||||
}
|
||||
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''
|
||||
if (!noProxy) {
|
||||
return false
|
||||
|
@ -52,6 +57,7 @@ export function checkBypass(reqUrl: URL): boolean {
|
|||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (
|
||||
upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(
|
||||
x =>
|
||||
x === upperNoProxyItem ||
|
||||
|
@ -66,3 +72,13 @@ export function checkBypass(reqUrl: URL): boolean {
|
|||
|
||||
return false
|
||||
}
|
||||
|
||||
function isLoopbackAddress(host: string): boolean {
|
||||
const hostLower = host.toLowerCase()
|
||||
return (
|
||||
hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]')
|
||||
)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue