diff --git a/.github/workflows/cache-tests.yml b/.github/workflows/cache-tests.yml index 1ac811ea..a2daa7eb 100644 --- a/.github/workflows/cache-tests.yml +++ b/.github/workflows/cache-tests.yml @@ -58,17 +58,33 @@ jobs: run: | node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" - - name: Delete cache folders prior to restore + - name: Delete cache folders before restoring shell: bash run: | rm -rf test-cache rm -rf ~/test-cache - - name: Restore cache using restoreCache() + - name: Restore cache using restoreCache() with http-client run: | - node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))" - - name: Verify cache + - name: Verify cache restored with http-client + shell: bash + run: | + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache + + - name: Delete cache folders before restoring + shell: bash + run: | + rm -rf test-cache + rm -rf ~/test-cache + + - name: Restore cache using restoreCache() with Azure SDK + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + + - name: Verify cache restored with Azure SDK shell: bash run: | packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache diff --git a/package-lock.json b/package-lock.json index f6a43d03..87069cb9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3962,9 +3962,9 @@ "dev": true }, "@types/node": { - "version": "11.13.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-11.13.5.tgz", - "integrity": "sha512-/OMMBnjVtDuwX1tg2pkYVSqRIDSmNTnvVvmvP/2xiMAAWf4a5+JozrApCrO4WCAILmXVxfNoQ3E+0HJbNpFVGg==", + "version": "12.12.47", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.47.tgz", + "integrity": "sha512-yzBInQFhdY8kaZmqoL2+3U5dSTMrKaYcb561VU+lDzAYvqt+2lojvBEy+hmpSNuXnPTx7m9+04CzWYOUqWME2A==", "dev": true }, "@types/signale": { diff --git a/package.json b/package.json index f8e3b17c..8efab789 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ }, "devDependencies": { "@types/jest": "^24.0.11", - "@types/node": "^11.13.5", + "@types/node": "^12.12.47", "@types/signale": "^1.2.1", "@typescript-eslint/parser": "^2.2.7", "concurrently": "^4.1.0", diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 0e623d9e..bc18cea2 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -8,4 +8,9 @@ - Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469) ### 0.2.1 -- Fix to await async function getCompressionMethod \ No newline at end of file +- Fix to await async function getCompressionMethod + +### 1.0.0 +- Downloads Azure-hosted caches using the Azure SDK for speed and reliability +- Includes changes that break compatibility with earlier versions, including: + - `retry`, `retryTypedResponse`, and `retryHttpClientResponse` moved from `cacheHttpClient` to `requestUtils` \ No newline at end of file diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index a7f3fec1..90dd5cfc 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,5 +1,9 @@ -import {getCacheVersion, retry} from '../src/internal/cacheHttpClient' +import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient' import {CompressionMethod} from '../src/internal/constants' +import * as downloadUtils from '../src/internal/downloadUtils' +import {DownloadOptions, getDownloadOptions} from '../src/options' + +jest.mock('../src/internal/downloadUtils') test('getCacheVersion with one path returns version', async () => { const paths = ['node_modules'] @@ -35,141 +39,103 @@ test('getCacheVersion with gzip compression does not change vesion', async () => ) }) -interface TestResponse { - statusCode: number - result: string | null -} - -async function handleResponse( - response: TestResponse | undefined -): Promise { - if (!response) { - // eslint-disable-next-line no-undef - fail('Retry method called too many times') - } - - if (response.statusCode === 999) { - throw Error('Test Error') - } else { - return Promise.resolve(response) - } -} - -async function testRetryExpectingResult( - responses: TestResponse[], - expectedResult: string | null -): Promise { - responses = responses.reverse() // Reverse responses since we pop from end - - const actualResult = await retry( - 'test', - async () => handleResponse(responses.pop()), - (response: TestResponse) => response.statusCode +test('downloadCache uses http-client for non-Azure URLs', async () => { + const downloadCacheHttpClientMock = jest.spyOn( + downloadUtils, + 'downloadCacheHttpClient' + ) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) - expect(actualResult.result).toEqual(expectedResult) -} + const archiveLocation = 'http://www.actionscache.test/download' + const archivePath = '/foo/bar' -async function testRetryExpectingError( - responses: TestResponse[] -): Promise { - responses = responses.reverse() // Reverse responses since we pop from end + await downloadCache(archiveLocation, archivePath) - expect( - retry( - 'test', - async () => handleResponse(responses.pop()), - (response: TestResponse) => response.statusCode - ) - ).rejects.toBeInstanceOf(Error) -} - -test('retry works on successful response', async () => { - await testRetryExpectingResult( - [ - { - statusCode: 200, - result: 'Ok' - } - ], - 'Ok' + expect(downloadCacheHttpClientMock).toHaveBeenCalledTimes(1) + expect(downloadCacheHttpClientMock).toHaveBeenCalledWith( + archiveLocation, + archivePath ) + + expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(0) }) -test('retry works after retryable status code', async () => { - await testRetryExpectingResult( - [ - { - statusCode: 503, - result: null - }, - { - statusCode: 200, - result: 'Ok' - } - ], - 'Ok' +test('downloadCache uses storage SDK for Azure storage URLs', async () => { + const downloadCacheHttpClientMock = jest.spyOn( + downloadUtils, + 'downloadCacheHttpClient' ) -}) - -test('retry fails after exhausting retries', async () => { - await testRetryExpectingError([ - { - statusCode: 503, - result: null - }, - { - statusCode: 503, - result: null - }, - { - statusCode: 200, - result: 'Ok' - } - ]) -}) - -test('retry fails after non-retryable status code', async () => { - await testRetryExpectingError([ - { - statusCode: 500, - result: null - }, - { - statusCode: 200, - result: 'Ok' - } - ]) -}) - -test('retry works after error', async () => { - await testRetryExpectingResult( - [ - { - statusCode: 999, - result: null - }, - { - statusCode: 200, - result: 'Ok' - } - ], - 'Ok' + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) + + const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' + const archivePath = '/foo/bar' + + await downloadCache(archiveLocation, archivePath) + + expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(1) + expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( + archiveLocation, + archivePath, + getDownloadOptions() + ) + + expect(downloadCacheHttpClientMock).toHaveBeenCalledTimes(0) }) -test('retry returns after client error', async () => { - await testRetryExpectingResult( - [ - { - statusCode: 400, - result: null - }, - { - statusCode: 200, - result: 'Ok' - } - ], - null +test('downloadCache passes options to download methods', async () => { + const downloadCacheHttpClientMock = jest.spyOn( + downloadUtils, + 'downloadCacheHttpClient' ) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' + ) + + const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' + const archivePath = '/foo/bar' + const options: DownloadOptions = {downloadConcurrency: 4} + + await downloadCache(archiveLocation, archivePath, options) + + expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(1) + expect(downloadCacheStorageSDKMock).toHaveBeenCalled() + expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( + archiveLocation, + archivePath, + getDownloadOptions(options) + ) + + expect(downloadCacheHttpClientMock).toHaveBeenCalledTimes(0) +}) + +test('downloadCache uses http-client when overridden', async () => { + const downloadCacheHttpClientMock = jest.spyOn( + downloadUtils, + 'downloadCacheHttpClient' + ) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' + ) + + const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' + const archivePath = '/foo/bar' + const options: DownloadOptions = {useAzureSdk: false} + + await downloadCache(archiveLocation, archivePath, options) + + expect(downloadCacheHttpClientMock).toHaveBeenCalledTimes(1) + expect(downloadCacheHttpClientMock).toHaveBeenCalledWith( + archiveLocation, + archivePath + ) + + expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts index 25d7ca82..32a95860 100644 --- a/packages/cache/__tests__/cacheUtils.test.ts +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -24,3 +24,11 @@ test('unlinkFile unlinks file', async () => { await fs.rmdir(testDirectory) }) + +test('assertDefined throws if undefined', () => { + expect(() => cacheUtils.assertDefined('test', undefined)).toThrowError() +}) + +test('assertDefined returns value', () => { + expect(cacheUtils.assertDefined('test', 5)).toBe(5) +}) diff --git a/packages/cache/__tests__/options.test.ts b/packages/cache/__tests__/options.test.ts new file mode 100644 index 00000000..3075b43b --- /dev/null +++ b/packages/cache/__tests__/options.test.ts @@ -0,0 +1,54 @@ +import { + DownloadOptions, + UploadOptions, + getDownloadOptions, + getUploadOptions +} from '../src/options' + +const useAzureSdk = true +const downloadConcurrency = 8 +const timeoutInMs = 30000 +const uploadConcurrency = 4 +const uploadChunkSize = 32 * 1024 * 1024 + +test('getDownloadOptions sets defaults', async () => { + const actualOptions = getDownloadOptions() + + expect(actualOptions).toEqual({ + useAzureSdk, + downloadConcurrency, + timeoutInMs + }) +}) + +test('getDownloadOptions overrides all settings', async () => { + const expectedOptions: DownloadOptions = { + useAzureSdk: false, + downloadConcurrency: 14, + timeoutInMs: 20000 + } + + const actualOptions = getDownloadOptions(expectedOptions) + + expect(actualOptions).toEqual(expectedOptions) +}) + +test('getUploadOptions sets defaults', async () => { + const actualOptions = getUploadOptions() + + expect(actualOptions).toEqual({ + uploadConcurrency, + uploadChunkSize + }) +}) + +test('getUploadOptions overrides all settings', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 2, + uploadChunkSize: 16 * 1024 * 1024 + } + + const actualOptions = getUploadOptions(expectedOptions) + + expect(actualOptions).toEqual(expectedOptions) +}) diff --git a/packages/cache/__tests__/requestUtils.test.ts b/packages/cache/__tests__/requestUtils.test.ts new file mode 100644 index 00000000..27fef955 --- /dev/null +++ b/packages/cache/__tests__/requestUtils.test.ts @@ -0,0 +1,140 @@ +import {retry} from '../src/internal/requestUtils' + +interface TestResponse { + statusCode: number + result: string | null +} + +async function handleResponse( + response: TestResponse | undefined +): Promise { + if (!response) { + // eslint-disable-next-line no-undef + fail('Retry method called too many times') + } + + if (response.statusCode === 999) { + throw Error('Test Error') + } else { + return Promise.resolve(response) + } +} + +async function testRetryExpectingResult( + responses: TestResponse[], + expectedResult: string | null +): Promise { + responses = responses.reverse() // Reverse responses since we pop from end + + const actualResult = await retry( + 'test', + async () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + + expect(actualResult.result).toEqual(expectedResult) +} + +async function testRetryExpectingError( + responses: TestResponse[] +): Promise { + responses = responses.reverse() // Reverse responses since we pop from end + + expect( + retry( + 'test', + async () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + ).rejects.toBeInstanceOf(Error) +} + +test('retry works on successful response', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry works after retryable status code', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry fails after exhausting retries', async () => { + await testRetryExpectingError([ + { + statusCode: 503, + result: null + }, + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ]) +}) + +test('retry fails after non-retryable status code', async () => { + await testRetryExpectingError([ + { + statusCode: 500, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ]) +}) + +test('retry works after error', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 999, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry returns after client error', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 400, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + null + ) +}) diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts index 2e0fd068..c8a52ea4 100644 --- a/packages/cache/__tests__/restoreCache.test.ts +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -144,7 +144,8 @@ test('restore with gzip compressed cache found', async () => { expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( cacheEntry.archiveLocation, - archivePath + archivePath, + undefined ) expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) @@ -202,7 +203,8 @@ test('restore with zstd compressed cache found', async () => { expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( cacheEntry.archiveLocation, - archivePath + archivePath, + undefined ) expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) @@ -258,7 +260,8 @@ test('restore with cache found for restore key', async () => { expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( cacheEntry.archiveLocation, - archivePath + archivePath, + undefined ) expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index a05581c1..774a5e5a 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "0.2.1", + "version": "0.3.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -39,18 +39,169 @@ "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz", "integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg==" }, + "@azure/abort-controller": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.1.tgz", + "integrity": "sha512-wP2Jw6uPp8DEDy0n4KNidvwzDjyVV2xnycEIq7nPzj1rHyb/r+t3OPeNT1INZePP2wy5ZqlwyuyOMTi0ePyY1A==", + "requires": { + "tslib": "^1.9.3" + } + }, + "@azure/core-asynciterator-polyfill": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.0.tgz", + "integrity": "sha512-kmv8CGrPfN9SwMwrkiBK9VTQYxdFQEGe0BmQk+M8io56P9KNzpAxcWE/1fxJj7uouwN4kXF0BHW8DNlgx+wtCg==" + }, + "@azure/core-auth": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.1.2.tgz", + "integrity": "sha512-IUbP/f3v96dpHgXUwsAjUwDzjlUjawyUhWhGKKB6Qxy+iqppC/pVBPyc6kdpyTe7H30HN+4H3f0lar7Wp9Hx6A==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.8", + "@opentelemetry/api": "^0.6.1", + "tslib": "^1.10.0" + } + }, + "@azure/core-http": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-1.1.3.tgz", + "integrity": "sha512-GysW3+BRVV4L9cs3GsuCbnlyibrQU6hh5mcJ7hlnk7tdUBzWybUvJ8/P/nHX49PgwRmi81pD5v1ht2jF0IzxAQ==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.1.2", + "@azure/core-tracing": "1.0.0-preview.8", + "@azure/logger": "^1.0.0", + "@opentelemetry/api": "^0.6.1", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.1", + "form-data": "^3.0.0", + "node-fetch": "^2.6.0", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^1.10.0", + "tunnel": "^0.0.6", + "uuid": "^8.1.0", + "xml2js": "^0.4.19" + }, + "dependencies": { + "uuid": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.1.0.tgz", + "integrity": "sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==" + } + } + }, + "@azure/core-lro": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-1.0.2.tgz", + "integrity": "sha512-Yr0JD7GKryOmbcb5wHCQoQ4KCcH5QJWRNorofid+UvudLaxnbCfvKh/cUfQsGUqRjO9L/Bw4X7FP824DcHdMxw==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^1.1.1", + "events": "^3.0.0", + "tslib": "^1.10.0" + } + }, + "@azure/core-paging": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.1.1.tgz", + "integrity": "sha512-hqEJBEGKan4YdOaL9ZG/GRG6PXaFd/Wb3SSjQW4LWotZzgl6xqG00h6wmkrpd2NNkbBkD1erLHBO3lPHApv+iQ==", + "requires": { + "@azure/core-asynciterator-polyfill": "^1.0.0" + } + }, + "@azure/core-tracing": { + "version": "1.0.0-preview.8", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.8.tgz", + "integrity": "sha512-ZKUpCd7Dlyfn7bdc+/zC/sf0aRIaNQMDuSj2RhYRFe3p70hVAnYGp3TX4cnG2yoEALp/LTj/XnZGQ8Xzf6Ja/Q==", + "requires": { + "@opencensus/web-types": "0.0.7", + "@opentelemetry/api": "^0.6.1", + "tslib": "^1.10.0" + } + }, + "@azure/logger": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.0.tgz", + "integrity": "sha512-g2qLDgvmhyIxR3JVS8N67CyIOeFRKQlX/llxYJQr1OSGQqM3HTpVP8MjmjcEKbL/OIt2N9C9UFaNQuKOw1laOA==", + "requires": { + "tslib": "^1.9.3" + } + }, + "@azure/storage-blob": { + "version": "12.1.2", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.1.2.tgz", + "integrity": "sha512-PCHgG4r3xLt5FaFj+uiMqrRpuzD3TD17cvxCeA1JKK2bJEf8b07H3QRLQVf0DM1MmvYY8FgQagkWZTp+jr9yew==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^1.1.1", + "@azure/core-lro": "^1.0.2", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.8", + "@azure/logger": "^1.0.0", + "@opentelemetry/api": "^0.6.1", + "events": "^3.0.0", + "tslib": "^1.10.0" + } + }, + "@opencensus/web-types": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/@opencensus/web-types/-/web-types-0.0.7.tgz", + "integrity": "sha512-xB+w7ZDAu3YBzqH44rCmG9/RlrOmFuDPt/bpf17eJr8eZSrLt7nc7LnWdxM9Mmoj/YKMHpxRg28txu3TcpiL+g==" + }, + "@opentelemetry/api": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-0.6.1.tgz", + "integrity": "sha512-wpufGZa7tTxw7eAsjXJtiyIQ42IWQdX9iUQp7ACJcKo1hCtuhLU+K2Nv1U6oRwT1oAlZTE6m4CgWKZBhOiau3Q==", + "requires": { + "@opentelemetry/context-base": "^0.6.1" + } + }, + "@opentelemetry/context-base": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-base/-/context-base-0.6.1.tgz", + "integrity": "sha512-5bHhlTBBq82ti3qPT15TRxkYTFPPQWbnkkQkmHPtqiS1XcTB69cEKd3Jm7Cfi/vkPoyxapmePE9tyA7EzLt8SQ==" + }, + "@types/node": { + "version": "14.0.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.11.tgz", + "integrity": "sha512-lCvvI24L21ZVeIiyIUHZ5Oflv1hhHQ5E1S25IRlKIXaRkVgmXpJMI3wUJkmym2bTbCe+WoIibQnMVAU3FguaOg==" + }, + "@types/node-fetch": { + "version": "2.5.7", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.7.tgz", + "integrity": "sha512-o2WVNf5UhWRkxlf6eq+jMZDu7kjgpgJfl4xVNlvryc95O/6F2ld8ztKX+qu+Rjyet93WAWm5LjeX9H5FGkODvw==", + "requires": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, "@types/semver": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-6.2.1.tgz", "integrity": "sha512-+beqKQOh9PYxuHvijhVl+tIHvT6tuwOrE9m14zd+MT2A38KoKZhh7pYJ0SNleLtwDsiIxHDsIk9bv01oOxvSvA==", "dev": true }, + "@types/tunnel": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.1.tgz", + "integrity": "sha512-AOqu6bQu5MSWwYvehMXLukFHnupHrpZ8nvgae5Ggie9UwzDR1CCwoXgSSWNZJuyOlCdfdsWMA5F2LlmvyoTv8A==", + "requires": { + "@types/node": "*" + } + }, "@types/uuid": { "version": "3.4.9", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz", "integrity": "sha512-XDwyIlt/47l2kWLTzw/mtrpLdB+GPSskR2n/PIcPn+VYhVO77rGhRncIR5GPU0KRzXuqkDO+J5qqrG0Y8P6jzQ==", "dev": true }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -65,11 +216,52 @@ "concat-map": "0.0.1" } }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "events": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.1.0.tgz", + "integrity": "sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg==" + }, + "form-data": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.0.tgz", + "integrity": "sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "mime-db": { + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz", + "integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==" + }, + "mime-types": { + "version": "2.1.27", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz", + "integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==", + "requires": { + "mime-db": "1.44.0" + } + }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -78,11 +270,51 @@ "brace-expansion": "^1.1.7" } }, + "node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" + }, + "psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, + "tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + } + }, + "tslib": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.13.0.tgz", + "integrity": "sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==" + }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -94,10 +326,29 @@ "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", "dev": true }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + }, + "xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" } } } diff --git a/packages/cache/package.json b/packages/cache/package.json index 6e0a3181..3299349b 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "0.2.1", + "version": "1.0.0", "preview": true, "description": "Actions cache lib", "keywords": [ @@ -42,6 +42,7 @@ "@actions/glob": "^0.1.0", "@actions/http-client": "^1.0.8", "@actions/io": "^1.0.1", + "@azure/storage-blob": "^12.1.2", "semver": "^6.1.0", "uuid": "^3.3.3" }, diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index fc04a297..57ea1527 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,7 +3,7 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import {createTar, extractTar} from './internal/tar' -import {UploadOptions} from './options' +import {DownloadOptions, UploadOptions} from './options' export class ValidationError extends Error { constructor(message: string) { @@ -49,12 +49,14 @@ function checkKey(key: string): void { * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options * @returns string returns the key for the cache hit, otherwise returns undefined */ export async function restoreCache( paths: string[], primaryKey: string, - restoreKeys?: string[] + restoreKeys?: string[], + options?: DownloadOptions ): Promise { checkPaths(paths) @@ -92,7 +94,11 @@ export async function restoreCache( try { // Download the cache from the cache entry - await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath) + await cacheHttpClient.downloadCache( + cacheEntry.archiveLocation, + archivePath, + options + ) const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) core.info( diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 21af8031..77954e94 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,18 +1,13 @@ import * as core from '@actions/core' -import {HttpClient, HttpCodes} from '@actions/http-client' +import {HttpClient} from '@actions/http-client' import {BearerCredentialHandler} from '@actions/http-client/auth' -import { - IHttpClientResponse, - IRequestOptions, - ITypedResponse -} from '@actions/http-client/interfaces' +import {IRequestOptions, ITypedResponse} from '@actions/http-client/interfaces' import * as crypto from 'crypto' import * as fs from 'fs' -import * as stream from 'stream' -import * as util from 'util' +import {URL} from 'url' import * as utils from './cacheUtils' -import {CompressionMethod, SocketTimeout} from './constants' +import {CompressionMethod} from './constants' import { ArtifactCacheEntry, InternalCacheOptions, @@ -20,36 +15,21 @@ import { ReserveCacheRequest, ReserveCacheResponse } from './contracts' -import {UploadOptions} from '../options' +import {downloadCacheHttpClient, downloadCacheStorageSDK} from './downloadUtils' +import { + DownloadOptions, + UploadOptions, + getDownloadOptions, + getUploadOptions +} from '../options' +import { + isSuccessStatusCode, + retryHttpClientResponse, + retryTypedResponse +} from './requestUtils' const versionSalt = '1.0' -function isSuccessStatusCode(statusCode?: number): boolean { - if (!statusCode) { - return false - } - return statusCode >= 200 && statusCode < 300 -} - -function isServerErrorStatusCode(statusCode?: number): boolean { - if (!statusCode) { - return true - } - return statusCode >= 500 -} - -function isRetryableStatusCode(statusCode?: number): boolean { - if (!statusCode) { - return false - } - const retryableStatusCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout - ] - return retryableStatusCodes.includes(statusCode) -} - function getCacheApiUrl(resource: string): string { // Ideally we just use ACTIONS_CACHE_URL const baseUrl: string = ( @@ -110,75 +90,6 @@ export function getCacheVersion( .digest('hex') } -export async function retry( - name: string, - method: () => Promise, - getStatusCode: (arg0: T) => number | undefined, - maxAttempts = 2 -): Promise { - let response: T | undefined = undefined - let statusCode: number | undefined = undefined - let isRetryable = false - let errorMessage = '' - let attempt = 1 - - while (attempt <= maxAttempts) { - try { - response = await method() - statusCode = getStatusCode(response) - - if (!isServerErrorStatusCode(statusCode)) { - return response - } - - isRetryable = isRetryableStatusCode(statusCode) - errorMessage = `Cache service responded with ${statusCode}` - } catch (error) { - isRetryable = true - errorMessage = error.message - } - - core.debug( - `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` - ) - - if (!isRetryable) { - core.debug(`${name} - Error is not retryable`) - break - } - - attempt++ - } - - throw Error(`${name} failed: ${errorMessage}`) -} - -export async function retryTypedResponse( - name: string, - method: () => Promise>, - maxAttempts = 2 -): Promise> { - return await retry( - name, - method, - (response: ITypedResponse) => response.statusCode, - maxAttempts - ) -} - -export async function retryHttpClientResponse( - name: string, - method: () => Promise, - maxAttempts = 2 -): Promise { - return await retry( - name, - method, - (response: IHttpClientResponse) => response.message.statusCode, - maxAttempts - ) -} - export async function getCacheEntry( keys: string[], paths: string[], @@ -212,47 +123,23 @@ export async function getCacheEntry( return cacheResult } -async function pipeResponseToStream( - response: IHttpClientResponse, - output: NodeJS.WritableStream -): Promise { - const pipeline = util.promisify(stream.pipeline) - await pipeline(response.message, output) -} - export async function downloadCache( archiveLocation: string, - archivePath: string + archivePath: string, + options?: DownloadOptions ): Promise { - const writeStream = fs.createWriteStream(archivePath) - const httpClient = new HttpClient('actions/cache') - const downloadResponse = await retryHttpClientResponse( - 'downloadCache', - async () => httpClient.get(archiveLocation) - ) + const archiveUrl = new URL(archiveLocation) + const downloadOptions = getDownloadOptions(options) - // Abort download if no traffic received over the socket. - downloadResponse.message.socket.setTimeout(SocketTimeout, () => { - downloadResponse.message.destroy() - core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`) - }) - - await pipeResponseToStream(downloadResponse, writeStream) - - // Validate download size. - const contentLengthHeader = downloadResponse.message.headers['content-length'] - - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader) - const actualLength = utils.getArchiveFileSizeIsBytes(archivePath) - - if (actualLength !== expectedLength) { - throw new Error( - `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` - ) - } + if ( + downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net') + ) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + await downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions) } else { - core.debug('Unable to validate download, no Content-Length header') + // Otherwise, download using the Actions http-client. + await downloadCacheHttpClient(archiveLocation, archivePath) } } @@ -329,10 +216,16 @@ async function uploadFile( const fileSize = fs.statSync(archivePath).size const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`) const fd = fs.openSync(archivePath, 'r') + const uploadOptions = getUploadOptions(options) - const concurrency = options?.uploadConcurrency ?? 4 // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = options?.uploadChunkSize ?? 32 * 1024 * 1024 // 32 MB Chunks - core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`) + const concurrency = utils.assertDefined( + 'uploadConcurrency', + uploadOptions.uploadConcurrency + ) + const maxChunkSize = utils.assertDefined( + 'uploadChunkSize', + uploadOptions.uploadChunkSize + ) const parallelUploads = [...new Array(concurrency).keys()] core.debug('Awaiting all uploads') @@ -342,10 +235,10 @@ async function uploadFile( await Promise.all( parallelUploads.map(async () => { while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE) + const chunkSize = Math.min(fileSize - offset, maxChunkSize) const start = offset const end = offset + chunkSize - 1 - offset += MAX_CHUNK_SIZE + offset += maxChunkSize await uploadChunk( httpClient, @@ -360,7 +253,7 @@ async function uploadFile( }) .on('error', error => { throw new Error( - `Cache upload failed because file read failed with ${error.Message}` + `Cache upload failed because file read failed with ${error.message}` ) }), start, diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index b2793695..0628ffa4 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -113,3 +113,11 @@ export async function isGnuTarInstalled(): Promise { const versionOutput = await getVersion('tar') return versionOutput.toLowerCase().includes('gnu tar') } + +export function assertDefined(name: string, value?: T): T { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`) + } + + return value +} diff --git a/packages/cache/src/internal/downloadUtils.ts b/packages/cache/src/internal/downloadUtils.ts new file mode 100644 index 00000000..839b1fa3 --- /dev/null +++ b/packages/cache/src/internal/downloadUtils.ts @@ -0,0 +1,134 @@ +import * as core from '@actions/core' +import {HttpClient} from '@actions/http-client' +import {IHttpClientResponse} from '@actions/http-client/interfaces' +import {BlockBlobClient} from '@azure/storage-blob' +import * as buffer from 'buffer' +import * as fs from 'fs' +import * as stream from 'stream' +import * as util from 'util' + +import * as utils from './cacheUtils' +import {SocketTimeout} from './constants' +import {DownloadOptions} from '../options' +import {retryHttpClientResponse} from './requestUtils' + +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +async function pipeResponseToStream( + response: IHttpClientResponse, + output: NodeJS.WritableStream +): Promise { + const pipeline = util.promisify(stream.pipeline) + await pipeline(response.message, output) +} + +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +export async function downloadCacheHttpClient( + archiveLocation: string, + archivePath: string +): Promise { + const writeStream = fs.createWriteStream(archivePath) + const httpClient = new HttpClient('actions/cache') + const downloadResponse = await retryHttpClientResponse( + 'downloadCache', + async () => httpClient.get(archiveLocation) + ) + + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(SocketTimeout, () => { + downloadResponse.message.destroy() + core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`) + }) + + await pipeResponseToStream(downloadResponse, writeStream) + + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length'] + + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader) + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath) + + if (actualLength !== expectedLength) { + throw new Error( + `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` + ) + } + } else { + core.debug('Unable to validate download, no Content-Length header') + } +} + +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +export async function downloadCacheStorageSDK( + archiveLocation: string, + archivePath: string, + options: DownloadOptions +): Promise { + const client = new BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }) + + const properties = await client.getProperties() + const contentLength = properties.contentLength ?? -1 + + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug( + 'Unable to determine content length, downloading file with http-client...' + ) + + await downloadCacheHttpClient(archiveLocation, archivePath) + } else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + const maxSegmentSize = buffer.constants.MAX_LENGTH + let offset = 0 + + const fd = fs.openSync(archivePath, 'w') + + try { + while (offset < contentLength) { + const segmentSize = Math.min(maxSegmentSize, contentLength - offset) + core.debug( + `Downloading segment at offset ${offset} with length ${segmentSize}...` + ) + + const result = await client.downloadToBuffer(offset, segmentSize, { + concurrency: options.downloadConcurrency + }) + + fs.writeFileSync(fd, result) + + core.debug(`Finished segment at offset ${offset}`) + offset += segmentSize + } + } finally { + fs.closeSync(fd) + } + } +} diff --git a/packages/cache/src/internal/requestUtils.ts b/packages/cache/src/internal/requestUtils.ts new file mode 100644 index 00000000..c72728cb --- /dev/null +++ b/packages/cache/src/internal/requestUtils.ts @@ -0,0 +1,101 @@ +import * as core from '@actions/core' +import {HttpCodes} from '@actions/http-client' +import { + IHttpClientResponse, + ITypedResponse +} from '@actions/http-client/interfaces' + +export function isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + return statusCode >= 200 && statusCode < 300 +} + +export function isServerErrorStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return true + } + return statusCode >= 500 +} + +export function isRetryableStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout + ] + return retryableStatusCodes.includes(statusCode) +} + +export async function retry( + name: string, + method: () => Promise, + getStatusCode: (arg0: T) => number | undefined, + maxAttempts = 2 +): Promise { + let response: T | undefined = undefined + let statusCode: number | undefined = undefined + let isRetryable = false + let errorMessage = '' + let attempt = 1 + + while (attempt <= maxAttempts) { + try { + response = await method() + statusCode = getStatusCode(response) + + if (!isServerErrorStatusCode(statusCode)) { + return response + } + + isRetryable = isRetryableStatusCode(statusCode) + errorMessage = `Cache service responded with ${statusCode}` + } catch (error) { + isRetryable = true + errorMessage = error.message + } + + core.debug( + `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` + ) + + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`) + break + } + + attempt++ + } + + throw Error(`${name} failed: ${errorMessage}`) +} + +export async function retryTypedResponse( + name: string, + method: () => Promise>, + maxAttempts = 2 +): Promise> { + return await retry( + name, + method, + (response: ITypedResponse) => response.statusCode, + maxAttempts + ) +} + +export async function retryHttpClientResponse( + name: string, + method: () => Promise, + maxAttempts = 2 +): Promise { + return await retry( + name, + method, + (response: IHttpClientResponse) => response.message.statusCode, + maxAttempts + ) +} diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index 97441c1e..94642fd3 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -1,3 +1,5 @@ +import * as core from '@actions/core' + /** * Options to control cache upload */ @@ -15,3 +17,93 @@ export interface UploadOptions { */ uploadChunkSize?: number } + +/** + * Options to control cache download + */ +export interface DownloadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default true + */ + useAzureSdk?: boolean + + /** + * Number of parallel downloads (this option only applies when using + * the Azure SDK) + * + * @default 8 + */ + downloadConcurrency?: number + + /** + * Maximum time for each download request, in milliseconds (this + * option only applies when using the Azure SDK) + * + * @default 30000 + */ + timeoutInMs?: number +} + +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +export function getUploadOptions(copy?: UploadOptions): UploadOptions { + const result: UploadOptions = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + } + + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency + } + + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize + } + } + + core.debug(`Upload concurrency: ${result.uploadConcurrency}`) + core.debug(`Upload chunk size: ${result.uploadChunkSize}`) + + return result +} + +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +export function getDownloadOptions(copy?: DownloadOptions): DownloadOptions { + const result: DownloadOptions = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + } + + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk + } + + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency + } + + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs + } + } + + core.debug(`Use Azure SDK: ${result.useAzureSdk}`) + core.debug(`Download concurrency: ${result.downloadConcurrency}`) + core.debug(`Request timeout (ms): ${result.timeoutInMs}`) + + return result +}