From 32dbccb77b8a8deddcb93ca606c28a67335e2beb Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 23 May 2024 07:25:17 -0700 Subject: [PATCH 001/108] Add debug message --- packages/cache/src/internal/cacheHttpClient.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index f96ca381..40add448 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -111,6 +111,9 @@ export async function getCacheEntry( options?.compressionMethod, options?.enableCrossOsArchive ) + + core.console.log(`We're running from the abyss`); + const resource = `cache?keys=${encodeURIComponent( keys.join(',') )}&version=${version}` From 264230c2c54080a6c4237a2e1e972e5ef775f5d0 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 23 May 2024 09:04:37 -0700 Subject: [PATCH 002/108] add debug --- package-lock.json | 2678 ++++++++++++++++- package.json | 16 +- packages/attest/package-lock.json | 4 +- .../cache/src/internal/cacheHttpClient.ts | 2 +- 4 files changed, 2546 insertions(+), 154 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7eeae1d9..88646d46 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,6 +5,20 @@ "packages": { "": { "name": "root", + "dependencies": { + "@actions/artifact": "^2.1.7", + "@actions/attest": "^1.2.1", + "@actions/cache": "^3.2.4", + "@actions/core": "^1.10.1", + "@actions/exec": "^1.1.1", + "@actions/github": "^6.0.0", + "@actions/glob": "^0.4.0", + "@actions/http-client": "^2.2.1", + "@actions/io": "^1.1.3", + "@actions/tool-cache": "^2.0.1", + "tunnel": "^0.0.6", + "undici": "^6.18.1" + }, "devDependencies": { "@types/jest": "^29.5.4", "@types/node": "^20.5.7", @@ -33,6 +47,626 @@ "node": ">=0.10.0" } }, + "node_modules/@actions/artifact": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", + "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/github": "^5.1.1", + "@actions/http-client": "^2.1.0", + "@azure/storage-blob": "^12.15.0", + "@octokit/core": "^3.5.1", + "@octokit/plugin-request-log": "^1.0.4", + "@octokit/plugin-retry": "^3.0.9", + "@octokit/request-error": "^5.0.0", + "@protobuf-ts/plugin": "^2.2.3-alpha.1", + "archiver": "^7.0.1", + "crypto": "^1.0.1", + "jwt-decode": "^3.1.2", + "twirp-ts": "^2.5.0", + "unzip-stream": "^0.3.1" + } + }, + "node_modules/@actions/artifact/node_modules/@actions/github": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", + "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "@octokit/core": "^3.6.0", + "@octokit/plugin-paginate-rest": "^2.17.0", + "@octokit/plugin-rest-endpoint-methods": "^5.13.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/auth-token": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", + "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", + "dependencies": { + "@octokit/types": "^6.0.3" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/core": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", + "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", + "dependencies": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.3", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/core/node_modules/@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/endpoint": { + "version": "6.0.12", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", + "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", + "dependencies": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/graphql": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", + "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", + "dependencies": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/openapi-types": { + "version": "12.11.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", + "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" + }, + "node_modules/@actions/artifact/node_modules/@octokit/plugin-paginate-rest": { + "version": "2.21.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", + "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", + "dependencies": { + "@octokit/types": "^6.40.0" + }, + "peerDependencies": { + "@octokit/core": ">=2" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "5.16.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", + "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", + "dependencies": { + "@octokit/types": "^6.39.0", + "deprecation": "^2.3.1" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/request": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", + "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", + "dependencies": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@actions/artifact/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/request/node_modules/@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "node_modules/@actions/artifact/node_modules/@octokit/types": { + "version": "6.41.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", + "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", + "dependencies": { + "@octokit/openapi-types": "^12.11.0" + } + }, + "node_modules/@actions/attest": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@actions/attest/-/attest-1.2.1.tgz", + "integrity": "sha512-ZLfmO6o2x3UL2BG++oIHMPx5kApWr8Uy1cgiiafXpHgamsqFUPjUtcp0/gpOaXkxUZftdVno7NwBTisw8qr9UA==", + "dependencies": { + "@actions/core": "^1.10.1", + "@actions/github": "^6.0.0", + "@actions/http-client": "^2.2.1", + "@octokit/plugin-retry": "^6.0.1", + "@sigstore/bundle": "^2.3.0", + "@sigstore/sign": "^2.3.0", + "jsonwebtoken": "^9.0.2", + "jwks-rsa": "^3.1.0" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/auth-token": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.1.tgz", + "integrity": "sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA==", + "peer": true, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/core": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.2.tgz", + "integrity": "sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg==", + "peer": true, + "dependencies": { + "@octokit/auth-token": "^5.0.0", + "@octokit/graphql": "^8.0.0", + "@octokit/request": "^9.0.0", + "@octokit/request-error": "^6.0.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^3.0.2", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/endpoint": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz", + "integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==", + "peer": true, + "dependencies": { + "@octokit/types": "^13.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/graphql": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.1.1.tgz", + "integrity": "sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg==", + "peer": true, + "dependencies": { + "@octokit/request": "^9.0.0", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@actions/attest/node_modules/@octokit/plugin-retry": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz", + "integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/types/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "node_modules/@actions/attest/node_modules/@octokit/request": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.1.1.tgz", + "integrity": "sha512-pyAguc0p+f+GbQho0uNetNQMmLG1e80WjkIaqqgUkihqUp0boRU6nKItXO4VWnr+nbZiLGEyy4TeKRwqaLvYgw==", + "peer": true, + "dependencies": { + "@octokit/endpoint": "^10.0.0", + "@octokit/request-error": "^6.0.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/request-error": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.1.tgz", + "integrity": "sha512-1mw1gqT3fR/WFvnoVpY/zUM2o/XkMs/2AszUUG9I69xn0JFLv6PGkPhNk5lbfvROs79wiS0bqiJNxfCZcRJJdg==", + "peer": true, + "dependencies": { + "@octokit/types": "^13.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/attest/node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "peer": true, + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@actions/attest/node_modules/before-after-hook": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz", + "integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==", + "peer": true + }, + "node_modules/@actions/attest/node_modules/universal-user-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz", + "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==", + "peer": true + }, + "node_modules/@actions/cache": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", + "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "semver": "^6.3.1", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, + "node_modules/@actions/cache/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@actions/cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/@actions/core": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", + "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/github": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", + "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", + "dependencies": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/plugin-rest-endpoint-methods": "^10.0.0" + } + }, + "node_modules/@actions/github/node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/core": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.0.tgz", + "integrity": "sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/endpoint": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz", + "integrity": "sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/graphql": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.0.tgz", + "integrity": "sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==", + "dependencies": { + "@octokit/request": "^8.3.0", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", + "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/github/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@actions/glob": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.4.0.tgz", + "integrity": "sha512-+eKIGFhsFa4EBwaf/GMyzCdWrXWymGXfFmZU3FHQvYS8mPcHtTtZONbkcqqUMzw9mJ/pImEBFET1JNifhqGsAQ==", + "dependencies": { + "@actions/core": "^1.9.1", + "minimatch": "^3.0.4" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@actions/http-client/node_modules/undici": { + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" + }, + "node_modules/@actions/tool-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz", + "integrity": "sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.1.1", + "semver": "^6.1.0", + "uuid": "^3.3.2" + } + }, + "node_modules/@actions/tool-cache/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@actions/tool-cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@ampproject/remapping": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", @@ -46,6 +680,238 @@ "node": ">=6.0.0" } }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/abort-controller/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-auth": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.2.tgz", + "integrity": "sha512-Igm/S3fDYmnMq1uKS38Ae1/m37B3zigdlZw+kocwEhh5GjyKjPrXKO2J6rzpC1wAxrNil/jX9BJRqBshyjnF3g==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-http": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz", + "integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-util": "^1.1.1", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-lro": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-paging": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-tracing/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/core-util": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.9.0.tgz", + "integrity": "sha512-AfalUQ1ZppaKuxPPMsFEUdX6GZPB3d9paR9d/TTL7Ow2De8cJaC7ibi7kWVlFAVPCYo31OcnGymc0R89DX8Oaw==", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/logger": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.2.tgz", + "integrity": "sha512-l170uE7bsKpIU6B/giRc9i4NI0Mj+tANMMMxf7Zi/5cKzEqPayP7+X1WPrG7e+91JgY8N+7K7nF2WOi7iVhXvg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.5.0" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.18.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.18.0.tgz", + "integrity": "sha512-BzBZJobMoDyjJsPRMLNHvqHycTGrT8R/dtcTx9qUFcqwSRfGVK9A/cZ7Nx38UQydT9usZGbaDCN75QRNjezSAA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^3.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/storage-blob/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/@babel/code-frame": { "version": "7.22.13", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", @@ -769,6 +1635,14 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, "node_modules/@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -823,6 +1697,90 @@ "node": ">=6.9.0" } }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/@isaacs/string-locale-compare": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz", @@ -2930,6 +3888,77 @@ "node": ">= 8" } }, + "node_modules/@npmcli/agent": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", + "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/agent/node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@npmcli/agent/node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@npmcli/agent/node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@npmcli/agent/node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/@npmcli/agent/node_modules/socks-proxy-agent": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.3.tgz", + "integrity": "sha512-VNegTZKhuGq5vSD6XNKlbqWhyt/40CgoEw8XxD6dhnm8Jq9IEa3nIa4HwnM8XOqU0CdB0BwWVXusqiFXfHB3+A==", + "dependencies": { + "agent-base": "^7.1.1", + "debug": "^4.3.4", + "socks": "^2.7.1" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/@npmcli/arborist": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-5.3.0.tgz", @@ -3840,7 +4869,6 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz", "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==", - "dev": true, "engines": { "node": ">= 14" } @@ -3849,7 +4877,6 @@ "version": "4.2.4", "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz", "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==", - "dev": true, "dependencies": { "@octokit/auth-token": "^3.0.0", "@octokit/graphql": "^5.0.0", @@ -3867,7 +4894,6 @@ "version": "7.0.6", "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz", "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==", - "dev": true, "dependencies": { "@octokit/types": "^9.0.0", "is-plain-object": "^5.0.0", @@ -3881,7 +4907,6 @@ "version": "5.0.6", "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz", "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==", - "dev": true, "dependencies": { "@octokit/request": "^6.0.0", "@octokit/types": "^9.0.0", @@ -3894,8 +4919,7 @@ "node_modules/@octokit/openapi-types": { "version": "18.1.1", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz", - "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==", - "dev": true + "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==" }, "node_modules/@octokit/plugin-enterprise-rest": { "version": "6.0.1", @@ -3923,7 +4947,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", - "dev": true, "peerDependencies": { "@octokit/core": ">=3" } @@ -3952,11 +4975,32 @@ "@octokit/openapi-types": "^18.0.0" } }, + "node_modules/@octokit/plugin-retry": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", + "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", + "dependencies": { + "@octokit/types": "^6.0.3", + "bottleneck": "^2.15.3" + } + }, + "node_modules/@octokit/plugin-retry/node_modules/@octokit/openapi-types": { + "version": "12.11.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", + "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" + }, + "node_modules/@octokit/plugin-retry/node_modules/@octokit/types": { + "version": "6.41.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", + "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", + "dependencies": { + "@octokit/openapi-types": "^12.11.0" + } + }, "node_modules/@octokit/request": { "version": "6.2.8", "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz", "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==", - "dev": true, "dependencies": { "@octokit/endpoint": "^7.0.0", "@octokit/request-error": "^3.0.0", @@ -3973,7 +5017,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz", "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==", - "dev": true, "dependencies": { "@octokit/types": "^9.0.0", "deprecation": "^2.0.0", @@ -4008,11 +5051,18 @@ "version": "9.3.2", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz", "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==", - "dev": true, "dependencies": { "@octokit/openapi-types": "^18.0.0" } }, + "node_modules/@opentelemetry/api": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz", + "integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@parcel/watcher": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.0.4.tgz", @@ -4031,6 +5081,15 @@ "url": "https://opencollective.com/parcel" } }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true, + "engines": { + "node": ">=14" + } + }, "node_modules/@pkgr/utils": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.4.2.tgz", @@ -4057,6 +5116,312 @@ "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==", "dev": true }, + "node_modules/@protobuf-ts/plugin": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", + "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.9.4", + "@protobuf-ts/protoc": "^2.9.4", + "@protobuf-ts/runtime": "^2.9.4", + "@protobuf-ts/runtime-rpc": "^2.9.4", + "typescript": "^3.9" + }, + "bin": { + "protoc-gen-dump": "bin/protoc-gen-dump", + "protoc-gen-ts": "bin/protoc-gen-ts" + } + }, + "node_modules/@protobuf-ts/plugin-framework": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", + "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4", + "typescript": "^3.9" + } + }, + "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/plugin/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/protoc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", + "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==", + "bin": { + "protoc": "protoc.js" + } + }, + "node_modules/@protobuf-ts/runtime": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", + "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" + }, + "node_modules/@protobuf-ts/runtime-rpc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", + "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4" + } + }, + "node_modules/@sigstore/bundle": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz", + "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/core": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", + "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/protobuf-specs": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.2.tgz", + "integrity": "sha512-c6B0ehIWxMI8wiS/bj6rHMPqeFvngFV7cDU/MY+B16P9Z3Mp9k8L93eYZ7BYzSickzuqAQqAq0V956b3Ju6mLw==", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", + "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^13.0.1", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/@npmcli/fs": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", + "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/cacache": { + "version": "18.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.3.tgz", + "integrity": "sha512-qXCd4rh6I07cnDqh8V48/94Tc/WSfj+o3Gn6NZ0aZovS255bUx8O13uKxRFd2eWG0xgsco7+YItQNPaa5E85hg==", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/fs-minipass": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", + "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/glob": { + "version": "10.3.16", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", + "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.11.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@sigstore/sign/node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/@sigstore/sign/node_modules/make-fetch-happen": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", + "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@sigstore/sign/node_modules/minipass": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", + "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/@sigstore/sign/node_modules/minipass-collect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz", + "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/@sigstore/sign/node_modules/minipass-fetch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", + "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/@sigstore/sign/node_modules/proc-log": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/ssri": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", + "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/unique-filename": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", + "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/unique-slug": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", + "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -4131,6 +5496,45 @@ "@babel/types": "^7.20.7" } }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.1.tgz", + "integrity": "sha512-ej0phymbFLoCB26dbbq5PGScsf2JAJ4IJHjG10LalgUV36XKTmA4GdA+PVllKvRk0sEKt64X8975qFnkSi0hqA==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, "node_modules/@types/graceful-fs": { "version": "4.1.6", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz", @@ -4140,6 +5544,11 @@ "@types/node": "*" } }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", @@ -4186,6 +5595,19 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.6.tgz", + "integrity": "sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==" + }, "node_modules/@types/minimatch": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", @@ -4201,8 +5623,16 @@ "node_modules/@types/node": { "version": "20.5.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.7.tgz", - "integrity": "sha512-dP7f3LdZIysZnmvP3ANJYTSwg+wLLl8p7RqniVlV7j+oXSXAbt9h0WIBFmJy5inWZoX9wZN6eXx+YXd9Rh3RBA==", - "dev": true + "integrity": "sha512-dP7f3LdZIysZnmvP3ANJYTSwg+wLLl8p7RqniVlV7j+oXSXAbt9h0WIBFmJy5inWZoX9wZN6eXx+YXd9Rh3RBA==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } }, "node_modules/@types/normalize-package-data": { "version": "2.4.4", @@ -4216,12 +5646,41 @@ "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", "dev": true }, + "node_modules/@types/qs": { + "version": "6.9.15", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz", + "integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==" + }, "node_modules/@types/semver": { "version": "7.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", "dev": true }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, "node_modules/@types/signale": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/@types/signale/-/signale-1.4.4.tgz", @@ -4237,6 +5696,14 @@ "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", "dev": true }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/yargs": { "version": "17.0.24", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", @@ -4543,6 +6010,17 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "dev": true }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -4598,7 +6076,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -4663,7 +6140,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, "engines": { "node": ">=8" } @@ -4672,7 +6148,6 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -4702,6 +6177,177 @@ "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", "dev": true }, + "node_modules/archiver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", + "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", + "dependencies": { + "archiver-utils": "^5.0.2", + "async": "^3.2.4", + "buffer-crc32": "^1.0.0", + "readable-stream": "^4.0.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^6.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/archiver-utils": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", + "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", + "dependencies": { + "glob": "^10.0.0", + "graceful-fs": "^4.2.0", + "is-stream": "^2.0.1", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/archiver-utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/archiver-utils/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/archiver-utils/node_modules/glob": { + "version": "10.3.16", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", + "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.11.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/archiver-utils/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/archiver-utils/node_modules/minipass": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", + "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/archiver-utils/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/archiver/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/archiver/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/archiver/node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, "node_modules/are-we-there-yet": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", @@ -4885,14 +6531,12 @@ "node_modules/async": { "version": "3.2.5", "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", - "dev": true + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/at-least-node": { "version": "1.0.0", @@ -4935,20 +6579,6 @@ "proxy-from-env": "^1.1.0" } }, - "node_modules/axios/node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/axobject-query": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", @@ -4958,6 +6588,11 @@ "dequal": "^2.0.3" } }, + "node_modules/b4a": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", + "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" + }, "node_modules/babel-jest": { "version": "29.6.4", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.6.4.tgz", @@ -5077,14 +6712,18 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/bare-events": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.2.tgz", + "integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==", + "optional": true }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, "funding": [ { "type": "github", @@ -5103,8 +6742,7 @@ "node_modules/before-after-hook": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", - "dev": true + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" }, "node_modules/big-integer": { "version": "1.6.51", @@ -5141,6 +6779,18 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, "node_modules/bl": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", @@ -5152,6 +6802,11 @@ "readable-stream": "^3.4.0" } }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, "node_modules/bplist-parser": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", @@ -5168,7 +6823,6 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5263,12 +6917,33 @@ "ieee754": "^1.1.13" } }, + "node_modules/buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "engines": { + "node": ">=0.2.0" + } + }, "node_modules/builtins": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", @@ -5402,6 +7077,20 @@ "node": ">=6" } }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "node_modules/camel-case/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/camelcase": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", @@ -5448,6 +7137,17 @@ } ] }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -5495,7 +7195,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "dev": true, "engines": { "node": ">=10" } @@ -5525,7 +7224,6 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true, "engines": { "node": ">=6" } @@ -5641,7 +7339,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, "dependencies": { "color-name": "~1.1.4" }, @@ -5652,8 +7349,7 @@ "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/color-support": { "version": "1.1.3", @@ -5681,7 +7377,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -5689,6 +7384,14 @@ "node": ">= 0.8" } }, + "node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "engines": { + "node": ">= 6" + } + }, "node_modules/common-ancestor-path": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz", @@ -5717,11 +7420,63 @@ "node": ">=8" } }, + "node_modules/compress-commons": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", + "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "dependencies": { + "crc-32": "^1.2.0", + "crc32-stream": "^6.0.0", + "is-stream": "^2.0.1", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/compress-commons/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/compress-commons/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "node_modules/concat-stream": { "version": "2.0.0", @@ -5931,8 +7686,7 @@ "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "node_modules/cosmiconfig": { "version": "7.1.0", @@ -5950,11 +7704,71 @@ "node": ">=10" } }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crc32-stream": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", + "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/crc32-stream/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/crc32-stream/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -5964,6 +7778,12 @@ "node": ">= 8" } }, + "node_modules/crypto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", + "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==", + "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in." + }, "node_modules/damerau-levenshtein": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", @@ -6008,7 +7828,6 @@ "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, "dependencies": { "ms": "2.1.2" }, @@ -6282,7 +8101,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, "engines": { "node": ">=0.4.0" } @@ -6296,8 +8114,7 @@ "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", - "dev": true + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, "node_modules/dequal": { "version": "2.0.3", @@ -6369,6 +8186,18 @@ "node": ">=6.0.0" } }, + "node_modules/dot-object": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", + "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", + "dependencies": { + "commander": "^6.1.0", + "glob": "^7.1.6" + }, + "bin": { + "dot-object": "bin/dot-object" + } + }, "node_modules/dot-prop": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", @@ -6399,6 +8228,19 @@ "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", "dev": true }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, "node_modules/ejs": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz", @@ -6435,14 +8277,12 @@ "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "node_modules/encoding": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "dev": true, "optional": true, "dependencies": { "iconv-lite": "^0.6.2" @@ -6452,7 +8292,6 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, "optional": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -6506,8 +8345,7 @@ "node_modules/err-code": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "dev": true + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" }, "node_modules/error-ex": { "version": "1.3.2", @@ -7234,12 +9072,28 @@ "node": ">=0.10.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", "dev": true }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -7332,6 +9186,11 @@ "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", "dev": true }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, "node_modules/fast-glob": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz", @@ -7553,6 +9412,45 @@ "is-callable": "^1.1.3" } }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -7577,7 +9475,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -7588,8 +9485,7 @@ "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "node_modules/fsevents": { "version": "2.3.3", @@ -7901,7 +9797,6 @@ "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dev": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -7994,8 +9889,7 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "node_modules/graphemer": { "version": "1.4.0", @@ -8159,8 +10053,7 @@ "node_modules/http-cache-semantics": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "dev": true + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" }, "node_modules/http-proxy-agent": { "version": "5.0.0", @@ -8223,7 +10116,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, "funding": [ { "type": "github", @@ -8320,7 +10212,6 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, "engines": { "node": ">=0.8.19" } @@ -8329,7 +10220,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true, "engines": { "node": ">=8" } @@ -8344,7 +10234,6 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "dev": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -8353,8 +10242,7 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/ini": { "version": "1.3.8", @@ -8474,7 +10362,6 @@ "version": "9.0.5", "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", - "dev": true, "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" @@ -8486,8 +10373,7 @@ "node_modules/ip-address/node_modules/sprintf-js": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", - "dev": true + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==" }, "node_modules/is-array-buffer": { "version": "3.0.2", @@ -8622,7 +10508,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, "engines": { "node": ">=8" } @@ -8678,8 +10563,7 @@ "node_modules/is-lambda": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", - "dev": true + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==" }, "node_modules/is-negative-zero": { "version": "2.0.2", @@ -8748,7 +10632,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -8794,7 +10677,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, "engines": { "node": ">=8" }, @@ -8925,8 +10807,7 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "node_modules/isobject": { "version": "3.0.1", @@ -9015,6 +10896,23 @@ "node": ">=8" } }, + "node_modules/jackspeak": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.1.2.tgz", + "integrity": "sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, "node_modules/jake": { "version": "10.8.7", "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz", @@ -9615,6 +11513,14 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jose": { + "version": "4.15.5", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", + "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -9636,8 +11542,7 @@ "node_modules/jsbn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", - "dev": true + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" }, "node_modules/jsesc": { "version": "2.5.2", @@ -9745,6 +11650,27 @@ "node": "*" } }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, "node_modules/jsx-ast-utils": { "version": "3.3.5", "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", @@ -9772,6 +11698,46 @@ "integrity": "sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==", "dev": true }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwks-rsa": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-3.1.0.tgz", + "integrity": "sha512-v7nqlfezb9YfHHzYII3ef2a2j1XnGeSE/bK3WfumaYCqONAIstJbrEGapz4kadScZzEt7zYCN7bucj8C0Mv/Rg==", + "dependencies": { + "@types/express": "^4.17.17", + "@types/jsonwebtoken": "^9.0.2", + "debug": "^4.3.4", + "jose": "^4.14.6", + "limiter": "^1.1.5", + "lru-memoizer": "^2.2.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -9805,6 +11771,49 @@ "language-subtag-registry": "~0.3.2" } }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, "node_modules/lerna": { "version": "6.4.1", "resolved": "https://registry.npmjs.org/lerna/-/lerna-6.4.1.tgz", @@ -10268,6 +12277,11 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/limiter": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", + "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" + }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -10316,8 +12330,7 @@ "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "node_modules/lodash.camelcase": { "version": "4.3.0", @@ -10325,12 +12338,47 @@ "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", "dev": true }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, "node_modules/lodash.ismatch": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", "dev": true }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, "node_modules/lodash.kebabcase": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", @@ -10349,6 +12397,11 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, "node_modules/lodash.snakecase": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz", @@ -10377,6 +12430,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lower-case/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -10386,6 +12452,31 @@ "yallist": "^3.0.2" } }, + "node_modules/lru-memoizer": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.3.0.tgz", + "integrity": "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==", + "dependencies": { + "lodash.clonedeep": "^4.5.0", + "lru-cache": "6.0.0" + } + }, + "node_modules/lru-memoizer/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/lru-memoizer/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, "node_modules/make-dir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", @@ -10662,7 +12753,6 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -10671,7 +12761,6 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "dependencies": { "mime-db": "1.52.0" }, @@ -10701,7 +12790,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -10713,7 +12801,6 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -10736,7 +12823,6 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -10777,7 +12863,6 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -10799,7 +12884,6 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -10811,7 +12895,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -10822,14 +12905,12 @@ "node_modules/minipass/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dev": true, "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" @@ -10841,14 +12922,12 @@ "node_modules/minizlib/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, "bin": { "mkdirp": "bin/cmd.js" }, @@ -10882,8 +12961,7 @@ "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/multimatch": { "version": "5.0.0", @@ -10935,7 +13013,6 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -10946,6 +13023,20 @@ "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", "dev": true }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, + "node_modules/no-case/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/node-addon-api": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", @@ -10956,7 +13047,6 @@ "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, "dependencies": { "whatwg-url": "^5.0.0" }, @@ -11075,7 +13165,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -11779,7 +13868,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "dependencies": { "wrappy": "1" } @@ -11909,7 +13997,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, "dependencies": { "aggregate-error": "^3.0.0" }, @@ -12135,6 +14222,20 @@ "parse-path": "^7.0.0" } }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/pascal-case/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -12148,7 +14249,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -12157,7 +14257,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, "engines": { "node": ">=8" } @@ -12168,6 +14267,42 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/path-scurry/node_modules/minipass": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", + "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/path-to-regexp": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", + "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" + }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -12351,11 +14486,18 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/promise-all-reject-late": { "version": "1.0.1", @@ -12385,7 +14527,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "dev": true, "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" @@ -12489,6 +14630,11 @@ } ] }, + "node_modules/queue-tick": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", + "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" + }, "node_modules/quick-lru": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", @@ -12831,6 +14977,33 @@ "node": ">= 6" } }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/readdir-scoped-modules": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", @@ -12962,7 +15135,6 @@ "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "dev": true, "engines": { "node": ">= 4" } @@ -13076,7 +15248,6 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, "funding": [ { "type": "github", @@ -13110,13 +15281,17 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "devOptional": true + }, + "node_modules/sax": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", + "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" }, "node_modules/semver": { "version": "7.5.4", "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, @@ -13131,7 +15306,6 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -13142,8 +15316,7 @@ "node_modules/semver/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/set-blocking": { "version": "2.0.0", @@ -13167,7 +15340,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -13179,7 +15351,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, "engines": { "node": ">=8" } @@ -13223,7 +15394,6 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true, "engines": { "node": ">= 6.0.0", "npm": ">= 3.0.0" @@ -13233,7 +15403,6 @@ "version": "2.8.3", "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", - "dev": true, "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" @@ -13398,11 +15567,22 @@ "node": ">=8" } }, + "node_modules/streamx": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz", + "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==", + "dependencies": { + "fast-fifo": "^1.1.0", + "queue-tick": "^1.0.1" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, "dependencies": { "safe-buffer": "~5.2.0" } @@ -13424,7 +15604,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -13434,11 +15613,29 @@ "node": ">=8" } }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/string.prototype.trim": { "version": "1.2.7", @@ -13489,7 +15686,18 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -13619,7 +15827,6 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -13652,7 +15859,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, "engines": { "node": ">=8" } @@ -13660,8 +15866,7 @@ "node_modules/tar/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/temp-dir": { "version": "1.0.0", @@ -13767,8 +15972,15 @@ "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "engines": { + "node": "*" + } }, "node_modules/tree-kill": { "version": "1.2.2", @@ -13849,6 +16061,29 @@ "node": ">=12" } }, + "node_modules/ts-poet": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", + "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", + "dependencies": { + "lodash": "^4.17.15", + "prettier": "^2.5.1" + } + }, + "node_modules/ts-poet/node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/tsconfig-paths": { "version": "3.14.2", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", @@ -13885,8 +16120,7 @@ "node_modules/tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, "node_modules/tsutils": { "version": "3.21.0", @@ -13903,6 +16137,42 @@ "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" } }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/twirp-ts": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", + "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.0.7", + "camel-case": "^4.1.2", + "dot-object": "^2.1.4", + "path-to-regexp": "^6.2.0", + "ts-poet": "^4.5.0", + "yaml": "^1.10.2" + }, + "bin": { + "protoc-gen-twirp_ts": "protoc-gen-twirp_ts" + }, + "peerDependencies": { + "@protobuf-ts/plugin": "^2.5.0", + "ts-proto": "^1.81.3" + }, + "peerDependenciesMeta": { + "@protobuf-ts/plugin": { + "optional": true + }, + "ts-proto": { + "optional": true + } + } + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -14057,6 +16327,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/undici": { + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.18.1.tgz", + "integrity": "sha512-/0BWqR8rJNRysS5lqVmfc7eeOErcOP4tZpATVjJOojjHZ71gSYVAtFhEmadcIjwMIUehh5NFyKGsXCnXIajtbA==", + "engines": { + "node": ">=18.17" + } + }, "node_modules/unique-filename": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", @@ -14084,8 +16362,7 @@ "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", - "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", - "dev": true + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, "node_modules/universalify": { "version": "2.0.0", @@ -14105,6 +16382,26 @@ "node": ">=8" } }, + "node_modules/unzip-stream": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz", + "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==", + "dependencies": { + "binary": "^0.3.0", + "mkdirp": "^0.5.1" + } + }, + "node_modules/unzip-stream/node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, "node_modules/upath": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", @@ -14157,14 +16454,12 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, "bin": { "uuid": "dist/bin/uuid" } @@ -14244,14 +16539,12 @@ "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -14261,7 +16554,6 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -14339,11 +16631,27 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/write-file-atomic": { "version": "4.0.2", @@ -14526,6 +16834,26 @@ "node": ">=6" } }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -14554,7 +16882,6 @@ "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, "engines": { "node": ">= 6" } @@ -14597,6 +16924,57 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/zip-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", + "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "dependencies": { + "archiver-utils": "^5.0.0", + "compress-commons": "^6.0.2", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/zip-stream/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/zip-stream/node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } } } } diff --git a/package.json b/package.json index d394979b..ca30fbc0 100644 --- a/package.json +++ b/package.json @@ -32,5 +32,19 @@ "prettier": "^3.0.0", "ts-jest": "^29.1.1", "typescript": "^5.2.2" + }, + "dependencies": { + "@actions/artifact": "^2.1.7", + "@actions/attest": "^1.2.1", + "@actions/cache": "^3.2.4", + "@actions/core": "^1.10.1", + "@actions/exec": "^1.1.1", + "@actions/github": "^6.0.0", + "@actions/glob": "^0.4.0", + "@actions/http-client": "^2.2.1", + "@actions/io": "^1.1.3", + "@actions/tool-cache": "^2.0.1", + "tunnel": "^0.0.6", + "undici": "^6.18.1" } -} \ No newline at end of file +} diff --git a/packages/attest/package-lock.json b/packages/attest/package-lock.json index 98f20097..5e9f7dad 100644 --- a/packages/attest/package-lock.json +++ b/packages/attest/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/attest", - "version": "1.2.0", + "version": "1.2.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@actions/attest", - "version": "1.2.0", + "version": "1.2.1", "license": "MIT", "dependencies": { "@actions/core": "^1.10.1", diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 40add448..e05d8815 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -112,7 +112,7 @@ export async function getCacheEntry( options?.enableCrossOsArchive ) - core.console.log(`We're running from the abyss`); + core.debug(`We're running from the abyss`); const resource = `cache?keys=${encodeURIComponent( keys.join(',') From c8466d1fac68623e803f25c8f9ec46e537036c8f Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 29 May 2024 08:31:54 -0700 Subject: [PATCH 003/108] Add twirp client --- packages/cache/src/cache.ts | 15 +- .../generated/google/protobuf/timestamp.ts | 290 +++++++ .../src/generated/google/protobuf/wrappers.ts | 753 ++++++++++++++++++ .../src/generated/results/api/v1/blobcache.ts | 474 +++++++++++ .../results/api/v1/blobcache.twirp.ts | 433 ++++++++++ .../cache/src/internal/cacheHttpClient.ts | 5 +- .../cache/src/internal/cacheTwirpClient.ts | 197 +++++ packages/cache/src/internal/config.ts | 7 + packages/cache/src/internal/constants.ts | 3 + 9 files changed, 2173 insertions(+), 4 deletions(-) create mode 100644 packages/cache/src/generated/google/protobuf/timestamp.ts create mode 100644 packages/cache/src/generated/google/protobuf/wrappers.ts create mode 100644 packages/cache/src/generated/results/api/v1/blobcache.ts create mode 100644 packages/cache/src/generated/results/api/v1/blobcache.twirp.ts create mode 100644 packages/cache/src/internal/cacheTwirpClient.ts create mode 100644 packages/cache/src/internal/config.ts diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f7fadb6f..5722c9eb 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,9 +1,12 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' +import {CacheUrl} from './internal/constants' import * as cacheHttpClient from './internal/cacheHttpClient' +import * as cacheTwirpClient from './internal/cacheTwirpClient' import {createTar, extractTar, listTar} from './internal/tar' import {DownloadOptions, UploadOptions} from './options' +import {GetCachedBlobRequest} from './generated/results/api/v1/blobcache' export class ValidationError extends Error { constructor(message: string) { @@ -50,7 +53,7 @@ function checkKey(key: string): void { */ export function isFeatureAvailable(): boolean { - return !!process.env['ACTIONS_CACHE_URL'] + return !!CacheUrl } /** @@ -171,6 +174,16 @@ export async function saveCache( checkPaths(paths) checkKey(key) + // TODO: REMOVE ME + // Making a call to the service + const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() + const getBlobRequest: GetCachedBlobRequest = { + owner: "link-/test", + keys: ['test-123412631236126'], + } + const getBlobResponse = await twirpClient.GetCachedBlob(getBlobRequest) + core.info(`GetCachedBlobResponse: ${JSON.stringify(getBlobResponse)}`) + const compressionMethod = await utils.getCompressionMethod() let cacheId = -1 diff --git a/packages/cache/src/generated/google/protobuf/timestamp.ts b/packages/cache/src/generated/google/protobuf/timestamp.ts new file mode 100644 index 00000000..3ef86d56 --- /dev/null +++ b/packages/cache/src/generated/google/protobuf/timestamp.ts @@ -0,0 +1,290 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3) +// tslint:disable +// +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { typeofJsonValue } from "@protobuf-ts/runtime"; +import type { JsonValue } from "@protobuf-ts/runtime"; +import type { JsonReadOptions } from "@protobuf-ts/runtime"; +import type { JsonWriteOptions } from "@protobuf-ts/runtime"; +import { PbLong } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + * + * + * + * @generated from protobuf message google.protobuf.Timestamp + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + * + * @generated from protobuf field: int64 seconds = 1; + */ + seconds: string; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + * + * @generated from protobuf field: int32 nanos = 2; + */ + nanos: number; +} +// @generated message type with reflection information, may provide speed optimized methods +class Timestamp$Type extends MessageType { + constructor() { + super("google.protobuf.Timestamp", [ + { no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ } + ]); + } + /** + * Creates a new `Timestamp` for the current time. + */ + now(): Timestamp { + const msg = this.create(); + const ms = Date.now(); + msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString(); + msg.nanos = (ms % 1000) * 1000000; + return msg; + } + /** + * Converts a `Timestamp` to a JavaScript Date. + */ + toDate(message: Timestamp): Date { + return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000)); + } + /** + * Converts a JavaScript Date to a `Timestamp`. + */ + fromDate(date: Date): Timestamp { + const msg = this.create(); + const ms = date.getTime(); + msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString(); + msg.nanos = (ms % 1000) * 1000000; + return msg; + } + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue { + let ms = PbLong.from(message.seconds).toNumber() * 1000; + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (message.nanos < 0) + throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); + let z = "Z"; + if (message.nanos > 0) { + let nanosStr = (message.nanos + 1000000000).toString().substring(1); + if (nanosStr.substring(3) === "000000") + z = "." + nanosStr.substring(0, 3) + "Z"; + else if (nanosStr.substring(6) === "000") + z = "." + nanosStr.substring(0, 6) + "Z"; + else + z = "." + nanosStr + "Z"; + } + return new Date(ms).toISOString().replace(".000Z", z); + } + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp { + if (typeof json !== "string") + throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + "."); + let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); + if (!matches) + throw new Error("Unable to parse Timestamp from JSON. Invalid format."); + let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); + if (Number.isNaN(ms)) + throw new Error("Unable to parse Timestamp from JSON. Invalid value."); + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (!target) + target = this.create(); + target.seconds = PbLong.from(ms / 1000).toString(); + target.nanos = 0; + if (matches[7]) + target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000); + return target; + } + create(value?: PartialMessage): Timestamp { + const message = { seconds: "0", nanos: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 seconds */ 1: + message.seconds = reader.int64().toString(); + break; + case /* int32 nanos */ 2: + message.nanos = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 seconds = 1; */ + if (message.seconds !== "0") + writer.tag(1, WireType.Varint).int64(message.seconds); + /* int32 nanos = 2; */ + if (message.nanos !== 0) + writer.tag(2, WireType.Varint).int32(message.nanos); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.Timestamp + */ +export const Timestamp = new Timestamp$Type(); diff --git a/packages/cache/src/generated/google/protobuf/wrappers.ts b/packages/cache/src/generated/google/protobuf/wrappers.ts new file mode 100644 index 00000000..d60e2416 --- /dev/null +++ b/packages/cache/src/generated/google/protobuf/wrappers.ts @@ -0,0 +1,753 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3) +// tslint:disable +// +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. +// +// These wrappers have no meaningful use within repeated fields as they lack +// the ability to detect presence on individual elements. +// These wrappers have no meaningful use within a map or a oneof since +// individual entries of a map or fields of a oneof can already detect presence. +// +import { ScalarType } from "@protobuf-ts/runtime"; +import { LongType } from "@protobuf-ts/runtime"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import type { JsonValue } from "@protobuf-ts/runtime"; +import type { JsonReadOptions } from "@protobuf-ts/runtime"; +import type { JsonWriteOptions } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * Wrapper message for `double`. + * + * The JSON representation for `DoubleValue` is JSON number. + * + * @generated from protobuf message google.protobuf.DoubleValue + */ +export interface DoubleValue { + /** + * The double value. + * + * @generated from protobuf field: double value = 1; + */ + value: number; +} +/** + * Wrapper message for `float`. + * + * The JSON representation for `FloatValue` is JSON number. + * + * @generated from protobuf message google.protobuf.FloatValue + */ +export interface FloatValue { + /** + * The float value. + * + * @generated from protobuf field: float value = 1; + */ + value: number; +} +/** + * Wrapper message for `int64`. + * + * The JSON representation for `Int64Value` is JSON string. + * + * @generated from protobuf message google.protobuf.Int64Value + */ +export interface Int64Value { + /** + * The int64 value. + * + * @generated from protobuf field: int64 value = 1; + */ + value: string; +} +/** + * Wrapper message for `uint64`. + * + * The JSON representation for `UInt64Value` is JSON string. + * + * @generated from protobuf message google.protobuf.UInt64Value + */ +export interface UInt64Value { + /** + * The uint64 value. + * + * @generated from protobuf field: uint64 value = 1; + */ + value: string; +} +/** + * Wrapper message for `int32`. + * + * The JSON representation for `Int32Value` is JSON number. + * + * @generated from protobuf message google.protobuf.Int32Value + */ +export interface Int32Value { + /** + * The int32 value. + * + * @generated from protobuf field: int32 value = 1; + */ + value: number; +} +/** + * Wrapper message for `uint32`. + * + * The JSON representation for `UInt32Value` is JSON number. + * + * @generated from protobuf message google.protobuf.UInt32Value + */ +export interface UInt32Value { + /** + * The uint32 value. + * + * @generated from protobuf field: uint32 value = 1; + */ + value: number; +} +/** + * Wrapper message for `bool`. + * + * The JSON representation for `BoolValue` is JSON `true` and `false`. + * + * @generated from protobuf message google.protobuf.BoolValue + */ +export interface BoolValue { + /** + * The bool value. + * + * @generated from protobuf field: bool value = 1; + */ + value: boolean; +} +/** + * Wrapper message for `string`. + * + * The JSON representation for `StringValue` is JSON string. + * + * @generated from protobuf message google.protobuf.StringValue + */ +export interface StringValue { + /** + * The string value. + * + * @generated from protobuf field: string value = 1; + */ + value: string; +} +/** + * Wrapper message for `bytes`. + * + * The JSON representation for `BytesValue` is JSON string. + * + * @generated from protobuf message google.protobuf.BytesValue + */ +export interface BytesValue { + /** + * The bytes value. + * + * @generated from protobuf field: bytes value = 1; + */ + value: Uint8Array; +} +// @generated message type with reflection information, may provide speed optimized methods +class DoubleValue$Type extends MessageType { + constructor() { + super("google.protobuf.DoubleValue", [ + { no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ } + ]); + } + /** + * Encode `DoubleValue` to JSON number. + */ + internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(2, message.value, "value", false, true); + } + /** + * Decode `DoubleValue` from JSON number. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number; + return target; + } + create(value?: PartialMessage): DoubleValue { + const message = { value: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* double value */ 1: + message.value = reader.double(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* double value = 1; */ + if (message.value !== 0) + writer.tag(1, WireType.Bit64).double(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.DoubleValue + */ +export const DoubleValue = new DoubleValue$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FloatValue$Type extends MessageType { + constructor() { + super("google.protobuf.FloatValue", [ + { no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ } + ]); + } + /** + * Encode `FloatValue` to JSON number. + */ + internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(1, message.value, "value", false, true); + } + /** + * Decode `FloatValue` from JSON number. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number; + return target; + } + create(value?: PartialMessage): FloatValue { + const message = { value: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* float value */ 1: + message.value = reader.float(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* float value = 1; */ + if (message.value !== 0) + writer.tag(1, WireType.Bit32).float(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.FloatValue + */ +export const FloatValue = new FloatValue$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Int64Value$Type extends MessageType { + constructor() { + super("google.protobuf.Int64Value", [ + { no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + /** + * Encode `Int64Value` to JSON string. + */ + internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true); + } + /** + * Decode `Int64Value` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any; + return target; + } + create(value?: PartialMessage): Int64Value { + const message = { value: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 value */ 1: + message.value = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 value = 1; */ + if (message.value !== "0") + writer.tag(1, WireType.Varint).int64(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.Int64Value + */ +export const Int64Value = new Int64Value$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class UInt64Value$Type extends MessageType { + constructor() { + super("google.protobuf.UInt64Value", [ + { no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ } + ]); + } + /** + * Encode `UInt64Value` to JSON string. + */ + internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true); + } + /** + * Decode `UInt64Value` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any; + return target; + } + create(value?: PartialMessage): UInt64Value { + const message = { value: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* uint64 value */ 1: + message.value = reader.uint64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* uint64 value = 1; */ + if (message.value !== "0") + writer.tag(1, WireType.Varint).uint64(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.UInt64Value + */ +export const UInt64Value = new UInt64Value$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Int32Value$Type extends MessageType { + constructor() { + super("google.protobuf.Int32Value", [ + { no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ } + ]); + } + /** + * Encode `Int32Value` to JSON string. + */ + internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(5, message.value, "value", false, true); + } + /** + * Decode `Int32Value` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number; + return target; + } + create(value?: PartialMessage): Int32Value { + const message = { value: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int32 value */ 1: + message.value = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int32 value = 1; */ + if (message.value !== 0) + writer.tag(1, WireType.Varint).int32(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.Int32Value + */ +export const Int32Value = new Int32Value$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class UInt32Value$Type extends MessageType { + constructor() { + super("google.protobuf.UInt32Value", [ + { no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ } + ]); + } + /** + * Encode `UInt32Value` to JSON string. + */ + internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(13, message.value, "value", false, true); + } + /** + * Decode `UInt32Value` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number; + return target; + } + create(value?: PartialMessage): UInt32Value { + const message = { value: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* uint32 value */ 1: + message.value = reader.uint32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* uint32 value = 1; */ + if (message.value !== 0) + writer.tag(1, WireType.Varint).uint32(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.UInt32Value + */ +export const UInt32Value = new UInt32Value$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class BoolValue$Type extends MessageType { + constructor() { + super("google.protobuf.BoolValue", [ + { no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + /** + * Encode `BoolValue` to JSON bool. + */ + internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue { + return message.value; + } + /** + * Decode `BoolValue` from JSON bool. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean; + return target; + } + create(value?: PartialMessage): BoolValue { + const message = { value: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool value */ 1: + message.value = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool value = 1; */ + if (message.value !== false) + writer.tag(1, WireType.Varint).bool(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.BoolValue + */ +export const BoolValue = new BoolValue$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StringValue$Type extends MessageType { + constructor() { + super("google.protobuf.StringValue", [ + { no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + /** + * Encode `StringValue` to JSON string. + */ + internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue { + return message.value; + } + /** + * Decode `StringValue` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string; + return target; + } + create(value?: PartialMessage): StringValue { + const message = { value: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string value */ 1: + message.value = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string value = 1; */ + if (message.value !== "") + writer.tag(1, WireType.LengthDelimited).string(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.StringValue + */ +export const StringValue = new StringValue$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class BytesValue$Type extends MessageType { + constructor() { + super("google.protobuf.BytesValue", [ + { no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + /** + * Encode `BytesValue` to JSON string. + */ + internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue { + return this.refJsonWriter.scalar(12, message.value, "value", false, true); + } + /** + * Decode `BytesValue` from JSON string. + */ + internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array; + return target; + } + create(value?: PartialMessage): BytesValue { + const message = { value: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes value */ 1: + message.value = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes value = 1; */ + if (message.value.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message google.protobuf.BytesValue + */ +export const BytesValue = new BytesValue$Type(); diff --git a/packages/cache/src/generated/results/api/v1/blobcache.ts b/packages/cache/src/generated/results/api/v1/blobcache.ts new file mode 100644 index 00000000..41af2886 --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/blobcache.ts @@ -0,0 +1,474 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/api/v1/blobcache.proto" (package "github.actions.results.api.v1", syntax proto3) +// tslint:disable +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobRequest + */ +export interface GetCachedBlobRequest { + /** + * Owner of the blob(s) to be retrieved + * + * @generated from protobuf field: string owner = 1; + */ + owner: string; + /** + * Key(s) of te blob(s) to be retrieved + * + * @generated from protobuf field: repeated string keys = 2; + */ + keys: string[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse + */ +export interface GetCachedBlobResponse { + /** + * List of blobs that match the requested keys + * + * @generated from protobuf field: repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; + */ + blobs: GetCachedBlobResponse_Blob[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob + */ +export interface GetCachedBlobResponse_Blob { + /** + * Key of the blob + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * Download url for the cached blob + * + * @generated from protobuf field: string signed_url = 2; + */ + signedUrl: string; + /** + * Version of the cached blob entry + * + * @generated from protobuf field: int32 version = 3; + */ + version: number; + /** + * Checksum of the blob + * + * @generated from protobuf field: string checksum = 4; + */ + checksum: string; + /** + * Timestamp for when the blob cache entry expires + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 5; + */ + expiresAt?: Timestamp; + /** + * Timestamp for when the blob cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest + */ +export interface GetCacheBlobUploadURLRequest { + /** + * Owner of the blob(s) to be retrieved + * + * @generated from protobuf field: string organization = 1; + */ + organization: string; + /** + * Key(s) of te blob(s) to be retrieved + * + * @generated from protobuf field: repeated string keys = 2; + */ + keys: string[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse + */ +export interface GetCacheBlobUploadURLResponse { + /** + * List of upload URLs that match the requested keys + * + * @generated from protobuf field: repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; + */ + urls: GetCacheBlobUploadURLResponse_Url[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url + */ +export interface GetCacheBlobUploadURLResponse_Url { + /** + * Key of the blob + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * URL to the blob + * + * @generated from protobuf field: string url = 2; + */ + url: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class GetCachedBlobRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCachedBlobRequest", [ + { no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCachedBlobRequest { + const message = { owner: "", keys: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobRequest): GetCachedBlobRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string owner */ 1: + message.owner = reader.string(); + break; + case /* repeated string keys */ 2: + message.keys.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string owner = 1; */ + if (message.owner !== "") + writer.tag(1, WireType.LengthDelimited).string(message.owner); + /* repeated string keys = 2; */ + for (let i = 0; i < message.keys.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobRequest + */ +export const GetCachedBlobRequest = new GetCachedBlobRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCachedBlobResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCachedBlobResponse", [ + { no: 1, name: "blobs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCachedBlobResponse_Blob } + ]); + } + create(value?: PartialMessage): GetCachedBlobResponse { + const message = { blobs: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse): GetCachedBlobResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs */ 1: + message.blobs.push(GetCachedBlobResponse_Blob.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCachedBlobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; */ + for (let i = 0; i < message.blobs.length; i++) + GetCachedBlobResponse_Blob.internalBinaryWrite(message.blobs[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse + */ +export const GetCachedBlobResponse = new GetCachedBlobResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCachedBlobResponse_Blob$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCachedBlobResponse.Blob", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, + { no: 4, name: "checksum", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "expires_at", kind: "message", T: () => Timestamp }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): GetCachedBlobResponse_Blob { + const message = { key: "", signedUrl: "", version: 0, checksum: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse_Blob): GetCachedBlobResponse_Blob { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string signed_url */ 2: + message.signedUrl = reader.string(); + break; + case /* int32 version */ 3: + message.version = reader.int32(); + break; + case /* string checksum */ 4: + message.checksum = reader.string(); + break; + case /* google.protobuf.Timestamp expires_at */ 5: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCachedBlobResponse_Blob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string signed_url = 2; */ + if (message.signedUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.signedUrl); + /* int32 version = 3; */ + if (message.version !== 0) + writer.tag(3, WireType.Varint).int32(message.version); + /* string checksum = 4; */ + if (message.checksum !== "") + writer.tag(4, WireType.LengthDelimited).string(message.checksum); + /* google.protobuf.Timestamp expires_at = 5; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob + */ +export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheBlobUploadURLRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ + { no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheBlobUploadURLRequest { + const message = { organization: "", keys: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLRequest): GetCacheBlobUploadURLRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string organization */ 1: + message.organization = reader.string(); + break; + case /* repeated string keys */ 2: + message.keys.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string organization = 1; */ + if (message.organization !== "") + writer.tag(1, WireType.LengthDelimited).string(message.organization); + /* repeated string keys = 2; */ + for (let i = 0; i < message.keys.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest + */ +export const GetCacheBlobUploadURLRequest = new GetCacheBlobUploadURLRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheBlobUploadURLResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse", [ + { no: 1, name: "urls", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCacheBlobUploadURLResponse_Url } + ]); + } + create(value?: PartialMessage): GetCacheBlobUploadURLResponse { + const message = { urls: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse): GetCacheBlobUploadURLResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls */ 1: + message.urls.push(GetCacheBlobUploadURLResponse_Url.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheBlobUploadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; */ + for (let i = 0; i < message.urls.length; i++) + GetCacheBlobUploadURLResponse_Url.internalBinaryWrite(message.urls[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse + */ +export const GetCacheBlobUploadURLResponse = new GetCacheBlobUploadURLResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheBlobUploadURLResponse_Url$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheBlobUploadURLResponse_Url { + const message = { key: "", url: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse_Url): GetCacheBlobUploadURLResponse_Url { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string url */ 2: + message.url = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheBlobUploadURLResponse_Url, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string url = 2; */ + if (message.url !== "") + writer.tag(2, WireType.LengthDelimited).string(message.url); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url + */ +export const GetCacheBlobUploadURLResponse_Url = new GetCacheBlobUploadURLResponse_Url$Type(); +/** + * @generated ServiceType for protobuf service github.actions.results.api.v1.BlobCacheService + */ +export const BlobCacheService = new ServiceType("github.actions.results.api.v1.BlobCacheService", [ + { name: "GetCachedBlob", options: {}, I: GetCachedBlobRequest, O: GetCachedBlobResponse }, + { name: "GetCacheBlobUploadURL", options: {}, I: GetCacheBlobUploadURLRequest, O: GetCacheBlobUploadURLResponse } +]); diff --git a/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts b/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts new file mode 100644 index 00000000..c2f05e88 --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts @@ -0,0 +1,433 @@ +import { + TwirpContext, + TwirpServer, + RouterEvents, + TwirpError, + TwirpErrorCode, + Interceptor, + TwirpContentType, + chainInterceptors, +} from "twirp-ts"; +import { + GetCachedBlobRequest, + GetCachedBlobResponse, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse, +} from "./blobcache"; + +//==================================// +// Client Code // +//==================================// + +interface Rpc { + request( + service: string, + method: string, + contentType: "application/json" | "application/protobuf", + data: object | Uint8Array + ): Promise; +} + +export interface BlobCacheServiceClient { + GetCachedBlob(request: GetCachedBlobRequest): Promise; + GetCacheBlobUploadURL( + request: GetCacheBlobUploadURLRequest + ): Promise; +} + +export class BlobCacheServiceClientJSON implements BlobCacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.GetCachedBlob.bind(this); + this.GetCacheBlobUploadURL.bind(this); + } + GetCachedBlob(request: GetCachedBlobRequest): Promise { + const data = GetCachedBlobRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.BlobCacheService", + "GetCachedBlob", + "application/json", + data as object + ); + return promise.then((data) => + GetCachedBlobResponse.fromJson(data as any, { ignoreUnknownFields: true }) + ); + } + + GetCacheBlobUploadURL( + request: GetCacheBlobUploadURLRequest + ): Promise { + const data = GetCacheBlobUploadURLRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.BlobCacheService", + "GetCacheBlobUploadURL", + "application/json", + data as object + ); + return promise.then((data) => + GetCacheBlobUploadURLResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } +} + +export class BlobCacheServiceClientProtobuf implements BlobCacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.GetCachedBlob.bind(this); + this.GetCacheBlobUploadURL.bind(this); + } + GetCachedBlob(request: GetCachedBlobRequest): Promise { + const data = GetCachedBlobRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.BlobCacheService", + "GetCachedBlob", + "application/protobuf", + data + ); + return promise.then((data) => + GetCachedBlobResponse.fromBinary(data as Uint8Array) + ); + } + + GetCacheBlobUploadURL( + request: GetCacheBlobUploadURLRequest + ): Promise { + const data = GetCacheBlobUploadURLRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.BlobCacheService", + "GetCacheBlobUploadURL", + "application/protobuf", + data + ); + return promise.then((data) => + GetCacheBlobUploadURLResponse.fromBinary(data as Uint8Array) + ); + } +} + +//==================================// +// Server Code // +//==================================// + +export interface BlobCacheServiceTwirp { + GetCachedBlob( + ctx: T, + request: GetCachedBlobRequest + ): Promise; + GetCacheBlobUploadURL( + ctx: T, + request: GetCacheBlobUploadURLRequest + ): Promise; +} + +export enum BlobCacheServiceMethod { + GetCachedBlob = "GetCachedBlob", + GetCacheBlobUploadURL = "GetCacheBlobUploadURL", +} + +export const BlobCacheServiceMethodList = [ + BlobCacheServiceMethod.GetCachedBlob, + BlobCacheServiceMethod.GetCacheBlobUploadURL, +]; + +export function createBlobCacheServiceServer< + T extends TwirpContext = TwirpContext +>(service: BlobCacheServiceTwirp) { + return new TwirpServer({ + service, + packageName: "github.actions.results.api.v1", + serviceName: "BlobCacheService", + methodList: BlobCacheServiceMethodList, + matchRoute: matchBlobCacheServiceRoute, + }); +} + +function matchBlobCacheServiceRoute( + method: string, + events: RouterEvents +) { + switch (method) { + case "GetCachedBlob": + return async ( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCachedBlobRequest, + GetCachedBlobResponse + >[] + ) => { + ctx = { ...ctx, methodName: "GetCachedBlob" }; + await events.onMatch(ctx); + return handleBlobCacheServiceGetCachedBlobRequest( + ctx, + service, + data, + interceptors + ); + }; + case "GetCacheBlobUploadURL": + return async ( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >[] + ) => { + ctx = { ...ctx, methodName: "GetCacheBlobUploadURL" }; + await events.onMatch(ctx); + return handleBlobCacheServiceGetCacheBlobUploadURLRequest( + ctx, + service, + data, + interceptors + ); + }; + default: + events.onNotFound(); + const msg = `no handler found`; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleBlobCacheServiceGetCachedBlobRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleBlobCacheServiceGetCachedBlobJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleBlobCacheServiceGetCachedBlobProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleBlobCacheServiceGetCacheBlobUploadURLRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleBlobCacheServiceGetCacheBlobUploadURLJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleBlobCacheServiceGetCacheBlobUploadURLProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} +async function handleBlobCacheServiceGetCachedBlobJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor[] +) { + let request: GetCachedBlobRequest; + let response: GetCachedBlobResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = GetCachedBlobRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCachedBlobRequest, + GetCachedBlobResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCachedBlob(ctx, inputReq); + }); + } else { + response = await service.GetCachedBlob(ctx, request!); + } + + return JSON.stringify( + GetCachedBlobResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleBlobCacheServiceGetCacheBlobUploadURLJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >[] +) { + let request: GetCacheBlobUploadURLRequest; + let response: GetCacheBlobUploadURLResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = GetCacheBlobUploadURLRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheBlobUploadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheBlobUploadURL(ctx, request!); + } + + return JSON.stringify( + GetCacheBlobUploadURLResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} +async function handleBlobCacheServiceGetCachedBlobProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor[] +) { + let request: GetCachedBlobRequest; + let response: GetCachedBlobResponse; + + try { + request = GetCachedBlobRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCachedBlobRequest, + GetCachedBlobResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCachedBlob(ctx, inputReq); + }); + } else { + response = await service.GetCachedBlob(ctx, request!); + } + + return Buffer.from(GetCachedBlobResponse.toBinary(response)); +} + +async function handleBlobCacheServiceGetCacheBlobUploadURLProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: BlobCacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >[] +) { + let request: GetCacheBlobUploadURLRequest; + let response: GetCacheBlobUploadURLResponse; + + try { + request = GetCacheBlobUploadURLRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheBlobUploadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheBlobUploadURL(ctx, request!); + } + + return Buffer.from(GetCacheBlobUploadURLResponse.toBinary(response)); +} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index e05d8815..c50ccd4b 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -36,11 +36,12 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' +import {CacheUrl} from './constants' const versionSalt = '1.0' function getCacheApiUrl(resource: string): string { - const baseUrl: string = process.env['ACTIONS_CACHE_URL'] || '' + const baseUrl: string = CacheUrl || '' if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.') } @@ -111,8 +112,6 @@ export async function getCacheEntry( options?.compressionMethod, options?.enableCrossOsArchive ) - - core.debug(`We're running from the abyss`); const resource = `cache?keys=${encodeURIComponent( keys.join(',') diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/cacheTwirpClient.ts new file mode 100644 index 00000000..62f98426 --- /dev/null +++ b/packages/cache/src/internal/cacheTwirpClient.ts @@ -0,0 +1,197 @@ +import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client' +import {BearerCredentialHandler} from '@actions/http-client/lib/auth' +import {info, debug} from '@actions/core' +import {BlobCacheServiceClientJSON} from '../generated/results/api/v1/blobcache.twirp' +import {CacheUrl} from './constants' +import {getRuntimeToken} from './config' +// import {getUserAgentString} from './user-agent' +// import {NetworkError, UsageError} from './errors' + +// The twirp http client must implement this interface +interface Rpc { + request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise +} + +class BlobCacheServiceClient implements Rpc { + private httpClient: HttpClient + private baseUrl: string + private maxAttempts = 5 + private baseRetryIntervalMilliseconds = 3000 + private retryMultiplier = 1.5 + + constructor( + userAgent: string, + maxAttempts?: number, + baseRetryIntervalMilliseconds?: number, + retryMultiplier?: number + ) { + const token = getRuntimeToken() + this.baseUrl = CacheUrl + if (maxAttempts) { + this.maxAttempts = maxAttempts + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier + } + + this.httpClient = new HttpClient(userAgent, [ + new BearerCredentialHandler(token) + ]) + } + + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + async request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href + debug(`[Request] ${method} ${url}`) + const headers = { + 'Content-Type': contentType + } + try { + const {body} = await this.retryableRequest(async () => + this.httpClient.post(url, JSON.stringify(data), headers) + ) + + return body + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`) + } + } + + async retryableRequest( + operation: () => Promise + ): Promise<{response: HttpClientResponse; body: object}> { + let attempt = 0 + let errorMessage = '' + let rawBody = '' + while (attempt < this.maxAttempts) { + let isRetryable = false + + try { + const response = await operation() + const statusCode = response.message.statusCode + rawBody = await response.readBody() + debug(`[Response] - ${response.message.statusCode}`) + debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) + const body = JSON.parse(rawBody) + debug(`Body: ${JSON.stringify(body, null, 2)}`) + if (this.isSuccessStatusCode(statusCode)) { + return {response, body} + } + isRetryable = this.isRetryableHttpStatusCode(statusCode) + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` + if (body.msg) { + // if (UsageError.isUsageErrorMessage(body.msg)) { + // throw new UsageError() + // } + + errorMessage = `${errorMessage}: ${body.msg}` + } + } catch (error) { + if (error instanceof SyntaxError) { + debug(`Raw Body: ${rawBody}`) + } + + // if (error instanceof UsageError) { + // throw error + // } + + // if (NetworkError.isNetworkErrorCode(error?.code)) { + // throw new NetworkError(error?.code) + // } + + isRetryable = true + errorMessage = error.message + } + + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`) + } + + if (attempt + 1 === this.maxAttempts) { + throw new Error( + `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` + ) + } + + const retryTimeMilliseconds = + this.getExponentialRetryTimeMilliseconds(attempt) + info( + `Attempt ${attempt + 1} of ${ + this.maxAttempts + } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` + ) + await this.sleep(retryTimeMilliseconds) + attempt++ + } + + throw new Error(`Request failed`) + } + + isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + return statusCode >= 200 && statusCode < 300 + } + + isRetryableHttpStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.GatewayTimeout, + HttpCodes.InternalServerError, + HttpCodes.ServiceUnavailable, + HttpCodes.TooManyRequests + ] + + return retryableStatusCodes.includes(statusCode) + } + + async sleep(milliseconds: number): Promise { + return new Promise(resolve => setTimeout(resolve, milliseconds)) + } + + getExponentialRetryTimeMilliseconds(attempt: number): number { + if (attempt < 0) { + throw new Error('attempt should be a positive integer') + } + + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds + } + + const minTime = + this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt + const maxTime = minTime * this.retryMultiplier + + // returns a random number between minTime and maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime) + } +} + +export function internalBlobCacheTwirpClient(options?: { + maxAttempts?: number + retryIntervalMs?: number + retryMultiplier?: number +}): BlobCacheServiceClientJSON { + const client = new BlobCacheServiceClient( + 'actions/cache', + options?.maxAttempts, + options?.retryIntervalMs, + options?.retryMultiplier + ) + return new BlobCacheServiceClientJSON(client) +} diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts new file mode 100644 index 00000000..959f3f46 --- /dev/null +++ b/packages/cache/src/internal/config.ts @@ -0,0 +1,7 @@ +export function getRuntimeToken(): string { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] + if (!token) { + throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable') + } + return token +} diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index 4dbff574..f6e093e0 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -36,3 +36,6 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S export const TarFilename = 'cache.tar' export const ManifestFilename = 'manifest.txt' + +// Cache URL +export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']}` From 66d5434f23e442936b4e9ad151f6d02cd4ca7437 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 10 Jun 2024 10:56:20 -0700 Subject: [PATCH 004/108] Add v2 cache upload --- packages/cache/src/cache.ts | 21 +++++++++++++----- .../src/internal/v2/upload/upload-cache.ts | 22 +++++++++++++++++++ 2 files changed, 37 insertions(+), 6 deletions(-) create mode 100644 packages/cache/src/internal/v2/upload/upload-cache.ts diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 5722c9eb..ac704209 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -6,7 +6,8 @@ import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' import {createTar, extractTar, listTar} from './internal/tar' import {DownloadOptions, UploadOptions} from './options' -import {GetCachedBlobRequest} from './generated/results/api/v1/blobcache' +import {GetCacheBlobUploadURLRequest, GetCacheBlobUploadURLResponse} from './generated/results/api/v1/blobcache' +import {UploadCache} from './internal/v2/upload/upload-cache' export class ValidationError extends Error { constructor(message: string) { @@ -177,12 +178,12 @@ export async function saveCache( // TODO: REMOVE ME // Making a call to the service const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getBlobRequest: GetCachedBlobRequest = { - owner: "link-/test", - keys: ['test-123412631236126'], + const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + organization: "github", + keys: [key], } - const getBlobResponse = await twirpClient.GetCachedBlob(getBlobRequest) - core.info(`GetCachedBlobResponse: ${JSON.stringify(getBlobResponse)}`) + const signedUploadURL: GetCacheBlobUploadURLResponse = await twirpClient.GetCacheBlobUploadURL(getSignedUploadURL) + core.info(`GetCacheBlobUploadURLResponse: ${JSON.stringify(signedUploadURL)}`) const compressionMethod = await utils.getCompressionMethod() let cacheId = -1 @@ -251,6 +252,14 @@ export async function saveCache( core.debug(`Saving Cache (ID: ${cacheId})`) await cacheHttpClient.saveCache(cacheId, archivePath, options) + + // Cache v2 upload + // inputs: + // - getSignedUploadURL + // - archivePath + core.debug(`Saving Cache v2: ${archivePath}`) + await UploadCache(signedUploadURL, archivePath) + } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { diff --git a/packages/cache/src/internal/v2/upload/upload-cache.ts b/packages/cache/src/internal/v2/upload/upload-cache.ts new file mode 100644 index 00000000..d709671f --- /dev/null +++ b/packages/cache/src/internal/v2/upload/upload-cache.ts @@ -0,0 +1,22 @@ +import * as core from '@actions/core' +import {GetCacheBlobUploadURLResponse} from '../../../generated/results/api/v1/blobcache' +import {BlobClient, BlockBlobParallelUploadOptions} from '@azure/storage-blob' + +export async function UploadCache( + uploadURL: GetCacheBlobUploadURLResponse, + archivePath: string, +): Promise<{}> { + core.debug(`Uploading cache to: ${uploadURL}`) + + // Specify data transfer options + const uploadOptions: BlockBlobParallelUploadOptions = { + blockSize: 4 * 1024 * 1024, // 4 MiB max block size + concurrency: 2, // maximum number of parallel transfer workers + maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size + }; + + // Create blob client from container client + const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) + + return blobClient.uploadFile(archivePath, uploadOptions); +} \ No newline at end of file From dccc3f7f1cd4f7b126b46b48efb4ee368bc80173 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 10 Jun 2024 11:01:01 -0700 Subject: [PATCH 005/108] Fix upload mechanics --- packages/cache/src/internal/v2/upload/upload-cache.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/cache/src/internal/v2/upload/upload-cache.ts b/packages/cache/src/internal/v2/upload/upload-cache.ts index d709671f..54616fb2 100644 --- a/packages/cache/src/internal/v2/upload/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload/upload-cache.ts @@ -1,6 +1,6 @@ import * as core from '@actions/core' import {GetCacheBlobUploadURLResponse} from '../../../generated/results/api/v1/blobcache' -import {BlobClient, BlockBlobParallelUploadOptions} from '@azure/storage-blob' +import {BlobClient, BlockBlobClient, BlockBlobParallelUploadOptions} from '@azure/storage-blob' export async function UploadCache( uploadURL: GetCacheBlobUploadURLResponse, @@ -16,7 +16,9 @@ export async function UploadCache( }; // Create blob client from container client - const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) + // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) + const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) + const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - return blobClient.uploadFile(archivePath, uploadOptions); + return blockBlobClient.uploadFile(archivePath, uploadOptions); } \ No newline at end of file From 6635d12ce0fb266a87a50e6f2925d6b09f39a3a8 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 10 Jun 2024 11:36:37 -0700 Subject: [PATCH 006/108] Implement cache v2 --- packages/cache/src/internal/v2/upload/upload-cache.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/cache/src/internal/v2/upload/upload-cache.ts b/packages/cache/src/internal/v2/upload/upload-cache.ts index 54616fb2..f735b3fe 100644 --- a/packages/cache/src/internal/v2/upload/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload/upload-cache.ts @@ -6,7 +6,7 @@ export async function UploadCache( uploadURL: GetCacheBlobUploadURLResponse, archivePath: string, ): Promise<{}> { - core.debug(`Uploading cache to: ${uploadURL}`) + core.info(`Uploading ${archivePath} to: ${uploadURL}`) // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { @@ -15,10 +15,12 @@ export async function UploadCache( maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size }; - // Create blob client from container client // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + core.info(`BlobClient: ${blobClient}`) + core.info(`BlobClient: ${blockBlobClient}`) + return blockBlobClient.uploadFile(archivePath, uploadOptions); } \ No newline at end of file From 146143a9b4964ee6407d8534b89f65fdb6098483 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 10 Jun 2024 11:55:28 -0700 Subject: [PATCH 007/108] Implement cache v2 --- packages/cache/src/cache.ts | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index ac704209..f4338541 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -224,6 +224,15 @@ export async function saveCache( ) } + + // Cache v2 upload + // inputs: + // - getSignedUploadURL + // - archivePath + core.debug(`Saving Cache v2: ${archivePath}`) + await UploadCache(signedUploadURL, archivePath) + + core.debug('Reserving Cache') const reserveCacheResponse = await cacheHttpClient.reserveCache( key, @@ -252,14 +261,6 @@ export async function saveCache( core.debug(`Saving Cache (ID: ${cacheId})`) await cacheHttpClient.saveCache(cacheId, archivePath, options) - - // Cache v2 upload - // inputs: - // - getSignedUploadURL - // - archivePath - core.debug(`Saving Cache v2: ${archivePath}`) - await UploadCache(signedUploadURL, archivePath) - } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { From 9e63a77e7a94856ea303cd458040a37922403375 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 10 Jun 2024 12:19:52 -0700 Subject: [PATCH 008/108] Implement cache v2 --- packages/cache/src/cache.ts | 2 +- packages/cache/src/internal/v2/upload/upload-cache.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f4338541..e150769f 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -229,7 +229,7 @@ export async function saveCache( // inputs: // - getSignedUploadURL // - archivePath - core.debug(`Saving Cache v2: ${archivePath}`) + core.info(`Saving Cache v2: ${archivePath}`) await UploadCache(signedUploadURL, archivePath) diff --git a/packages/cache/src/internal/v2/upload/upload-cache.ts b/packages/cache/src/internal/v2/upload/upload-cache.ts index f735b3fe..442b89b1 100644 --- a/packages/cache/src/internal/v2/upload/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload/upload-cache.ts @@ -6,7 +6,7 @@ export async function UploadCache( uploadURL: GetCacheBlobUploadURLResponse, archivePath: string, ): Promise<{}> { - core.info(`Uploading ${archivePath} to: ${uploadURL}`) + core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { @@ -19,8 +19,8 @@ export async function UploadCache( const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.info(`BlobClient: ${blobClient}`) - core.info(`BlobClient: ${blockBlobClient}`) + core.info(`BlobClient: ${JSON.stringify(blobClient)}`) + core.info(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) return blockBlobClient.uploadFile(archivePath, uploadOptions); } \ No newline at end of file From 5e5faf73fc7a21ed6d486018ce2fc613103f4185 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 13 Jun 2024 03:16:59 -0700 Subject: [PATCH 009/108] Use zlib for compression --- packages/cache/__tests__/saveCache.test.ts | 75 + packages/cache/package-lock.json | 2357 +++++++++++++++++ packages/cache/package.json | 1 + packages/cache/src/cache.ts | 86 +- packages/cache/src/internal/constants.ts | 5 +- .../cache/src/internal/v2/upload-cache.ts | 130 + .../src/internal/v2/upload/upload-cache.ts | 26 - packages/cache/src/internal/v2/zip.ts | 0 8 files changed, 2632 insertions(+), 48 deletions(-) create mode 100644 packages/cache/src/internal/v2/upload-cache.ts delete mode 100644 packages/cache/src/internal/v2/upload/upload-cache.ts create mode 100644 packages/cache/src/internal/v2/zip.ts diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 4d0027be..7597ba8d 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -2,10 +2,14 @@ import * as core from '@actions/core' import * as path from 'path' import {saveCache} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import * as cacheTwirpClient from '../src/internal/cacheTwirpClient' +import {GetCacheBlobUploadURLResponse} from '../src/generated/results/api/v1/blobcache' +import {BlobCacheServiceClientJSON} from '../src/generated/results/api/v1/blobcache.twirp' import * as cacheUtils from '../src/internal/cacheUtils' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as tar from '../src/internal/tar' import {TypedResponse} from '@actions/http-client/lib/interfaces' +import * as uploadCache from '../src/internal/v2/upload-cache' import { ReserveCacheResponse, ITypedResponseWithError @@ -327,3 +331,74 @@ test('save with non existing path should not save cache', async () => { `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` ) }) + +test('throwaway test', async () => { + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(filePath)] + + const cacheSignedURL = 'https://container.blob.core.windows.net/cache/${primaryKey}?sig=1234' + const getCacheBlobUploadURL: GetCacheBlobUploadURLResponse = { + urls: [ + { + key: primaryKey, + url: cacheSignedURL, + }, + ] + } + + const cacheId = 4 + const reserveCacheMock = jest + .spyOn(cacheHttpClient, 'reserveCache') + .mockImplementation(async () => { + const response: TypedResponse = { + statusCode: 500, + result: {cacheId}, + headers: {} + } + return response + }) + + const getCacheBlobUploadURLMock = jest + .spyOn(BlobCacheServiceClientJSON.prototype, 'GetCacheBlobUploadURL') + .mockResolvedValue(getCacheBlobUploadURL) + + const uploadCacheMock = jest + .spyOn(uploadCache, 'UploadCacheFile') + .mockImplementation(async () => { + return { + status: 200 + } + }) + + const createTarMock = jest.spyOn(tar, 'createTar') + + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await uploadCache.UploadCacheFile(getCacheBlobUploadURL, cachePaths[0]) + await saveCache([filePath], primaryKey) + + expect(reserveCacheMock).toHaveBeenCalledTimes(1) + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { + cacheSize: undefined, + compressionMethod: compression, + enableCrossOsArchive: false + }) + expect (getCacheBlobUploadURLMock).toHaveBeenCalledTimes(1) + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + expect(uploadCacheMock).toHaveBeenCalledTimes(2) + expect(saveCacheMock).toHaveBeenCalledTimes(1) + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) \ No newline at end of file diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 422f2264..f71c3e1b 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -9,6 +9,7 @@ "version": "3.2.4", "license": "MIT", "dependencies": { + "@actions/artifact": "^2.1.7", "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", @@ -26,6 +27,27 @@ "typescript": "^5.2.2" } }, + "node_modules/@actions/artifact": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", + "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/github": "^5.1.1", + "@actions/http-client": "^2.1.0", + "@azure/storage-blob": "^12.15.0", + "@octokit/core": "^3.5.1", + "@octokit/plugin-request-log": "^1.0.4", + "@octokit/plugin-retry": "^3.0.9", + "@octokit/request-error": "^5.0.0", + "@protobuf-ts/plugin": "^2.2.3-alpha.1", + "archiver": "^7.0.1", + "crypto": "^1.0.1", + "jwt-decode": "^3.1.2", + "twirp-ts": "^2.5.0", + "unzip-stream": "^0.3.1" + } + }, "node_modules/@actions/core": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", @@ -51,6 +73,17 @@ "@actions/io": "^1.0.1" } }, + "node_modules/@actions/github": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", + "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "@octokit/core": "^3.6.0", + "@octokit/plugin-paginate-rest": "^2.17.0", + "@octokit/plugin-rest-endpoint-methods": "^5.13.0" + } + }, "node_modules/@actions/glob": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", @@ -247,6 +280,176 @@ "node": ">=14.0.0" } }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@octokit/auth-token": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", + "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", + "dependencies": { + "@octokit/types": "^6.0.3" + } + }, + "node_modules/@octokit/core": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", + "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", + "dependencies": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.3", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/core/node_modules/@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "node_modules/@octokit/endpoint": { + "version": "6.0.12", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", + "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", + "dependencies": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/graphql": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", + "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", + "dependencies": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "12.11.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", + "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "2.21.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", + "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", + "dependencies": { + "@octokit/types": "^6.40.0" + }, + "peerDependencies": { + "@octokit/core": ">=2" + } + }, + "node_modules/@octokit/plugin-request-log": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", + "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "5.16.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", + "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", + "dependencies": { + "@octokit/types": "^6.39.0", + "deprecation": "^2.3.1" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", + "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", + "dependencies": { + "@octokit/types": "^6.0.3", + "bottleneck": "^2.15.3" + } + }, + "node_modules/@octokit/request": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", + "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", + "dependencies": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@octokit/request/node_modules/@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "node_modules/@octokit/types": { + "version": "6.41.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", + "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", + "dependencies": { + "@octokit/openapi-types": "^12.11.0" + } + }, "node_modules/@opentelemetry/api": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", @@ -255,6 +458,85 @@ "node": ">=8.0.0" } }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@protobuf-ts/plugin": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", + "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.9.4", + "@protobuf-ts/protoc": "^2.9.4", + "@protobuf-ts/runtime": "^2.9.4", + "@protobuf-ts/runtime-rpc": "^2.9.4", + "typescript": "^3.9" + }, + "bin": { + "protoc-gen-dump": "bin/protoc-gen-dump", + "protoc-gen-ts": "bin/protoc-gen-ts" + } + }, + "node_modules/@protobuf-ts/plugin-framework": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", + "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4", + "typescript": "^3.9" + } + }, + "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/plugin/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/protoc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", + "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==", + "bin": { + "protoc": "protoc.js" + } + }, + "node_modules/@protobuf-ts/runtime": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", + "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" + }, + "node_modules/@protobuf-ts/runtime-rpc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", + "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4" + } + }, "node_modules/@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -313,16 +595,129 @@ "node": ">=6.5" } }, + "node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/archiver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", + "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", + "dependencies": { + "archiver-utils": "^5.0.2", + "async": "^3.2.4", + "buffer-crc32": "^1.0.0", + "readable-stream": "^4.0.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^6.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/archiver-utils": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", + "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", + "dependencies": { + "glob": "^10.0.0", + "graceful-fs": "^4.2.0", + "is-stream": "^2.0.1", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, + "node_modules/b4a": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", + "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "node_modules/bare-events": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", + "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", + "optional": true + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -332,6 +727,81 @@ "concat-map": "0.0.1" } }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "engines": { + "node": ">=0.2.0" + } + }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -343,11 +813,81 @@ "node": ">= 0.8" } }, + "node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/compress-commons": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", + "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "dependencies": { + "crc-32": "^1.2.0", + "crc32-stream": "^6.0.0", + "is-stream": "^2.0.1", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crc32-stream": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", + "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/crypto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", + "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==", + "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in." + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -356,6 +896,53 @@ "node": ">=0.4.0" } }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dot-object": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", + "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", + "dependencies": { + "commander": "^6.1.0", + "glob": "^7.1.6" + }, + "bin": { + "dot-object": "bin/dot-object" + } + }, + "node_modules/dot-object/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, "node_modules/event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -372,6 +959,26 @@ "node": ">=0.8.x" } }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, + "node_modules/foreground-child": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.0.tgz", + "integrity": "sha512-CrWQNaEl1/6WeZoarcM9LHupTo3RpZO2Pdk1vktwzPiQTsJnAKJmm3TACKeG5UZbWDfaH2AbvYxzP96y0MT7fA==", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/form-data": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", @@ -385,6 +992,211 @@ "node": ">= 0.12" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/glob": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", + "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/jackspeak": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", + "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -415,6 +1227,42 @@ "node": "*" } }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node_modules/node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -434,6 +1282,81 @@ } } }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-to-regexp": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", + "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -442,6 +1365,77 @@ "node": ">= 0.6.0" } }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/queue-tick": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", + "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" + }, + "node_modules/readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", @@ -455,11 +1449,185 @@ "semver": "bin/semver.js" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/streamx": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", + "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", + "dependencies": { + "fast-fifo": "^1.3.2", + "queue-tick": "^1.0.1", + "text-decoder": "^1.1.0" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/text-decoder": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.0.tgz", + "integrity": "sha512-TmLJNj6UgX8xcUZo4UDStGQtDiTzF7BzWlzn9g7UWrjkpHr5uJTK1ld16wZ3LXb2vb6jH8qU89dW5whuMdXYdw==", + "dependencies": { + "b4a": "^1.6.4" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "engines": { + "node": "*" + } + }, + "node_modules/ts-poet": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", + "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", + "dependencies": { + "lodash": "^4.17.15", + "prettier": "^2.5.1" + } + }, "node_modules/tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -473,6 +1641,34 @@ "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, + "node_modules/twirp-ts": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", + "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.0.7", + "camel-case": "^4.1.2", + "dot-object": "^2.1.4", + "path-to-regexp": "^6.2.0", + "ts-poet": "^4.5.0", + "yaml": "^1.10.2" + }, + "bin": { + "protoc-gen-twirp_ts": "protoc-gen-twirp_ts" + }, + "peerDependencies": { + "@protobuf-ts/plugin": "^2.5.0", + "ts-proto": "^1.81.3" + }, + "peerDependenciesMeta": { + "@protobuf-ts/plugin": { + "optional": true + }, + "ts-proto": { + "optional": true + } + } + }, "node_modules/typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", @@ -486,6 +1682,25 @@ "node": ">=14.17" } }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + }, + "node_modules/unzip-stream": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz", + "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==", + "dependencies": { + "binary": "^0.3.0", + "mkdirp": "^0.5.1" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, "node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -509,6 +1724,109 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, "node_modules/xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -528,9 +1846,51 @@ "engines": { "node": ">=4.0" } + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/zip-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", + "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "dependencies": { + "archiver-utils": "^5.0.0", + "compress-commons": "^6.0.2", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } } }, "dependencies": { + "@actions/artifact": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", + "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", + "requires": { + "@actions/core": "^1.10.0", + "@actions/github": "^5.1.1", + "@actions/http-client": "^2.1.0", + "@azure/storage-blob": "^12.15.0", + "@octokit/core": "^3.5.1", + "@octokit/plugin-request-log": "^1.0.4", + "@octokit/plugin-retry": "^3.0.9", + "@octokit/request-error": "^5.0.0", + "@protobuf-ts/plugin": "^2.2.3-alpha.1", + "archiver": "^7.0.1", + "crypto": "^1.0.1", + "jwt-decode": "^3.1.2", + "twirp-ts": "^2.5.0", + "unzip-stream": "^0.3.1" + } + }, "@actions/core": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", @@ -555,6 +1915,17 @@ "@actions/io": "^1.0.1" } }, + "@actions/github": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", + "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", + "requires": { + "@actions/http-client": "^2.0.1", + "@octokit/core": "^3.6.0", + "@octokit/plugin-paginate-rest": "^2.17.0", + "@octokit/plugin-rest-endpoint-methods": "^5.13.0" + } + }, "@actions/glob": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", @@ -719,11 +2090,232 @@ "tslib": "^2.2.0" } }, + "@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "requires": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + } + }, + "@octokit/auth-token": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", + "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", + "requires": { + "@octokit/types": "^6.0.3" + } + }, + "@octokit/core": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", + "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", + "requires": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.3", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "dependencies": { + "@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "requires": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + } + } + }, + "@octokit/endpoint": { + "version": "6.0.12", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", + "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", + "requires": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/graphql": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", + "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", + "requires": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/openapi-types": { + "version": "12.11.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", + "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" + }, + "@octokit/plugin-paginate-rest": { + "version": "2.21.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", + "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", + "requires": { + "@octokit/types": "^6.40.0" + } + }, + "@octokit/plugin-request-log": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", + "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", + "requires": {} + }, + "@octokit/plugin-rest-endpoint-methods": { + "version": "5.16.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", + "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", + "requires": { + "@octokit/types": "^6.39.0", + "deprecation": "^2.3.1" + } + }, + "@octokit/plugin-retry": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", + "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", + "requires": { + "@octokit/types": "^6.0.3", + "bottleneck": "^2.15.3" + } + }, + "@octokit/request": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", + "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", + "requires": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + }, + "dependencies": { + "@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "requires": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + } + } + }, + "@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "requires": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "requires": { + "@octokit/openapi-types": "^22.2.0" + } + } + } + }, + "@octokit/types": { + "version": "6.41.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", + "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", + "requires": { + "@octokit/openapi-types": "^12.11.0" + } + }, "@opentelemetry/api": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==" }, + "@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true + }, + "@protobuf-ts/plugin": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", + "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", + "requires": { + "@protobuf-ts/plugin-framework": "^2.9.4", + "@protobuf-ts/protoc": "^2.9.4", + "@protobuf-ts/runtime": "^2.9.4", + "@protobuf-ts/runtime-rpc": "^2.9.4", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/plugin-framework": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", + "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", + "requires": { + "@protobuf-ts/runtime": "^2.9.4", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/protoc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", + "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==" + }, + "@protobuf-ts/runtime": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", + "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" + }, + "@protobuf-ts/runtime-rpc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", + "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", + "requires": { + "@protobuf-ts/runtime": "^2.9.4" + } + }, "@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -778,16 +2370,94 @@ "event-target-shim": "^5.0.0" } }, + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" + }, + "ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" + }, + "archiver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", + "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", + "requires": { + "archiver-utils": "^5.0.2", + "async": "^3.2.4", + "buffer-crc32": "^1.0.0", + "readable-stream": "^4.0.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^6.0.1" + } + }, + "archiver-utils": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", + "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", + "requires": { + "glob": "^10.0.0", + "graceful-fs": "^4.2.0", + "is-stream": "^2.0.1", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + } + }, + "async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, + "b4a": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", + "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" + }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "bare-events": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", + "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", + "optional": true + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + }, + "before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + }, + "binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "requires": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + } + }, + "bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -797,6 +2467,55 @@ "concat-map": "0.0.1" } }, + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==" + }, + "buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==" + }, + "camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "requires": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "requires": { + "traverse": ">=0.3.0 <0.4" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -805,16 +2524,106 @@ "delayed-stream": "~1.0.0" } }, + "commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==" + }, + "compress-commons": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", + "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "requires": { + "crc-32": "^1.2.0", + "crc32-stream": "^6.0.0", + "is-stream": "^2.0.1", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==" + }, + "crc32-stream": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", + "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "requires": { + "crc-32": "^1.2.0", + "readable-stream": "^4.0.0" + } + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "crypto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", + "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==" + }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "dot-object": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", + "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", + "requires": { + "commander": "^6.1.0", + "glob": "^7.1.6" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -825,6 +2634,20 @@ "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, + "fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, + "foreground-child": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.0.tgz", + "integrity": "sha512-CrWQNaEl1/6WeZoarcM9LHupTo3RpZO2Pdk1vktwzPiQTsJnAKJmm3TACKeG5UZbWDfaH2AbvYxzP96y0MT7fA==", + "requires": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + } + }, "form-data": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", @@ -835,6 +2658,159 @@ "mime-types": "^2.1.12" } }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "glob": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", + "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", + "requires": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "path-scurry": "^1.11.1" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" + }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "jackspeak": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", + "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", + "requires": { + "@isaacs/cliui": "^8.0.2", + "@pkgjs/parseargs": "^0.11.0" + } + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, + "lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "requires": { + "readable-stream": "^2.0.5" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "requires": { + "tslib": "^2.0.3" + } + }, + "lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==" + }, "mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -856,6 +2832,33 @@ "brace-expansion": "^1.1.7" } }, + "minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" + }, + "minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" + }, + "mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "requires": { + "minimist": "^1.2.6" + } + }, + "no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "requires": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -864,11 +2867,115 @@ "whatwg-url": "^5.0.0" } }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "requires": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "requires": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + } + }, + "path-to-regexp": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", + "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" + }, + "prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==" + }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "queue-tick": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", + "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" + }, + "readable-stream": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", + "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", + "requires": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + } + }, + "readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "requires": { + "minimatch": "^5.1.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", @@ -879,11 +2986,143 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" + }, + "streamx": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", + "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", + "requires": { + "bare-events": "^2.2.0", + "fast-fifo": "^1.3.2", + "queue-tick": "^1.0.1", + "text-decoder": "^1.1.0" + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "requires": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + } + }, + "string-width-cjs": { + "version": "npm:string-width@4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + } + } + }, + "strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "requires": { + "ansi-regex": "^6.0.1" + } + }, + "strip-ansi-cjs": { + "version": "npm:strip-ansi@6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + } + } + }, + "tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "requires": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "text-decoder": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.0.tgz", + "integrity": "sha512-TmLJNj6UgX8xcUZo4UDStGQtDiTzF7BzWlzn9g7UWrjkpHr5uJTK1ld16wZ3LXb2vb6jH8qU89dW5whuMdXYdw==", + "requires": { + "b4a": "^1.6.4" + } + }, "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==" + }, + "ts-poet": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", + "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", + "requires": { + "lodash": "^4.17.15", + "prettier": "^2.5.1" + } + }, "tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -894,12 +3133,44 @@ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, + "twirp-ts": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", + "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", + "requires": { + "@protobuf-ts/plugin-framework": "^2.0.7", + "camel-case": "^4.1.2", + "dot-object": "^2.1.4", + "path-to-regexp": "^6.2.0", + "ts-poet": "^4.5.0", + "yaml": "^1.10.2" + } + }, "typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", "dev": true }, + "universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + }, + "unzip-stream": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz", + "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==", + "requires": { + "binary": "^0.3.0", + "mkdirp": "^0.5.1" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -919,6 +3190,77 @@ "webidl-conversions": "^3.0.0" } }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + }, + "wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "requires": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + } + }, + "wrap-ansi-cjs": { + "version": "npm:wrap-ansi@7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, "xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -932,6 +3274,21 @@ "version": "11.0.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" + }, + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" + }, + "zip-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", + "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "requires": { + "archiver-utils": "^5.0.0", + "compress-commons": "^6.0.2", + "readable-stream": "^4.0.0" + } } } } diff --git a/packages/cache/package.json b/packages/cache/package.json index d3251083..78f33c14 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -38,6 +38,7 @@ }, "dependencies": { "@actions/core": "^1.10.0", + "@actions/artifact": "^2.1.7", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index e150769f..5a582f8d 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,13 +1,18 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' -import {CacheUrl} from './internal/constants' +import {CacheServiceVersion, CacheUrl} from './internal/constants' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' import {createTar, extractTar, listTar} from './internal/tar' import {DownloadOptions, UploadOptions} from './options' import {GetCacheBlobUploadURLRequest, GetCacheBlobUploadURLResponse} from './generated/results/api/v1/blobcache' -import {UploadCache} from './internal/v2/upload/upload-cache' +import {UploadCacheStream} from './internal/v2/upload-cache' +import { + UploadZipSpecification, + getUploadZipSpecification +} from '@actions/artifact/lib/internal/upload/upload-zip-specification' +import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' export class ValidationError extends Error { constructor(message: string) { @@ -174,17 +179,23 @@ export async function saveCache( ): Promise { checkPaths(paths) checkKey(key) - - // TODO: REMOVE ME - // Making a call to the service - const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getSignedUploadURL: GetCacheBlobUploadURLRequest = { - organization: "github", - keys: [key], + + console.debug(`Cache Service Version: ${CacheServiceVersion}`) + switch (CacheServiceVersion) { + case "v2": + return await saveCachev1(paths, key, options, enableCrossOsArchive) + case "v1": + default: + return await saveCachev2(paths, key, options, enableCrossOsArchive) } - const signedUploadURL: GetCacheBlobUploadURLResponse = await twirpClient.GetCacheBlobUploadURL(getSignedUploadURL) - core.info(`GetCacheBlobUploadURLResponse: ${JSON.stringify(signedUploadURL)}`) +} +async function saveCachev1( + paths: string[], + key: string, + options?: UploadOptions, + enableCrossOsArchive = false +): Promise { const compressionMethod = await utils.getCompressionMethod() let cacheId = -1 @@ -224,15 +235,6 @@ export async function saveCache( ) } - - // Cache v2 upload - // inputs: - // - getSignedUploadURL - // - archivePath - core.info(`Saving Cache v2: ${archivePath}`) - await UploadCache(signedUploadURL, archivePath) - - core.debug('Reserving Cache') const reserveCacheResponse = await cacheHttpClient.reserveCache( key, @@ -281,3 +283,47 @@ export async function saveCache( return cacheId } + +async function saveCachev2( + paths: string[], + key: string, + options?: UploadOptions, + enableCrossOsArchive = false +): Promise { + const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() + const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + organization: "github", + keys: [key], + } + const signedUploadURL: GetCacheBlobUploadURLResponse = await twirpClient.GetCacheBlobUploadURL(getSignedUploadURL) + core.info(`GetCacheBlobUploadURLResponse: ${JSON.stringify(signedUploadURL)}`) + + // Archive + // We're going to handle 1 path fow now. This needs to be fixed to handle all + // paths passed in. + const rootDir = path.dirname(paths[0]) + const zipSpecs: UploadZipSpecification[] = getUploadZipSpecification(paths, rootDir) + if (zipSpecs.length === 0) { + throw new Error( + `Error with zip specs: ${zipSpecs.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])).join(', ')}` + ) + } + + // 0: No compression + // 1: Best speed + // 6: Default compression (same as GNU Gzip) + // 9: Best compression Higher levels will result in better compression, but will take longer to complete. For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads. + const zipUploadStream = await createZipUploadStream( + zipSpecs, + 6 + ) + + // Cache v2 upload + // inputs: + // - getSignedUploadURL + // - archivePath + core.info(`Saving Cache v2: ${paths[0]}`) + await UploadCacheStream(signedUploadURL.urls[0].url, zipUploadStream) + + return 0 +} \ No newline at end of file diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index f6e093e0..6fd5d7a0 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -37,5 +37,6 @@ export const TarFilename = 'cache.tar' export const ManifestFilename = 'manifest.txt' -// Cache URL -export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']}` +// Cache Service Metadata +export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']} || ${process.env['ACTIONS_CACHE_URL']}` +export const CacheServiceVersion = `${process.env['ACTIONS_CACHE_URL_NEXT']} ? 'v2' : 'v1'` \ No newline at end of file diff --git a/packages/cache/src/internal/v2/upload-cache.ts b/packages/cache/src/internal/v2/upload-cache.ts new file mode 100644 index 00000000..574cf788 --- /dev/null +++ b/packages/cache/src/internal/v2/upload-cache.ts @@ -0,0 +1,130 @@ +import * as core from '@actions/core' +import {GetCacheBlobUploadURLResponse} from '../../generated/results/api/v1/blobcache' +import {ZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' +import {NetworkError} from '@actions/artifact/' +import {TransferProgressEvent} from '@azure/core-http' +import * as stream from 'stream' +import * as crypto from 'crypto' +import { + BlobClient, + BlockBlobClient, + BlockBlobUploadStreamOptions, + BlockBlobParallelUploadOptions +} from '@azure/storage-blob' + +export async function UploadCacheStream( + signedUploadURL: string, + zipUploadStream: ZipUploadStream +): Promise<{}> { + let uploadByteCount = 0 + let lastProgressTime = Date.now() + let timeoutId: NodeJS.Timeout | undefined + + const chunkTimer = (timeout: number): NodeJS.Timeout => { + // clear the previous timeout + if (timeoutId) { + clearTimeout(timeoutId) + } + + timeoutId = setTimeout(() => { + const now = Date.now() + // if there's been more than 30 seconds since the + // last progress event, then we'll consider the upload stalled + if (now - lastProgressTime > timeout) { + throw new Error('Upload progress stalled.') + } + }, timeout) + return timeoutId + } + + const maxConcurrency = 32 + const bufferSize = 8 * 1024 * 1024 // 8 MB Chunks + const blobClient = new BlobClient(signedUploadURL) + const blockBlobClient = blobClient.getBlockBlobClient() + const timeoutDuration = 300000 // 30 seconds + + core.debug( + `Uploading cache zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` + ) + + const uploadCallback = (progress: TransferProgressEvent): void => { + core.info(`Uploaded bytes ${progress.loadedBytes}`) + uploadByteCount = progress.loadedBytes + chunkTimer(timeoutDuration) + lastProgressTime = Date.now() + } + + const options: BlockBlobUploadStreamOptions = { + blobHTTPHeaders: {blobContentType: 'zip'}, + onProgress: uploadCallback + } + + let sha256Hash: string | undefined = undefined + const uploadStream = new stream.PassThrough() + const hashStream = crypto.createHash('sha256') + + zipUploadStream.pipe(uploadStream) // This stream is used for the upload + zipUploadStream.pipe(hashStream).setEncoding('hex') // This stream is used to compute a hash of the zip content that gets used. Integrity check + + core.info('Beginning upload of cache to blob storage') + try { + // Start the chunk timer + timeoutId = chunkTimer(timeoutDuration) + await blockBlobClient.uploadStream( + uploadStream, + bufferSize, + maxConcurrency, + options + ) + } catch (error) { + if (NetworkError.isNetworkErrorCode(error?.code)) { + throw new NetworkError(error?.code) + } + throw error + } finally { + // clear the timeout whether or not the upload completes + if (timeoutId) { + clearTimeout(timeoutId) + } + } + + core.info('Finished uploading cache content to blob storage!') + + hashStream.end() + sha256Hash = hashStream.read() as string + core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`) + core.info(`Uploaded: ${uploadByteCount} bytes`) + + if (uploadByteCount === 0) { + core.error( + `No data was uploaded to blob storage. Reported upload byte count is 0.` + ) + } + return { + uploadSize: uploadByteCount, + sha256Hash + } +} + +export async function UploadCacheFile( + uploadURL: GetCacheBlobUploadURLResponse, + archivePath: string, +): Promise<{}> { + core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) + + // Specify data transfer options + const uploadOptions: BlockBlobParallelUploadOptions = { + blockSize: 4 * 1024 * 1024, // 4 MiB max block size + concurrency: 2, // maximum number of parallel transfer workers + maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size + }; + + // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) + const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) + const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + + core.info(`BlobClient: ${JSON.stringify(blobClient)}`) + core.info(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) + + return blockBlobClient.uploadFile(archivePath, uploadOptions); +} \ No newline at end of file diff --git a/packages/cache/src/internal/v2/upload/upload-cache.ts b/packages/cache/src/internal/v2/upload/upload-cache.ts deleted file mode 100644 index 442b89b1..00000000 --- a/packages/cache/src/internal/v2/upload/upload-cache.ts +++ /dev/null @@ -1,26 +0,0 @@ -import * as core from '@actions/core' -import {GetCacheBlobUploadURLResponse} from '../../../generated/results/api/v1/blobcache' -import {BlobClient, BlockBlobClient, BlockBlobParallelUploadOptions} from '@azure/storage-blob' - -export async function UploadCache( - uploadURL: GetCacheBlobUploadURLResponse, - archivePath: string, -): Promise<{}> { - core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) - - // Specify data transfer options - const uploadOptions: BlockBlobParallelUploadOptions = { - blockSize: 4 * 1024 * 1024, // 4 MiB max block size - concurrency: 2, // maximum number of parallel transfer workers - maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size - }; - - // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) - const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) - const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - - core.info(`BlobClient: ${JSON.stringify(blobClient)}`) - core.info(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) - - return blockBlobClient.uploadFile(archivePath, uploadOptions); -} \ No newline at end of file diff --git a/packages/cache/src/internal/v2/zip.ts b/packages/cache/src/internal/v2/zip.ts new file mode 100644 index 00000000..e69de29b From 5afc042a7457ece5073f00cb20c423145d045d1e Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 17 Jun 2024 01:17:10 -0700 Subject: [PATCH 010/108] Add download cache v2 --- packages/cache/src/cache.ts | 73 ++++++++++++++++++- .../cache/src/internal/v2/download-cache.ts | 67 +++++++++++++++++ 2 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 packages/cache/src/internal/v2/download-cache.ts diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 5a582f8d..e93ffd4b 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -6,8 +6,14 @@ import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' import {createTar, extractTar, listTar} from './internal/tar' import {DownloadOptions, UploadOptions} from './options' -import {GetCacheBlobUploadURLRequest, GetCacheBlobUploadURLResponse} from './generated/results/api/v1/blobcache' +import { + GetCacheBlobUploadURLRequest, + GetCacheBlobUploadURLResponse, + GetCachedBlobRequest, + GetCachedBlobResponse +} from './generated/results/api/v1/blobcache' import {UploadCacheStream} from './internal/v2/upload-cache' +import {StreamExtract} from './internal/v2/download-cache' import { UploadZipSpecification, getUploadZipSpecification @@ -81,6 +87,23 @@ export async function restoreCache( ): Promise { checkPaths(paths) + console.debug(`Cache Service Version: ${CacheServiceVersion}`) + switch (CacheServiceVersion) { + case "v2": + return await restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) + case "v1": + default: + return await restoreCachev1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) + } +} + +async function restoreCachev1( + paths: string[], + primaryKey: string, + restoreKeys?: string[], + options?: DownloadOptions, + enableCrossOsArchive = false +) { restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -162,6 +185,54 @@ export async function restoreCache( return undefined } +async function restoreCachev2( + paths: string[], + primaryKey: string, + restoreKeys?: string[], + options?: DownloadOptions, + enableCrossOsArchive = false +) { + + restoreKeys = restoreKeys || [] + const keys = [primaryKey, ...restoreKeys] + + core.debug('Resolved Keys:') + core.debug(JSON.stringify(keys)) + + if (keys.length > 10) { + throw new ValidationError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) + } + for (const key of keys) { + checkKey(key) + } + + try { + const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() + const getSignedDownloadURLRequest: GetCachedBlobRequest = { + owner: "github", + keys: keys, + } + const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) + core.info(`GetCachedBlobResponse: ${JSON.stringify(signedDownloadURL)}`) + + if (signedDownloadURL.blobs.length === 0) { + // Cache not found + core.warning(`Cache not found for keys: ${keys.join(', ')}`) + return undefined + } + + core.info(`Starting download of artifact to: ${paths[0]}`) + await StreamExtract(signedDownloadURL.blobs[0].signedUrl, paths[0]) + core.info(`Artifact download completed successfully.`) + } catch (error) { + throw new Error(`Unable to download and extract cache: ${error.message}`) + } + + return undefined +} + /** * Saves a list of files with the specified key * diff --git a/packages/cache/src/internal/v2/download-cache.ts b/packages/cache/src/internal/v2/download-cache.ts new file mode 100644 index 00000000..bfba0d70 --- /dev/null +++ b/packages/cache/src/internal/v2/download-cache.ts @@ -0,0 +1,67 @@ +import * as core from '@actions/core' +import * as httpClient from '@actions/http-client' +import unzip from 'unzip-stream' +const packageJson = require('../../../package.json') + +export async function StreamExtract(url: string, directory: string): Promise { + let retryCount = 0 + while (retryCount < 5) { + try { + await streamExtractExternal(url, directory) + return + } catch (error) { + retryCount++ + core.debug( + `Failed to download cache after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...` + ) + // wait 5 seconds before retrying + await new Promise(resolve => setTimeout(resolve, 5000)) + } + } + + throw new Error(`Cache download failed after ${retryCount} retries.`) +} + +export async function streamExtractExternal( + url: string, + directory: string + ): Promise { + const client = new httpClient.HttpClient(`@actions/cache-${packageJson.version}`) + const response = await client.get(url) + if (response.message.statusCode !== 200) { + throw new Error( + `Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}` + ) + } + + const timeout = 30 * 1000 // 30 seconds + + return new Promise((resolve, reject) => { + const timerFn = (): void => { + response.message.destroy( + new Error(`Blob storage chunk did not respond in ${timeout}ms`) + ) + } + const timer = setTimeout(timerFn, timeout) + + response.message + .on('data', () => { + timer.refresh() + }) + .on('error', (error: Error) => { + core.debug( + `response.message: Cache download failed: ${error.message}` + ) + clearTimeout(timer) + reject(error) + }) + .pipe(unzip.Extract({path: directory})) + .on('close', () => { + clearTimeout(timer) + resolve() + }) + .on('error', (error: Error) => { + reject(error) + }) + }) + } \ No newline at end of file From 8d7ed4fb57c3c154384c0b6ad4c21c3dcfdb6795 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 17 Jun 2024 01:32:41 -0700 Subject: [PATCH 011/108] Fix cache service url bug --- packages/cache/src/internal/constants.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index 6fd5d7a0..143ba06e 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -39,4 +39,4 @@ export const ManifestFilename = 'manifest.txt' // Cache Service Metadata export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']} || ${process.env['ACTIONS_CACHE_URL']}` -export const CacheServiceVersion = `${process.env['ACTIONS_CACHE_URL_NEXT']} ? 'v2' : 'v1'` \ No newline at end of file +export const CacheServiceVersion = `${process.env['ACTIONS_CACHE_URL_NEXT'] ? 'v2' : 'v1'}` \ No newline at end of file From 7640cf17c1ea600e516c08605be2fbb365a6b318 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 17 Jun 2024 02:35:25 -0700 Subject: [PATCH 012/108] Fix cache misses --- packages/cache/src/cache.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index e93ffd4b..d463b38a 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -226,11 +226,11 @@ async function restoreCachev2( core.info(`Starting download of artifact to: ${paths[0]}`) await StreamExtract(signedDownloadURL.blobs[0].signedUrl, paths[0]) core.info(`Artifact download completed successfully.`) + + return keys[0] } catch (error) { throw new Error(`Unable to download and extract cache: ${error.message}`) } - - return undefined } /** From e1b7e78d600472c43003bad447ef3c0cf983db44 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 17 Jun 2024 02:39:45 -0700 Subject: [PATCH 013/108] Fix cache misses --- packages/cache/src/cache.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index d463b38a..43c0212a 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -254,10 +254,10 @@ export async function saveCache( console.debug(`Cache Service Version: ${CacheServiceVersion}`) switch (CacheServiceVersion) { case "v2": - return await saveCachev1(paths, key, options, enableCrossOsArchive) + return await saveCachev2(paths, key, options, enableCrossOsArchive) case "v1": default: - return await saveCachev2(paths, key, options, enableCrossOsArchive) + return await saveCachev1(paths, key, options, enableCrossOsArchive) } } From 04d1a7ec3cdd6afeea962daef7c425b9a57f9f09 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 17 Jun 2024 03:36:06 -0700 Subject: [PATCH 014/108] Add fix cache paths --- packages/cache/src/cache.ts | 3 ++- packages/cache/src/internal/v2/download-cache.ts | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 43c0212a..d8a26b27 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -223,8 +223,9 @@ async function restoreCachev2( return undefined } + core.info(`Cache hit for: ${signedDownloadURL.blobs[0].key}`) core.info(`Starting download of artifact to: ${paths[0]}`) - await StreamExtract(signedDownloadURL.blobs[0].signedUrl, paths[0]) + await StreamExtract(signedDownloadURL.blobs[0].signedUrl, path.dirname(paths[0])) core.info(`Artifact download completed successfully.`) return keys[0] diff --git a/packages/cache/src/internal/v2/download-cache.ts b/packages/cache/src/internal/v2/download-cache.ts index bfba0d70..19563181 100644 --- a/packages/cache/src/internal/v2/download-cache.ts +++ b/packages/cache/src/internal/v2/download-cache.ts @@ -11,7 +11,7 @@ export async function StreamExtract(url: string, directory: string): Promise { - core.debug( + core.info( `response.message: Cache download failed: ${error.message}` ) clearTimeout(timer) From 4902d3a118cbb2bcaa1a4f914ed144458e50971c Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 24 Jun 2024 01:16:11 -0700 Subject: [PATCH 015/108] Add backend ids --- packages/cache/src/cache.ts | 10 +- .../src/generated/results/api/v1/blobcache.ts | 93 +++++++++++++------ 2 files changed, 75 insertions(+), 28 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index d8a26b27..fdba186e 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -19,6 +19,7 @@ import { getUploadZipSpecification } from '@actions/artifact/lib/internal/upload/upload-zip-specification' import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' +import {getBackendIdsFromToken, BackendIds} from '@actions/artifact/lib/internal/shared/util' export class ValidationError extends Error { constructor(message: string) { @@ -209,9 +210,12 @@ async function restoreCachev2( } try { + // BackendIds are retrieved form the signed JWT + const backendIds: BackendIds = getBackendIdsFromToken() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const getSignedDownloadURLRequest: GetCachedBlobRequest = { - owner: "github", + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, keys: keys, } const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) @@ -362,8 +366,12 @@ async function saveCachev2( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + // BackendIds are retrieved form the signed JWT + const backendIds: BackendIds = getBackendIdsFromToken() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, organization: "github", keys: [key], } diff --git a/packages/cache/src/generated/results/api/v1/blobcache.ts b/packages/cache/src/generated/results/api/v1/blobcache.ts index 41af2886..8e63bc63 100644 --- a/packages/cache/src/generated/results/api/v1/blobcache.ts +++ b/packages/cache/src/generated/results/api/v1/blobcache.ts @@ -18,15 +18,21 @@ import { Timestamp } from "../../../google/protobuf/timestamp"; */ export interface GetCachedBlobRequest { /** - * Owner of the blob(s) to be retrieved + * Workflow run backend ID * - * @generated from protobuf field: string owner = 1; + * @generated from protobuf field: string workflow_run_backend_id = 1; */ - owner: string; + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; /** * Key(s) of te blob(s) to be retrieved * - * @generated from protobuf field: repeated string keys = 2; + * @generated from protobuf field: repeated string keys = 3; */ keys: string[]; } @@ -87,15 +93,27 @@ export interface GetCachedBlobResponse_Blob { */ export interface GetCacheBlobUploadURLRequest { /** - * Owner of the blob(s) to be retrieved + * Workflow run backend ID * - * @generated from protobuf field: string organization = 1; + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * / Owner of the blob(s) to be retrieved + * + * @generated from protobuf field: string organization = 3; */ organization: string; /** * Key(s) of te blob(s) to be retrieved * - * @generated from protobuf field: repeated string keys = 2; + * @generated from protobuf field: repeated string keys = 4; */ keys: string[]; } @@ -131,12 +149,13 @@ export interface GetCacheBlobUploadURLResponse_Url { class GetCachedBlobRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.GetCachedBlobRequest", [ - { no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): GetCachedBlobRequest { - const message = { owner: "", keys: [] }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -147,10 +166,13 @@ class GetCachedBlobRequest$Type extends MessageType { while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string owner */ 1: - message.owner = reader.string(); + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); break; - case /* repeated string keys */ 2: + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* repeated string keys */ 3: message.keys.push(reader.string()); break; default: @@ -165,12 +187,15 @@ class GetCachedBlobRequest$Type extends MessageType { return message; } internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string owner = 1; */ - if (message.owner !== "") - writer.tag(1, WireType.LengthDelimited).string(message.owner); - /* repeated string keys = 2; */ + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* repeated string keys = 3; */ for (let i = 0; i < message.keys.length; i++) - writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); + writer.tag(3, WireType.LengthDelimited).string(message.keys[i]); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); @@ -314,12 +339,14 @@ export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type(); class GetCacheBlobUploadURLRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ - { no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): GetCacheBlobUploadURLRequest { - const message = { organization: "", keys: [] }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -330,10 +357,16 @@ class GetCacheBlobUploadURLRequest$Type extends MessageType Date: Tue, 24 Sep 2024 03:17:44 -0700 Subject: [PATCH 016/108] Add cache service v2 client --- packages/cache/src/cache.ts | 91 +- .../src/generated/results/api/v1/blobcache.ts | 513 ------- .../results/api/v1/blobcache.twirp.ts | 433 ------ .../src/generated/results/api/v1/cache.ts | 1324 +++++++++++++++++ .../generated/results/api/v1/cache.twirp.ts | 1209 +++++++++++++++ .../cache/src/internal/cacheHttpClient.ts | 49 +- .../cache/src/internal/cacheTwirpClient.ts | 373 +++-- packages/cache/src/internal/cacheUtils.ts | 30 +- .../cache/src/internal/v2/upload-cache.ts | 24 +- 9 files changed, 2828 insertions(+), 1218 deletions(-) delete mode 100644 packages/cache/src/generated/results/api/v1/blobcache.ts delete mode 100644 packages/cache/src/generated/results/api/v1/blobcache.twirp.ts create mode 100644 packages/cache/src/generated/results/api/v1/cache.ts create mode 100644 packages/cache/src/generated/results/api/v1/cache.twirp.ts diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index fdba186e..0530aaab 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,25 +1,27 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' -import {CacheServiceVersion, CacheUrl} from './internal/constants' +import { CacheServiceVersion, CacheUrl } from './internal/constants' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' -import {createTar, extractTar, listTar} from './internal/tar' -import {DownloadOptions, UploadOptions} from './options' +import { createTar, extractTar, listTar } from './internal/tar' +import { DownloadOptions, UploadOptions } from './options' import { - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse, - GetCachedBlobRequest, - GetCachedBlobResponse -} from './generated/results/api/v1/blobcache' -import {UploadCacheStream} from './internal/v2/upload-cache' -import {StreamExtract} from './internal/v2/download-cache' + CreateCacheEntryRequest, + CreateCacheEntryResponse, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse +} from './generated/results/api/v1/cache' +import { UploadCacheStream } from './internal/v2/upload-cache' +import { StreamExtract } from './internal/v2/download-cache' import { UploadZipSpecification, getUploadZipSpecification } from '@actions/artifact/lib/internal/upload/upload-zip-specification' -import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' -import {getBackendIdsFromToken, BackendIds} from '@actions/artifact/lib/internal/shared/util' +import { createZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' +import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' export class ValidationError extends Error { constructor(message: string) { @@ -193,7 +195,7 @@ async function restoreCachev2( options?: DownloadOptions, enableCrossOsArchive = false ) { - + restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -212,24 +214,31 @@ async function restoreCachev2( try { // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() - const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getSignedDownloadURLRequest: GetCachedBlobRequest = { + const compressionMethod = await utils.getCompressionMethod() + const twirpClient = cacheTwirpClient.internalCacheTwirpClient() + const request: GetCacheEntryDownloadURLRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - keys: keys, + key: primaryKey, + restoreKeys: restoreKeys, + version: utils.getCacheVersion( + paths, + compressionMethod, + enableCrossOsArchive, + ), } - const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) - core.info(`GetCachedBlobResponse: ${JSON.stringify(signedDownloadURL)}`) + const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) + core.info(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) - if (signedDownloadURL.blobs.length === 0) { + if (!response.ok) { // Cache not found core.warning(`Cache not found for keys: ${keys.join(', ')}`) return undefined } - core.info(`Cache hit for: ${signedDownloadURL.blobs[0].key}`) + core.info(`Cache hit for: ${request.key}`) core.info(`Starting download of artifact to: ${paths[0]}`) - await StreamExtract(signedDownloadURL.blobs[0].signedUrl, path.dirname(paths[0])) + await StreamExtract(response.signedDownloadUrl, path.dirname(paths[0])) core.info(`Artifact download completed successfully.`) return keys[0] @@ -255,7 +264,7 @@ export async function saveCache( ): Promise { checkPaths(paths) checkKey(key) - + console.debug(`Cache Service Version: ${CacheServiceVersion}`) switch (CacheServiceVersion) { case "v2": @@ -327,9 +336,9 @@ async function saveCachev1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -368,15 +377,21 @@ async function saveCachev2( ): Promise { // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() - const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + const compressionMethod = await utils.getCompressionMethod() + const version = utils.getCacheVersion( + paths, + compressionMethod, + enableCrossOsArchive + ) + const twirpClient = cacheTwirpClient.internalCacheTwirpClient() + const request: CreateCacheEntryRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - organization: "github", - keys: [key], + key: key, + version: version } - const signedUploadURL: GetCacheBlobUploadURLResponse = await twirpClient.GetCacheBlobUploadURL(getSignedUploadURL) - core.info(`GetCacheBlobUploadURLResponse: ${JSON.stringify(signedUploadURL)}`) + const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) + core.info(`CreateCacheEntryResponse: ${JSON.stringify(response)}`) // Archive // We're going to handle 1 path fow now. This needs to be fixed to handle all @@ -403,7 +418,19 @@ async function saveCachev2( // - getSignedUploadURL // - archivePath core.info(`Saving Cache v2: ${paths[0]}`) - await UploadCacheStream(signedUploadURL.urls[0].url, zipUploadStream) + await UploadCacheStream(response.signedUploadUrl, zipUploadStream) + + // Finalize the cache entry + const finalizeRequest: FinalizeCacheEntryUploadRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, + key: key, + version: version, + sizeBytes: "1024", + } + + const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) + core.info(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) return 0 } \ No newline at end of file diff --git a/packages/cache/src/generated/results/api/v1/blobcache.ts b/packages/cache/src/generated/results/api/v1/blobcache.ts deleted file mode 100644 index 8e63bc63..00000000 --- a/packages/cache/src/generated/results/api/v1/blobcache.ts +++ /dev/null @@ -1,513 +0,0 @@ -// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies -// @generated from protobuf file "results/api/v1/blobcache.proto" (package "github.actions.results.api.v1", syntax proto3) -// tslint:disable -import { ServiceType } from "@protobuf-ts/runtime-rpc"; -import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; -import type { IBinaryWriter } from "@protobuf-ts/runtime"; -import { WireType } from "@protobuf-ts/runtime"; -import type { BinaryReadOptions } from "@protobuf-ts/runtime"; -import type { IBinaryReader } from "@protobuf-ts/runtime"; -import { UnknownFieldHandler } from "@protobuf-ts/runtime"; -import type { PartialMessage } from "@protobuf-ts/runtime"; -import { reflectionMergePartial } from "@protobuf-ts/runtime"; -import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; -import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobRequest - */ -export interface GetCachedBlobRequest { - /** - * Workflow run backend ID - * - * @generated from protobuf field: string workflow_run_backend_id = 1; - */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; - /** - * Key(s) of te blob(s) to be retrieved - * - * @generated from protobuf field: repeated string keys = 3; - */ - keys: string[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse - */ -export interface GetCachedBlobResponse { - /** - * List of blobs that match the requested keys - * - * @generated from protobuf field: repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; - */ - blobs: GetCachedBlobResponse_Blob[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob - */ -export interface GetCachedBlobResponse_Blob { - /** - * Key of the blob - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * Download url for the cached blob - * - * @generated from protobuf field: string signed_url = 2; - */ - signedUrl: string; - /** - * Version of the cached blob entry - * - * @generated from protobuf field: int32 version = 3; - */ - version: number; - /** - * Checksum of the blob - * - * @generated from protobuf field: string checksum = 4; - */ - checksum: string; - /** - * Timestamp for when the blob cache entry expires - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 5; - */ - expiresAt?: Timestamp; - /** - * Timestamp for when the blob cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest - */ -export interface GetCacheBlobUploadURLRequest { - /** - * Workflow run backend ID - * - * @generated from protobuf field: string workflow_run_backend_id = 1; - */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; - /** - * / Owner of the blob(s) to be retrieved - * - * @generated from protobuf field: string organization = 3; - */ - organization: string; - /** - * Key(s) of te blob(s) to be retrieved - * - * @generated from protobuf field: repeated string keys = 4; - */ - keys: string[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse - */ -export interface GetCacheBlobUploadURLResponse { - /** - * List of upload URLs that match the requested keys - * - * @generated from protobuf field: repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; - */ - urls: GetCacheBlobUploadURLResponse_Url[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url - */ -export interface GetCacheBlobUploadURLResponse_Url { - /** - * Key of the blob - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * URL to the blob - * - * @generated from protobuf field: string url = 2; - */ - url: string; -} -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobRequest$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCachedBlobRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobRequest): GetCachedBlobRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* repeated string keys */ 3: - message.keys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* repeated string keys = 3; */ - for (let i = 0; i < message.keys.length; i++) - writer.tag(3, WireType.LengthDelimited).string(message.keys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobRequest - */ -export const GetCachedBlobRequest = new GetCachedBlobRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobResponse", [ - { no: 1, name: "blobs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCachedBlobResponse_Blob } - ]); - } - create(value?: PartialMessage): GetCachedBlobResponse { - const message = { blobs: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse): GetCachedBlobResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs */ 1: - message.blobs.push(GetCachedBlobResponse_Blob.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; */ - for (let i = 0; i < message.blobs.length; i++) - GetCachedBlobResponse_Blob.internalBinaryWrite(message.blobs[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse - */ -export const GetCachedBlobResponse = new GetCachedBlobResponse$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobResponse_Blob$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobResponse.Blob", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, - { no: 4, name: "checksum", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "expires_at", kind: "message", T: () => Timestamp }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): GetCachedBlobResponse_Blob { - const message = { key: "", signedUrl: "", version: 0, checksum: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse_Blob): GetCachedBlobResponse_Blob { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string signed_url */ 2: - message.signedUrl = reader.string(); - break; - case /* int32 version */ 3: - message.version = reader.int32(); - break; - case /* string checksum */ 4: - message.checksum = reader.string(); - break; - case /* google.protobuf.Timestamp expires_at */ 5: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobResponse_Blob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string signed_url = 2; */ - if (message.signedUrl !== "") - writer.tag(2, WireType.LengthDelimited).string(message.signedUrl); - /* int32 version = 3; */ - if (message.version !== 0) - writer.tag(3, WireType.Varint).int32(message.version); - /* string checksum = 4; */ - if (message.checksum !== "") - writer.tag(4, WireType.LengthDelimited).string(message.checksum); - /* google.protobuf.Timestamp expires_at = 5; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob - */ -export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLRequest$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLRequest): GetCacheBlobUploadURLRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string organization */ 3: - message.organization = reader.string(); - break; - case /* repeated string keys */ 4: - message.keys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string organization = 3; */ - if (message.organization !== "") - writer.tag(3, WireType.LengthDelimited).string(message.organization); - /* repeated string keys = 4; */ - for (let i = 0; i < message.keys.length; i++) - writer.tag(4, WireType.LengthDelimited).string(message.keys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest - */ -export const GetCacheBlobUploadURLRequest = new GetCacheBlobUploadURLRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse", [ - { no: 1, name: "urls", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCacheBlobUploadURLResponse_Url } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLResponse { - const message = { urls: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse): GetCacheBlobUploadURLResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls */ 1: - message.urls.push(GetCacheBlobUploadURLResponse_Url.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; */ - for (let i = 0; i < message.urls.length; i++) - GetCacheBlobUploadURLResponse_Url.internalBinaryWrite(message.urls[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse - */ -export const GetCacheBlobUploadURLResponse = new GetCacheBlobUploadURLResponse$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLResponse_Url$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLResponse_Url { - const message = { key: "", url: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse_Url): GetCacheBlobUploadURLResponse_Url { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string url */ 2: - message.url = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLResponse_Url, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string url = 2; */ - if (message.url !== "") - writer.tag(2, WireType.LengthDelimited).string(message.url); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url - */ -export const GetCacheBlobUploadURLResponse_Url = new GetCacheBlobUploadURLResponse_Url$Type(); -/** - * @generated ServiceType for protobuf service github.actions.results.api.v1.BlobCacheService - */ -export const BlobCacheService = new ServiceType("github.actions.results.api.v1.BlobCacheService", [ - { name: "GetCachedBlob", options: {}, I: GetCachedBlobRequest, O: GetCachedBlobResponse }, - { name: "GetCacheBlobUploadURL", options: {}, I: GetCacheBlobUploadURLRequest, O: GetCacheBlobUploadURLResponse } -]); diff --git a/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts b/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts deleted file mode 100644 index c2f05e88..00000000 --- a/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts +++ /dev/null @@ -1,433 +0,0 @@ -import { - TwirpContext, - TwirpServer, - RouterEvents, - TwirpError, - TwirpErrorCode, - Interceptor, - TwirpContentType, - chainInterceptors, -} from "twirp-ts"; -import { - GetCachedBlobRequest, - GetCachedBlobResponse, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse, -} from "./blobcache"; - -//==================================// -// Client Code // -//==================================// - -interface Rpc { - request( - service: string, - method: string, - contentType: "application/json" | "application/protobuf", - data: object | Uint8Array - ): Promise; -} - -export interface BlobCacheServiceClient { - GetCachedBlob(request: GetCachedBlobRequest): Promise; - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise; -} - -export class BlobCacheServiceClientJSON implements BlobCacheServiceClient { - private readonly rpc: Rpc; - constructor(rpc: Rpc) { - this.rpc = rpc; - this.GetCachedBlob.bind(this); - this.GetCacheBlobUploadURL.bind(this); - } - GetCachedBlob(request: GetCachedBlobRequest): Promise { - const data = GetCachedBlobRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCachedBlob", - "application/json", - data as object - ); - return promise.then((data) => - GetCachedBlobResponse.fromJson(data as any, { ignoreUnknownFields: true }) - ); - } - - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise { - const data = GetCacheBlobUploadURLRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCacheBlobUploadURL", - "application/json", - data as object - ); - return promise.then((data) => - GetCacheBlobUploadURLResponse.fromJson(data as any, { - ignoreUnknownFields: true, - }) - ); - } -} - -export class BlobCacheServiceClientProtobuf implements BlobCacheServiceClient { - private readonly rpc: Rpc; - constructor(rpc: Rpc) { - this.rpc = rpc; - this.GetCachedBlob.bind(this); - this.GetCacheBlobUploadURL.bind(this); - } - GetCachedBlob(request: GetCachedBlobRequest): Promise { - const data = GetCachedBlobRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCachedBlob", - "application/protobuf", - data - ); - return promise.then((data) => - GetCachedBlobResponse.fromBinary(data as Uint8Array) - ); - } - - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise { - const data = GetCacheBlobUploadURLRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCacheBlobUploadURL", - "application/protobuf", - data - ); - return promise.then((data) => - GetCacheBlobUploadURLResponse.fromBinary(data as Uint8Array) - ); - } -} - -//==================================// -// Server Code // -//==================================// - -export interface BlobCacheServiceTwirp { - GetCachedBlob( - ctx: T, - request: GetCachedBlobRequest - ): Promise; - GetCacheBlobUploadURL( - ctx: T, - request: GetCacheBlobUploadURLRequest - ): Promise; -} - -export enum BlobCacheServiceMethod { - GetCachedBlob = "GetCachedBlob", - GetCacheBlobUploadURL = "GetCacheBlobUploadURL", -} - -export const BlobCacheServiceMethodList = [ - BlobCacheServiceMethod.GetCachedBlob, - BlobCacheServiceMethod.GetCacheBlobUploadURL, -]; - -export function createBlobCacheServiceServer< - T extends TwirpContext = TwirpContext ->(service: BlobCacheServiceTwirp) { - return new TwirpServer({ - service, - packageName: "github.actions.results.api.v1", - serviceName: "BlobCacheService", - methodList: BlobCacheServiceMethodList, - matchRoute: matchBlobCacheServiceRoute, - }); -} - -function matchBlobCacheServiceRoute( - method: string, - events: RouterEvents -) { - switch (method) { - case "GetCachedBlob": - return async ( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >[] - ) => { - ctx = { ...ctx, methodName: "GetCachedBlob" }; - await events.onMatch(ctx); - return handleBlobCacheServiceGetCachedBlobRequest( - ctx, - service, - data, - interceptors - ); - }; - case "GetCacheBlobUploadURL": - return async ( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] - ) => { - ctx = { ...ctx, methodName: "GetCacheBlobUploadURL" }; - await events.onMatch(ctx); - return handleBlobCacheServiceGetCacheBlobUploadURLRequest( - ctx, - service, - data, - interceptors - ); - }; - default: - events.onNotFound(); - const msg = `no handler found`; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} - -function handleBlobCacheServiceGetCachedBlobRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleBlobCacheServiceGetCachedBlobJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleBlobCacheServiceGetCachedBlobProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} - -function handleBlobCacheServiceGetCacheBlobUploadURLRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleBlobCacheServiceGetCacheBlobUploadURLJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleBlobCacheServiceGetCacheBlobUploadURLProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} -async function handleBlobCacheServiceGetCachedBlobJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -) { - let request: GetCachedBlobRequest; - let response: GetCachedBlobResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = GetCachedBlobRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCachedBlob(ctx, inputReq); - }); - } else { - response = await service.GetCachedBlob(ctx, request!); - } - - return JSON.stringify( - GetCachedBlobResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} - -async function handleBlobCacheServiceGetCacheBlobUploadURLJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -) { - let request: GetCacheBlobUploadURLRequest; - let response: GetCacheBlobUploadURLResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = GetCacheBlobUploadURLRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCacheBlobUploadURL(ctx, inputReq); - }); - } else { - response = await service.GetCacheBlobUploadURL(ctx, request!); - } - - return JSON.stringify( - GetCacheBlobUploadURLResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} -async function handleBlobCacheServiceGetCachedBlobProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -) { - let request: GetCachedBlobRequest; - let response: GetCachedBlobResponse; - - try { - request = GetCachedBlobRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCachedBlob(ctx, inputReq); - }); - } else { - response = await service.GetCachedBlob(ctx, request!); - } - - return Buffer.from(GetCachedBlobResponse.toBinary(response)); -} - -async function handleBlobCacheServiceGetCacheBlobUploadURLProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -) { - let request: GetCacheBlobUploadURLRequest; - let response: GetCacheBlobUploadURLResponse; - - try { - request = GetCacheBlobUploadURLRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCacheBlobUploadURL(ctx, inputReq); - }); - } else { - response = await service.GetCacheBlobUploadURL(ctx, request!); - } - - return Buffer.from(GetCacheBlobUploadURLResponse.toBinary(response)); -} diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts new file mode 100644 index 00000000..f7686fbd --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -0,0 +1,1324 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3) +// tslint:disable +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest + */ +export interface CreateCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 4; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse + */ +export interface CreateCacheEntryResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * SAS URL to upload the cache archive + * + * @generated from protobuf field: string signed_upload_url = 2; + */ + signedUploadUrl: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest + */ +export interface FinalizeCacheEntryUploadRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Size of the cache archive in Bytes + * + * @generated from protobuf field: int64 size_bytes = 4; + */ + sizeBytes: string; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse + */ +export interface FinalizeCacheEntryUploadResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * Cache entry database ID + * + * @generated from protobuf field: int64 entry_id = 2; + */ + entryId: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest + */ +export interface GetCacheEntryDownloadURLRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse + */ +export interface GetCacheEntryDownloadURLResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * SAS URL to download the cache archive + * + * @generated from protobuf field: string signed_download_url = 2; + */ + signedDownloadUrl: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest + */ +export interface DeleteCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse + */ +export interface DeleteCacheEntryResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * Cache entry database ID + * + * @generated from protobuf field: int64 entry_id = 2; + */ + entryId: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest + */ +export interface ListCacheEntriesRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse + */ +export interface ListCacheEntriesResponse { + /** + * @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; + */ + entries: ListCacheEntriesResponse_CacheEntry[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry + */ +export interface ListCacheEntriesResponse_CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +/** + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest + */ +export interface LookupCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse + */ +export interface LookupCacheEntryResponse { + /** + * Indicates whether the cache entry exists or not + * + * @generated from protobuf field: bool exists = 1; + */ + exists: boolean; +} +/** + * Matched cache entry metadata + * + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry + */ +export interface LookupCacheEntryResponse_CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +// @generated message type with reflection information, may provide speed optimized methods +class CreateCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CreateCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* string version */ 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* string version = 4; */ + if (message.version !== "") + writer.tag(4, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest + */ +export const CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CreateCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CreateCacheEntryResponse { + const message = { ok: false, signedUploadUrl: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ 2: + message.signedUploadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* string signed_upload_url = 2; */ + if (message.signedUploadUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse + */ +export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeCacheEntryUploadRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): FinalizeCacheEntryUploadRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* int64 size_bytes */ 4: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* int64 size_bytes = 4; */ + if (message.sizeBytes !== "0") + writer.tag(4, WireType.Varint).int64(message.sizeBytes); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest + */ +export const FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeCacheEntryUploadResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + create(value?: PartialMessage): FinalizeCacheEntryUploadResponse { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ 2: + message.entryId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* int64 entry_id = 2; */ + if (message.entryId !== "0") + writer.tag(2, WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse + */ +export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheEntryDownloadURLRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheEntryDownloadURLRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest + */ +export const GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheEntryDownloadURLResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheEntryDownloadURLResponse { + const message = { ok: false, signedDownloadUrl: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* string signed_download_url */ 2: + message.signedDownloadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* string signed_download_url = 2; */ + if (message.signedDownloadUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse + */ +export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeleteCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): DeleteCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest + */ +export const DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeleteCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + create(value?: PartialMessage): DeleteCacheEntryResponse { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ 2: + message.entryId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* int64 entry_id = 2; */ + if (message.entryId !== "0") + writer.tag(2, WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse + */ +export const DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ListCacheEntriesRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest + */ +export const ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse", [ + { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry } + ]); + } + create(value?: PartialMessage): ListCacheEntriesResponse { + const message = { entries: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1: + message.entries.push(ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */ + for (let i = 0; i < message.entries.length; i++) + ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse + */ +export const ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesResponse_CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): ListCacheEntriesResponse_CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry + */ +export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): LookupCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest + */ +export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse", [ + { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): LookupCacheEntryResponse { + const message = { exists: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool exists */ 1: + message.exists = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool exists = 1; */ + if (message.exists !== false) + writer.tag(1, WireType.Varint).bool(message.exists); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse + */ +export const LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryResponse_CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): LookupCacheEntryResponse_CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry + */ +export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type(); +/** + * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService + */ +export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }, + { name: "DeleteCacheEntry", options: {}, I: DeleteCacheEntryRequest, O: DeleteCacheEntryResponse }, + { name: "ListCacheEntries", options: {}, I: ListCacheEntriesRequest, O: ListCacheEntriesResponse }, + { name: "LookupCacheEntry", options: {}, I: LookupCacheEntryRequest, O: LookupCacheEntryResponse } +]); diff --git a/packages/cache/src/generated/results/api/v1/cache.twirp.ts b/packages/cache/src/generated/results/api/v1/cache.twirp.ts new file mode 100644 index 00000000..c8f1f633 --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/cache.twirp.ts @@ -0,0 +1,1209 @@ +import { + TwirpContext, + TwirpServer, + RouterEvents, + TwirpError, + TwirpErrorCode, + Interceptor, + TwirpContentType, + chainInterceptors, +} from "twirp-ts"; +import { + CreateCacheEntryRequest, + CreateCacheEntryResponse, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse, + ListCacheEntriesRequest, + ListCacheEntriesResponse, + LookupCacheEntryRequest, + LookupCacheEntryResponse, +} from "./cache"; + +//==================================// +// Client Code // +//==================================// + +interface Rpc { + request( + service: string, + method: string, + contentType: "application/json" | "application/protobuf", + data: object | Uint8Array + ): Promise; +} + +export interface CacheServiceClient { + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise; + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise; + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise; + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise; + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise; + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise; +} + +export class CacheServiceClientJSON implements CacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise { + const data = CreateCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "CreateCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + CreateCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise { + const data = FinalizeCacheEntryUploadRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "FinalizeCacheEntryUpload", + "application/json", + data as object + ); + return promise.then((data) => + FinalizeCacheEntryUploadResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise { + const data = GetCacheEntryDownloadURLRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "GetCacheEntryDownloadURL", + "application/json", + data as object + ); + return promise.then((data) => + GetCacheEntryDownloadURLResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise { + const data = DeleteCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "DeleteCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + DeleteCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise { + const data = ListCacheEntriesRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "ListCacheEntries", + "application/json", + data as object + ); + return promise.then((data) => + ListCacheEntriesResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise { + const data = LookupCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "LookupCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + LookupCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } +} + +export class CacheServiceClientProtobuf implements CacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise { + const data = CreateCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "CreateCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + CreateCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } + + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise { + const data = FinalizeCacheEntryUploadRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "FinalizeCacheEntryUpload", + "application/protobuf", + data + ); + return promise.then((data) => + FinalizeCacheEntryUploadResponse.fromBinary(data as Uint8Array) + ); + } + + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise { + const data = GetCacheEntryDownloadURLRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "GetCacheEntryDownloadURL", + "application/protobuf", + data + ); + return promise.then((data) => + GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array) + ); + } + + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise { + const data = DeleteCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "DeleteCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + DeleteCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } + + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise { + const data = ListCacheEntriesRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "ListCacheEntries", + "application/protobuf", + data + ); + return promise.then((data) => + ListCacheEntriesResponse.fromBinary(data as Uint8Array) + ); + } + + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise { + const data = LookupCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "LookupCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + LookupCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } +} + +//==================================// +// Server Code // +//==================================// + +export interface CacheServiceTwirp { + CreateCacheEntry( + ctx: T, + request: CreateCacheEntryRequest + ): Promise; + FinalizeCacheEntryUpload( + ctx: T, + request: FinalizeCacheEntryUploadRequest + ): Promise; + GetCacheEntryDownloadURL( + ctx: T, + request: GetCacheEntryDownloadURLRequest + ): Promise; + DeleteCacheEntry( + ctx: T, + request: DeleteCacheEntryRequest + ): Promise; + ListCacheEntries( + ctx: T, + request: ListCacheEntriesRequest + ): Promise; + LookupCacheEntry( + ctx: T, + request: LookupCacheEntryRequest + ): Promise; +} + +export enum CacheServiceMethod { + CreateCacheEntry = "CreateCacheEntry", + FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload", + GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL", + DeleteCacheEntry = "DeleteCacheEntry", + ListCacheEntries = "ListCacheEntries", + LookupCacheEntry = "LookupCacheEntry", +} + +export const CacheServiceMethodList = [ + CacheServiceMethod.CreateCacheEntry, + CacheServiceMethod.FinalizeCacheEntryUpload, + CacheServiceMethod.GetCacheEntryDownloadURL, + CacheServiceMethod.DeleteCacheEntry, + CacheServiceMethod.ListCacheEntries, + CacheServiceMethod.LookupCacheEntry, +]; + +export function createCacheServiceServer( + service: CacheServiceTwirp +) { + return new TwirpServer({ + service, + packageName: "github.actions.results.api.v1", + serviceName: "CacheService", + methodList: CacheServiceMethodList, + matchRoute: matchCacheServiceRoute, + }); +} + +function matchCacheServiceRoute( + method: string, + events: RouterEvents +) { + switch (method) { + case "CreateCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "CreateCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceCreateCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + case "FinalizeCacheEntryUpload": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] + ) => { + ctx = { ...ctx, methodName: "FinalizeCacheEntryUpload" }; + await events.onMatch(ctx); + return handleCacheServiceFinalizeCacheEntryUploadRequest( + ctx, + service, + data, + interceptors + ); + }; + case "GetCacheEntryDownloadURL": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] + ) => { + ctx = { ...ctx, methodName: "GetCacheEntryDownloadURL" }; + await events.onMatch(ctx); + return handleCacheServiceGetCacheEntryDownloadURLRequest( + ctx, + service, + data, + interceptors + ); + }; + case "DeleteCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "DeleteCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceDeleteCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + case "ListCacheEntries": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] + ) => { + ctx = { ...ctx, methodName: "ListCacheEntries" }; + await events.onMatch(ctx); + return handleCacheServiceListCacheEntriesRequest( + ctx, + service, + data, + interceptors + ); + }; + case "LookupCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "LookupCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceLookupCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + default: + events.onNotFound(); + const msg = `no handler found`; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceCreateCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceCreateCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceCreateCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceFinalizeCacheEntryUploadRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceFinalizeCacheEntryUploadJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceFinalizeCacheEntryUploadProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceGetCacheEntryDownloadURLRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceGetCacheEntryDownloadURLJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceGetCacheEntryDownloadURLProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceDeleteCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceDeleteCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceDeleteCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceListCacheEntriesRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceListCacheEntriesJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceListCacheEntriesProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceLookupCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceLookupCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceLookupCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} +async function handleCacheServiceCreateCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +) { + let request: CreateCacheEntryRequest; + let response: CreateCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = CreateCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.CreateCacheEntry(ctx, inputReq); + }); + } else { + response = await service.CreateCacheEntry(ctx, request!); + } + + return JSON.stringify( + CreateCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceFinalizeCacheEntryUploadJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +) { + let request: FinalizeCacheEntryUploadRequest; + let response: FinalizeCacheEntryUploadResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = FinalizeCacheEntryUploadRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx, inputReq); + }); + } else { + response = await service.FinalizeCacheEntryUpload(ctx, request!); + } + + return JSON.stringify( + FinalizeCacheEntryUploadResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceGetCacheEntryDownloadURLJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +) { + let request: GetCacheEntryDownloadURLRequest; + let response: GetCacheEntryDownloadURLResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = GetCacheEntryDownloadURLRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheEntryDownloadURL(ctx, request!); + } + + return JSON.stringify( + GetCacheEntryDownloadURLResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceDeleteCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +) { + let request: DeleteCacheEntryRequest; + let response: DeleteCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = DeleteCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.DeleteCacheEntry(ctx, inputReq); + }); + } else { + response = await service.DeleteCacheEntry(ctx, request!); + } + + return JSON.stringify( + DeleteCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceListCacheEntriesJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +) { + let request: ListCacheEntriesRequest; + let response: ListCacheEntriesResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = ListCacheEntriesRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.ListCacheEntries(ctx, inputReq); + }); + } else { + response = await service.ListCacheEntries(ctx, request!); + } + + return JSON.stringify( + ListCacheEntriesResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceLookupCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +) { + let request: LookupCacheEntryRequest; + let response: LookupCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = LookupCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.LookupCacheEntry(ctx, inputReq); + }); + } else { + response = await service.LookupCacheEntry(ctx, request!); + } + + return JSON.stringify( + LookupCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} +async function handleCacheServiceCreateCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +) { + let request: CreateCacheEntryRequest; + let response: CreateCacheEntryResponse; + + try { + request = CreateCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.CreateCacheEntry(ctx, inputReq); + }); + } else { + response = await service.CreateCacheEntry(ctx, request!); + } + + return Buffer.from(CreateCacheEntryResponse.toBinary(response)); +} + +async function handleCacheServiceFinalizeCacheEntryUploadProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +) { + let request: FinalizeCacheEntryUploadRequest; + let response: FinalizeCacheEntryUploadResponse; + + try { + request = FinalizeCacheEntryUploadRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx, inputReq); + }); + } else { + response = await service.FinalizeCacheEntryUpload(ctx, request!); + } + + return Buffer.from(FinalizeCacheEntryUploadResponse.toBinary(response)); +} + +async function handleCacheServiceGetCacheEntryDownloadURLProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +) { + let request: GetCacheEntryDownloadURLRequest; + let response: GetCacheEntryDownloadURLResponse; + + try { + request = GetCacheEntryDownloadURLRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheEntryDownloadURL(ctx, request!); + } + + return Buffer.from(GetCacheEntryDownloadURLResponse.toBinary(response)); +} + +async function handleCacheServiceDeleteCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +) { + let request: DeleteCacheEntryRequest; + let response: DeleteCacheEntryResponse; + + try { + request = DeleteCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.DeleteCacheEntry(ctx, inputReq); + }); + } else { + response = await service.DeleteCacheEntry(ctx, request!); + } + + return Buffer.from(DeleteCacheEntryResponse.toBinary(response)); +} + +async function handleCacheServiceListCacheEntriesProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +) { + let request: ListCacheEntriesRequest; + let response: ListCacheEntriesResponse; + + try { + request = ListCacheEntriesRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.ListCacheEntries(ctx, inputReq); + }); + } else { + response = await service.ListCacheEntries(ctx, request!); + } + + return Buffer.from(ListCacheEntriesResponse.toBinary(response)); +} + +async function handleCacheServiceLookupCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +) { + let request: LookupCacheEntryRequest; + let response: LookupCacheEntryResponse; + + try { + request = LookupCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.LookupCacheEntry(ctx, inputReq); + }); + } else { + response = await service.LookupCacheEntry(ctx, request!); + } + + return Buffer.from(LookupCacheEntryResponse.toBinary(response)); +} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index c50ccd4b..8fe76376 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,16 +1,13 @@ import * as core from '@actions/core' -import {HttpClient} from '@actions/http-client' -import {BearerCredentialHandler} from '@actions/http-client/lib/auth' +import { HttpClient } from '@actions/http-client' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { RequestOptions, TypedResponse } from '@actions/http-client/lib/interfaces' -import * as crypto from 'crypto' import * as fs from 'fs' -import {URL} from 'url' - +import { URL } from 'url' import * as utils from './cacheUtils' -import {CompressionMethod} from './constants' import { ArtifactCacheEntry, InternalCacheOptions, @@ -36,9 +33,7 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import {CacheUrl} from './constants' - -const versionSalt = '1.0' +import { CacheUrl } from './constants' function getCacheApiUrl(resource: string): string { const baseUrl: string = CacheUrl || '' @@ -76,43 +71,18 @@ function createHttpClient(): HttpClient { ) } -export function getCacheVersion( - paths: string[], - compressionMethod?: CompressionMethod, - enableCrossOsArchive = false -): string { - // don't pass changes upstream - const components = paths.slice() - - // Add compression method to cache version to restore - // compressed cache as per compression method - if (compressionMethod) { - components.push(compressionMethod) - } - - // Only check for windows platforms if enableCrossOsArchive is false - if (process.platform === 'win32' && !enableCrossOsArchive) { - components.push('windows-only') - } - - // Add salt to cache version to support breaking changes in cache entry - components.push(versionSalt) - - return crypto.createHash('sha256').update(components.join('|')).digest('hex') -} - export async function getCacheEntry( keys: string[], paths: string[], options?: InternalCacheOptions ): Promise { const httpClient = createHttpClient() - const version = getCacheVersion( + const version = utils.getCacheVersion( paths, options?.compressionMethod, options?.enableCrossOsArchive ) - + const resource = `cache?keys=${encodeURIComponent( keys.join(',') )}&version=${version}` @@ -209,7 +179,7 @@ export async function reserveCache( options?: InternalCacheOptions ): Promise> { const httpClient = createHttpClient() - const version = getCacheVersion( + const version = utils.getCacheVersion( paths, options?.compressionMethod, options?.enableCrossOsArchive @@ -246,8 +216,7 @@ async function uploadChunk( end: number ): Promise { core.debug( - `Uploading chunk of size ${ - end - start + 1 + `Uploading chunk of size ${end - start + 1 } bytes at offset ${start} with content range: ${getContentRange( start, end @@ -343,7 +312,7 @@ async function commitCache( cacheId: number, filesize: number ): Promise> { - const commitCacheRequest: CommitCacheRequest = {size: filesize} + const commitCacheRequest: CommitCacheRequest = { size: filesize } return await retryTypedResponse('commitCache', async () => httpClient.postJson( getCacheApiUrl(`caches/${cacheId.toString()}`), diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/cacheTwirpClient.ts index 62f98426..6d9826ac 100644 --- a/packages/cache/src/internal/cacheTwirpClient.ts +++ b/packages/cache/src/internal/cacheTwirpClient.ts @@ -1,197 +1,196 @@ -import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client' -import {BearerCredentialHandler} from '@actions/http-client/lib/auth' -import {info, debug} from '@actions/core' -import {BlobCacheServiceClientJSON} from '../generated/results/api/v1/blobcache.twirp' -import {CacheUrl} from './constants' -import {getRuntimeToken} from './config' +import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' +import { info, debug } from '@actions/core' +import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' +import { CacheUrl } from './constants' +import { getRuntimeToken } from './config' // import {getUserAgentString} from './user-agent' // import {NetworkError, UsageError} from './errors' // The twirp http client must implement this interface interface Rpc { - request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise + request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise } -class BlobCacheServiceClient implements Rpc { - private httpClient: HttpClient - private baseUrl: string - private maxAttempts = 5 - private baseRetryIntervalMilliseconds = 3000 - private retryMultiplier = 1.5 +class CacheServiceClient implements Rpc { + private httpClient: HttpClient + private baseUrl: string + private maxAttempts = 5 + private baseRetryIntervalMilliseconds = 3000 + private retryMultiplier = 1.5 - constructor( - userAgent: string, - maxAttempts?: number, - baseRetryIntervalMilliseconds?: number, + constructor( + userAgent: string, + maxAttempts?: number, + baseRetryIntervalMilliseconds?: number, + retryMultiplier?: number + ) { + const token = getRuntimeToken() + this.baseUrl = CacheUrl + if (maxAttempts) { + this.maxAttempts = maxAttempts + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier + } + + this.httpClient = new HttpClient(userAgent, [ + new BearerCredentialHandler(token) + ]) + } + + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + async request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href + debug(`[Request] ${method} ${url}`) + const headers = { + 'Content-Type': contentType + } + try { + const { body } = await this.retryableRequest(async () => + this.httpClient.post(url, JSON.stringify(data), headers) + ) + + return body + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`) + } + } + + async retryableRequest( + operation: () => Promise + ): Promise<{ response: HttpClientResponse; body: object }> { + let attempt = 0 + let errorMessage = '' + let rawBody = '' + while (attempt < this.maxAttempts) { + let isRetryable = false + + try { + const response = await operation() + const statusCode = response.message.statusCode + rawBody = await response.readBody() + debug(`[Response] - ${response.message.statusCode}`) + debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) + const body = JSON.parse(rawBody) + debug(`Body: ${JSON.stringify(body, null, 2)}`) + if (this.isSuccessStatusCode(statusCode)) { + return { response, body } + } + isRetryable = this.isRetryableHttpStatusCode(statusCode) + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` + if (body.msg) { + // if (UsageError.isUsageErrorMessage(body.msg)) { + // throw new UsageError() + // } + + errorMessage = `${errorMessage}: ${body.msg}` + } + } catch (error) { + if (error instanceof SyntaxError) { + debug(`Raw Body: ${rawBody}`) + } + + // if (error instanceof UsageError) { + // throw error + // } + + // if (NetworkError.isNetworkErrorCode(error?.code)) { + // throw new NetworkError(error?.code) + // } + + isRetryable = true + errorMessage = error.message + } + + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`) + } + + if (attempt + 1 === this.maxAttempts) { + throw new Error( + `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` + ) + } + + const retryTimeMilliseconds = + this.getExponentialRetryTimeMilliseconds(attempt) + info( + `Attempt ${attempt + 1} of ${this.maxAttempts + } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` + ) + await this.sleep(retryTimeMilliseconds) + attempt++ + } + + throw new Error(`Request failed`) + } + + isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + return statusCode >= 200 && statusCode < 300 + } + + isRetryableHttpStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.GatewayTimeout, + HttpCodes.InternalServerError, + HttpCodes.ServiceUnavailable, + HttpCodes.TooManyRequests + ] + + return retryableStatusCodes.includes(statusCode) + } + + async sleep(milliseconds: number): Promise { + return new Promise(resolve => setTimeout(resolve, milliseconds)) + } + + getExponentialRetryTimeMilliseconds(attempt: number): number { + if (attempt < 0) { + throw new Error('attempt should be a positive integer') + } + + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds + } + + const minTime = + this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt + const maxTime = minTime * this.retryMultiplier + + // returns a random number between minTime and maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime) + } +} + +export function internalCacheTwirpClient(options?: { + maxAttempts?: number + retryIntervalMs?: number retryMultiplier?: number - ) { - const token = getRuntimeToken() - this.baseUrl = CacheUrl - if (maxAttempts) { - this.maxAttempts = maxAttempts - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier - } - - this.httpClient = new HttpClient(userAgent, [ - new BearerCredentialHandler(token) - ]) - } - - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - async request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise { - const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href - debug(`[Request] ${method} ${url}`) - const headers = { - 'Content-Type': contentType - } - try { - const {body} = await this.retryableRequest(async () => - this.httpClient.post(url, JSON.stringify(data), headers) - ) - - return body - } catch (error) { - throw new Error(`Failed to ${method}: ${error.message}`) - } - } - - async retryableRequest( - operation: () => Promise - ): Promise<{response: HttpClientResponse; body: object}> { - let attempt = 0 - let errorMessage = '' - let rawBody = '' - while (attempt < this.maxAttempts) { - let isRetryable = false - - try { - const response = await operation() - const statusCode = response.message.statusCode - rawBody = await response.readBody() - debug(`[Response] - ${response.message.statusCode}`) - debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) - const body = JSON.parse(rawBody) - debug(`Body: ${JSON.stringify(body, null, 2)}`) - if (this.isSuccessStatusCode(statusCode)) { - return {response, body} - } - isRetryable = this.isRetryableHttpStatusCode(statusCode) - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` - if (body.msg) { - // if (UsageError.isUsageErrorMessage(body.msg)) { - // throw new UsageError() - // } - - errorMessage = `${errorMessage}: ${body.msg}` - } - } catch (error) { - if (error instanceof SyntaxError) { - debug(`Raw Body: ${rawBody}`) - } - - // if (error instanceof UsageError) { - // throw error - // } - - // if (NetworkError.isNetworkErrorCode(error?.code)) { - // throw new NetworkError(error?.code) - // } - - isRetryable = true - errorMessage = error.message - } - - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`) - } - - if (attempt + 1 === this.maxAttempts) { - throw new Error( - `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` - ) - } - - const retryTimeMilliseconds = - this.getExponentialRetryTimeMilliseconds(attempt) - info( - `Attempt ${attempt + 1} of ${ - this.maxAttempts - } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` - ) - await this.sleep(retryTimeMilliseconds) - attempt++ - } - - throw new Error(`Request failed`) - } - - isSuccessStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - return statusCode >= 200 && statusCode < 300 - } - - isRetryableHttpStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - - const retryableStatusCodes = [ - HttpCodes.BadGateway, - HttpCodes.GatewayTimeout, - HttpCodes.InternalServerError, - HttpCodes.ServiceUnavailable, - HttpCodes.TooManyRequests - ] - - return retryableStatusCodes.includes(statusCode) - } - - async sleep(milliseconds: number): Promise { - return new Promise(resolve => setTimeout(resolve, milliseconds)) - } - - getExponentialRetryTimeMilliseconds(attempt: number): number { - if (attempt < 0) { - throw new Error('attempt should be a positive integer') - } - - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds - } - - const minTime = - this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt - const maxTime = minTime * this.retryMultiplier - - // returns a random number between minTime and maxTime (exclusive) - return Math.trunc(Math.random() * (maxTime - minTime) + minTime) - } -} - -export function internalBlobCacheTwirpClient(options?: { - maxAttempts?: number - retryIntervalMs?: number - retryMultiplier?: number -}): BlobCacheServiceClientJSON { - const client = new BlobCacheServiceClient( - 'actions/cache', - options?.maxAttempts, - options?.retryIntervalMs, - options?.retryMultiplier - ) - return new BlobCacheServiceClientJSON(client) +}): CacheServiceClientJSON { + const client = new CacheServiceClient( + 'actions/cache', + options?.maxAttempts, + options?.retryIntervalMs, + options?.retryMultiplier + ) + return new CacheServiceClientJSON(client) } diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index 91bae9a8..48a0b354 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -6,13 +6,16 @@ import * as fs from 'fs' import * as path from 'path' import * as semver from 'semver' import * as util from 'util' -import {v4 as uuidV4} from 'uuid' +import { v4 as uuidV4 } from 'uuid' +import * as crypto from 'crypto' import { CacheFilename, CompressionMethod, GnuTarPathOnWindows } from './constants' +const versionSalt = '1.0' + // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 export async function createTempDirectory(): Promise { const IS_WINDOWS = process.platform === 'win32' @@ -143,3 +146,28 @@ export function isGhes(): boolean { return !isGitHubHost && !isGheHost } + +export function getCacheVersion( + paths: string[], + compressionMethod?: CompressionMethod, + enableCrossOsArchive = false +): string { + // don't pass changes upstream + const components = paths.slice() + + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod) + } + + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only') + } + + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt) + + return crypto.createHash('sha256').update(components.join('|')).digest('hex') +} \ No newline at end of file diff --git a/packages/cache/src/internal/v2/upload-cache.ts b/packages/cache/src/internal/v2/upload-cache.ts index 574cf788..b3ed530d 100644 --- a/packages/cache/src/internal/v2/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload-cache.ts @@ -1,13 +1,14 @@ import * as core from '@actions/core' -import {GetCacheBlobUploadURLResponse} from '../../generated/results/api/v1/blobcache' -import {ZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' -import {NetworkError} from '@actions/artifact/' -import {TransferProgressEvent} from '@azure/core-http' +import { CreateCacheEntryResponse } from '../../generated/results/api/v1/cache' +import { ZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' +import { NetworkError } from '@actions/artifact/' +import { TransferProgressEvent } from '@azure/core-http' import * as stream from 'stream' import * as crypto from 'crypto' + import { - BlobClient, - BlockBlobClient, + BlobClient, + BlockBlobClient, BlockBlobUploadStreamOptions, BlockBlobParallelUploadOptions } from '@azure/storage-blob' @@ -55,7 +56,7 @@ export async function UploadCacheStream( } const options: BlockBlobUploadStreamOptions = { - blobHTTPHeaders: {blobContentType: 'zip'}, + blobHTTPHeaders: { blobContentType: 'zip' }, onProgress: uploadCallback } @@ -89,7 +90,7 @@ export async function UploadCacheStream( } core.info('Finished uploading cache content to blob storage!') - + hashStream.end() sha256Hash = hashStream.read() as string core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`) @@ -107,11 +108,11 @@ export async function UploadCacheStream( } export async function UploadCacheFile( - uploadURL: GetCacheBlobUploadURLResponse, + uploadURL: CreateCacheEntryResponse, archivePath: string, ): Promise<{}> { core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) - + // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size @@ -119,8 +120,7 @@ export async function UploadCacheFile( maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size }; - // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) - const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) + const blobClient: BlobClient = new BlobClient(uploadURL.signedUploadUrl) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() core.info(`BlobClient: ${JSON.stringify(blobClient)}`) From e62c6428e7fceb13121cbf165c1996765bd8393b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Tue, 24 Sep 2024 03:29:14 -0700 Subject: [PATCH 017/108] Fix service urls --- packages/cache/src/cache.ts | 18 ++++++++++-------- packages/cache/src/internal/cacheHttpClient.ts | 4 ++-- .../cache/src/internal/cacheTwirpClient.ts | 5 ++--- packages/cache/src/internal/config.ts | 16 ++++++++++++++++ packages/cache/src/internal/constants.ts | 6 +----- 5 files changed, 31 insertions(+), 18 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0530aaab..321610cd 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,7 +1,7 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' -import { CacheServiceVersion, CacheUrl } from './internal/constants' +import * as config from './internal/config' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' import { createTar, extractTar, listTar } from './internal/tar' @@ -67,9 +67,9 @@ function checkKey(key: string): void { * @returns boolean return true if Actions cache service feature is available, otherwise false */ -export function isFeatureAvailable(): boolean { - return !!CacheUrl -} +// export function isFeatureAvailable(): boolean { +// return !!CacheUrl +// } /** * Restores cache from keys @@ -90,8 +90,9 @@ export async function restoreCache( ): Promise { checkPaths(paths) - console.debug(`Cache Service Version: ${CacheServiceVersion}`) - switch (CacheServiceVersion) { + const cacheServiceVersion: string = config.getCacheServiceVersion() + console.debug(`Cache Service Version: ${cacheServiceVersion}`) + switch (cacheServiceVersion) { case "v2": return await restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) case "v1": @@ -265,8 +266,9 @@ export async function saveCache( checkPaths(paths) checkKey(key) - console.debug(`Cache Service Version: ${CacheServiceVersion}`) - switch (CacheServiceVersion) { + const cacheServiceVersion: string = config.getCacheServiceVersion() + console.debug(`Cache Service Version: ${cacheServiceVersion}`) + switch (cacheServiceVersion) { case "v2": return await saveCachev2(paths, key, options, enableCrossOsArchive) case "v1": diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 8fe76376..98d6a3bb 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -33,10 +33,10 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import { CacheUrl } from './constants' +import { getCacheServiceURL } from './config' function getCacheApiUrl(resource: string): string { - const baseUrl: string = CacheUrl || '' + const baseUrl: string = getCacheServiceURL() if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.') } diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/cacheTwirpClient.ts index 6d9826ac..cc365ec6 100644 --- a/packages/cache/src/internal/cacheTwirpClient.ts +++ b/packages/cache/src/internal/cacheTwirpClient.ts @@ -2,8 +2,7 @@ import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { info, debug } from '@actions/core' import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' -import { CacheUrl } from './constants' -import { getRuntimeToken } from './config' +import { getRuntimeToken, getCacheServiceURL } from './config' // import {getUserAgentString} from './user-agent' // import {NetworkError, UsageError} from './errors' @@ -31,7 +30,7 @@ class CacheServiceClient implements Rpc { retryMultiplier?: number ) { const token = getRuntimeToken() - this.baseUrl = CacheUrl + this.baseUrl = getCacheServiceURL() if (maxAttempts) { this.maxAttempts = maxAttempts } diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index 959f3f46..54844396 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -5,3 +5,19 @@ export function getRuntimeToken(): string { } return token } + +export function getCacheServiceVersion(): string { + return process.env['ACTIONS_CACHE_SERVICE_VERSION'] || 'v1' +} + +export function getCacheServiceURL(): string { + const version = getCacheServiceVersion() + switch (version) { + case 'v1': + return process.env['ACTIONS_CACHE_URL'] || process.env['ACTIONS_RESULTS_URL'] || "" + case 'v2': + return process.env['ACTIONS_RESULTS_URL'] || "" + default: + throw new Error(`Unsupported cache service version: ${version}`) + } +} \ No newline at end of file diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index 143ba06e..b2cddf96 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -35,8 +35,4 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S export const TarFilename = 'cache.tar' -export const ManifestFilename = 'manifest.txt' - -// Cache Service Metadata -export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']} || ${process.env['ACTIONS_CACHE_URL']}` -export const CacheServiceVersion = `${process.env['ACTIONS_CACHE_URL_NEXT'] ? 'v2' : 'v1'}` \ No newline at end of file +export const ManifestFilename = 'manifest.txt' \ No newline at end of file From 13abc951656812c75eaa1cbc71bcf5d4328192ba Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 9 Oct 2024 04:32:57 -0700 Subject: [PATCH 018/108] Port restoreCache to new service --- packages/cache/src/cache.ts | 70 ++++++++++++++++++++++----- packages/cache/src/internal/config.ts | 1 + 2 files changed, 60 insertions(+), 11 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 321610cd..37250c47 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -67,9 +67,9 @@ function checkKey(key: string): void { * @returns boolean return true if Actions cache service feature is available, otherwise false */ -// export function isFeatureAvailable(): boolean { -// return !!CacheUrl -// } +export function isFeatureAvailable(): boolean { + return !!config.getCacheServiceVersion +} /** * Restores cache from keys @@ -91,7 +91,7 @@ export async function restoreCache( checkPaths(paths) const cacheServiceVersion: string = config.getCacheServiceVersion() - console.debug(`Cache Service Version: ${cacheServiceVersion}`) + console.debug(`Cache service version: ${cacheServiceVersion}`) switch (cacheServiceVersion) { case "v2": return await restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) @@ -189,6 +189,16 @@ async function restoreCachev1( return undefined } +/** + * Restores cache using the new Cache Service + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform + * @returns string returns the key for the cache hit, otherwise returns undefined + */ async function restoreCachev2( paths: string[], primaryKey: string, @@ -196,7 +206,6 @@ async function restoreCachev2( options?: DownloadOptions, enableCrossOsArchive = false ) { - restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -212,11 +221,13 @@ async function restoreCachev2( checkKey(key) } + let archivePath = '' try { + const twirpClient = cacheTwirpClient.internalCacheTwirpClient() // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() - const twirpClient = cacheTwirpClient.internalCacheTwirpClient() + const request: GetCacheEntryDownloadURLRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, @@ -228,8 +239,9 @@ async function restoreCachev2( enableCrossOsArchive, ), } + const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) - core.info(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) + core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) if (!response.ok) { // Cache not found @@ -238,13 +250,49 @@ async function restoreCachev2( } core.info(`Cache hit for: ${request.key}`) - core.info(`Starting download of artifact to: ${paths[0]}`) - await StreamExtract(response.signedDownloadUrl, path.dirname(paths[0])) - core.info(`Artifact download completed successfully.`) - return keys[0] + if (options?.lookupOnly) { + core.info('Lookup only - skipping download') + return request.key + } + + archivePath = path.join( + await utils.createTempDirectory(), + utils.getCacheFileName(compressionMethod) + ) + core.debug(`Archive path: ${archivePath}`) + + if (core.isDebug()) { + await listTar(archivePath, compressionMethod) + } + + core.debug(`Starting download of artifact to: ${archivePath}`) + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) + core.info( + `Cache Size: ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B)` + ) + + // Download the cache from the cache entry + await cacheHttpClient.downloadCache( + response.signedDownloadUrl, + archivePath, + options + ) + + await extractTar(archivePath, compressionMethod) + core.info('Cache restored successfully') + + return request.key } catch (error) { throw new Error(`Unable to download and extract cache: ${error.message}`) + } finally { + try { + await utils.unlinkFile(archivePath) + } catch (error) { + core.debug(`Failed to delete archive: ${error}`) + } } } diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index 54844396..117156a7 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -6,6 +6,7 @@ export function getRuntimeToken(): string { return token } +// TODO: Use the feature flag to determine the cache service version export function getCacheServiceVersion(): string { return process.env['ACTIONS_CACHE_SERVICE_VERSION'] || 'v1' } From 89354f65407afc6d6c4d740ba32d64ab02e1fab3 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 21 Oct 2024 05:21:32 -0700 Subject: [PATCH 019/108] Cleanup implementation and use tarballs instead of streaming zip --- packages/cache/src/cache.ts | 198 ++++++++++++------ .../cache/src/internal/cacheTwirpClient.ts | 13 +- packages/cache/src/internal/constants.ts | 4 +- .../cache/src/internal/v2/download-cache.ts | 85 ++------ .../cache/src/internal/v2/upload-cache.ts | 115 +--------- packages/cache/src/internal/v2/zip.ts | 0 6 files changed, 173 insertions(+), 242 deletions(-) delete mode 100644 packages/cache/src/internal/v2/zip.ts diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 37250c47..7354f649 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -14,14 +14,10 @@ import { GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse } from './generated/results/api/v1/cache' -import { UploadCacheStream } from './internal/v2/upload-cache' -import { StreamExtract } from './internal/v2/download-cache' -import { - UploadZipSpecification, - getUploadZipSpecification -} from '@actions/artifact/lib/internal/upload/upload-zip-specification' -import { createZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' +import { UploadCacheFile } from './internal/v2/upload-cache' +import { DownloadCacheFile } from './internal/v2/download-cache' import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' +import { CacheFileSizeLimit } from './internal/constants' export class ValidationError extends Error { constructor(message: string) { @@ -101,6 +97,16 @@ export async function restoreCache( } } +/** + * Restores cache using the legacy Cache Service + * + * @param paths + * @param primaryKey + * @param restoreKeys + * @param options + * @param enableCrossOsArchive + * @returns + */ async function restoreCachev1( paths: string[], primaryKey: string, @@ -209,8 +215,7 @@ async function restoreCachev2( restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] - core.debug('Resolved Keys:') - core.debug(JSON.stringify(keys)) + core.debug(`Resolved Keys: JSON.stringify(keys)`) if (keys.length > 10) { throw new ValidationError( @@ -224,7 +229,6 @@ async function restoreCachev2( let archivePath = '' try { const twirpClient = cacheTwirpClient.internalCacheTwirpClient() - // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() @@ -240,11 +244,11 @@ async function restoreCachev2( ), } + core.debug(`GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}`) const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) if (!response.ok) { - // Cache not found core.warning(`Cache not found for keys: ${keys.join(', ')}`) return undefined } @@ -262,11 +266,13 @@ async function restoreCachev2( ) core.debug(`Archive path: ${archivePath}`) - if (core.isDebug()) { - await listTar(archivePath, compressionMethod) - } - core.debug(`Starting download of artifact to: ${archivePath}`) + + await DownloadCacheFile( + response.signedDownloadUrl, + archivePath + ) + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( `Cache Size: ~${Math.round( @@ -274,18 +280,16 @@ async function restoreCachev2( )} MB (${archiveFileSize} B)` ) - // Download the cache from the cache entry - await cacheHttpClient.downloadCache( - response.signedDownloadUrl, - archivePath, - options - ) + if (core.isDebug()) { + await listTar(archivePath, compressionMethod) + } await extractTar(archivePath, compressionMethod) core.info('Cache restored successfully') return request.key } catch (error) { + // TODO: handle all the possible error scenarios throw new Error(`Unable to download and extract cache: ${error.message}`) } finally { try { @@ -294,6 +298,8 @@ async function restoreCachev2( core.debug(`Failed to delete archive: ${error}`) } } + + return undefined } /** @@ -325,6 +331,15 @@ export async function saveCache( } } +/** + * Save cache using the legacy Cache Service + * + * @param paths + * @param key + * @param options + * @param enableCrossOsArchive + * @returns + */ async function saveCachev1( paths: string[], key: string, @@ -419,6 +434,15 @@ async function saveCachev1( return cacheId } +/** + * Save cache using the new Cache Service + * + * @param paths + * @param key + * @param options + * @param enableCrossOsArchive + * @returns + */ async function saveCachev2( paths: string[], key: string, @@ -428,59 +452,103 @@ async function saveCachev2( // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() - const version = utils.getCacheVersion( - paths, - compressionMethod, - enableCrossOsArchive - ) const twirpClient = cacheTwirpClient.internalCacheTwirpClient() - const request: CreateCacheEntryRequest = { - workflowRunBackendId: backendIds.workflowRunBackendId, - workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - key: key, - version: version - } - const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) - core.info(`CreateCacheEntryResponse: ${JSON.stringify(response)}`) + let cacheId = -1 - // Archive - // We're going to handle 1 path fow now. This needs to be fixed to handle all - // paths passed in. - const rootDir = path.dirname(paths[0]) - const zipSpecs: UploadZipSpecification[] = getUploadZipSpecification(paths, rootDir) - if (zipSpecs.length === 0) { + const cachePaths = await utils.resolvePaths(paths) + core.debug('Cache Paths:') + core.debug(`${JSON.stringify(cachePaths)}`) + + if (cachePaths.length === 0) { throw new Error( - `Error with zip specs: ${zipSpecs.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])).join(', ')}` + `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` ) } - // 0: No compression - // 1: Best speed - // 6: Default compression (same as GNU Gzip) - // 9: Best compression Higher levels will result in better compression, but will take longer to complete. For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads. - const zipUploadStream = await createZipUploadStream( - zipSpecs, - 6 + const archiveFolder = await utils.createTempDirectory() + const archivePath = path.join( + archiveFolder, + utils.getCacheFileName(compressionMethod) ) - // Cache v2 upload - // inputs: - // - getSignedUploadURL - // - archivePath - core.info(`Saving Cache v2: ${paths[0]}`) - await UploadCacheStream(response.signedUploadUrl, zipUploadStream) + core.debug(`Archive Path: ${archivePath}`) - // Finalize the cache entry - const finalizeRequest: FinalizeCacheEntryUploadRequest = { - workflowRunBackendId: backendIds.workflowRunBackendId, - workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - key: key, - version: version, - sizeBytes: "1024", + try { + await createTar(archiveFolder, cachePaths, compressionMethod) + if (core.isDebug()) { + await listTar(archivePath, compressionMethod) + } + + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) + core.debug(`File Size: ${archiveFileSize}`) + + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > CacheFileSizeLimit && !utils.isGhes()) { + throw new Error( + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.` + ) + } + + core.debug('Reserving Cache') + const version = utils.getCacheVersion( + paths, + compressionMethod, + enableCrossOsArchive + ) + const request: CreateCacheEntryRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, + key: key, + version: version + } + const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) + core.info(`CreateCacheEntryResponse: ${JSON.stringify(response)}`) + // TODO: handle the error cases here + if (!response.ok) { + throw new ReserveCacheError( + `Unable to reserve cache with key ${key}, another job may be creating this cache.` + ) + } + + // TODO: mask the signed upload URL + core.debug(`Saving Cache to: ${response.signedUploadUrl}`) + await UploadCacheFile( + response.signedUploadUrl, + archivePath, + ) + + const finalizeRequest: FinalizeCacheEntryUploadRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, + key: key, + version: version, + sizeBytes: `${archiveFileSize}`, + } + + const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) + core.debug(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) + + if (!finalizeResponse.ok) { + throw new Error( + `Unable to finalize cache with key ${key}, another job may be finalizing this cache.` + ) + } + + // TODO: this is not great, we should handle the types without parsing + cacheId = parseInt(finalizeResponse.entryId) + } catch (error) { + const typedError = error as Error + core.debug(typedError.message) + } finally { + // Try to delete the archive to save space + try { + await utils.unlinkFile(archivePath) + } catch (error) { + core.debug(`Failed to delete archive: ${error}`) + } } - const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) - core.info(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) - - return 0 + return cacheId } \ No newline at end of file diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/cacheTwirpClient.ts index cc365ec6..3cb3422e 100644 --- a/packages/cache/src/internal/cacheTwirpClient.ts +++ b/packages/cache/src/internal/cacheTwirpClient.ts @@ -1,8 +1,8 @@ -import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' -import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { info, debug } from '@actions/core' -import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' import { getRuntimeToken, getCacheServiceURL } from './config' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' +import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' +import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' // import {getUserAgentString} from './user-agent' // import {NetworkError, UsageError} from './errors' @@ -16,6 +16,13 @@ interface Rpc { ): Promise } +/** + * This class is a wrapper around the CacheServiceClientJSON class generated by Twirp. + * + * It adds retry logic to the request method, which is not present in the generated client. + * + * This class is used to interact with cache service v2. + */ class CacheServiceClient implements Rpc { private httpClient: HttpClient private baseUrl: string diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index b2cddf96..bc4e1d7a 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -35,4 +35,6 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S export const TarFilename = 'cache.tar' -export const ManifestFilename = 'manifest.txt' \ No newline at end of file +export const ManifestFilename = 'manifest.txt' + +export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository \ No newline at end of file diff --git a/packages/cache/src/internal/v2/download-cache.ts b/packages/cache/src/internal/v2/download-cache.ts index 19563181..1820cb70 100644 --- a/packages/cache/src/internal/v2/download-cache.ts +++ b/packages/cache/src/internal/v2/download-cache.ts @@ -1,68 +1,25 @@ import * as core from '@actions/core' -import * as httpClient from '@actions/http-client' -import unzip from 'unzip-stream' -const packageJson = require('../../../package.json') -export async function StreamExtract(url: string, directory: string): Promise { - let retryCount = 0 - while (retryCount < 5) { - try { - await streamExtractExternal(url, directory) - return - } catch (error) { - retryCount++ - core.info( - `Failed to download cache after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...` - ) - // wait 5 seconds before retrying - await new Promise(resolve => setTimeout(resolve, 5000)) - } - } +import { + BlobClient, + BlockBlobClient, + BlobDownloadOptions, +} from '@azure/storage-blob' - throw new Error(`Cache download failed after ${retryCount} retries.`) -} +export async function DownloadCacheFile( + signedUploadURL: string, + archivePath: string, +): Promise<{}> { + const downloadOptions: BlobDownloadOptions = { + maxRetryRequests: 5, + } -export async function streamExtractExternal( - url: string, - directory: string - ): Promise { - const client = new httpClient.HttpClient(`@actions/cache-${packageJson.version}`) - const response = await client.get(url) - if (response.message.statusCode !== 200) { - core.info(`Failed to download cache. HTTP status code: ${response.message.statusCode}`) - throw new Error( - `Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}` - ) - } - - const timeout = 30 * 1000 // 30 seconds - - return new Promise((resolve, reject) => { - const timerFn = (): void => { - response.message.destroy( - new Error(`Blob storage chunk did not respond in ${timeout}ms`) - ) - } - const timer = setTimeout(timerFn, timeout) - - response.message - .on('data', () => { - timer.refresh() - }) - .on('error', (error: Error) => { - core.info( - `response.message: Cache download failed: ${error.message}` - ) - clearTimeout(timer) - reject(error) - }) - .pipe(unzip.Extract({path: directory})) - .on('close', () => { - clearTimeout(timer) - resolve() - }) - .on('error', (error: Error) => { - reject(error) - }) - }) - } \ No newline at end of file + // TODO: tighten the configuration and pass the appropriate user-agent + const blobClient: BlobClient = new BlobClient(signedUploadURL) + const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + + core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) + core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) + + return blockBlobClient.downloadToFile(archivePath, 0, undefined, downloadOptions) +} \ No newline at end of file diff --git a/packages/cache/src/internal/v2/upload-cache.ts b/packages/cache/src/internal/v2/upload-cache.ts index b3ed530d..e4572d20 100644 --- a/packages/cache/src/internal/v2/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload-cache.ts @@ -1,130 +1,27 @@ import * as core from '@actions/core' -import { CreateCacheEntryResponse } from '../../generated/results/api/v1/cache' -import { ZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' -import { NetworkError } from '@actions/artifact/' -import { TransferProgressEvent } from '@azure/core-http' -import * as stream from 'stream' -import * as crypto from 'crypto' - import { BlobClient, BlockBlobClient, - BlockBlobUploadStreamOptions, BlockBlobParallelUploadOptions } from '@azure/storage-blob' -export async function UploadCacheStream( - signedUploadURL: string, - zipUploadStream: ZipUploadStream -): Promise<{}> { - let uploadByteCount = 0 - let lastProgressTime = Date.now() - let timeoutId: NodeJS.Timeout | undefined - - const chunkTimer = (timeout: number): NodeJS.Timeout => { - // clear the previous timeout - if (timeoutId) { - clearTimeout(timeoutId) - } - - timeoutId = setTimeout(() => { - const now = Date.now() - // if there's been more than 30 seconds since the - // last progress event, then we'll consider the upload stalled - if (now - lastProgressTime > timeout) { - throw new Error('Upload progress stalled.') - } - }, timeout) - return timeoutId - } - - const maxConcurrency = 32 - const bufferSize = 8 * 1024 * 1024 // 8 MB Chunks - const blobClient = new BlobClient(signedUploadURL) - const blockBlobClient = blobClient.getBlockBlobClient() - const timeoutDuration = 300000 // 30 seconds - - core.debug( - `Uploading cache zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` - ) - - const uploadCallback = (progress: TransferProgressEvent): void => { - core.info(`Uploaded bytes ${progress.loadedBytes}`) - uploadByteCount = progress.loadedBytes - chunkTimer(timeoutDuration) - lastProgressTime = Date.now() - } - - const options: BlockBlobUploadStreamOptions = { - blobHTTPHeaders: { blobContentType: 'zip' }, - onProgress: uploadCallback - } - - let sha256Hash: string | undefined = undefined - const uploadStream = new stream.PassThrough() - const hashStream = crypto.createHash('sha256') - - zipUploadStream.pipe(uploadStream) // This stream is used for the upload - zipUploadStream.pipe(hashStream).setEncoding('hex') // This stream is used to compute a hash of the zip content that gets used. Integrity check - - core.info('Beginning upload of cache to blob storage') - try { - // Start the chunk timer - timeoutId = chunkTimer(timeoutDuration) - await blockBlobClient.uploadStream( - uploadStream, - bufferSize, - maxConcurrency, - options - ) - } catch (error) { - if (NetworkError.isNetworkErrorCode(error?.code)) { - throw new NetworkError(error?.code) - } - throw error - } finally { - // clear the timeout whether or not the upload completes - if (timeoutId) { - clearTimeout(timeoutId) - } - } - - core.info('Finished uploading cache content to blob storage!') - - hashStream.end() - sha256Hash = hashStream.read() as string - core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`) - core.info(`Uploaded: ${uploadByteCount} bytes`) - - if (uploadByteCount === 0) { - core.error( - `No data was uploaded to blob storage. Reported upload byte count is 0.` - ) - } - return { - uploadSize: uploadByteCount, - sha256Hash - } -} - export async function UploadCacheFile( - uploadURL: CreateCacheEntryResponse, + signedUploadURL: string, archivePath: string, ): Promise<{}> { - core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) - + // TODO: tighten the configuration and pass the appropriate user-agent // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size - concurrency: 2, // maximum number of parallel transfer workers + concurrency: 4, // maximum number of parallel transfer workers maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size }; - const blobClient: BlobClient = new BlobClient(uploadURL.signedUploadUrl) + const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.info(`BlobClient: ${JSON.stringify(blobClient)}`) - core.info(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) + core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) + core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) return blockBlobClient.uploadFile(archivePath, uploadOptions); } \ No newline at end of file diff --git a/packages/cache/src/internal/v2/zip.ts b/packages/cache/src/internal/v2/zip.ts deleted file mode 100644 index e69de29b..00000000 From 7f5921cdddc31081d4754a42711d71e7890b0d06 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Tue, 22 Oct 2024 12:01:31 -0400 Subject: [PATCH 020/108] Document unreleased changes in `cache` and `tool-cache` (#1856) --- packages/cache/RELEASES.md | 5 ++++- packages/tool-cache/RELEASES.md | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 43566ef1..8f00327c 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -1,9 +1,12 @@ # @actions/cache Releases +### Unreleased +- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842) + ### 3.2.4 - Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts - + ### 3.2.3 - Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378) diff --git a/packages/tool-cache/RELEASES.md b/packages/tool-cache/RELEASES.md index 9fdd4898..e2372238 100644 --- a/packages/tool-cache/RELEASES.md +++ b/packages/tool-cache/RELEASES.md @@ -1,5 +1,8 @@ # @actions/tool-cache Releases +### Unreleased +- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842) + ### 2.0.1 - Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087) From 28dbd8ff93db072afd45025983326af5f8603465 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 24 Oct 2024 05:19:48 -0700 Subject: [PATCH 021/108] Cleanups and package refactoring --- .../cache/__tests__/restoreCachev2.test.ts | 346 ++++++++++++++++++ packages/cache/src/cache.ts | 12 +- .../internal/{v2 => blob}/download-cache.ts | 0 .../src/internal/{v2 => blob}/upload-cache.ts | 0 .../internal/{ => shared}/cacheTwirpClient.ts | 28 +- packages/cache/src/internal/shared/errors.ts | 72 ++++ .../cache/src/internal/shared/user-agent.ts | 9 + 7 files changed, 447 insertions(+), 20 deletions(-) create mode 100644 packages/cache/__tests__/restoreCachev2.test.ts rename packages/cache/src/internal/{v2 => blob}/download-cache.ts (100%) rename packages/cache/src/internal/{v2 => blob}/upload-cache.ts (100%) rename packages/cache/src/internal/{ => shared}/cacheTwirpClient.ts (90%) create mode 100644 packages/cache/src/internal/shared/errors.ts create mode 100644 packages/cache/src/internal/shared/user-agent.ts diff --git a/packages/cache/__tests__/restoreCachev2.test.ts b/packages/cache/__tests__/restoreCachev2.test.ts new file mode 100644 index 00000000..73f42bfa --- /dev/null +++ b/packages/cache/__tests__/restoreCachev2.test.ts @@ -0,0 +1,346 @@ +import * as core from '@actions/core' +import * as path from 'path' +import { restoreCache } from '../src/cache' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import * as cacheUtils from '../src/internal/cacheUtils' +import * as config from '../src/internal/config' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import * as util from '@actions/artifact/lib/internal/shared/util' +import { ArtifactCacheEntry } from '../src/internal/contracts' +import * as tar from '../src/internal/tar' + +jest.mock('../src/internal/cacheTwirpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/tar') + +const fixtures = { + testRuntimeToken: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8', + backendIds: { + workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422', + workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf' + }, + cacheServiceURL: 'http://results.local', +} + +beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) + + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + + jest.spyOn(config, 'getCacheServiceVersion').mockImplementation(() => { + return "v2" + }) + + jest.spyOn(config, 'getRuntimeToken').mockImplementation(() => { + return fixtures.testRuntimeToken + }) + + jest.spyOn(util, 'getBackendIdsFromToken').mockImplementation(() => { + return fixtures.backendIds + }) + + jest.spyOn(config, 'getCacheServiceURL').mockReturnValue( + fixtures.cacheServiceURL + ) +}) + +test('restore with no path should fail', async () => { + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('restore with too many keys should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) +}) + +test('restore with large key should fail', async () => { + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) +}) + +test('restore with invalid key should fail', async () => { + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) +}) + +test('restore with no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + jest.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '' + }) + ) + + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) +}) + +/** +test('restore with server error should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') + + jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) + + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) +}) + +test('restore with restore keys and no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + + jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { + return Promise.resolve(null) + }) + + const cacheKey = await restoreCache(paths, key, [restoreKey]) + + expect(cacheKey).toBe(undefined) +}) + +test('restore with gzip compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: 'refs/heads/main', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheMock).toHaveBeenCalledWith([key], paths, { + compressionMethod: compression, + enableCrossOsArchive: false + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath, + undefined + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('restore with zstd compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + const infoMock = jest.spyOn(core, 'info') + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: 'refs/heads/main', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheMock).toHaveBeenCalledWith([key], paths, { + compressionMethod: compression, + enableCrossOsArchive: false + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath, + undefined + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('restore with cache found for restore key', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + + const infoMock = jest.spyOn(core, 'info') + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: restoreKey, + scope: 'refs/heads/main', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + const cacheKey = await restoreCache(paths, key, [restoreKey]) + + expect(cacheKey).toBe(restoreKey) + expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { + compressionMethod: compression, + enableCrossOsArchive: false + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath, + undefined + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('restore with dry run', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const options = { lookupOnly: true } + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: 'refs/heads/main', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCompressionMock).toHaveBeenCalledTimes(1) + expect(getCacheMock).toHaveBeenCalledWith([key], paths, { + compressionMethod: compression, + enableCrossOsArchive: false + }) + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheMock).toHaveBeenCalledTimes(0) +}) + **/ \ No newline at end of file diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 7354f649..7e4200f8 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,11 +1,11 @@ import * as core from '@actions/core' import * as path from 'path' -import * as utils from './internal/cacheUtils' import * as config from './internal/config' +import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' -import * as cacheTwirpClient from './internal/cacheTwirpClient' -import { createTar, extractTar, listTar } from './internal/tar' +import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, CreateCacheEntryResponse, @@ -14,10 +14,10 @@ import { GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse } from './generated/results/api/v1/cache' -import { UploadCacheFile } from './internal/v2/upload-cache' -import { DownloadCacheFile } from './internal/v2/download-cache' -import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' import { CacheFileSizeLimit } from './internal/constants' +import { UploadCacheFile } from './internal/blob/upload-cache' +import { DownloadCacheFile } from './internal/blob/download-cache' +import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' export class ValidationError extends Error { constructor(message: string) { diff --git a/packages/cache/src/internal/v2/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts similarity index 100% rename from packages/cache/src/internal/v2/download-cache.ts rename to packages/cache/src/internal/blob/download-cache.ts diff --git a/packages/cache/src/internal/v2/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts similarity index 100% rename from packages/cache/src/internal/v2/upload-cache.ts rename to packages/cache/src/internal/blob/upload-cache.ts diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/shared/cacheTwirpClient.ts similarity index 90% rename from packages/cache/src/internal/cacheTwirpClient.ts rename to packages/cache/src/internal/shared/cacheTwirpClient.ts index 3cb3422e..29bb845a 100644 --- a/packages/cache/src/internal/cacheTwirpClient.ts +++ b/packages/cache/src/internal/shared/cacheTwirpClient.ts @@ -1,10 +1,10 @@ import { info, debug } from '@actions/core' -import { getRuntimeToken, getCacheServiceURL } from './config' +import { getUserAgentString } from './user-agent' +import { NetworkError, UsageError } from './errors' +import { getRuntimeToken, getCacheServiceURL } from '../config' import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' -import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' -// import {getUserAgentString} from './user-agent' -// import {NetworkError, UsageError} from './errors' +import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp' // The twirp http client must implement this interface interface Rpc { @@ -100,9 +100,9 @@ class CacheServiceClient implements Rpc { isRetryable = this.isRetryableHttpStatusCode(statusCode) errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` if (body.msg) { - // if (UsageError.isUsageErrorMessage(body.msg)) { - // throw new UsageError() - // } + if (UsageError.isUsageErrorMessage(body.msg)) { + throw new UsageError() + } errorMessage = `${errorMessage}: ${body.msg}` } @@ -111,13 +111,13 @@ class CacheServiceClient implements Rpc { debug(`Raw Body: ${rawBody}`) } - // if (error instanceof UsageError) { - // throw error - // } + if (error instanceof UsageError) { + throw error + } - // if (NetworkError.isNetworkErrorCode(error?.code)) { - // throw new NetworkError(error?.code) - // } + if (NetworkError.isNetworkErrorCode(error?.code)) { + throw new NetworkError(error?.code) + } isRetryable = true errorMessage = error.message @@ -193,7 +193,7 @@ export function internalCacheTwirpClient(options?: { retryMultiplier?: number }): CacheServiceClientJSON { const client = new CacheServiceClient( - 'actions/cache', + getUserAgentString(), options?.maxAttempts, options?.retryIntervalMs, options?.retryMultiplier diff --git a/packages/cache/src/internal/shared/errors.ts b/packages/cache/src/internal/shared/errors.ts new file mode 100644 index 00000000..24c38e0d --- /dev/null +++ b/packages/cache/src/internal/shared/errors.ts @@ -0,0 +1,72 @@ +export class FilesNotFoundError extends Error { + files: string[] + + constructor(files: string[] = []) { + let message = 'No files were found to upload' + if (files.length > 0) { + message += `: ${files.join(', ')}` + } + + super(message) + this.files = files + this.name = 'FilesNotFoundError' + } +} + +export class InvalidResponseError extends Error { + constructor(message: string) { + super(message) + this.name = 'InvalidResponseError' + } +} + +export class CacheNotFoundError extends Error { + constructor(message = 'Cache not found') { + super(message) + this.name = 'CacheNotFoundError' + } +} + +export class GHESNotSupportedError extends Error { + constructor( + message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.' + ) { + super(message) + this.name = 'GHESNotSupportedError' + } +} + +export class NetworkError extends Error { + code: string + + constructor(code: string) { + const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github` + super(message) + this.code = code + this.name = 'NetworkError' + } + + static isNetworkErrorCode = (code?: string): boolean => { + if (!code) return false + return [ + 'ECONNRESET', + 'ENOTFOUND', + 'ETIMEDOUT', + 'ECONNREFUSED', + 'EHOSTUNREACH' + ].includes(code) + } +} + +export class UsageError extends Error { + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending` + super(message) + this.name = 'UsageError' + } + + static isUsageErrorMessage = (msg?: string): boolean => { + if (!msg) return false + return msg.includes('insufficient usage') + } +} diff --git a/packages/cache/src/internal/shared/user-agent.ts b/packages/cache/src/internal/shared/user-agent.ts new file mode 100644 index 00000000..1fcb15bd --- /dev/null +++ b/packages/cache/src/internal/shared/user-agent.ts @@ -0,0 +1,9 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports +const packageJson = require('../../../package.json') + +/** + * Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package + */ +export function getUserAgentString(): string { + return `@actions/cache-${packageJson.version}` +} From 01bf918aa54471eb872114e2283b70517994706e Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 24 Oct 2024 06:09:23 -0700 Subject: [PATCH 022/108] Refactoring & cleanup --- .../cache/__tests__/cacheHttpClient.test.ts | 9 +- .../cache/__tests__/restoreCachev2.test.ts | 346 ------------------ packages/cache/__tests__/saveCache.test.ts | 75 ---- packages/cache/src/cache.ts | 2 +- 4 files changed, 6 insertions(+), 426 deletions(-) delete mode 100644 packages/cache/__tests__/restoreCachev2.test.ts diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index 21c5ae86..b8176ba6 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,7 +1,8 @@ -import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient' -import {CompressionMethod} from '../src/internal/constants' +import { getCacheVersion } from '../src/internal/cacheUtils' +import { downloadCache } from '../src/internal/cacheHttpClient' +import { CompressionMethod } from '../src/internal/constants' import * as downloadUtils from '../src/internal/downloadUtils' -import {DownloadOptions, getDownloadOptions} from '../src/options' +import { DownloadOptions, getDownloadOptions } from '../src/options' jest.mock('../src/internal/downloadUtils') @@ -128,7 +129,7 @@ test('downloadCache passes options to download methods', async () => { const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' const archivePath = '/foo/bar' - const options: DownloadOptions = {downloadConcurrency: 4} + const options: DownloadOptions = { downloadConcurrency: 4 } await downloadCache(archiveLocation, archivePath, options) diff --git a/packages/cache/__tests__/restoreCachev2.test.ts b/packages/cache/__tests__/restoreCachev2.test.ts deleted file mode 100644 index 73f42bfa..00000000 --- a/packages/cache/__tests__/restoreCachev2.test.ts +++ /dev/null @@ -1,346 +0,0 @@ -import * as core from '@actions/core' -import * as path from 'path' -import { restoreCache } from '../src/cache' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' -import * as cacheUtils from '../src/internal/cacheUtils' -import * as config from '../src/internal/config' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' -import * as util from '@actions/artifact/lib/internal/shared/util' -import { ArtifactCacheEntry } from '../src/internal/contracts' -import * as tar from '../src/internal/tar' - -jest.mock('../src/internal/cacheTwirpClient') -jest.mock('../src/internal/cacheUtils') -jest.mock('../src/internal/tar') - -const fixtures = { - testRuntimeToken: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8', - backendIds: { - workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422', - workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf' - }, - cacheServiceURL: 'http://results.local', -} - -beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) - - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) - - jest.spyOn(config, 'getCacheServiceVersion').mockImplementation(() => { - return "v2" - }) - - jest.spyOn(config, 'getRuntimeToken').mockImplementation(() => { - return fixtures.testRuntimeToken - }) - - jest.spyOn(util, 'getBackendIdsFromToken').mockImplementation(() => { - return fixtures.backendIds - }) - - jest.spyOn(config, 'getCacheServiceURL').mockReturnValue( - fixtures.cacheServiceURL - ) -}) - -test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) -}) - -test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) -}) - -test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) -}) - -test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) -}) - -test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - - jest.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue( - Promise.resolve({ - ok: false, - signedDownloadUrl: '' - }) - ) - - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) -}) - -/** -test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') - - jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) - - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) -}) - -test('restore with restore keys and no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKey = 'node-' - - jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { - return Promise.resolve(null) - }) - - const cacheKey = await restoreCache(paths, key, [restoreKey]) - - expect(cacheKey).toBe(undefined) -}) - -test('restore with gzip compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - - const cacheEntry: ArtifactCacheEntry = { - cacheKey: key, - scope: 'refs/heads/main', - archiveLocation: 'www.actionscache.test/download' - } - const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - getCacheMock.mockImplementation(async () => { - return Promise.resolve(cacheEntry) - }) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) - - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') - - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) - - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - - const compression = CompressionMethod.Gzip - const getCompressionMock = jest - .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - const cacheKey = await restoreCache(paths, key) - - expect(cacheKey).toBe(key) - expect(getCacheMock).toHaveBeenCalledWith([key], paths, { - compressionMethod: compression, - enableCrossOsArchive: false - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheMock).toHaveBeenCalledWith( - cacheEntry.archiveLocation, - archivePath, - undefined - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) - - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - - expect(getCompressionMock).toHaveBeenCalledTimes(1) -}) - -test('restore with zstd compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - - const infoMock = jest.spyOn(core, 'info') - - const cacheEntry: ArtifactCacheEntry = { - cacheKey: key, - scope: 'refs/heads/main', - archiveLocation: 'www.actionscache.test/download' - } - const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - getCacheMock.mockImplementation(async () => { - return Promise.resolve(cacheEntry) - }) - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) - - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') - - const fileSize = 62915000 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) - - const extractTarMock = jest.spyOn(tar, 'extractTar') - const compression = CompressionMethod.Zstd - const getCompressionMock = jest - .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - const cacheKey = await restoreCache(paths, key) - - expect(cacheKey).toBe(key) - expect(getCacheMock).toHaveBeenCalledWith([key], paths, { - compressionMethod: compression, - enableCrossOsArchive: false - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheMock).toHaveBeenCalledWith( - cacheEntry.archiveLocation, - archivePath, - undefined - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) - - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) - expect(getCompressionMock).toHaveBeenCalledTimes(1) -}) - -test('restore with cache found for restore key', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKey = 'node-' - - const infoMock = jest.spyOn(core, 'info') - - const cacheEntry: ArtifactCacheEntry = { - cacheKey: restoreKey, - scope: 'refs/heads/main', - archiveLocation: 'www.actionscache.test/download' - } - const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - getCacheMock.mockImplementation(async () => { - return Promise.resolve(cacheEntry) - }) - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) - - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') - - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) - - const extractTarMock = jest.spyOn(tar, 'extractTar') - const compression = CompressionMethod.Zstd - const getCompressionMock = jest - .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - const cacheKey = await restoreCache(paths, key, [restoreKey]) - - expect(cacheKey).toBe(restoreKey) - expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { - compressionMethod: compression, - enableCrossOsArchive: false - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheMock).toHaveBeenCalledWith( - cacheEntry.archiveLocation, - archivePath, - undefined - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) - expect(getCompressionMock).toHaveBeenCalledTimes(1) -}) - -test('restore with dry run', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const options = { lookupOnly: true } - - const cacheEntry: ArtifactCacheEntry = { - cacheKey: key, - scope: 'refs/heads/main', - archiveLocation: 'www.actionscache.test/download' - } - const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - getCacheMock.mockImplementation(async () => { - return Promise.resolve(cacheEntry) - }) - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') - - const compression = CompressionMethod.Gzip - const getCompressionMock = jest - .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - const cacheKey = await restoreCache(paths, key, undefined, options) - - expect(cacheKey).toBe(key) - expect(getCompressionMock).toHaveBeenCalledTimes(1) - expect(getCacheMock).toHaveBeenCalledWith([key], paths, { - compressionMethod: compression, - enableCrossOsArchive: false - }) - // creating a tempDir and downloading the cache are skipped - expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheMock).toHaveBeenCalledTimes(0) -}) - **/ \ No newline at end of file diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 7597ba8d..4d0027be 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -2,14 +2,10 @@ import * as core from '@actions/core' import * as path from 'path' import {saveCache} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' -import * as cacheTwirpClient from '../src/internal/cacheTwirpClient' -import {GetCacheBlobUploadURLResponse} from '../src/generated/results/api/v1/blobcache' -import {BlobCacheServiceClientJSON} from '../src/generated/results/api/v1/blobcache.twirp' import * as cacheUtils from '../src/internal/cacheUtils' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as tar from '../src/internal/tar' import {TypedResponse} from '@actions/http-client/lib/interfaces' -import * as uploadCache from '../src/internal/v2/upload-cache' import { ReserveCacheResponse, ITypedResponseWithError @@ -331,74 +327,3 @@ test('save with non existing path should not save cache', async () => { `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` ) }) - -test('throwaway test', async () => { - const filePath = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(filePath)] - - const cacheSignedURL = 'https://container.blob.core.windows.net/cache/${primaryKey}?sig=1234' - const getCacheBlobUploadURL: GetCacheBlobUploadURLResponse = { - urls: [ - { - key: primaryKey, - url: cacheSignedURL, - }, - ] - } - - const cacheId = 4 - const reserveCacheMock = jest - .spyOn(cacheHttpClient, 'reserveCache') - .mockImplementation(async () => { - const response: TypedResponse = { - statusCode: 500, - result: {cacheId}, - headers: {} - } - return response - }) - - const getCacheBlobUploadURLMock = jest - .spyOn(BlobCacheServiceClientJSON.prototype, 'GetCacheBlobUploadURL') - .mockResolvedValue(getCacheBlobUploadURL) - - const uploadCacheMock = jest - .spyOn(uploadCache, 'UploadCacheFile') - .mockImplementation(async () => { - return { - status: 200 - } - }) - - const createTarMock = jest.spyOn(tar, 'createTar') - - const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') - const compression = CompressionMethod.Zstd - const getCompressionMock = jest - .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - await uploadCache.UploadCacheFile(getCacheBlobUploadURL, cachePaths[0]) - await saveCache([filePath], primaryKey) - - expect(reserveCacheMock).toHaveBeenCalledTimes(1) - expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { - cacheSize: undefined, - compressionMethod: compression, - enableCrossOsArchive: false - }) - expect (getCacheBlobUploadURLMock).toHaveBeenCalledTimes(1) - const archiveFolder = '/foo/bar' - const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) - expect(createTarMock).toHaveBeenCalledTimes(1) - expect(createTarMock).toHaveBeenCalledWith( - archiveFolder, - cachePaths, - compression - ) - expect(uploadCacheMock).toHaveBeenCalledTimes(2) - expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) - expect(getCompressionMock).toHaveBeenCalledTimes(1) -}) \ No newline at end of file diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 7e4200f8..2659b848 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -64,7 +64,7 @@ function checkKey(key: string): void { */ export function isFeatureAvailable(): boolean { - return !!config.getCacheServiceVersion + return !!process.env['ACTIONS_CACHE_URL'] } /** From 717ba9d9a42743b749b30020bad5b9350d58368e Mon Sep 17 00:00:00 2001 From: Meriadec Pillet Date: Wed, 30 Oct 2024 14:02:29 +0100 Subject: [PATCH 023/108] Handle tags containing "@" character in `buildSLSAProvenancePredicate` When using some monorepo-related tools (like [changesets](https://github.com/changesets/changesets)), the produced tags have a special format that includes `@` character. For example, a `foo` package on a monorepo will produce Git tags looking like `foo@1.0.0` if using changesets. When used in combination with `actions/attest-build-provenance`, the action was not properly re-crafting the tag in `buildSLSAProvenancePredicate` because it was always splitting the workflow ref by `@` and taking the second element. This result in this error on CI: ``` Error: Error: Failed to persist attestation: Invalid Argument - values do not match: refs/tags/foo != refs/tags/foo@1.0.0 - https://docs.github.com/rest/repos/repos#create-an-attestation ```` This PR slightly update the logic there, and rather take "everything located after the first '@'". This shouldn't introduce any breaking change, while giving support for custom tags. I've added the corresponding test case, it passes, however I couldn't successfully run the full test suite (neither on `main`). Looking forward for CI outcome. Thanks in advance for the review :pray:. --- .../__snapshots__/provenance.test.ts.snap | 42 +++++++++++++++++++ packages/attest/__tests__/provenance.test.ts | 32 ++++++++++---- packages/attest/src/provenance.ts | 4 +- 3 files changed, 68 insertions(+), 10 deletions(-) diff --git a/packages/attest/__tests__/__snapshots__/provenance.test.ts.snap b/packages/attest/__tests__/__snapshots__/provenance.test.ts.snap index 4c199dae..82daca94 100644 --- a/packages/attest/__tests__/__snapshots__/provenance.test.ts.snap +++ b/packages/attest/__tests__/__snapshots__/provenance.test.ts.snap @@ -1,5 +1,47 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`provenance functions buildSLSAProvenancePredicate handle tags including "@" character 1`] = ` +{ + "params": { + "buildDefinition": { + "buildType": "https://actions.github.io/buildtypes/workflow/v1", + "externalParameters": { + "workflow": { + "path": ".github/workflows/main.yml", + "ref": "foo@1.0.0", + "repository": "https://foo.ghe.com/owner/repo", + }, + }, + "internalParameters": { + "github": { + "event_name": "push", + "repository_id": "repo-id", + "repository_owner_id": "owner-id", + "runner_environment": "github-hosted", + }, + }, + "resolvedDependencies": [ + { + "digest": { + "gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b", + }, + "uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main", + }, + ], + }, + "runDetails": { + "builder": { + "id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main", + }, + "metadata": { + "invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt", + }, + }, + }, + "type": "https://slsa.dev/provenance/v1", +} +`; + exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = ` { "params": { diff --git a/packages/attest/__tests__/provenance.test.ts b/packages/attest/__tests__/provenance.test.ts index 4dbfef58..6803d75d 100644 --- a/packages/attest/__tests__/provenance.test.ts +++ b/packages/attest/__tests__/provenance.test.ts @@ -33,15 +33,7 @@ describe('provenance functions', () => { runner_environment: 'github-hosted' } - beforeEach(async () => { - process.env = { - ...originalEnv, - ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`, - ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token', - GITHUB_SERVER_URL: 'https://foo.ghe.com', - GITHUB_REPOSITORY: claims.repository - } - + const mockIssuer = async (claims: jose.JWTPayload): Promise => { // Generate JWT signing key const key = await jose.generateKeyPair('PS256') @@ -60,6 +52,18 @@ describe('provenance functions', () => { // Mock OIDC token endpoint for populating the provenance nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt}) + } + + beforeEach(async () => { + process.env = { + ...originalEnv, + ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`, + ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token', + GITHUB_SERVER_URL: 'https://foo.ghe.com', + GITHUB_REPOSITORY: claims.repository + } + + await mockIssuer(claims) }) afterEach(() => { @@ -71,6 +75,16 @@ describe('provenance functions', () => { const predicate = await buildSLSAProvenancePredicate() expect(predicate).toMatchSnapshot() }) + + it('handle tags including "@" character', async () => { + nock.cleanAll() + await mockIssuer({ + ...claims, + workflow_ref: 'owner/repo/.github/workflows/main.yml@foo@1.0.0' + }) + const predicate = await buildSLSAProvenancePredicate() + expect(predicate).toMatchSnapshot() + }) }) describe('attestProvenance', () => { diff --git a/packages/attest/src/provenance.ts b/packages/attest/src/provenance.ts index 09aa64f7..faba08fd 100644 --- a/packages/attest/src/provenance.ts +++ b/packages/attest/src/provenance.ts @@ -30,9 +30,11 @@ export const buildSLSAProvenancePredicate = async ( // Split just the path and ref from the workflow string. // owner/repo/.github/workflows/main.yml@main => // .github/workflows/main.yml, main - const [workflowPath, workflowRef] = claims.workflow_ref + const [workflowPath, ...workflowRefChunks] = claims.workflow_ref .replace(`${claims.repository}/`, '') .split('@') + // Handle case where tag contains `@` (e.g: when using changesets in a monorepo context), + const workflowRef = workflowRefChunks.join('@') return { type: SLSA_PREDICATE_V1_TYPE, From 65ee4d33afc6a3c188b33b58976e2e98c5d0281e Mon Sep 17 00:00:00 2001 From: Brian DeHamer Date: Fri, 1 Nov 2024 08:59:55 -0700 Subject: [PATCH 024/108] use macos-latest-large in test/release workflows (#1869) Signed-off-by: Brian DeHamer --- .github/workflows/releases.yml | 4 ++-- .github/workflows/unit-tests.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 592f7707..a29858c4 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -11,7 +11,7 @@ on: jobs: test: - runs-on: macos-latest + runs-on: macos-latest-large steps: - name: setup repo @@ -48,7 +48,7 @@ jobs: path: packages/${{ github.event.inputs.package }}/*.tgz publish: - runs-on: macos-latest + runs-on: macos-latest-large needs: test environment: npm-publish permissions: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 952fa6b2..633a0168 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -16,7 +16,7 @@ jobs: strategy: matrix: - runs-on: [ubuntu-latest, macos-latest, windows-latest] + runs-on: [ubuntu-latest, macos-latest-large, windows-latest] fail-fast: false runs-on: ${{ matrix.runs-on }} From 265a5be8bc69fbea621091c2f8f5b08586fa383c Mon Sep 17 00:00:00 2001 From: Brian DeHamer Date: Wed, 30 Oct 2024 10:55:36 -0700 Subject: [PATCH 025/108] support multi-subject attestations Signed-off-by: Brian DeHamer --- packages/attest/README.md | 34 ++++++++++++-------- packages/attest/__tests__/attest.test.ts | 16 +++++++++ packages/attest/__tests__/intoto.test.ts | 2 +- packages/attest/__tests__/provenance.test.ts | 12 +++---- packages/attest/src/attest.ts | 32 ++++++++++++------ packages/attest/src/intoto.ts | 4 +-- 6 files changed, 67 insertions(+), 33 deletions(-) create mode 100644 packages/attest/__tests__/attest.test.ts diff --git a/packages/attest/README.md b/packages/attest/README.md index 8f004399..e6761ea6 100644 --- a/packages/attest/README.md +++ b/packages/attest/README.md @@ -32,8 +32,7 @@ async function run() { const ghToken = core.getInput('gh-token'); const attestation = await attest({ - subjectName: 'my-artifact-name', - subjectDigest: { 'sha256': '36ab4667...'}, + subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}], predicateType: 'https://in-toto.io/attestation/release', predicate: { . . . }, token: ghToken @@ -49,11 +48,12 @@ The `attest` function supports the following options: ```typescript export type AttestOptions = { - // The name of the subject to be attested. - subjectName: string - // The digest of the subject to be attested. Should be a map of digest - // algorithms to their hex-encoded values. - subjectDigest: Record + // Deprecated. Use 'subjects' instead. + subjectName?: string + // Deprecated. Use 'subjects' instead. + subjectDigest?: Record + // Collection of subjects to be attested + subjects?: Subject[] // URI identifying the content type of the predicate being attested. predicateType: string // Predicate to be attested. @@ -68,6 +68,13 @@ export type AttestOptions = { // Whether to skip writing the attestation to the GH attestations API. skipWrite?: boolean } + +export type Subject = { + // Name of the subject. + name: string + // Digests of the subject. Should be a map of digest algorithms to their hex-encoded values. + digest: Record +} ``` ### `attestProvenance` @@ -105,12 +112,13 @@ The `attestProvenance` function supports the following options: ```typescript export type AttestProvenanceOptions = { - // The name of the subject to be attested. - subjectName: string - // The digest of the subject to be attested. Should be a map of digest - // algorithms to their hex-encoded values. - subjectDigest: Record - // GitHub token for writing attestations. + // Deprecated. Use 'subjects' instead. + subjectName?: string + // Deprecated. Use 'subjects' instead. + subjectDigest?: Record + // Collection of subjects to be attested + subjects?: Subject[] + // URI identifying the content type of the predicate being attested. token: string // Sigstore instance to use for signing. Must be one of "public-good" or // "github". diff --git a/packages/attest/__tests__/attest.test.ts b/packages/attest/__tests__/attest.test.ts new file mode 100644 index 00000000..d8b07163 --- /dev/null +++ b/packages/attest/__tests__/attest.test.ts @@ -0,0 +1,16 @@ +import {attest} from '../src/attest' + +describe('attest', () => { + describe('when no subject information is provided', () => { + it('throws an error', async () => { + const options = { + predicateType: 'foo', + predicate: {bar: 'baz'}, + token: 'token' + } + expect(attest(options)).rejects.toThrowError( + 'Must provide either subjectName and subjectDigest or subjects' + ) + }) + }) +}) diff --git a/packages/attest/__tests__/intoto.test.ts b/packages/attest/__tests__/intoto.test.ts index dd6a1a95..c69f7d84 100644 --- a/packages/attest/__tests__/intoto.test.ts +++ b/packages/attest/__tests__/intoto.test.ts @@ -17,7 +17,7 @@ describe('buildIntotoStatement', () => { } it('returns an intoto statement', () => { - const statement = buildIntotoStatement(subject, predicate) + const statement = buildIntotoStatement([subject], predicate) expect(statement).toMatchSnapshot() }) }) diff --git a/packages/attest/__tests__/provenance.test.ts b/packages/attest/__tests__/provenance.test.ts index 4dbfef58..cca7a020 100644 --- a/packages/attest/__tests__/provenance.test.ts +++ b/packages/attest/__tests__/provenance.test.ts @@ -115,8 +115,7 @@ describe('provenance functions', () => { describe('when the sigstore instance is explicitly set', () => { it('attests provenance', async () => { const attestation = await attestProvenance({ - subjectName, - subjectDigest, + subjects: [{name: subjectName, digest: subjectDigest}], token: 'token', sigstore: 'github' }) @@ -143,8 +142,7 @@ describe('provenance functions', () => { it('attests provenance', async () => { const attestation = await attestProvenance({ - subjectName, - subjectDigest, + subjects: [{name: subjectName, digest: subjectDigest}], token: 'token' }) @@ -178,8 +176,7 @@ describe('provenance functions', () => { describe('when the sigstore instance is explicitly set', () => { it('attests provenance', async () => { const attestation = await attestProvenance({ - subjectName, - subjectDigest, + subjects: [{name: subjectName, digest: subjectDigest}], token: 'token', sigstore: 'public-good' }) @@ -206,8 +203,7 @@ describe('provenance functions', () => { it('attests provenance', async () => { const attestation = await attestProvenance({ - subjectName, - subjectDigest, + subjects: [{name: subjectName, digest: subjectDigest}], token: 'token' }) diff --git a/packages/attest/src/attest.ts b/packages/attest/src/attest.ts index 85c63013..807a8e5d 100644 --- a/packages/attest/src/attest.ts +++ b/packages/attest/src/attest.ts @@ -14,11 +14,16 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json' * Options for attesting a subject / predicate. */ export type AttestOptions = { - // The name of the subject to be attested. - subjectName: string - // The digest of the subject to be attested. Should be a map of digest - // algorithms to their hex-encoded values. - subjectDigest: Record + /** + * @deprecated Use `subjects` instead. + **/ + subjectName?: string + /** + * @deprecated Use `subjects` instead. + **/ + subjectDigest?: Record + // Subjects to be attested. + subjects?: Subject[] // Content type of the predicate being attested. predicateType: string // Predicate to be attested. @@ -42,15 +47,24 @@ export type AttestOptions = { * @returns A promise that resolves to the attestation. */ export async function attest(options: AttestOptions): Promise { - const subject: Subject = { - name: options.subjectName, - digest: options.subjectDigest + let subjects: Subject[] + + if (options.subjects) { + subjects = options.subjects + } else if (options.subjectName && options.subjectDigest) { + subjects = [{name: options.subjectName, digest: options.subjectDigest}] + } else { + throw new Error( + 'Must provide either subjectName and subjectDigest or subjects' + ) } + const predicate: Predicate = { type: options.predicateType, params: options.predicate } - const statement = buildIntotoStatement(subject, predicate) + + const statement = buildIntotoStatement(subjects, predicate) // Sign the provenance statement const payload: Payload = { diff --git a/packages/attest/src/intoto.ts b/packages/attest/src/intoto.ts index 9d6a2d0e..5a2dcc9f 100644 --- a/packages/attest/src/intoto.ts +++ b/packages/attest/src/intoto.ts @@ -20,12 +20,12 @@ export type InTotoStatement = { * @returns The constructed in-toto statement. */ export const buildIntotoStatement = ( - subject: Subject, + subjects: Subject[], predicate: Predicate ): InTotoStatement => { return { _type: INTOTO_STATEMENT_V1_TYPE, - subject: [subject], + subject: subjects, predicateType: predicate.type, predicate: predicate.params } From 7e54468896aa89d3a3f4a2af408e1ea6c192bcae Mon Sep 17 00:00:00 2001 From: Brian DeHamer Date: Fri, 1 Nov 2024 09:45:11 -0700 Subject: [PATCH 026/108] update release notes for @actions/attest v1.5.0 Signed-off-by: Brian DeHamer --- packages/attest/RELEASES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/attest/RELEASES.md b/packages/attest/RELEASES.md index f6d25193..da623b95 100644 --- a/packages/attest/RELEASES.md +++ b/packages/attest/RELEASES.md @@ -5,6 +5,8 @@ - Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847) - Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846) - Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846) +- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865) +- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863) ### 1.4.2 From 77f247b2f3e5d82ecd0e27573ef30c75d5d9a2cb Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Fri, 1 Nov 2024 13:32:42 -0400 Subject: [PATCH 027/108] Prepare `@actions/cache` 3.3.0 release (#1871) --- packages/cache/RELEASES.md | 3 ++- packages/cache/package-lock.json | 41 ++++++++++---------------------- packages/cache/package.json | 4 ++-- 3 files changed, 17 insertions(+), 31 deletions(-) diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 8f00327c..85415952 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -1,6 +1,7 @@ # @actions/cache Releases -### Unreleased +### 3.3.0 +- Update `@actions/core` to `1.11.1` - Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842) ### 3.2.4 diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 346c2c2a..724f674a 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,15 +1,15 @@ { "name": "@actions/cache", - "version": "3.2.4", + "version": "3.3.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@actions/cache", - "version": "3.2.4", + "version": "3.3.0", "license": "MIT", "dependencies": { - "@actions/core": "^1.10.0", + "@actions/core": "^1.11.1", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", @@ -25,20 +25,12 @@ } }, "node_modules/@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/core/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, "node_modules/@actions/exec": { @@ -515,19 +507,12 @@ }, "dependencies": { "@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - }, - "dependencies": { - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - } + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, "@actions/exec": { diff --git a/packages/cache/package.json b/packages/cache/package.json index 6af620f2..a98c0bb6 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "3.2.4", + "version": "3.3.0", "preview": true, "description": "Actions cache lib", "keywords": [ @@ -37,7 +37,7 @@ "url": "https://github.com/actions/toolkit/issues" }, "dependencies": { - "@actions/core": "^1.10.0", + "@actions/core": "^1.11.1", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", From bb2278e5cfbb40afc20890c415e9ffa836631cd5 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Fri, 8 Nov 2024 10:30:18 -0500 Subject: [PATCH 028/108] Extend Node version test coverage (#1843) * Extend Node version test coverage * Remove Node 16 --- .github/workflows/unit-tests.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 633a0168..6956df01 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -17,6 +17,10 @@ jobs: strategy: matrix: runs-on: [ubuntu-latest, macos-latest-large, windows-latest] + + # Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841) + # Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions + node-version: [18.x, 20.x] fail-fast: false runs-on: ${{ matrix.runs-on }} @@ -25,10 +29,10 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Set Node.js 20.x + - name: Set up Node ${{ matrix.node-version }} uses: actions/setup-node@v4 with: - node-version: 20.x + node-version: ${{ matrix.node-version }} - name: npm install run: npm install From 9da70ffbd7e5115e75d296aebeaa7e449d7a20a5 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 02:04:20 -0800 Subject: [PATCH 029/108] Post merge cleanup --- packages/attest/package-lock.json | 2 +- packages/cache/package-lock.json | 2320 +----------------------- packages/glob/package-lock.json | 2 +- packages/http-client/package-lock.json | 2 +- 4 files changed, 4 insertions(+), 2322 deletions(-) diff --git a/packages/attest/package-lock.json b/packages/attest/package-lock.json index 9a3160ba..11ad6b8e 100644 --- a/packages/attest/package-lock.json +++ b/packages/attest/package-lock.json @@ -3171,4 +3171,4 @@ "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==" } } -} \ No newline at end of file +} diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 3963e19d..724f674a 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -24,27 +24,6 @@ "typescript": "^5.2.2" } }, - "node_modules/@actions/artifact": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", - "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "@actions/http-client": "^2.1.0", - "@azure/storage-blob": "^12.15.0", - "@octokit/core": "^3.5.1", - "@octokit/plugin-request-log": "^1.0.4", - "@octokit/plugin-retry": "^3.0.9", - "@octokit/request-error": "^5.0.0", - "@protobuf-ts/plugin": "^2.2.3-alpha.1", - "archiver": "^7.0.1", - "crypto": "^1.0.1", - "jwt-decode": "^3.1.2", - "twirp-ts": "^2.5.0", - "unzip-stream": "^0.3.1" - } - }, "node_modules/@actions/core": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", @@ -62,17 +41,6 @@ "@actions/io": "^1.0.1" } }, - "node_modules/@actions/github": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", - "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, "node_modules/@actions/glob": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", @@ -269,176 +237,6 @@ "node": ">=14.0.0" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/core/node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-request-log": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", - "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/plugin-retry": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", - "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", - "dependencies": { - "@octokit/types": "^6.0.3", - "bottleneck": "^2.15.3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", - "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", - "dependencies": { - "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "22.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", - "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" - }, - "node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "dependencies": { - "@octokit/openapi-types": "^22.2.0" - } - }, - "node_modules/@octokit/request/node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, "node_modules/@opentelemetry/api": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", @@ -447,85 +245,6 @@ "node": ">=8.0.0" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@protobuf-ts/plugin": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", - "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", - "dependencies": { - "@protobuf-ts/plugin-framework": "^2.9.4", - "@protobuf-ts/protoc": "^2.9.4", - "@protobuf-ts/runtime": "^2.9.4", - "@protobuf-ts/runtime-rpc": "^2.9.4", - "typescript": "^3.9" - }, - "bin": { - "protoc-gen-dump": "bin/protoc-gen-dump", - "protoc-gen-ts": "bin/protoc-gen-ts" - } - }, - "node_modules/@protobuf-ts/plugin-framework": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", - "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", - "dependencies": { - "@protobuf-ts/runtime": "^2.9.4", - "typescript": "^3.9" - } - }, - "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/@protobuf-ts/plugin/node_modules/typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/@protobuf-ts/protoc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", - "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==", - "bin": { - "protoc": "protoc.js" - } - }, - "node_modules/@protobuf-ts/runtime": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", - "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" - }, - "node_modules/@protobuf-ts/runtime-rpc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", - "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", - "dependencies": { - "@protobuf-ts/runtime": "^2.9.4" - } - }, "node_modules/@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -578,129 +297,16 @@ "node": ">=6.5" } }, - "node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/archiver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", - "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", - "dependencies": { - "archiver-utils": "^5.0.2", - "async": "^3.2.4", - "buffer-crc32": "^1.0.0", - "readable-stream": "^4.0.0", - "readdir-glob": "^1.1.2", - "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "dependencies": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/async": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, - "node_modules/b4a": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, - "node_modules/bare-events": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", - "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", - "optional": true - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "node_modules/binary": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", - "dependencies": { - "buffers": "~0.1.1", - "chainsaw": "~0.1.0" - }, - "engines": { - "node": "*" - } - }, - "node_modules/bottleneck": { - "version": "2.19.5", - "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", - "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" - }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -710,81 +316,6 @@ "concat-map": "0.0.1" } }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/buffer-crc32": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", - "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/buffers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", - "engines": { - "node": ">=0.2.0" - } - }, - "node_modules/camel-case": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", - "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", - "dependencies": { - "pascal-case": "^3.1.2", - "tslib": "^2.0.3" - } - }, - "node_modules/chainsaw": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", - "dependencies": { - "traverse": ">=0.3.0 <0.4" - }, - "engines": { - "node": "*" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -796,81 +327,11 @@ "node": ">= 0.8" } }, - "node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", - "dependencies": { - "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" - }, - "node_modules/crc-32": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", - "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", - "bin": { - "crc32": "bin/crc32.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", - "dependencies": { - "crc-32": "^1.2.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/crypto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", - "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==", - "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in." - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -879,53 +340,6 @@ "node": ">=0.4.0" } }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/dot-object": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", - "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", - "dependencies": { - "commander": "^6.1.0", - "glob": "^7.1.6" - }, - "bin": { - "dot-object": "bin/dot-object" - } - }, - "node_modules/dot-object/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, "node_modules/event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -942,26 +356,6 @@ "node": ">=0.8.x" } }, - "node_modules/fast-fifo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", - "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" - }, - "node_modules/foreground-child": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.0.tgz", - "integrity": "sha512-CrWQNaEl1/6WeZoarcM9LHupTo3RpZO2Pdk1vktwzPiQTsJnAKJmm3TACKeG5UZbWDfaH2AbvYxzP96y0MT7fA==", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/form-data": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", @@ -975,211 +369,6 @@ "node": ">= 0.12" } }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "node_modules/glob": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", - "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "node_modules/jackspeak": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", - "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" - }, - "node_modules/lazystream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", - "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", - "dependencies": { - "readable-stream": "^2.0.5" - }, - "engines": { - "node": ">= 0.6.3" - } - }, - "node_modules/lazystream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/lazystream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/lazystream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -1210,42 +399,6 @@ "node": "*" } }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "dependencies": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, "node_modules/node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -1265,81 +418,6 @@ } } }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/pascal-case": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", - "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" - }, - "node_modules/prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -1348,77 +426,6 @@ "node": ">= 0.6.0" } }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, - "node_modules/queue-tick": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", - "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" - }, - "node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/readdir-glob": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", - "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", - "dependencies": { - "minimatch": "^5.1.0" - } - }, - "node_modules/readdir-glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/readdir-glob/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", @@ -1432,185 +439,11 @@ "semver": "bin/semver.js" } }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/streamx": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", - "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", - "dependencies": { - "fast-fifo": "^1.3.2", - "queue-tick": "^1.0.1", - "text-decoder": "^1.1.0" - }, - "optionalDependencies": { - "bare-events": "^2.2.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/tar-stream": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", - "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", - "dependencies": { - "b4a": "^1.6.4", - "fast-fifo": "^1.2.0", - "streamx": "^2.15.0" - } - }, - "node_modules/text-decoder": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.0.tgz", - "integrity": "sha512-TmLJNj6UgX8xcUZo4UDStGQtDiTzF7BzWlzn9g7UWrjkpHr5uJTK1ld16wZ3LXb2vb6jH8qU89dW5whuMdXYdw==", - "dependencies": { - "b4a": "^1.6.4" - } - }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, - "node_modules/traverse": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", - "engines": { - "node": "*" - } - }, - "node_modules/ts-poet": { - "version": "4.15.0", - "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", - "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", - "dependencies": { - "lodash": "^4.17.15", - "prettier": "^2.5.1" - } - }, "node_modules/tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -1624,34 +457,6 @@ "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, - "node_modules/twirp-ts": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", - "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", - "dependencies": { - "@protobuf-ts/plugin-framework": "^2.0.7", - "camel-case": "^4.1.2", - "dot-object": "^2.1.4", - "path-to-regexp": "^6.2.0", - "ts-poet": "^4.5.0", - "yaml": "^1.10.2" - }, - "bin": { - "protoc-gen-twirp_ts": "protoc-gen-twirp_ts" - }, - "peerDependencies": { - "@protobuf-ts/plugin": "^2.5.0", - "ts-proto": "^1.81.3" - }, - "peerDependenciesMeta": { - "@protobuf-ts/plugin": { - "optional": true - }, - "ts-proto": { - "optional": true - } - } - }, "node_modules/typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", @@ -1679,109 +484,6 @@ "webidl-conversions": "^3.0.0" } }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, "node_modules/xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -1801,51 +503,9 @@ "engines": { "node": ">=4.0" } - }, - "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "engines": { - "node": ">= 6" - } - }, - "node_modules/zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", - "dependencies": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } } }, "dependencies": { - "@actions/artifact": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", - "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", - "requires": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "@actions/http-client": "^2.1.0", - "@azure/storage-blob": "^12.15.0", - "@octokit/core": "^3.5.1", - "@octokit/plugin-request-log": "^1.0.4", - "@octokit/plugin-retry": "^3.0.9", - "@octokit/request-error": "^5.0.0", - "@protobuf-ts/plugin": "^2.2.3-alpha.1", - "archiver": "^7.0.1", - "crypto": "^1.0.1", - "jwt-decode": "^3.1.2", - "twirp-ts": "^2.5.0", - "unzip-stream": "^0.3.1" - } - }, "@actions/core": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", @@ -1863,17 +523,6 @@ "@actions/io": "^1.0.1" } }, - "@actions/github": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", - "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", - "requires": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, "@actions/glob": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", @@ -2038,232 +687,11 @@ "tslib": "^2.2.0" } }, - "@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "requires": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - } - }, - "@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "requires": { - "@octokit/types": "^6.0.3" - } - }, - "@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "requires": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - }, - "dependencies": { - "@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "requires": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - } - } - }, - "@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "requires": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "requires": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "requires": { - "@octokit/types": "^6.40.0" - } - }, - "@octokit/plugin-request-log": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", - "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", - "requires": {} - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "requires": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - } - }, - "@octokit/plugin-retry": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", - "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", - "requires": { - "@octokit/types": "^6.0.3", - "bottleneck": "^2.15.3" - } - }, - "@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - }, - "dependencies": { - "@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "requires": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - } - } - }, - "@octokit/request-error": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", - "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", - "requires": { - "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "dependencies": { - "@octokit/openapi-types": { - "version": "22.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", - "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" - }, - "@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "requires": { - "@octokit/openapi-types": "^22.2.0" - } - } - } - }, - "@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "requires": { - "@octokit/openapi-types": "^12.11.0" - } - }, "@opentelemetry/api": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==" }, - "@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true - }, - "@protobuf-ts/plugin": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", - "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", - "requires": { - "@protobuf-ts/plugin-framework": "^2.9.4", - "@protobuf-ts/protoc": "^2.9.4", - "@protobuf-ts/runtime": "^2.9.4", - "@protobuf-ts/runtime-rpc": "^2.9.4", - "typescript": "^3.9" - }, - "dependencies": { - "typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" - } - } - }, - "@protobuf-ts/plugin-framework": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", - "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", - "requires": { - "@protobuf-ts/runtime": "^2.9.4", - "typescript": "^3.9" - }, - "dependencies": { - "typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" - } - } - }, - "@protobuf-ts/protoc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", - "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==" - }, - "@protobuf-ts/runtime": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", - "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" - }, - "@protobuf-ts/runtime-rpc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", - "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", - "requires": { - "@protobuf-ts/runtime": "^2.9.4" - } - }, "@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -2312,94 +740,16 @@ "event-target-shim": "^5.0.0" } }, - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" - }, - "ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" - }, - "archiver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", - "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", - "requires": { - "archiver-utils": "^5.0.2", - "async": "^3.2.4", - "buffer-crc32": "^1.0.0", - "readable-stream": "^4.0.0", - "readdir-glob": "^1.1.2", - "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" - } - }, - "archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "requires": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - } - }, - "async": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, - "b4a": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" - }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, - "bare-events": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", - "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", - "optional": true - }, - "base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" - }, - "before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "binary": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", - "requires": { - "buffers": "~0.1.1", - "chainsaw": "~0.1.0" - } - }, - "bottleneck": { - "version": "2.19.5", - "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", - "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -2409,55 +759,6 @@ "concat-map": "0.0.1" } }, - "buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "buffer-crc32": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", - "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==" - }, - "buffers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==" - }, - "camel-case": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", - "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", - "requires": { - "pascal-case": "^3.1.2", - "tslib": "^2.0.3" - } - }, - "chainsaw": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", - "requires": { - "traverse": ">=0.3.0 <0.4" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -2466,106 +767,16 @@ "delayed-stream": "~1.0.0" } }, - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==" - }, - "compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", - "requires": { - "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - } - }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, - "core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" - }, - "crc-32": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", - "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==" - }, - "crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", - "requires": { - "crc-32": "^1.2.0", - "readable-stream": "^4.0.0" - } - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "crypto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", - "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==" - }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "dot-object": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", - "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", - "requires": { - "commander": "^6.1.0", - "glob": "^7.1.6" - }, - "dependencies": { - "glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } - } - }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -2576,20 +787,6 @@ "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, - "fast-fifo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", - "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" - }, - "foreground-child": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.0.tgz", - "integrity": "sha512-CrWQNaEl1/6WeZoarcM9LHupTo3RpZO2Pdk1vktwzPiQTsJnAKJmm3TACKeG5UZbWDfaH2AbvYxzP96y0MT7fA==", - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - } - }, "form-data": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", @@ -2600,159 +797,6 @@ "mime-types": "^2.1.12" } }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "glob": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", - "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "path-scurry": "^1.11.1" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "requires": { - "balanced-match": "^1.0.0" - } - }, - "minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "requires": { - "brace-expansion": "^2.0.1" - } - } - } - }, - "graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" - }, - "ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "jackspeak": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", - "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", - "requires": { - "@isaacs/cliui": "^8.0.2", - "@pkgjs/parseargs": "^0.11.0" - } - }, - "jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" - }, - "lazystream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", - "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", - "requires": { - "readable-stream": "^2.0.5" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "requires": { - "tslib": "^2.0.3" - } - }, - "lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==" - }, "mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -2774,33 +818,6 @@ "brace-expansion": "^1.1.7" } }, - "minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" - }, - "minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" - }, - "mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "requires": { - "minimist": "^1.2.6" - } - }, - "no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "requires": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, "node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -2809,115 +826,11 @@ "whatwg-url": "^5.0.0" } }, - "normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "pascal-case": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", - "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", - "requires": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "requires": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - } - }, - "path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" - }, - "prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==" - }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, - "queue-tick": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", - "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" - }, - "readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "requires": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - } - }, - "readdir-glob": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", - "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", - "requires": { - "minimatch": "^5.1.0" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "requires": { - "balanced-match": "^1.0.0" - } - }, - "minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "requires": { - "brace-expansion": "^2.0.1" - } - } - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - }, "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", @@ -2928,143 +841,11 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" - }, - "streamx": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", - "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", - "requires": { - "bare-events": "^2.2.0", - "fast-fifo": "^1.3.2", - "queue-tick": "^1.0.1", - "text-decoder": "^1.1.0" - } - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, - "strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "requires": { - "ansi-regex": "^6.0.1" - } - }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - } - } - }, - "tar-stream": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", - "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", - "requires": { - "b4a": "^1.6.4", - "fast-fifo": "^1.2.0", - "streamx": "^2.15.0" - } - }, - "text-decoder": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.0.tgz", - "integrity": "sha512-TmLJNj6UgX8xcUZo4UDStGQtDiTzF7BzWlzn9g7UWrjkpHr5uJTK1ld16wZ3LXb2vb6jH8qU89dW5whuMdXYdw==", - "requires": { - "b4a": "^1.6.4" - } - }, "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, - "traverse": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==" - }, - "ts-poet": { - "version": "4.15.0", - "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", - "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", - "requires": { - "lodash": "^4.17.15", - "prettier": "^2.5.1" - } - }, "tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -3075,19 +856,6 @@ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, - "twirp-ts": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", - "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", - "requires": { - "@protobuf-ts/plugin-framework": "^2.0.7", - "camel-case": "^4.1.2", - "dot-object": "^2.1.4", - "path-to-regexp": "^6.2.0", - "ts-poet": "^4.5.0", - "yaml": "^1.10.2" - } - }, "typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", @@ -3108,77 +876,6 @@ "webidl-conversions": "^3.0.0" } }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } - }, - "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, "xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -3192,21 +889,6 @@ "version": "11.0.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" - }, - "yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" - }, - "zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", - "requires": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", - "readable-stream": "^4.0.0" - } } } -} \ No newline at end of file +} diff --git a/packages/glob/package-lock.json b/packages/glob/package-lock.json index 17817543..665b11d5 100644 --- a/packages/glob/package-lock.json +++ b/packages/glob/package-lock.json @@ -21,7 +21,7 @@ "packages": { "": { "name": "@actions/glob", - "version": "0.4.0", + "version": "0.5.0", "license": "MIT", "dependencies": { "@actions/core": "^1.9.1", diff --git a/packages/http-client/package-lock.json b/packages/http-client/package-lock.json index 823b38b7..c049b7c1 100644 --- a/packages/http-client/package-lock.json +++ b/packages/http-client/package-lock.json @@ -6,7 +6,7 @@ "packages": { "": { "name": "@actions/http-client", - "version": "2.2.1", + "version": "2.2.3", "license": "MIT", "dependencies": { "tunnel": "^0.0.6", From 4e1912a3c34ad5a3b788b909402b83206c1cb3ab Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 02:08:24 -0800 Subject: [PATCH 030/108] Restore __tests__ --- packages/cache/__tests__/cacheHttpClient.test.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index b8176ba6..21c5ae86 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,8 +1,7 @@ -import { getCacheVersion } from '../src/internal/cacheUtils' -import { downloadCache } from '../src/internal/cacheHttpClient' -import { CompressionMethod } from '../src/internal/constants' +import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient' +import {CompressionMethod} from '../src/internal/constants' import * as downloadUtils from '../src/internal/downloadUtils' -import { DownloadOptions, getDownloadOptions } from '../src/options' +import {DownloadOptions, getDownloadOptions} from '../src/options' jest.mock('../src/internal/downloadUtils') @@ -129,7 +128,7 @@ test('downloadCache passes options to download methods', async () => { const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' const archivePath = '/foo/bar' - const options: DownloadOptions = { downloadConcurrency: 4 } + const options: DownloadOptions = {downloadConcurrency: 4} await downloadCache(archiveLocation, archivePath, options) From d109d9c03e01cf55df99c16f5f648bfb3ffe8ccf Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:00:43 -0800 Subject: [PATCH 031/108] Handle ACTIONS_CACHE_SERVICE_V2 feature flag --- packages/cache/src/internal/config.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index 117156a7..d980de14 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -6,9 +6,8 @@ export function getRuntimeToken(): string { return token } -// TODO: Use the feature flag to determine the cache service version export function getCacheServiceVersion(): string { - return process.env['ACTIONS_CACHE_SERVICE_VERSION'] || 'v1' + return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'; } export function getCacheServiceURL(): string { From 9dff82c727b2c306c2f3fb0daf40919be780a426 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:01:04 -0800 Subject: [PATCH 032/108] Port dependencies & remove dependency on toolkit/artifacts --- packages/cache/src/cache.ts | 21 +++---- packages/cache/src/internal/cacheUtils.ts | 77 +++++++++++++++++++++++ 2 files changed, 84 insertions(+), 14 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 2659b848..ab5ccb9e 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -17,8 +17,6 @@ import { import { CacheFileSizeLimit } from './internal/constants' import { UploadCacheFile } from './internal/blob/upload-cache' import { DownloadCacheFile } from './internal/blob/download-cache' -import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' - export class ValidationError extends Error { constructor(message: string) { super(message) @@ -62,7 +60,6 @@ function checkKey(key: string): void { * * @returns boolean return true if Actions cache service feature is available, otherwise false */ - export function isFeatureAvailable(): boolean { return !!process.env['ACTIONS_CACHE_URL'] } @@ -215,7 +212,8 @@ async function restoreCachev2( restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] - core.debug(`Resolved Keys: JSON.stringify(keys)`) + core.debug('Resolved Keys:') + core.debug(JSON.stringify(keys)) if (keys.length > 10) { throw new ValidationError( @@ -229,7 +227,7 @@ async function restoreCachev2( let archivePath = '' try { const twirpClient = cacheTwirpClient.internalCacheTwirpClient() - const backendIds: BackendIds = getBackendIdsFromToken() + const backendIds: utils.BackendIds = utils.getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() const request: GetCacheEntryDownloadURLRequest = { @@ -289,8 +287,7 @@ async function restoreCachev2( return request.key } catch (error) { - // TODO: handle all the possible error scenarios - throw new Error(`Unable to download and extract cache: ${error.message}`) + throw new Error(`Failed to restore: ${error.message}`) } finally { try { await utils.unlinkFile(archivePath) @@ -450,7 +447,7 @@ async function saveCachev2( enableCrossOsArchive = false ): Promise { // BackendIds are retrieved form the signed JWT - const backendIds: BackendIds = getBackendIdsFromToken() + const backendIds: utils.BackendIds = utils.getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 @@ -504,16 +501,13 @@ async function saveCachev2( version: version } const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) - core.info(`CreateCacheEntryResponse: ${JSON.stringify(response)}`) - // TODO: handle the error cases here if (!response.ok) { throw new ReserveCacheError( `Unable to reserve cache with key ${key}, another job may be creating this cache.` ) } - // TODO: mask the signed upload URL - core.debug(`Saving Cache to: ${response.signedUploadUrl}`) + core.debug(`Saving Cache to: ${core.setSecret(response.signedUploadUrl)}`) await UploadCacheFile( response.signedUploadUrl, archivePath, @@ -536,11 +530,10 @@ async function saveCachev2( ) } - // TODO: this is not great, we should handle the types without parsing cacheId = parseInt(finalizeResponse.entryId) } catch (error) { const typedError = error as Error - core.debug(typedError.message) + core.warning(`Failed to save: ${typedError.message}`) } finally { // Try to delete the archive to save space try { diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index bd493172..ef09969b 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -7,6 +7,7 @@ import * as fs from 'fs' import * as path from 'path' import * as semver from 'semver' import * as util from 'util' +import jwt_decode from 'jwt-decode' import { CacheFilename, CompressionMethod, @@ -169,4 +170,80 @@ export function getCacheVersion( components.push(versionSalt) return crypto.createHash('sha256').update(components.join('|')).digest('hex') +} + +export function getRuntimeToken(): string { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] + if (!token) { + throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable') + } + return token +} + +export interface BackendIds { + workflowRunBackendId: string + workflowJobRunBackendId: string +} + +interface ActionsToken { + scp: string +} + +const InvalidJwtError = new Error( + 'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims' +) + +// uses the JWT token claims to get the +// workflow run and workflow job run backend ids +export function getBackendIdsFromToken(): BackendIds { + const token = getRuntimeToken() + const decoded = jwt_decode(token) + if (!decoded.scp) { + throw InvalidJwtError + } + + /* + * example decoded: + * { + * scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774" + * } + */ + + const scpParts = decoded.scp.split(' ') + if (scpParts.length === 0) { + throw InvalidJwtError + } + /* + * example scpParts: + * ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"] + */ + + for (const scopes of scpParts) { + const scopeParts = scopes.split(':') + if (scopeParts?.[0] !== 'Actions.Results') { + // not the Actions.Results scope + continue + } + + /* + * example scopeParts: + * ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"] + */ + if (scopeParts.length !== 3) { + // missing expected number of claims + throw InvalidJwtError + } + + const ids = { + workflowRunBackendId: scopeParts[1], + workflowJobRunBackendId: scopeParts[2] + } + + core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`) + core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`) + + return ids + } + + throw InvalidJwtError } \ No newline at end of file From 69409b3acd8d8a5f033a8f86de49d27893609c27 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:10:48 -0800 Subject: [PATCH 033/108] Fix broken test --- packages/cache/__tests__/cacheHttpClient.test.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index 21c5ae86..c3e52ff6 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,7 +1,8 @@ -import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient' -import {CompressionMethod} from '../src/internal/constants' +import { downloadCache } from '../src/internal/cacheHttpClient' +import { getCacheVersion } from '../src/internal/cacheUtils' +import { CompressionMethod } from '../src/internal/constants' import * as downloadUtils from '../src/internal/downloadUtils' -import {DownloadOptions, getDownloadOptions} from '../src/options' +import { DownloadOptions, getDownloadOptions } from '../src/options' jest.mock('../src/internal/downloadUtils') @@ -128,7 +129,7 @@ test('downloadCache passes options to download methods', async () => { const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' const archivePath = '/foo/bar' - const options: DownloadOptions = {downloadConcurrency: 4} + const options: DownloadOptions = { downloadConcurrency: 4 } await downloadCache(archiveLocation, archivePath, options) From b2557ac90ca11aad2f3dfb23565c54e91cef5dfe Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:22:03 -0800 Subject: [PATCH 034/108] Formatting and stylistic cleanup --- .../cache/__tests__/cacheHttpClient.test.ts | 10 +- packages/cache/src/cache.ts | 113 +++--- .../cache/src/internal/blob/download-cache.ts | 15 +- .../cache/src/internal/blob/upload-cache.ts | 10 +- .../cache/src/internal/cacheHttpClient.ts | 13 +- packages/cache/src/internal/cacheUtils.ts | 2 +- packages/cache/src/internal/config.ts | 12 +- packages/cache/src/internal/constants.ts | 2 +- .../src/internal/shared/cacheTwirpClient.ts | 339 +++++++++--------- packages/cache/src/internal/shared/errors.ts | 100 +++--- .../cache/src/internal/shared/user-agent.ts | 2 +- 11 files changed, 321 insertions(+), 297 deletions(-) diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index c3e52ff6..e2201cd1 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,8 +1,8 @@ -import { downloadCache } from '../src/internal/cacheHttpClient' -import { getCacheVersion } from '../src/internal/cacheUtils' -import { CompressionMethod } from '../src/internal/constants' +import {downloadCache} from '../src/internal/cacheHttpClient' +import {getCacheVersion} from '../src/internal/cacheUtils' +import {CompressionMethod} from '../src/internal/constants' import * as downloadUtils from '../src/internal/downloadUtils' -import { DownloadOptions, getDownloadOptions } from '../src/options' +import {DownloadOptions, getDownloadOptions} from '../src/options' jest.mock('../src/internal/downloadUtils') @@ -129,7 +129,7 @@ test('downloadCache passes options to download methods', async () => { const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' const archivePath = '/foo/bar' - const options: DownloadOptions = { downloadConcurrency: 4 } + const options: DownloadOptions = {downloadConcurrency: 4} await downloadCache(archiveLocation, archivePath, options) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index ab5ccb9e..5fba1e82 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -4,8 +4,8 @@ import * as config from './internal/config' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, CreateCacheEntryResponse, @@ -14,9 +14,9 @@ import { GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { UploadCacheFile } from './internal/blob/upload-cache' -import { DownloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {UploadCacheFile} from './internal/blob/upload-cache' +import {DownloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -86,23 +86,35 @@ export async function restoreCache( const cacheServiceVersion: string = config.getCacheServiceVersion() console.debug(`Cache service version: ${cacheServiceVersion}`) switch (cacheServiceVersion) { - case "v2": - return await restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) - case "v1": + case 'v2': + return await restoreCachev2( + paths, + primaryKey, + restoreKeys, + options, + enableCrossOsArchive + ) + case 'v1': default: - return await restoreCachev1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) + return await restoreCachev1( + paths, + primaryKey, + restoreKeys, + options, + enableCrossOsArchive + ) } } /** * Restores cache using the legacy Cache Service - * - * @param paths - * @param primaryKey - * @param restoreKeys - * @param options - * @param enableCrossOsArchive - * @returns + * + * @param paths + * @param primaryKey + * @param restoreKeys + * @param options + * @param enableCrossOsArchive + * @returns */ async function restoreCachev1( paths: string[], @@ -238,12 +250,15 @@ async function restoreCachev2( version: utils.getCacheVersion( paths, compressionMethod, - enableCrossOsArchive, - ), + enableCrossOsArchive + ) } - core.debug(`GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}`) - const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) + core.debug( + `GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}` + ) + const response: GetCacheEntryDownloadURLResponse = + await twirpClient.GetCacheEntryDownloadURL(request) core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) if (!response.ok) { @@ -266,10 +281,7 @@ async function restoreCachev2( core.debug(`Starting download of artifact to: ${archivePath}`) - await DownloadCacheFile( - response.signedDownloadUrl, - archivePath - ) + await DownloadCacheFile(response.signedDownloadUrl, archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -320,9 +332,9 @@ export async function saveCache( const cacheServiceVersion: string = config.getCacheServiceVersion() console.debug(`Cache Service Version: ${cacheServiceVersion}`) switch (cacheServiceVersion) { - case "v2": + case 'v2': return await saveCachev2(paths, key, options, enableCrossOsArchive) - case "v1": + case 'v1': default: return await saveCachev1(paths, key, options, enableCrossOsArchive) } @@ -330,12 +342,12 @@ export async function saveCache( /** * Save cache using the legacy Cache Service - * - * @param paths - * @param key - * @param options - * @param enableCrossOsArchive - * @returns + * + * @param paths + * @param key + * @param options + * @param enableCrossOsArchive + * @returns */ async function saveCachev1( paths: string[], @@ -398,9 +410,9 @@ async function saveCachev1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -433,12 +445,12 @@ async function saveCachev1( /** * Save cache using the new Cache Service - * - * @param paths - * @param key - * @param options - * @param enableCrossOsArchive - * @returns + * + * @param paths + * @param key + * @param options + * @param enableCrossOsArchive + * @returns */ async function saveCachev2( paths: string[], @@ -500,7 +512,8 @@ async function saveCachev2( key: key, version: version } - const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) + const response: CreateCacheEntryResponse = + await twirpClient.CreateCacheEntry(request) if (!response.ok) { throw new ReserveCacheError( `Unable to reserve cache with key ${key}, another job may be creating this cache.` @@ -508,21 +521,21 @@ async function saveCachev2( } core.debug(`Saving Cache to: ${core.setSecret(response.signedUploadUrl)}`) - await UploadCacheFile( - response.signedUploadUrl, - archivePath, - ) + await UploadCacheFile(response.signedUploadUrl, archivePath) const finalizeRequest: FinalizeCacheEntryUploadRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, key: key, version: version, - sizeBytes: `${archiveFileSize}`, + sizeBytes: `${archiveFileSize}` } - const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) - core.debug(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) + const finalizeResponse: FinalizeCacheEntryUploadResponse = + await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) + core.debug( + `FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}` + ) if (!finalizeResponse.ok) { throw new Error( @@ -544,4 +557,4 @@ async function saveCachev2( } return cacheId -} \ No newline at end of file +} diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 1820cb70..966d4974 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -3,15 +3,15 @@ import * as core from '@actions/core' import { BlobClient, BlockBlobClient, - BlobDownloadOptions, + BlobDownloadOptions } from '@azure/storage-blob' export async function DownloadCacheFile( signedUploadURL: string, - archivePath: string, + archivePath: string ): Promise<{}> { const downloadOptions: BlobDownloadOptions = { - maxRetryRequests: 5, + maxRetryRequests: 5 } // TODO: tighten the configuration and pass the appropriate user-agent @@ -21,5 +21,10 @@ export async function DownloadCacheFile( core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) - return blockBlobClient.downloadToFile(archivePath, 0, undefined, downloadOptions) -} \ No newline at end of file + return blockBlobClient.downloadToFile( + archivePath, + 0, + undefined, + downloadOptions + ) +} diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index e4572d20..5cd5cd6a 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -7,15 +7,15 @@ import { export async function UploadCacheFile( signedUploadURL: string, - archivePath: string, + archivePath: string ): Promise<{}> { // TODO: tighten the configuration and pass the appropriate user-agent // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size concurrency: 4, // maximum number of parallel transfer workers - maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size - }; + maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size + } const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() @@ -23,5 +23,5 @@ export async function UploadCacheFile( core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) - return blockBlobClient.uploadFile(archivePath, uploadOptions); -} \ No newline at end of file + return blockBlobClient.uploadFile(archivePath, uploadOptions) +} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 98d6a3bb..051348ec 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,12 +1,12 @@ import * as core from '@actions/core' -import { HttpClient } from '@actions/http-client' -import { BearerCredentialHandler } from '@actions/http-client/lib/auth' +import {HttpClient} from '@actions/http-client' +import {BearerCredentialHandler} from '@actions/http-client/lib/auth' import { RequestOptions, TypedResponse } from '@actions/http-client/lib/interfaces' import * as fs from 'fs' -import { URL } from 'url' +import {URL} from 'url' import * as utils from './cacheUtils' import { ArtifactCacheEntry, @@ -33,7 +33,7 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import { getCacheServiceURL } from './config' +import {getCacheServiceURL} from './config' function getCacheApiUrl(resource: string): string { const baseUrl: string = getCacheServiceURL() @@ -216,7 +216,8 @@ async function uploadChunk( end: number ): Promise { core.debug( - `Uploading chunk of size ${end - start + 1 + `Uploading chunk of size ${ + end - start + 1 } bytes at offset ${start} with content range: ${getContentRange( start, end @@ -312,7 +313,7 @@ async function commitCache( cacheId: number, filesize: number ): Promise> { - const commitCacheRequest: CommitCacheRequest = { size: filesize } + const commitCacheRequest: CommitCacheRequest = {size: filesize} return await retryTypedResponse('commitCache', async () => httpClient.postJson( getCacheApiUrl(`caches/${cacheId.toString()}`), diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index ef09969b..a7548171 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -246,4 +246,4 @@ export function getBackendIdsFromToken(): BackendIds { } throw InvalidJwtError -} \ No newline at end of file +} diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index d980de14..61d84677 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -7,17 +7,21 @@ export function getRuntimeToken(): string { } export function getCacheServiceVersion(): string { - return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'; + return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1' } export function getCacheServiceURL(): string { const version = getCacheServiceVersion() switch (version) { case 'v1': - return process.env['ACTIONS_CACHE_URL'] || process.env['ACTIONS_RESULTS_URL'] || "" + return ( + process.env['ACTIONS_CACHE_URL'] || + process.env['ACTIONS_RESULTS_URL'] || + '' + ) case 'v2': - return process.env['ACTIONS_RESULTS_URL'] || "" + return process.env['ACTIONS_RESULTS_URL'] || '' default: throw new Error(`Unsupported cache service version: ${version}`) } -} \ No newline at end of file +} diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index bc4e1d7a..8c5d1ee4 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -37,4 +37,4 @@ export const TarFilename = 'cache.tar' export const ManifestFilename = 'manifest.txt' -export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository \ No newline at end of file +export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository diff --git a/packages/cache/src/internal/shared/cacheTwirpClient.ts b/packages/cache/src/internal/shared/cacheTwirpClient.ts index 29bb845a..9a0f0679 100644 --- a/packages/cache/src/internal/shared/cacheTwirpClient.ts +++ b/packages/cache/src/internal/shared/cacheTwirpClient.ts @@ -1,202 +1,203 @@ -import { info, debug } from '@actions/core' -import { getUserAgentString } from './user-agent' -import { NetworkError, UsageError } from './errors' -import { getRuntimeToken, getCacheServiceURL } from '../config' -import { BearerCredentialHandler } from '@actions/http-client/lib/auth' -import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' -import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp' +import {info, debug} from '@actions/core' +import {getUserAgentString} from './user-agent' +import {NetworkError, UsageError} from './errors' +import {getRuntimeToken, getCacheServiceURL} from '../config' +import {BearerCredentialHandler} from '@actions/http-client/lib/auth' +import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client' +import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp' // The twirp http client must implement this interface interface Rpc { - request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise + request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise } /** * This class is a wrapper around the CacheServiceClientJSON class generated by Twirp. - * + * * It adds retry logic to the request method, which is not present in the generated client. - * + * * This class is used to interact with cache service v2. */ class CacheServiceClient implements Rpc { - private httpClient: HttpClient - private baseUrl: string - private maxAttempts = 5 - private baseRetryIntervalMilliseconds = 3000 - private retryMultiplier = 1.5 + private httpClient: HttpClient + private baseUrl: string + private maxAttempts = 5 + private baseRetryIntervalMilliseconds = 3000 + private retryMultiplier = 1.5 - constructor( - userAgent: string, - maxAttempts?: number, - baseRetryIntervalMilliseconds?: number, - retryMultiplier?: number - ) { - const token = getRuntimeToken() - this.baseUrl = getCacheServiceURL() - if (maxAttempts) { - this.maxAttempts = maxAttempts - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier - } - - this.httpClient = new HttpClient(userAgent, [ - new BearerCredentialHandler(token) - ]) + constructor( + userAgent: string, + maxAttempts?: number, + baseRetryIntervalMilliseconds?: number, + retryMultiplier?: number + ) { + const token = getRuntimeToken() + this.baseUrl = getCacheServiceURL() + if (maxAttempts) { + this.maxAttempts = maxAttempts + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier } - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - async request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise { - const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href - debug(`[Request] ${method} ${url}`) - const headers = { - 'Content-Type': contentType - } - try { - const { body } = await this.retryableRequest(async () => - this.httpClient.post(url, JSON.stringify(data), headers) - ) + this.httpClient = new HttpClient(userAgent, [ + new BearerCredentialHandler(token) + ]) + } - return body - } catch (error) { - throw new Error(`Failed to ${method}: ${error.message}`) - } + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + async request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href + debug(`[Request] ${method} ${url}`) + const headers = { + 'Content-Type': contentType } + try { + const {body} = await this.retryableRequest(async () => + this.httpClient.post(url, JSON.stringify(data), headers) + ) - async retryableRequest( - operation: () => Promise - ): Promise<{ response: HttpClientResponse; body: object }> { - let attempt = 0 - let errorMessage = '' - let rawBody = '' - while (attempt < this.maxAttempts) { - let isRetryable = false + return body + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`) + } + } - try { - const response = await operation() - const statusCode = response.message.statusCode - rawBody = await response.readBody() - debug(`[Response] - ${response.message.statusCode}`) - debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) - const body = JSON.parse(rawBody) - debug(`Body: ${JSON.stringify(body, null, 2)}`) - if (this.isSuccessStatusCode(statusCode)) { - return { response, body } - } - isRetryable = this.isRetryableHttpStatusCode(statusCode) - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` - if (body.msg) { - if (UsageError.isUsageErrorMessage(body.msg)) { - throw new UsageError() - } + async retryableRequest( + operation: () => Promise + ): Promise<{response: HttpClientResponse; body: object}> { + let attempt = 0 + let errorMessage = '' + let rawBody = '' + while (attempt < this.maxAttempts) { + let isRetryable = false - errorMessage = `${errorMessage}: ${body.msg}` - } - } catch (error) { - if (error instanceof SyntaxError) { - debug(`Raw Body: ${rawBody}`) - } + try { + const response = await operation() + const statusCode = response.message.statusCode + rawBody = await response.readBody() + debug(`[Response] - ${response.message.statusCode}`) + debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) + const body = JSON.parse(rawBody) + debug(`Body: ${JSON.stringify(body, null, 2)}`) + if (this.isSuccessStatusCode(statusCode)) { + return {response, body} + } + isRetryable = this.isRetryableHttpStatusCode(statusCode) + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` + if (body.msg) { + if (UsageError.isUsageErrorMessage(body.msg)) { + throw new UsageError() + } - if (error instanceof UsageError) { - throw error - } - - if (NetworkError.isNetworkErrorCode(error?.code)) { - throw new NetworkError(error?.code) - } - - isRetryable = true - errorMessage = error.message - } - - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`) - } - - if (attempt + 1 === this.maxAttempts) { - throw new Error( - `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` - ) - } - - const retryTimeMilliseconds = - this.getExponentialRetryTimeMilliseconds(attempt) - info( - `Attempt ${attempt + 1} of ${this.maxAttempts - } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` - ) - await this.sleep(retryTimeMilliseconds) - attempt++ + errorMessage = `${errorMessage}: ${body.msg}` + } + } catch (error) { + if (error instanceof SyntaxError) { + debug(`Raw Body: ${rawBody}`) } - throw new Error(`Request failed`) - } - - isSuccessStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - return statusCode >= 200 && statusCode < 300 - } - - isRetryableHttpStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - - const retryableStatusCodes = [ - HttpCodes.BadGateway, - HttpCodes.GatewayTimeout, - HttpCodes.InternalServerError, - HttpCodes.ServiceUnavailable, - HttpCodes.TooManyRequests - ] - - return retryableStatusCodes.includes(statusCode) - } - - async sleep(milliseconds: number): Promise { - return new Promise(resolve => setTimeout(resolve, milliseconds)) - } - - getExponentialRetryTimeMilliseconds(attempt: number): number { - if (attempt < 0) { - throw new Error('attempt should be a positive integer') + if (error instanceof UsageError) { + throw error } - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds + if (NetworkError.isNetworkErrorCode(error?.code)) { + throw new NetworkError(error?.code) } - const minTime = - this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt - const maxTime = minTime * this.retryMultiplier + isRetryable = true + errorMessage = error.message + } - // returns a random number between minTime and maxTime (exclusive) - return Math.trunc(Math.random() * (maxTime - minTime) + minTime) + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`) + } + + if (attempt + 1 === this.maxAttempts) { + throw new Error( + `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` + ) + } + + const retryTimeMilliseconds = + this.getExponentialRetryTimeMilliseconds(attempt) + info( + `Attempt ${attempt + 1} of ${ + this.maxAttempts + } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` + ) + await this.sleep(retryTimeMilliseconds) + attempt++ } + + throw new Error(`Request failed`) + } + + isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + return statusCode >= 200 && statusCode < 300 + } + + isRetryableHttpStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.GatewayTimeout, + HttpCodes.InternalServerError, + HttpCodes.ServiceUnavailable, + HttpCodes.TooManyRequests + ] + + return retryableStatusCodes.includes(statusCode) + } + + async sleep(milliseconds: number): Promise { + return new Promise(resolve => setTimeout(resolve, milliseconds)) + } + + getExponentialRetryTimeMilliseconds(attempt: number): number { + if (attempt < 0) { + throw new Error('attempt should be a positive integer') + } + + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds + } + + const minTime = + this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt + const maxTime = minTime * this.retryMultiplier + + // returns a random number between minTime and maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime) + } } export function internalCacheTwirpClient(options?: { - maxAttempts?: number - retryIntervalMs?: number - retryMultiplier?: number + maxAttempts?: number + retryIntervalMs?: number + retryMultiplier?: number }): CacheServiceClientJSON { - const client = new CacheServiceClient( - getUserAgentString(), - options?.maxAttempts, - options?.retryIntervalMs, - options?.retryMultiplier - ) - return new CacheServiceClientJSON(client) + const client = new CacheServiceClient( + getUserAgentString(), + options?.maxAttempts, + options?.retryIntervalMs, + options?.retryMultiplier + ) + return new CacheServiceClientJSON(client) } diff --git a/packages/cache/src/internal/shared/errors.ts b/packages/cache/src/internal/shared/errors.ts index 24c38e0d..9ec29f6b 100644 --- a/packages/cache/src/internal/shared/errors.ts +++ b/packages/cache/src/internal/shared/errors.ts @@ -1,72 +1,72 @@ export class FilesNotFoundError extends Error { - files: string[] + files: string[] - constructor(files: string[] = []) { - let message = 'No files were found to upload' - if (files.length > 0) { - message += `: ${files.join(', ')}` - } - - super(message) - this.files = files - this.name = 'FilesNotFoundError' + constructor(files: string[] = []) { + let message = 'No files were found to upload' + if (files.length > 0) { + message += `: ${files.join(', ')}` } + + super(message) + this.files = files + this.name = 'FilesNotFoundError' + } } export class InvalidResponseError extends Error { - constructor(message: string) { - super(message) - this.name = 'InvalidResponseError' - } + constructor(message: string) { + super(message) + this.name = 'InvalidResponseError' + } } export class CacheNotFoundError extends Error { - constructor(message = 'Cache not found') { - super(message) - this.name = 'CacheNotFoundError' - } + constructor(message = 'Cache not found') { + super(message) + this.name = 'CacheNotFoundError' + } } export class GHESNotSupportedError extends Error { - constructor( - message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.' - ) { - super(message) - this.name = 'GHESNotSupportedError' - } + constructor( + message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.' + ) { + super(message) + this.name = 'GHESNotSupportedError' + } } export class NetworkError extends Error { - code: string + code: string - constructor(code: string) { - const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github` - super(message) - this.code = code - this.name = 'NetworkError' - } + constructor(code: string) { + const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github` + super(message) + this.code = code + this.name = 'NetworkError' + } - static isNetworkErrorCode = (code?: string): boolean => { - if (!code) return false - return [ - 'ECONNRESET', - 'ENOTFOUND', - 'ETIMEDOUT', - 'ECONNREFUSED', - 'EHOSTUNREACH' - ].includes(code) - } + static isNetworkErrorCode = (code?: string): boolean => { + if (!code) return false + return [ + 'ECONNRESET', + 'ENOTFOUND', + 'ETIMEDOUT', + 'ECONNREFUSED', + 'EHOSTUNREACH' + ].includes(code) + } } export class UsageError extends Error { - constructor() { - const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending` - super(message) - this.name = 'UsageError' - } + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending` + super(message) + this.name = 'UsageError' + } - static isUsageErrorMessage = (msg?: string): boolean => { - if (!msg) return false - return msg.includes('insufficient usage') - } + static isUsageErrorMessage = (msg?: string): boolean => { + if (!msg) return false + return msg.includes('insufficient usage') + } } diff --git a/packages/cache/src/internal/shared/user-agent.ts b/packages/cache/src/internal/shared/user-agent.ts index 1fcb15bd..9d88a659 100644 --- a/packages/cache/src/internal/shared/user-agent.ts +++ b/packages/cache/src/internal/shared/user-agent.ts @@ -5,5 +5,5 @@ const packageJson = require('../../../package.json') * Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package */ export function getUserAgentString(): string { - return `@actions/cache-${packageJson.version}` + return `@actions/cache-${packageJson.version}` } From 19cdd5f210c6a71c9b8d163ce8e08a04c62918ee Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:34:13 -0800 Subject: [PATCH 035/108] Linter cleanups --- packages/cache/src/cache.ts | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 5fba1e82..f5c00053 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -4,8 +4,8 @@ import * as config from './internal/config' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, CreateCacheEntryResponse, @@ -14,9 +14,9 @@ import { GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {UploadCacheFile} from './internal/blob/upload-cache' -import {DownloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { UploadCacheFile } from './internal/blob/upload-cache' +import { DownloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -84,7 +84,6 @@ export async function restoreCache( checkPaths(paths) const cacheServiceVersion: string = config.getCacheServiceVersion() - console.debug(`Cache service version: ${cacheServiceVersion}`) switch (cacheServiceVersion) { case 'v2': return await restoreCachev2( @@ -246,7 +245,7 @@ async function restoreCachev2( workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, key: primaryKey, - restoreKeys: restoreKeys, + restoreKeys, version: utils.getCacheVersion( paths, compressionMethod, @@ -307,8 +306,6 @@ async function restoreCachev2( core.debug(`Failed to delete archive: ${error}`) } } - - return undefined } /** @@ -330,7 +327,6 @@ export async function saveCache( checkKey(key) const cacheServiceVersion: string = config.getCacheServiceVersion() - console.debug(`Cache Service Version: ${cacheServiceVersion}`) switch (cacheServiceVersion) { case 'v2': return await saveCachev2(paths, key, options, enableCrossOsArchive) @@ -410,9 +406,9 @@ async function saveCachev1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -509,8 +505,8 @@ async function saveCachev2( const request: CreateCacheEntryRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - key: key, - version: version + key, + version } const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) @@ -526,8 +522,8 @@ async function saveCachev2( const finalizeRequest: FinalizeCacheEntryUploadRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - key: key, - version: version, + key, + version, sizeBytes: `${archiveFileSize}` } From 83baffc3f6aa547cec65984fa0f7d81d2b7f11b7 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:34:32 -0800 Subject: [PATCH 036/108] Package upgrades with security fixes --- package-lock.json | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6ebe4c24..396698e4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12738,12 +12738,13 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, + "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -14300,9 +14301,10 @@ } }, "node_modules/path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", + "license": "MIT" }, "node_modules/path-type": { "version": "4.0.0", @@ -16329,9 +16331,10 @@ } }, "node_modules/undici": { - "version": "6.18.1", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.18.1.tgz", - "integrity": "sha512-/0BWqR8rJNRysS5lqVmfc7eeOErcOP4tZpATVjJOojjHZ71gSYVAtFhEmadcIjwMIUehh5NFyKGsXCnXIajtbA==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.0.tgz", + "integrity": "sha512-BUgJXc752Kou3oOIuU1i+yZZypyZRqNPW0vqoMPl8VaoalSfeR0D8/t4iAS3yirs79SSMTxTag+ZC86uswv+Cw==", + "license": "MIT", "engines": { "node": ">=18.17" } From 2ee77e654fbe20a6b111095deff90ac25ba6ee23 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 03:42:14 -0800 Subject: [PATCH 037/108] Add missing function return types --- packages/cache/src/cache.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f5c00053..5c415cec 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -4,8 +4,8 @@ import * as config from './internal/config' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, CreateCacheEntryResponse, @@ -14,9 +14,9 @@ import { GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { UploadCacheFile } from './internal/blob/upload-cache' -import { DownloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {UploadCacheFile} from './internal/blob/upload-cache' +import {DownloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -121,7 +121,7 @@ async function restoreCachev1( restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive = false -) { +): Promise { restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -219,7 +219,7 @@ async function restoreCachev2( restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive = false -) { +): Promise { restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -406,9 +406,9 @@ async function saveCachev1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From c3e354da23676a8a5d65ecfc3f5720f36012c338 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 04:33:31 -0800 Subject: [PATCH 038/108] Remove unnecessary debug information --- packages/cache/src/cache.ts | 4 ++-- packages/cache/src/internal/blob/download-cache.ts | 4 +--- packages/cache/src/internal/blob/upload-cache.ts | 4 +--- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 5c415cec..cbeb3d8c 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -516,7 +516,7 @@ async function saveCachev2( ) } - core.debug(`Saving Cache to: ${core.setSecret(response.signedUploadUrl)}`) + core.debug(`Attempting to upload cache located at: ${archivePath}`) await UploadCacheFile(response.signedUploadUrl, archivePath) const finalizeRequest: FinalizeCacheEntryUploadRequest = { @@ -530,7 +530,7 @@ async function saveCachev2( const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) core.debug( - `FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}` + `FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}` ) if (!finalizeResponse.ok) { diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 966d4974..73829a83 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -14,12 +14,10 @@ export async function DownloadCacheFile( maxRetryRequests: 5 } - // TODO: tighten the configuration and pass the appropriate user-agent const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) - core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) + core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`) return blockBlobClient.downloadToFile( archivePath, diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index 5cd5cd6a..9e79e966 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -9,7 +9,6 @@ export async function UploadCacheFile( signedUploadURL: string, archivePath: string ): Promise<{}> { - // TODO: tighten the configuration and pass the appropriate user-agent // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size @@ -20,8 +19,7 @@ export async function UploadCacheFile( const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) - core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) + core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`) return blockBlobClient.uploadFile(archivePath, uploadOptions) } From ea4bf4810a8fe3eeb4aeff1f10d0d4488e5abbc1 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 04:39:30 -0800 Subject: [PATCH 039/108] Remove unnecessary debug information --- packages/cache/src/cache.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index cbeb3d8c..68345739 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -253,12 +253,8 @@ async function restoreCachev2( ) } - core.debug( - `GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}` - ) const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) - core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) if (!response.ok) { core.warning(`Cache not found for keys: ${keys.join(', ')}`) @@ -277,7 +273,6 @@ async function restoreCachev2( utils.getCacheFileName(compressionMethod) ) core.debug(`Archive path: ${archivePath}`) - core.debug(`Starting download of artifact to: ${archivePath}`) await DownloadCacheFile(response.signedDownloadUrl, archivePath) From 5e9ef8532f587df0f8fde05f8d4eabad87da9762 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 04:47:27 -0800 Subject: [PATCH 040/108] Lint fixes --- packages/cache/src/cache.ts | 4 +--- packages/cache/src/internal/blob/download-cache.ts | 4 +++- packages/cache/src/internal/blob/upload-cache.ts | 4 +++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 68345739..7d0cd000 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -524,9 +524,7 @@ async function saveCachev2( const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) - core.debug( - `FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}` - ) + core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`) if (!finalizeResponse.ok) { throw new Error( diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 73829a83..38443de3 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -17,7 +17,9 @@ export async function DownloadCacheFile( const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`) + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` + ) return blockBlobClient.downloadToFile( archivePath, diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index 9e79e966..a29672dc 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -19,7 +19,9 @@ export async function UploadCacheFile( const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`) + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` + ) return blockBlobClient.uploadFile(archivePath, uploadOptions) } From ab8110fa2f9e860e01ae01f4ff6ede24f06e725f Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 06:36:42 -0800 Subject: [PATCH 041/108] Remove unecessary packages from top level package.json --- package-lock.json | 2654 ++---------------------------- package.json | 14 - packages/cache/package-lock.json | 491 +++++- packages/cache/package.json | 5 +- 4 files changed, 631 insertions(+), 2533 deletions(-) diff --git a/package-lock.json b/package-lock.json index 396698e4..b97deae9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,20 +5,6 @@ "packages": { "": { "name": "root", - "dependencies": { - "@actions/artifact": "^2.1.7", - "@actions/attest": "^1.2.1", - "@actions/cache": "^3.2.4", - "@actions/core": "^1.10.1", - "@actions/exec": "^1.1.1", - "@actions/github": "^6.0.0", - "@actions/glob": "^0.4.0", - "@actions/http-client": "^2.2.1", - "@actions/io": "^1.1.3", - "@actions/tool-cache": "^2.0.1", - "tunnel": "^0.0.6", - "undici": "^6.18.1" - }, "devDependencies": { "@types/jest": "^29.5.4", "@types/node": "^20.5.7", @@ -47,626 +33,6 @@ "node": ">=0.10.0" } }, - "node_modules/@actions/artifact": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.7.tgz", - "integrity": "sha512-iIFsTPZnb182dBc+Is5v7ZqojC4ydO8Ru4/PD8Azg2diV//fdW3H6biEH/utUlNhwfOuHxZpC/QSQsU5KDEuuw==", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "@actions/http-client": "^2.1.0", - "@azure/storage-blob": "^12.15.0", - "@octokit/core": "^3.5.1", - "@octokit/plugin-request-log": "^1.0.4", - "@octokit/plugin-retry": "^3.0.9", - "@octokit/request-error": "^5.0.0", - "@protobuf-ts/plugin": "^2.2.3-alpha.1", - "archiver": "^7.0.1", - "crypto": "^1.0.1", - "jwt-decode": "^3.1.2", - "twirp-ts": "^2.5.0", - "unzip-stream": "^0.3.1" - } - }, - "node_modules/@actions/artifact/node_modules/@actions/github": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", - "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/core/node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@actions/artifact/node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/request-error": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", - "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", - "dependencies": { - "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "22.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", - "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" - }, - "node_modules/@actions/artifact/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "dependencies": { - "@octokit/openapi-types": "^22.2.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/request/node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@actions/artifact/node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/@actions/attest": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@actions/attest/-/attest-1.2.1.tgz", - "integrity": "sha512-ZLfmO6o2x3UL2BG++oIHMPx5kApWr8Uy1cgiiafXpHgamsqFUPjUtcp0/gpOaXkxUZftdVno7NwBTisw8qr9UA==", - "dependencies": { - "@actions/core": "^1.10.1", - "@actions/github": "^6.0.0", - "@actions/http-client": "^2.2.1", - "@octokit/plugin-retry": "^6.0.1", - "@sigstore/bundle": "^2.3.0", - "@sigstore/sign": "^2.3.0", - "jsonwebtoken": "^9.0.2", - "jwks-rsa": "^3.1.0" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/auth-token": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.1.tgz", - "integrity": "sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA==", - "peer": true, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/core": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.2.tgz", - "integrity": "sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg==", - "peer": true, - "dependencies": { - "@octokit/auth-token": "^5.0.0", - "@octokit/graphql": "^8.0.0", - "@octokit/request": "^9.0.0", - "@octokit/request-error": "^6.0.1", - "@octokit/types": "^13.0.0", - "before-after-hook": "^3.0.2", - "universal-user-agent": "^7.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/endpoint": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz", - "integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==", - "peer": true, - "dependencies": { - "@octokit/types": "^13.0.0", - "universal-user-agent": "^7.0.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/graphql": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.1.1.tgz", - "integrity": "sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg==", - "peer": true, - "dependencies": { - "@octokit/request": "^9.0.0", - "@octokit/types": "^13.0.0", - "universal-user-agent": "^7.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/openapi-types": { - "version": "22.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", - "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" - }, - "node_modules/@actions/attest/node_modules/@octokit/plugin-retry": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz", - "integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==", - "dependencies": { - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "bottleneck": "^2.15.3" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "@octokit/core": ">=5" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", - "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", - "dependencies": { - "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "dependencies": { - "@octokit/openapi-types": "^22.2.0" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/types": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", - "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", - "dependencies": { - "@octokit/openapi-types": "^20.0.0" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/plugin-retry/node_modules/@octokit/types/node_modules/@octokit/openapi-types": { - "version": "20.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", - "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" - }, - "node_modules/@actions/attest/node_modules/@octokit/request": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.1.1.tgz", - "integrity": "sha512-pyAguc0p+f+GbQho0uNetNQMmLG1e80WjkIaqqgUkihqUp0boRU6nKItXO4VWnr+nbZiLGEyy4TeKRwqaLvYgw==", - "peer": true, - "dependencies": { - "@octokit/endpoint": "^10.0.0", - "@octokit/request-error": "^6.0.1", - "@octokit/types": "^13.1.0", - "universal-user-agent": "^7.0.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/request-error": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.1.tgz", - "integrity": "sha512-1mw1gqT3fR/WFvnoVpY/zUM2o/XkMs/2AszUUG9I69xn0JFLv6PGkPhNk5lbfvROs79wiS0bqiJNxfCZcRJJdg==", - "peer": true, - "dependencies": { - "@octokit/types": "^13.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/attest/node_modules/@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "peer": true, - "dependencies": { - "@octokit/openapi-types": "^22.2.0" - } - }, - "node_modules/@actions/attest/node_modules/before-after-hook": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz", - "integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==", - "peer": true - }, - "node_modules/@actions/attest/node_modules/universal-user-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz", - "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==", - "peer": true - }, - "node_modules/@actions/cache": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", - "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/exec": "^1.0.1", - "@actions/glob": "^0.1.0", - "@actions/http-client": "^2.1.1", - "@actions/io": "^1.0.1", - "@azure/abort-controller": "^1.1.0", - "@azure/ms-rest-js": "^2.6.0", - "@azure/storage-blob": "^12.13.0", - "semver": "^6.3.1", - "uuid": "^3.3.3" - } - }, - "node_modules/@actions/cache/node_modules/@actions/glob": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", - "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", - "dependencies": { - "@actions/core": "^1.2.6", - "minimatch": "^3.0.4" - } - }, - "node_modules/@actions/cache/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@actions/cache/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, - "node_modules/@actions/core": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", - "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/exec": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", - "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", - "dependencies": { - "@actions/io": "^1.0.1" - } - }, - "node_modules/@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", - "dependencies": { - "@actions/http-client": "^2.2.0", - "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" - } - }, - "node_modules/@actions/github/node_modules/@octokit/auth-token": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", - "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/core": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.0.tgz", - "integrity": "sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==", - "dependencies": { - "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.1.0", - "@octokit/request": "^8.3.1", - "@octokit/request-error": "^5.1.0", - "@octokit/types": "^13.0.0", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/endpoint": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz", - "integrity": "sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==", - "dependencies": { - "@octokit/types": "^13.1.0", - "universal-user-agent": "^6.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/graphql": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.0.tgz", - "integrity": "sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==", - "dependencies": { - "@octokit/request": "^8.3.0", - "@octokit/types": "^13.0.0", - "universal-user-agent": "^6.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/openapi-types": { - "version": "22.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", - "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", - "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", - "dependencies": { - "@octokit/types": "^12.6.0" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "@octokit/core": "5" - } - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { - "version": "20.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", - "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", - "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", - "dependencies": { - "@octokit/openapi-types": "^20.0.0" - } - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", - "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", - "dependencies": { - "@octokit/types": "^12.6.0" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "@octokit/core": "5" - } - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { - "version": "20.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", - "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" - }, - "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", - "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", - "dependencies": { - "@octokit/openapi-types": "^20.0.0" - } - }, - "node_modules/@actions/github/node_modules/@octokit/request": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", - "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", - "dependencies": { - "@octokit/endpoint": "^9.0.1", - "@octokit/request-error": "^5.1.0", - "@octokit/types": "^13.1.0", - "universal-user-agent": "^6.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/request-error": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", - "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", - "dependencies": { - "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@actions/github/node_modules/@octokit/types": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", - "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", - "dependencies": { - "@octokit/openapi-types": "^22.2.0" - } - }, - "node_modules/@actions/glob": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.4.0.tgz", - "integrity": "sha512-+eKIGFhsFa4EBwaf/GMyzCdWrXWymGXfFmZU3FHQvYS8mPcHtTtZONbkcqqUMzw9mJ/pImEBFET1JNifhqGsAQ==", - "dependencies": { - "@actions/core": "^1.9.1", - "minimatch": "^3.0.4" - } - }, - "node_modules/@actions/http-client": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", - "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", - "dependencies": { - "tunnel": "^0.0.6", - "undici": "^5.25.4" - } - }, - "node_modules/@actions/http-client/node_modules/undici": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", - "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, - "engines": { - "node": ">=14.0" - } - }, - "node_modules/@actions/io": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", - "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" - }, - "node_modules/@actions/tool-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz", - "integrity": "sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==", - "dependencies": { - "@actions/core": "^1.2.6", - "@actions/exec": "^1.0.0", - "@actions/http-client": "^2.0.1", - "@actions/io": "^1.1.1", - "semver": "^6.1.0", - "uuid": "^3.3.2" - } - }, - "node_modules/@actions/tool-cache/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@actions/tool-cache/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/@ampproject/remapping": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", @@ -680,238 +46,6 @@ "node": ">=6.0.0" } }, - "node_modules/@azure/abort-controller": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", - "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", - "dependencies": { - "tslib": "^2.2.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/@azure/abort-controller/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-auth": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.2.tgz", - "integrity": "sha512-Igm/S3fDYmnMq1uKS38Ae1/m37B3zigdlZw+kocwEhh5GjyKjPrXKO2J6rzpC1wAxrNil/jX9BJRqBshyjnF3g==", - "dependencies": { - "@azure/abort-controller": "^2.0.0", - "@azure/core-util": "^1.1.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", - "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-auth/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-http": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz", - "integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==", - "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-auth": "^1.3.0", - "@azure/core-tracing": "1.0.0-preview.13", - "@azure/core-util": "^1.1.1", - "@azure/logger": "^1.0.0", - "@types/node-fetch": "^2.5.0", - "@types/tunnel": "^0.0.3", - "form-data": "^4.0.0", - "node-fetch": "^2.6.7", - "process": "^0.11.10", - "tslib": "^2.2.0", - "tunnel": "^0.0.6", - "uuid": "^8.3.0", - "xml2js": "^0.5.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@azure/core-http/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-lro": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", - "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", - "dependencies": { - "@azure/abort-controller": "^2.0.0", - "@azure/core-util": "^1.2.0", - "@azure/logger": "^1.0.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", - "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-lro/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-paging": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", - "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-paging/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-tracing": { - "version": "1.0.0-preview.13", - "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", - "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", - "dependencies": { - "@opentelemetry/api": "^1.0.1", - "tslib": "^2.2.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/@azure/core-tracing/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/core-util": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.9.0.tgz", - "integrity": "sha512-AfalUQ1ZppaKuxPPMsFEUdX6GZPB3d9paR9d/TTL7Ow2De8cJaC7ibi7kWVlFAVPCYo31OcnGymc0R89DX8Oaw==", - "dependencies": { - "@azure/abort-controller": "^2.0.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", - "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/core-util/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/logger": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.2.tgz", - "integrity": "sha512-l170uE7bsKpIU6B/giRc9i4NI0Mj+tANMMMxf7Zi/5cKzEqPayP7+X1WPrG7e+91JgY8N+7K7nF2WOi7iVhXvg==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@azure/logger/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/@azure/ms-rest-js": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", - "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", - "dependencies": { - "@azure/core-auth": "^1.1.4", - "abort-controller": "^3.0.0", - "form-data": "^2.5.0", - "node-fetch": "^2.6.7", - "tslib": "^1.10.0", - "tunnel": "0.0.6", - "uuid": "^8.3.2", - "xml2js": "^0.5.0" - } - }, - "node_modules/@azure/ms-rest-js/node_modules/form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 0.12" - } - }, - "node_modules/@azure/storage-blob": { - "version": "12.18.0", - "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.18.0.tgz", - "integrity": "sha512-BzBZJobMoDyjJsPRMLNHvqHycTGrT8R/dtcTx9qUFcqwSRfGVK9A/cZ7Nx38UQydT9usZGbaDCN75QRNjezSAA==", - "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-http": "^3.0.0", - "@azure/core-lro": "^2.2.0", - "@azure/core-paging": "^1.1.1", - "@azure/core-tracing": "1.0.0-preview.13", - "@azure/logger": "^1.0.0", - "events": "^3.0.0", - "tslib": "^2.2.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@azure/storage-blob/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, "node_modules/@babel/code-frame": { "version": "7.22.13", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", @@ -1635,14 +769,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", - "engines": { - "node": ">=14" - } - }, "node_modules/@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -1697,90 +823,6 @@ "node": ">=6.9.0" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/@isaacs/string-locale-compare": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz", @@ -3888,77 +2930,6 @@ "node": ">= 8" } }, - "node_modules/@npmcli/agent": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", - "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/agent/node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "dependencies": { - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@npmcli/agent/node_modules/http-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", - "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@npmcli/agent/node_modules/https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", - "dependencies": { - "agent-base": "^7.0.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@npmcli/agent/node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/@npmcli/agent/node_modules/socks-proxy-agent": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.3.tgz", - "integrity": "sha512-VNegTZKhuGq5vSD6XNKlbqWhyt/40CgoEw8XxD6dhnm8Jq9IEa3nIa4HwnM8XOqU0CdB0BwWVXusqiFXfHB3+A==", - "dependencies": { - "agent-base": "^7.1.1", - "debug": "^4.3.4", - "socks": "^2.7.1" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/@npmcli/arborist": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-5.3.0.tgz", @@ -4869,6 +3840,7 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz", "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==", + "dev": true, "engines": { "node": ">= 14" } @@ -4877,6 +3849,7 @@ "version": "4.2.4", "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz", "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==", + "dev": true, "dependencies": { "@octokit/auth-token": "^3.0.0", "@octokit/graphql": "^5.0.0", @@ -4894,6 +3867,7 @@ "version": "7.0.6", "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz", "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==", + "dev": true, "dependencies": { "@octokit/types": "^9.0.0", "is-plain-object": "^5.0.0", @@ -4907,6 +3881,7 @@ "version": "5.0.6", "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz", "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==", + "dev": true, "dependencies": { "@octokit/request": "^6.0.0", "@octokit/types": "^9.0.0", @@ -4919,7 +3894,8 @@ "node_modules/@octokit/openapi-types": { "version": "18.1.1", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz", - "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==" + "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==", + "dev": true }, "node_modules/@octokit/plugin-enterprise-rest": { "version": "6.0.1", @@ -4947,6 +3923,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz", "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==", + "dev": true, "peerDependencies": { "@octokit/core": ">=3" } @@ -4975,32 +3952,11 @@ "@octokit/openapi-types": "^18.0.0" } }, - "node_modules/@octokit/plugin-retry": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-3.0.9.tgz", - "integrity": "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ==", - "dependencies": { - "@octokit/types": "^6.0.3", - "bottleneck": "^2.15.3" - } - }, - "node_modules/@octokit/plugin-retry/node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-retry/node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, "node_modules/@octokit/request": { "version": "6.2.8", "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz", "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==", + "dev": true, "dependencies": { "@octokit/endpoint": "^7.0.0", "@octokit/request-error": "^3.0.0", @@ -5017,6 +3973,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz", "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==", + "dev": true, "dependencies": { "@octokit/types": "^9.0.0", "deprecation": "^2.0.0", @@ -5051,18 +4008,11 @@ "version": "9.3.2", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz", "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==", + "dev": true, "dependencies": { "@octokit/openapi-types": "^18.0.0" } }, - "node_modules/@opentelemetry/api": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz", - "integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==", - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/@parcel/watcher": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.0.4.tgz", @@ -5081,15 +4031,6 @@ "url": "https://opencollective.com/parcel" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@pkgr/utils": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.4.2.tgz", @@ -5116,312 +4057,6 @@ "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==", "dev": true }, - "node_modules/@protobuf-ts/plugin": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", - "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", - "dependencies": { - "@protobuf-ts/plugin-framework": "^2.9.4", - "@protobuf-ts/protoc": "^2.9.4", - "@protobuf-ts/runtime": "^2.9.4", - "@protobuf-ts/runtime-rpc": "^2.9.4", - "typescript": "^3.9" - }, - "bin": { - "protoc-gen-dump": "bin/protoc-gen-dump", - "protoc-gen-ts": "bin/protoc-gen-ts" - } - }, - "node_modules/@protobuf-ts/plugin-framework": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", - "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", - "dependencies": { - "@protobuf-ts/runtime": "^2.9.4", - "typescript": "^3.9" - } - }, - "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/@protobuf-ts/plugin/node_modules/typescript": { - "version": "3.9.10", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", - "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/@protobuf-ts/protoc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", - "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==", - "bin": { - "protoc": "protoc.js" - } - }, - "node_modules/@protobuf-ts/runtime": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", - "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" - }, - "node_modules/@protobuf-ts/runtime-rpc": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", - "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", - "dependencies": { - "@protobuf-ts/runtime": "^2.9.4" - } - }, - "node_modules/@sigstore/bundle": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz", - "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/core": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", - "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/protobuf-specs": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.2.tgz", - "integrity": "sha512-c6B0ehIWxMI8wiS/bj6rHMPqeFvngFV7cDU/MY+B16P9Z3Mp9k8L93eYZ7BYzSickzuqAQqAq0V956b3Ju6mLw==", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", - "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^13.0.1", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/@npmcli/fs": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", - "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/cacache": { - "version": "18.0.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.3.tgz", - "integrity": "sha512-qXCd4rh6I07cnDqh8V48/94Tc/WSfj+o3Gn6NZ0aZovS255bUx8O13uKxRFd2eWG0xgsco7+YItQNPaa5E85hg==", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/fs-minipass": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", - "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/glob": { - "version": "10.3.16", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", - "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.1", - "minipass": "^7.0.4", - "path-scurry": "^1.11.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@sigstore/sign/node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/@sigstore/sign/node_modules/make-fetch-happen": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", - "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@sigstore/sign/node_modules/minipass": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", - "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/@sigstore/sign/node_modules/minipass-collect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz", - "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/@sigstore/sign/node_modules/minipass-fetch": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", - "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "node_modules/@sigstore/sign/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/ssri": { - "version": "10.0.6", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", - "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "dependencies": { - "unique-slug": "^4.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/sign/node_modules/unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -5496,45 +4131,6 @@ "@babel/types": "^7.20.7" } }, - "node_modules/@types/body-parser": { - "version": "1.19.5", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", - "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/express": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", - "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.19.1", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.1.tgz", - "integrity": "sha512-ej0phymbFLoCB26dbbq5PGScsf2JAJ4IJHjG10LalgUV36XKTmA4GdA+PVllKvRk0sEKt64X8975qFnkSi0hqA==", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, "node_modules/@types/graceful-fs": { "version": "4.1.6", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz", @@ -5544,11 +4140,6 @@ "@types/node": "*" } }, - "node_modules/@types/http-errors": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", - "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" - }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", @@ -5595,19 +4186,6 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, - "node_modules/@types/jsonwebtoken": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.6.tgz", - "integrity": "sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==" - }, "node_modules/@types/minimatch": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", @@ -5623,16 +4201,8 @@ "node_modules/@types/node": { "version": "20.5.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.7.tgz", - "integrity": "sha512-dP7f3LdZIysZnmvP3ANJYTSwg+wLLl8p7RqniVlV7j+oXSXAbt9h0WIBFmJy5inWZoX9wZN6eXx+YXd9Rh3RBA==" - }, - "node_modules/@types/node-fetch": { - "version": "2.6.11", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", - "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", - "dependencies": { - "@types/node": "*", - "form-data": "^4.0.0" - } + "integrity": "sha512-dP7f3LdZIysZnmvP3ANJYTSwg+wLLl8p7RqniVlV7j+oXSXAbt9h0WIBFmJy5inWZoX9wZN6eXx+YXd9Rh3RBA==", + "dev": true }, "node_modules/@types/normalize-package-data": { "version": "2.4.4", @@ -5646,41 +4216,12 @@ "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", "dev": true }, - "node_modules/@types/qs": { - "version": "6.9.15", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz", - "integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==" - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", - "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==" - }, "node_modules/@types/semver": { "version": "7.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", "dev": true }, - "node_modules/@types/send": { - "version": "0.17.4", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", - "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.7", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", - "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "*" - } - }, "node_modules/@types/signale": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/@types/signale/-/signale-1.4.4.tgz", @@ -5696,14 +4237,6 @@ "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", "dev": true }, - "node_modules/@types/tunnel": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", - "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/yargs": { "version": "17.0.24", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", @@ -6010,17 +4543,6 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "dev": true }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -6076,6 +4598,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -6140,6 +4663,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -6148,6 +4672,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -6177,177 +4702,6 @@ "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", "dev": true }, - "node_modules/archiver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", - "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", - "dependencies": { - "archiver-utils": "^5.0.2", - "async": "^3.2.4", - "buffer-crc32": "^1.0.0", - "readable-stream": "^4.0.0", - "readdir-glob": "^1.1.2", - "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "dependencies": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/archiver-utils/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/archiver-utils/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/archiver-utils/node_modules/glob": { - "version": "10.3.16", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", - "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.1", - "minipass": "^7.0.4", - "path-scurry": "^1.11.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/archiver-utils/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/archiver-utils/node_modules/minipass": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", - "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/archiver-utils/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/archiver/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/archiver/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/archiver/node_modules/tar-stream": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", - "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", - "dependencies": { - "b4a": "^1.6.4", - "fast-fifo": "^1.2.0", - "streamx": "^2.15.0" - } - }, "node_modules/are-we-there-yet": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", @@ -6531,12 +4885,14 @@ "node_modules/async": { "version": "3.2.5", "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", + "dev": true }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true }, "node_modules/at-least-node": { "version": "1.0.0", @@ -6589,11 +4945,6 @@ "dequal": "^2.0.3" } }, - "node_modules/b4a": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" - }, "node_modules/babel-jest": { "version": "29.6.4", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.6.4.tgz", @@ -6713,18 +5064,14 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/bare-events": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.2.tgz", - "integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==", - "optional": true + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, "funding": [ { "type": "github", @@ -6743,7 +5090,8 @@ "node_modules/before-after-hook": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "dev": true }, "node_modules/big-integer": { "version": "1.6.51", @@ -6780,18 +5128,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/binary": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", - "dependencies": { - "buffers": "~0.1.1", - "chainsaw": "~0.1.0" - }, - "engines": { - "node": "*" - } - }, "node_modules/bl": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", @@ -6803,11 +5139,6 @@ "readable-stream": "^3.4.0" } }, - "node_modules/bottleneck": { - "version": "2.19.5", - "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", - "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" - }, "node_modules/bplist-parser": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", @@ -6824,6 +5155,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -6918,33 +5250,12 @@ "ieee754": "^1.1.13" } }, - "node_modules/buffer-crc32": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", - "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/buffer-equal-constant-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" - }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, - "node_modules/buffers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", - "engines": { - "node": ">=0.2.0" - } - }, "node_modules/builtins": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", @@ -7078,20 +5389,6 @@ "node": ">=6" } }, - "node_modules/camel-case": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", - "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", - "dependencies": { - "pascal-case": "^3.1.2", - "tslib": "^2.0.3" - } - }, - "node_modules/camel-case/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, "node_modules/camelcase": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", @@ -7138,17 +5435,6 @@ } ] }, - "node_modules/chainsaw": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", - "dependencies": { - "traverse": ">=0.3.0 <0.4" - }, - "engines": { - "node": "*" - } - }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -7196,6 +5482,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, "engines": { "node": ">=10" } @@ -7225,6 +5512,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, "engines": { "node": ">=6" } @@ -7340,6 +5628,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { "color-name": "~1.1.4" }, @@ -7350,7 +5639,8 @@ "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/color-support": { "version": "1.1.3", @@ -7378,6 +5668,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -7385,14 +5676,6 @@ "node": ">= 0.8" } }, - "node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "engines": { - "node": ">= 6" - } - }, "node_modules/common-ancestor-path": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz", @@ -7421,63 +5704,11 @@ "node": ">=8" } }, - "node_modules/compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", - "dependencies": { - "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/compress-commons/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/compress-commons/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true }, "node_modules/concat-stream": { "version": "2.0.0", @@ -7687,7 +5918,8 @@ "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true }, "node_modules/cosmiconfig": { "version": "7.1.0", @@ -7705,71 +5937,11 @@ "node": ">=10" } }, - "node_modules/crc-32": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", - "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", - "bin": { - "crc32": "bin/crc32.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", - "dependencies": { - "crc-32": "^1.2.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/crc32-stream/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/crc32-stream/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -7779,12 +5951,6 @@ "node": ">= 8" } }, - "node_modules/crypto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", - "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==", - "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in." - }, "node_modules/damerau-levenshtein": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", @@ -7829,6 +5995,7 @@ "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, "dependencies": { "ms": "2.1.2" }, @@ -8102,6 +6269,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, "engines": { "node": ">=0.4.0" } @@ -8115,7 +6283,8 @@ "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "dev": true }, "node_modules/dequal": { "version": "2.0.3", @@ -8187,18 +6356,6 @@ "node": ">=6.0.0" } }, - "node_modules/dot-object": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", - "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", - "dependencies": { - "commander": "^6.1.0", - "glob": "^7.1.6" - }, - "bin": { - "dot-object": "bin/dot-object" - } - }, "node_modules/dot-prop": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", @@ -8229,19 +6386,6 @@ "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", "dev": true }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "node_modules/ecdsa-sig-formatter": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", - "dependencies": { - "safe-buffer": "^5.0.1" - } - }, "node_modules/ejs": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", @@ -8278,12 +6422,14 @@ "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, "node_modules/encoding": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, "optional": true, "dependencies": { "iconv-lite": "^0.6.2" @@ -8293,6 +6439,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, "optional": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -8346,7 +6493,8 @@ "node_modules/err-code": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true }, "node_modules/error-ex": { "version": "1.3.2", @@ -9073,28 +7221,12 @@ "node": ">=0.10.0" } }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", "dev": true }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "engines": { - "node": ">=0.8.x" - } - }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -9187,11 +7319,6 @@ "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", "dev": true }, - "node_modules/fast-fifo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", - "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" - }, "node_modules/fast-glob": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz", @@ -9413,36 +7540,11 @@ "is-callable": "^1.1.3" } }, - "node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/foreground-child/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/form-data": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -9476,6 +7578,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -9486,7 +7589,8 @@ "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true }, "node_modules/fsevents": { "version": "2.3.3", @@ -9798,6 +7902,7 @@ "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -9890,7 +7995,8 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true }, "node_modules/graphemer": { "version": "1.4.0", @@ -10054,7 +8160,8 @@ "node_modules/http-cache-semantics": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true }, "node_modules/http-proxy-agent": { "version": "5.0.0", @@ -10117,6 +8224,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, "funding": [ { "type": "github", @@ -10213,6 +8321,7 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, "engines": { "node": ">=0.8.19" } @@ -10221,6 +8330,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, "engines": { "node": ">=8" } @@ -10235,6 +8345,7 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -10243,7 +8354,8 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true }, "node_modules/ini": { "version": "1.3.8", @@ -10363,6 +8475,7 @@ "version": "9.0.5", "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dev": true, "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" @@ -10374,7 +8487,8 @@ "node_modules/ip-address/node_modules/sprintf-js": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==" + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true }, "node_modules/is-array-buffer": { "version": "3.0.2", @@ -10509,6 +8623,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, "engines": { "node": ">=8" } @@ -10564,7 +8679,8 @@ "node_modules/is-lambda": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==" + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true }, "node_modules/is-negative-zero": { "version": "2.0.2", @@ -10633,6 +8749,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -10678,6 +8795,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, "engines": { "node": ">=8" }, @@ -10808,7 +8926,8 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true }, "node_modules/isobject": { "version": "3.0.1", @@ -10897,23 +9016,6 @@ "node": ">=8" } }, - "node_modules/jackspeak": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.1.2.tgz", - "integrity": "sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, "node_modules/jake": { "version": "10.8.7", "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz", @@ -11514,14 +9616,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jose": { - "version": "4.15.5", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", - "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", - "funding": { - "url": "https://github.com/sponsors/panva" - } - }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -11543,7 +9637,8 @@ "node_modules/jsbn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "dev": true }, "node_modules/jsesc": { "version": "2.5.2", @@ -11651,27 +9746,6 @@ "node": "*" } }, - "node_modules/jsonwebtoken": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", - "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", - "dependencies": { - "jws": "^3.2.2", - "lodash.includes": "^4.3.0", - "lodash.isboolean": "^3.0.3", - "lodash.isinteger": "^4.0.4", - "lodash.isnumber": "^3.0.3", - "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1", - "lodash.once": "^4.0.0", - "ms": "^2.1.1", - "semver": "^7.5.4" - }, - "engines": { - "node": ">=12", - "npm": ">=6" - } - }, "node_modules/jsx-ast-utils": { "version": "3.3.5", "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", @@ -11699,46 +9773,6 @@ "integrity": "sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==", "dev": true }, - "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "dependencies": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/jwks-rsa": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-3.1.0.tgz", - "integrity": "sha512-v7nqlfezb9YfHHzYII3ef2a2j1XnGeSE/bK3WfumaYCqONAIstJbrEGapz4kadScZzEt7zYCN7bucj8C0Mv/Rg==", - "dependencies": { - "@types/express": "^4.17.17", - "@types/jsonwebtoken": "^9.0.2", - "debug": "^4.3.4", - "jose": "^4.14.6", - "limiter": "^1.1.5", - "lru-memoizer": "^2.2.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "dependencies": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" - }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -11772,49 +9806,6 @@ "language-subtag-registry": "~0.3.2" } }, - "node_modules/lazystream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", - "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", - "dependencies": { - "readable-stream": "^2.0.5" - }, - "engines": { - "node": ">= 0.6.3" - } - }, - "node_modules/lazystream/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "node_modules/lazystream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/lazystream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/lazystream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/lerna": { "version": "6.4.1", "resolved": "https://registry.npmjs.org/lerna/-/lerna-6.4.1.tgz", @@ -12278,11 +10269,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/limiter": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", - "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" - }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -12331,7 +10317,8 @@ "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "node_modules/lodash.camelcase": { "version": "4.3.0", @@ -12339,47 +10326,12 @@ "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", "dev": true }, - "node_modules/lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==" - }, - "node_modules/lodash.includes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" - }, - "node_modules/lodash.isboolean": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" - }, - "node_modules/lodash.isinteger": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" - }, "node_modules/lodash.ismatch": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", "dev": true }, - "node_modules/lodash.isnumber": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - }, - "node_modules/lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" - }, "node_modules/lodash.kebabcase": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", @@ -12398,11 +10350,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" - }, "node_modules/lodash.snakecase": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz", @@ -12431,19 +10378,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/lower-case/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, "node_modules/lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -12453,31 +10387,6 @@ "yallist": "^3.0.2" } }, - "node_modules/lru-memoizer": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.3.0.tgz", - "integrity": "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==", - "dependencies": { - "lodash.clonedeep": "^4.5.0", - "lru-cache": "6.0.0" - } - }, - "node_modules/lru-memoizer/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/lru-memoizer/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/make-dir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", @@ -12755,6 +10664,7 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, "engines": { "node": ">= 0.6" } @@ -12763,6 +10673,7 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, "dependencies": { "mime-db": "1.52.0" }, @@ -12792,6 +10703,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -12803,6 +10715,7 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -12825,6 +10738,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -12865,6 +10779,7 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -12886,6 +10801,7 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -12897,6 +10813,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, "dependencies": { "minipass": "^3.0.0" }, @@ -12907,12 +10824,14 @@ "node_modules/minipass/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true }, "node_modules/minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" @@ -12924,12 +10843,14 @@ "node_modules/minizlib/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, "bin": { "mkdirp": "bin/cmd.js" }, @@ -12963,7 +10884,8 @@ "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true }, "node_modules/multimatch": { "version": "5.0.0", @@ -13015,6 +10937,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, "engines": { "node": ">= 0.6" } @@ -13025,20 +10948,6 @@ "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", "dev": true }, - "node_modules/no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "dependencies": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, - "node_modules/no-case/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, "node_modules/node-addon-api": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", @@ -13049,6 +10958,7 @@ "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, "dependencies": { "whatwg-url": "^5.0.0" }, @@ -13167,6 +11077,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -13870,6 +11781,7 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, "dependencies": { "wrappy": "1" } @@ -13999,6 +11911,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, "dependencies": { "aggregate-error": "^3.0.0" }, @@ -14224,20 +12137,6 @@ "parse-path": "^7.0.0" } }, - "node_modules/pascal-case": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", - "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/pascal-case/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -14251,6 +12150,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -14259,6 +12159,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, "engines": { "node": ">=8" } @@ -14269,43 +12170,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/path-scurry/node_modules/minipass": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz", - "integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/path-to-regexp": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", - "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", - "license": "MIT" - }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -14489,18 +12353,11 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true }, "node_modules/promise-all-reject-late": { "version": "1.0.1", @@ -14530,6 +12387,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" @@ -14633,11 +12491,6 @@ } ] }, - "node_modules/queue-tick": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", - "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==" - }, "node_modules/quick-lru": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", @@ -14980,33 +12833,6 @@ "node": ">= 6" } }, - "node_modules/readdir-glob": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", - "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", - "dependencies": { - "minimatch": "^5.1.0" - } - }, - "node_modules/readdir-glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/readdir-glob/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/readdir-scoped-modules": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", @@ -15138,6 +12964,7 @@ "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, "engines": { "node": ">= 4" } @@ -15251,6 +13078,7 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, "funding": [ { "type": "github", @@ -15284,17 +13112,13 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "devOptional": true - }, - "node_modules/sax": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", - "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" + "dev": true }, "node_modules/semver": { "version": "7.5.4", "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, @@ -15309,6 +13133,7 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -15319,7 +13144,8 @@ "node_modules/semver/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true }, "node_modules/set-blocking": { "version": "2.0.0", @@ -15343,6 +13169,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -15354,6 +13181,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, "engines": { "node": ">=8" } @@ -15397,6 +13225,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, "engines": { "node": ">= 6.0.0", "npm": ">= 3.0.0" @@ -15406,6 +13235,7 @@ "version": "2.8.3", "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", + "dev": true, "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" @@ -15570,22 +13400,11 @@ "node": ">=8" } }, - "node_modules/streamx": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz", - "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==", - "dependencies": { - "fast-fifo": "^1.1.0", - "queue-tick": "^1.0.1" - }, - "optionalDependencies": { - "bare-events": "^2.2.0" - } - }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, "dependencies": { "safe-buffer": "~5.2.0" } @@ -15607,6 +13426,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -15616,29 +13436,11 @@ "node": ">=8" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/string.prototype.trim": { "version": "1.2.7", @@ -15689,18 +13491,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -15830,6 +13621,7 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -15862,6 +13654,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -15869,7 +13662,8 @@ "node_modules/tar/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true }, "node_modules/temp-dir": { "version": "1.0.0", @@ -15975,15 +13769,8 @@ "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/traverse": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", - "engines": { - "node": "*" - } + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true }, "node_modules/tree-kill": { "version": "1.2.2", @@ -16064,29 +13851,6 @@ "node": ">=12" } }, - "node_modules/ts-poet": { - "version": "4.15.0", - "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", - "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", - "dependencies": { - "lodash": "^4.17.15", - "prettier": "^2.5.1" - } - }, - "node_modules/ts-poet/node_modules/prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, "node_modules/tsconfig-paths": { "version": "3.14.2", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", @@ -16123,7 +13887,8 @@ "node_modules/tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true }, "node_modules/tsutils": { "version": "3.21.0", @@ -16140,42 +13905,6 @@ "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" } }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/twirp-ts": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", - "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", - "dependencies": { - "@protobuf-ts/plugin-framework": "^2.0.7", - "camel-case": "^4.1.2", - "dot-object": "^2.1.4", - "path-to-regexp": "^6.2.0", - "ts-poet": "^4.5.0", - "yaml": "^1.10.2" - }, - "bin": { - "protoc-gen-twirp_ts": "protoc-gen-twirp_ts" - }, - "peerDependencies": { - "@protobuf-ts/plugin": "^2.5.0", - "ts-proto": "^1.81.3" - }, - "peerDependenciesMeta": { - "@protobuf-ts/plugin": { - "optional": true - }, - "ts-proto": { - "optional": true - } - } - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -16330,15 +14059,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/undici": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.0.tgz", - "integrity": "sha512-BUgJXc752Kou3oOIuU1i+yZZypyZRqNPW0vqoMPl8VaoalSfeR0D8/t4iAS3yirs79SSMTxTag+ZC86uswv+Cw==", - "license": "MIT", - "engines": { - "node": ">=18.17" - } - }, "node_modules/unique-filename": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", @@ -16366,7 +14086,8 @@ "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", - "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "dev": true }, "node_modules/universalify": { "version": "2.0.0", @@ -16386,26 +14107,6 @@ "node": ">=8" } }, - "node_modules/unzip-stream": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz", - "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==", - "dependencies": { - "binary": "^0.3.0", - "mkdirp": "^0.5.1" - } - }, - "node_modules/unzip-stream/node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, "node_modules/upath": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", @@ -16458,12 +14159,14 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true }, "node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, "bin": { "uuid": "dist/bin/uuid" } @@ -16543,12 +14246,14 @@ "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -16558,6 +14263,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -16635,27 +14341,11 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true }, "node_modules/write-file-atomic": { "version": "4.0.2", @@ -16838,26 +14528,6 @@ "node": ">=6" } }, - "node_modules/xml2js": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", - "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", - "dependencies": { - "sax": ">=0.6.0", - "xmlbuilder": "~11.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/xmlbuilder": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", - "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", - "engines": { - "node": ">=4.0" - } - }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -16886,6 +14556,7 @@ "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, "engines": { "node": ">= 6" } @@ -16928,57 +14599,6 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } - }, - "node_modules/zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", - "dependencies": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/zip-stream/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/zip-stream/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } } } } diff --git a/package.json b/package.json index ca30fbc0..3115ed6b 100644 --- a/package.json +++ b/package.json @@ -32,19 +32,5 @@ "prettier": "^3.0.0", "ts-jest": "^29.1.1", "typescript": "^5.2.2" - }, - "dependencies": { - "@actions/artifact": "^2.1.7", - "@actions/attest": "^1.2.1", - "@actions/cache": "^3.2.4", - "@actions/core": "^1.10.1", - "@actions/exec": "^1.1.1", - "@actions/github": "^6.0.0", - "@actions/glob": "^0.4.0", - "@actions/http-client": "^2.2.1", - "@actions/io": "^1.1.3", - "@actions/tool-cache": "^2.0.1", - "tunnel": "^0.0.6", - "undici": "^6.18.1" } } diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 724f674a..8e682de4 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -17,7 +17,10 @@ "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", - "semver": "^6.3.1" + "@protobuf-ts/plugin": "^2.9.4", + "jwt-decode": "^3.1.2", + "semver": "^6.3.1", + "twirp-ts": "^2.5.0" }, "devDependencies": { "@types/semver": "^6.0.0", @@ -245,6 +248,83 @@ "node": ">=8.0.0" } }, + "node_modules/@protobuf-ts/plugin": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", + "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", + "license": "Apache-2.0", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.9.4", + "@protobuf-ts/protoc": "^2.9.4", + "@protobuf-ts/runtime": "^2.9.4", + "@protobuf-ts/runtime-rpc": "^2.9.4", + "typescript": "^3.9" + }, + "bin": { + "protoc-gen-dump": "bin/protoc-gen-dump", + "protoc-gen-ts": "bin/protoc-gen-ts" + } + }, + "node_modules/@protobuf-ts/plugin-framework": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", + "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", + "license": "(Apache-2.0 AND BSD-3-Clause)", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4", + "typescript": "^3.9" + } + }, + "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/plugin/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/protoc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", + "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==", + "license": "Apache-2.0", + "bin": { + "protoc": "protoc.js" + } + }, + "node_modules/@protobuf-ts/runtime": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", + "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==", + "license": "(Apache-2.0 AND BSD-3-Clause)" + }, + "node_modules/@protobuf-ts/runtime-rpc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", + "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", + "license": "Apache-2.0", + "dependencies": { + "@protobuf-ts/runtime": "^2.9.4" + } + }, "node_modules/@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -316,6 +396,16 @@ "concat-map": "0.0.1" } }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "license": "MIT", + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -327,6 +417,15 @@ "node": ">= 0.8" } }, + "node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -340,6 +439,19 @@ "node": ">=0.4.0" } }, + "node_modules/dot-object": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", + "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", + "license": "MIT", + "dependencies": { + "commander": "^6.1.0", + "glob": "^7.1.6" + }, + "bin": { + "dot-object": "bin/dot-object" + } + }, "node_modules/event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -369,6 +481,71 @@ "node": ">= 0.12" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", + "license": "MIT" + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -399,6 +576,16 @@ "node": "*" } }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "license": "MIT", + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node_modules/node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -418,6 +605,55 @@ } } }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-to-regexp": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", + "license": "MIT" + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "license": "MIT", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -444,6 +680,16 @@ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "node_modules/ts-poet": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", + "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", + "license": "Apache-2.0", + "dependencies": { + "lodash": "^4.17.15", + "prettier": "^2.5.1" + } + }, "node_modules/tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -457,6 +703,35 @@ "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, + "node_modules/twirp-ts": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", + "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", + "license": "MIT", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.0.7", + "camel-case": "^4.1.2", + "dot-object": "^2.1.4", + "path-to-regexp": "^6.2.0", + "ts-poet": "^4.5.0", + "yaml": "^1.10.2" + }, + "bin": { + "protoc-gen-twirp_ts": "protoc-gen-twirp_ts" + }, + "peerDependencies": { + "@protobuf-ts/plugin": "^2.5.0", + "ts-proto": "^1.81.3" + }, + "peerDependenciesMeta": { + "@protobuf-ts/plugin": { + "optional": true + }, + "ts-proto": { + "optional": true + } + } + }, "node_modules/typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", @@ -484,6 +759,12 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -503,6 +784,15 @@ "engines": { "node": ">=4.0" } + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "license": "ISC", + "engines": { + "node": ">= 6" + } } }, "dependencies": { @@ -692,6 +982,59 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==" }, + "@protobuf-ts/plugin": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz", + "integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==", + "requires": { + "@protobuf-ts/plugin-framework": "^2.9.4", + "@protobuf-ts/protoc": "^2.9.4", + "@protobuf-ts/runtime": "^2.9.4", + "@protobuf-ts/runtime-rpc": "^2.9.4", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/plugin-framework": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz", + "integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==", + "requires": { + "@protobuf-ts/runtime": "^2.9.4", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/protoc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz", + "integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==" + }, + "@protobuf-ts/runtime": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz", + "integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==" + }, + "@protobuf-ts/runtime-rpc": { + "version": "2.9.4", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz", + "integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==", + "requires": { + "@protobuf-ts/runtime": "^2.9.4" + } + }, "@types/node": { "version": "20.4.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", @@ -759,6 +1102,15 @@ "concat-map": "0.0.1" } }, + "camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "requires": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -767,6 +1119,11 @@ "delayed-stream": "~1.0.0" } }, + "commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==" + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -777,6 +1134,15 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, + "dot-object": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz", + "integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==", + "requires": { + "commander": "^6.1.0", + "glob": "^7.1.6" + } + }, "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -797,6 +1163,56 @@ "mime-types": "^2.1.12" } }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "requires": { + "tslib": "^2.0.3" + } + }, "mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -818,6 +1234,15 @@ "brace-expansion": "^1.1.7" } }, + "no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "requires": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", @@ -826,6 +1251,38 @@ "whatwg-url": "^5.0.0" } }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "requires": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + }, + "path-to-regexp": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==" + }, + "prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==" + }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -846,6 +1303,15 @@ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "ts-poet": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz", + "integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==", + "requires": { + "lodash": "^4.17.15", + "prettier": "^2.5.1" + } + }, "tslib": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", @@ -856,6 +1322,19 @@ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, + "twirp-ts": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz", + "integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==", + "requires": { + "@protobuf-ts/plugin-framework": "^2.0.7", + "camel-case": "^4.1.2", + "dot-object": "^2.1.4", + "path-to-regexp": "^6.2.0", + "ts-poet": "^4.5.0", + "yaml": "^1.10.2" + } + }, "typescript": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", @@ -876,6 +1355,11 @@ "webidl-conversions": "^3.0.0" } }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, "xml2js": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", @@ -889,6 +1373,11 @@ "version": "11.0.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" + }, + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" } } } diff --git a/packages/cache/package.json b/packages/cache/package.json index 1d1ee0e2..49cd075b 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -45,7 +45,10 @@ "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", - "semver": "^6.3.1" + "@protobuf-ts/plugin": "^2.9.4", + "semver": "^6.3.1", + "jwt-decode": "^3.1.2", + "twirp-ts": "^2.5.0" }, "devDependencies": { "@types/semver": "^6.0.0", From 555b03f6fd51ffb6eba5f256fb84581a90df8fa1 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 06:40:10 -0800 Subject: [PATCH 042/108] Revert package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3115ed6b..d394979b 100644 --- a/package.json +++ b/package.json @@ -33,4 +33,4 @@ "ts-jest": "^29.1.1", "typescript": "^5.2.2" } -} +} \ No newline at end of file From 68ab87caa2c73b0abb2011bfc6bb5243836d72c3 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:49:02 +0100 Subject: [PATCH 043/108] Add check to make sure archive has been created already Co-authored-by: Josh Gross --- packages/cache/src/cache.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 7d0cd000..6567cabf 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -296,7 +296,9 @@ async function restoreCachev2( throw new Error(`Failed to restore: ${error.message}`) } finally { try { - await utils.unlinkFile(archivePath) + if (archivePath) { + await utils.unlinkFile(archivePath) + } } catch (error) { core.debug(`Failed to delete archive: ${error}`) } From 6c11d441a57bf710714904779185a19b5fdd317b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 06:49:55 -0800 Subject: [PATCH 044/108] Remove unnecessary type hints --- packages/cache/src/cache.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 7d0cd000..0623f058 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -253,8 +253,7 @@ async function restoreCachev2( ) } - const response: GetCacheEntryDownloadURLResponse = - await twirpClient.GetCacheEntryDownloadURL(request) + const response = await twirpClient.GetCacheEntryDownloadURL(request) if (!response.ok) { core.warning(`Cache not found for keys: ${keys.join(', ')}`) @@ -273,7 +272,7 @@ async function restoreCachev2( utils.getCacheFileName(compressionMethod) ) core.debug(`Archive path: ${archivePath}`) - core.debug(`Starting download of artifact to: ${archivePath}`) + core.debug(`Starting download of archive to: ${archivePath}`) await DownloadCacheFile(response.signedDownloadUrl, archivePath) @@ -503,8 +502,8 @@ async function saveCachev2( key, version } - const response: CreateCacheEntryResponse = - await twirpClient.CreateCacheEntry(request) + + const response = await twirpClient.CreateCacheEntry(request) if (!response.ok) { throw new ReserveCacheError( `Unable to reserve cache with key ${key}, another job may be creating this cache.` From 8616c313a26622e237804459fff2308978539e20 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 14 Nov 2024 07:11:12 -0800 Subject: [PATCH 045/108] Remove unused definitions --- packages/cache/src/cache.ts | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 7f1f4fd6..69e1c6a2 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -8,11 +8,9 @@ import {DownloadOptions, UploadOptions} from './options' import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, - CreateCacheEntryResponse, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, - GetCacheEntryDownloadURLRequest, - GetCacheEntryDownloadURLResponse + GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' import {CacheFileSizeLimit} from './internal/constants' import {UploadCacheFile} from './internal/blob/upload-cache' @@ -86,7 +84,7 @@ export async function restoreCache( const cacheServiceVersion: string = config.getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': - return await restoreCachev2( + return await restoreCacheV2( paths, primaryKey, restoreKeys, @@ -95,7 +93,7 @@ export async function restoreCache( ) case 'v1': default: - return await restoreCachev1( + return await restoreCacheV1( paths, primaryKey, restoreKeys, @@ -115,7 +113,7 @@ export async function restoreCache( * @param enableCrossOsArchive * @returns */ -async function restoreCachev1( +async function restoreCacheV1( paths: string[], primaryKey: string, restoreKeys?: string[], @@ -213,7 +211,7 @@ async function restoreCachev1( * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ -async function restoreCachev2( +async function restoreCacheV2( paths: string[], primaryKey: string, restoreKeys?: string[], @@ -325,10 +323,10 @@ export async function saveCache( const cacheServiceVersion: string = config.getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': - return await saveCachev2(paths, key, options, enableCrossOsArchive) + return await saveCacheV2(paths, key, options, enableCrossOsArchive) case 'v1': default: - return await saveCachev1(paths, key, options, enableCrossOsArchive) + return await saveCacheV1(paths, key, options, enableCrossOsArchive) } } @@ -341,7 +339,7 @@ export async function saveCache( * @param enableCrossOsArchive * @returns */ -async function saveCachev1( +async function saveCacheV1( paths: string[], key: string, options?: UploadOptions, @@ -444,7 +442,7 @@ async function saveCachev1( * @param enableCrossOsArchive * @returns */ -async function saveCachev2( +async function saveCacheV2( paths: string[], key: string, options?: UploadOptions, From a1e6ef3759e307b31680f0892888f8cdf1b592fa Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 20 Nov 2024 13:53:47 -0800 Subject: [PATCH 046/108] Update cache service APIs & cleanup --- packages/cache/package-lock.json | 12 - packages/cache/package.json | 1 - packages/cache/src/cache.ts | 9 - .../src/generated/results/api/v1/cache.ts | 339 +++++++----------- .../results/entities/v1/cachemetadata.ts | 85 +++++ .../results/entities/v1/cachescope.ts | 84 +++++ packages/cache/src/internal/cacheUtils.ts | 69 ---- packages/cache/src/internal/config.ts | 8 - .../src/internal/shared/cacheTwirpClient.ts | 3 +- 9 files changed, 302 insertions(+), 308 deletions(-) create mode 100644 packages/cache/src/generated/results/entities/v1/cachemetadata.ts create mode 100644 packages/cache/src/generated/results/entities/v1/cachescope.ts diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 8e682de4..beb23a68 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -18,7 +18,6 @@ "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", "@protobuf-ts/plugin": "^2.9.4", - "jwt-decode": "^3.1.2", "semver": "^6.3.1", "twirp-ts": "^2.5.0" }, @@ -525,12 +524,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "license": "ISC" }, - "node_modules/jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", - "license": "MIT" - }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -1195,11 +1188,6 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, - "jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" - }, "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", diff --git a/packages/cache/package.json b/packages/cache/package.json index 49cd075b..e5332a92 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -47,7 +47,6 @@ "@azure/storage-blob": "^12.13.0", "@protobuf-ts/plugin": "^2.9.4", "semver": "^6.3.1", - "jwt-decode": "^3.1.2", "twirp-ts": "^2.5.0" }, "devDependencies": { diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 69e1c6a2..fe379b9a 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -236,12 +236,9 @@ async function restoreCacheV2( let archivePath = '' try { const twirpClient = cacheTwirpClient.internalCacheTwirpClient() - const backendIds: utils.BackendIds = utils.getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() const request: GetCacheEntryDownloadURLRequest = { - workflowRunBackendId: backendIds.workflowRunBackendId, - workflowJobRunBackendId: backendIds.workflowJobRunBackendId, key: primaryKey, restoreKeys, version: utils.getCacheVersion( @@ -448,8 +445,6 @@ async function saveCacheV2( options?: UploadOptions, enableCrossOsArchive = false ): Promise { - // BackendIds are retrieved form the signed JWT - const backendIds: utils.BackendIds = utils.getBackendIdsFromToken() const compressionMethod = await utils.getCompressionMethod() const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 @@ -497,8 +492,6 @@ async function saveCacheV2( enableCrossOsArchive ) const request: CreateCacheEntryRequest = { - workflowRunBackendId: backendIds.workflowRunBackendId, - workflowJobRunBackendId: backendIds.workflowJobRunBackendId, key, version } @@ -514,8 +507,6 @@ async function saveCacheV2( await UploadCacheFile(response.signedUploadUrl, archivePath) const finalizeRequest: FinalizeCacheEntryUploadRequest = { - workflowRunBackendId: backendIds.workflowRunBackendId, - workflowJobRunBackendId: backendIds.workflowJobRunBackendId, key, version, sizeBytes: `${archiveFileSize}` diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index f7686fbd..0736c7ad 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -13,32 +13,27 @@ import { reflectionMergePartial } from "@protobuf-ts/runtime"; import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; import { MessageType } from "@protobuf-ts/runtime"; import { Timestamp } from "../../../google/protobuf/timestamp"; +import { CacheMetadata } from "../../entities/v1/cachemetadata"; /** * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest */ export interface CreateCacheEntryRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; /** * Hash of the compression tool, runner OS and paths cached * - * @generated from protobuf field: string version = 4; + * @generated from protobuf field: string version = 3; */ version: string; } @@ -62,33 +57,27 @@ export interface CreateCacheEntryResponse { */ export interface FinalizeCacheEntryUploadRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; /** * Size of the cache archive in Bytes * - * @generated from protobuf field: int64 size_bytes = 4; + * @generated from protobuf field: int64 size_bytes = 3; */ sizeBytes: string; /** * Hash of the compression tool, runner OS and paths cached * - * @generated from protobuf field: string version = 5; + * @generated from protobuf field: string version = 4; */ version: string; } @@ -112,33 +101,27 @@ export interface FinalizeCacheEntryUploadResponse { */ export interface GetCacheEntryDownloadURLRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; /** * Restore keys used for prefix searching * - * @generated from protobuf field: repeated string restore_keys = 4; + * @generated from protobuf field: repeated string restore_keys = 3; */ restoreKeys: string[]; /** * Hash of the compression tool, runner OS and paths cached * - * @generated from protobuf field: string version = 5; + * @generated from protobuf field: string version = 4; */ version: string; } @@ -162,21 +145,15 @@ export interface GetCacheEntryDownloadURLResponse { */ export interface DeleteCacheEntryRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; } @@ -200,27 +177,21 @@ export interface DeleteCacheEntryResponse { */ export interface ListCacheEntriesRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; /** * Restore keys used for prefix searching * - * @generated from protobuf field: repeated string restore_keys = 4; + * @generated from protobuf field: repeated string restore_keys = 3; */ restoreKeys: string[]; } @@ -291,33 +262,27 @@ export interface ListCacheEntriesResponse_CacheEntry { */ export interface LookupCacheEntryRequest { /** - * Workflow run backend ID + * Scope and other metadata for the cache entry * - * @generated from protobuf field: string workflow_run_backend_id = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; + metadata?: CacheMetadata; /** * An explicit key for a cache entry * - * @generated from protobuf field: string key = 3; + * @generated from protobuf field: string key = 2; */ key: string; /** * Restore keys used for prefix searching * - * @generated from protobuf field: repeated string restore_keys = 4; + * @generated from protobuf field: repeated string restore_keys = 3; */ restoreKeys: string[]; /** * Hash of the compression tool, runner OS and paths cached * - * @generated from protobuf field: string version = 5; + * @generated from protobuf field: string version = 4; */ version: string; } @@ -391,14 +356,13 @@ export interface LookupCacheEntryResponse_CacheEntry { class CreateCacheEntryRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.CreateCacheEntryRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): CreateCacheEntryRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", version: "" }; + const message = { key: "", version: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -409,16 +373,13 @@ class CreateCacheEntryRequest$Type extends MessageType while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); + case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: + message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string key */ 3: + case /* string key */ 2: message.key = reader.string(); break; - case /* string version */ 4: + case /* string version */ 3: message.version = reader.string(); break; default: @@ -433,18 +394,15 @@ class CreateCacheEntryRequest$Type extends MessageType return message; } internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string key = 3; */ + /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ + if (message.metadata) + CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string key = 2; */ if (message.key !== "") - writer.tag(3, WireType.LengthDelimited).string(message.key); - /* string version = 4; */ + writer.tag(2, WireType.LengthDelimited).string(message.key); + /* string version = 3; */ if (message.version !== "") - writer.tag(4, WireType.LengthDelimited).string(message.version); + writer.tag(3, WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); @@ -513,15 +471,14 @@ export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); class FinalizeCacheEntryUploadRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): FinalizeCacheEntryUploadRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", sizeBytes: "0", version: "" }; + const message = { key: "", sizeBytes: "0", version: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -532,19 +489,16 @@ class FinalizeCacheEntryUploadRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): GetCacheEntryDownloadURLRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + const message = { key: "", restoreKeys: [], version: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -661,19 +611,16 @@ class GetCacheEntryDownloadURLRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): DeleteCacheEntryRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "" }; + const message = { key: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -788,13 +731,10 @@ class DeleteCacheEntryRequest$Type extends MessageType while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); + case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: + message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string key */ 3: + case /* string key */ 2: message.key = reader.string(); break; default: @@ -809,15 +749,12 @@ class DeleteCacheEntryRequest$Type extends MessageType return message; } internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string key = 3; */ + /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ + if (message.metadata) + CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string key = 2; */ if (message.key !== "") - writer.tag(3, WireType.LengthDelimited).string(message.key); + writer.tag(2, WireType.LengthDelimited).string(message.key); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); @@ -886,14 +823,13 @@ export const DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); class ListCacheEntriesRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): ListCacheEntriesRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [] }; + const message = { key: "", restoreKeys: [] }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -904,16 +840,13 @@ class ListCacheEntriesRequest$Type extends MessageType while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); + case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: + message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string key */ 3: + case /* string key */ 2: message.key = reader.string(); break; - case /* repeated string restore_keys */ 4: + case /* repeated string restore_keys */ 3: message.restoreKeys.push(reader.string()); break; default: @@ -928,18 +861,15 @@ class ListCacheEntriesRequest$Type extends MessageType return message; } internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string key = 3; */ + /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ + if (message.metadata) + CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string key = 2; */ if (message.key !== "") - writer.tag(3, WireType.LengthDelimited).string(message.key); - /* repeated string restore_keys = 4; */ + writer.tag(2, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 3; */ for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); @@ -1097,15 +1027,14 @@ export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_ class LookupCacheEntryRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, + { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): LookupCacheEntryRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + const message = { key: "", restoreKeys: [], version: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -1116,19 +1045,16 @@ class LookupCacheEntryRequest$Type extends MessageType while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); + case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: + message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string key */ 3: + case /* string key */ 2: message.key = reader.string(); break; - case /* repeated string restore_keys */ 4: + case /* repeated string restore_keys */ 3: message.restoreKeys.push(reader.string()); break; - case /* string version */ 5: + case /* string version */ 4: message.version = reader.string(); break; default: @@ -1143,21 +1069,18 @@ class LookupCacheEntryRequest$Type extends MessageType return message; } internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string key = 3; */ + /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ + if (message.metadata) + CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string key = 2; */ if (message.key !== "") - writer.tag(3, WireType.LengthDelimited).string(message.key); - /* repeated string restore_keys = 4; */ + writer.tag(2, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 3; */ for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); - /* string version = 5; */ + writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]); + /* string version = 4; */ if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); + writer.tag(4, WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); diff --git a/packages/cache/src/generated/results/entities/v1/cachemetadata.ts b/packages/cache/src/generated/results/entities/v1/cachemetadata.ts new file mode 100644 index 00000000..d7af1fe2 --- /dev/null +++ b/packages/cache/src/generated/results/entities/v1/cachemetadata.ts @@ -0,0 +1,85 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/entities/v1/cachemetadata.proto" (package "github.actions.results.entities.v1", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { CacheScope } from "./cachescope"; +/** + * @generated from protobuf message github.actions.results.entities.v1.CacheMetadata + */ +export interface CacheMetadata { + /** + * Backend repository id + * + * @generated from protobuf field: int64 repository_id = 1; + */ + repositoryId: string; + /** + * Scopes for the cache entry + * + * @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2; + */ + scope: CacheScope[]; +} +// @generated message type with reflection information, may provide speed optimized methods +class CacheMetadata$Type extends MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheMetadata", [ + { no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheScope } + ]); + } + create(value?: PartialMessage): CacheMetadata { + const message = { repositoryId: "0", scope: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 repository_id */ 1: + message.repositoryId = reader.int64().toString(); + break; + case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2: + message.scope.push(CacheScope.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 repository_id = 1; */ + if (message.repositoryId !== "0") + writer.tag(1, WireType.Varint).int64(message.repositoryId); + /* repeated github.actions.results.entities.v1.CacheScope scope = 2; */ + for (let i = 0; i < message.scope.length; i++) + CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata + */ +export const CacheMetadata = new CacheMetadata$Type(); diff --git a/packages/cache/src/generated/results/entities/v1/cachescope.ts b/packages/cache/src/generated/results/entities/v1/cachescope.ts new file mode 100644 index 00000000..248d9f36 --- /dev/null +++ b/packages/cache/src/generated/results/entities/v1/cachescope.ts @@ -0,0 +1,84 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/entities/v1/cachescope.proto" (package "github.actions.results.entities.v1", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * @generated from protobuf message github.actions.results.entities.v1.CacheScope + */ +export interface CacheScope { + /** + * Determines the scope of the cache entry + * + * @generated from protobuf field: string scope = 1; + */ + scope: string; + /** + * None: 0 | Read: 1 | Write: 2 | All: (1|2) + * + * @generated from protobuf field: int64 permission = 2; + */ + permission: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class CacheScope$Type extends MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheScope", [ + { no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + create(value?: PartialMessage): CacheScope { + const message = { scope: "", permission: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string scope */ 1: + message.scope = reader.string(); + break; + case /* int64 permission */ 2: + message.permission = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string scope = 1; */ + if (message.scope !== "") + writer.tag(1, WireType.LengthDelimited).string(message.scope); + /* int64 permission = 2; */ + if (message.permission !== "0") + writer.tag(2, WireType.Varint).int64(message.permission); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope + */ +export const CacheScope = new CacheScope$Type(); diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index a7548171..250843a5 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -7,7 +7,6 @@ import * as fs from 'fs' import * as path from 'path' import * as semver from 'semver' import * as util from 'util' -import jwt_decode from 'jwt-decode' import { CacheFilename, CompressionMethod, @@ -179,71 +178,3 @@ export function getRuntimeToken(): string { } return token } - -export interface BackendIds { - workflowRunBackendId: string - workflowJobRunBackendId: string -} - -interface ActionsToken { - scp: string -} - -const InvalidJwtError = new Error( - 'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims' -) - -// uses the JWT token claims to get the -// workflow run and workflow job run backend ids -export function getBackendIdsFromToken(): BackendIds { - const token = getRuntimeToken() - const decoded = jwt_decode(token) - if (!decoded.scp) { - throw InvalidJwtError - } - - /* - * example decoded: - * { - * scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774" - * } - */ - - const scpParts = decoded.scp.split(' ') - if (scpParts.length === 0) { - throw InvalidJwtError - } - /* - * example scpParts: - * ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"] - */ - - for (const scopes of scpParts) { - const scopeParts = scopes.split(':') - if (scopeParts?.[0] !== 'Actions.Results') { - // not the Actions.Results scope - continue - } - - /* - * example scopeParts: - * ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"] - */ - if (scopeParts.length !== 3) { - // missing expected number of claims - throw InvalidJwtError - } - - const ids = { - workflowRunBackendId: scopeParts[1], - workflowJobRunBackendId: scopeParts[2] - } - - core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`) - core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`) - - return ids - } - - throw InvalidJwtError -} diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index 61d84677..28524e72 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -1,11 +1,3 @@ -export function getRuntimeToken(): string { - const token = process.env['ACTIONS_RUNTIME_TOKEN'] - if (!token) { - throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable') - } - return token -} - export function getCacheServiceVersion(): string { return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1' } diff --git a/packages/cache/src/internal/shared/cacheTwirpClient.ts b/packages/cache/src/internal/shared/cacheTwirpClient.ts index 9a0f0679..9394a08c 100644 --- a/packages/cache/src/internal/shared/cacheTwirpClient.ts +++ b/packages/cache/src/internal/shared/cacheTwirpClient.ts @@ -1,7 +1,8 @@ import {info, debug} from '@actions/core' import {getUserAgentString} from './user-agent' import {NetworkError, UsageError} from './errors' -import {getRuntimeToken, getCacheServiceURL} from '../config' +import {getCacheServiceURL} from '../config' +import {getRuntimeToken} from '../cacheUtils' import {BearerCredentialHandler} from '@actions/http-client/lib/auth' import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client' import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp' From ab58a59f33146930d16eca4df69c2b455fc977dd Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 20 Nov 2024 14:02:54 -0800 Subject: [PATCH 047/108] Bump cross-spawn to 7.0.6 --- packages/artifact/package-lock.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index 8608ac3d..8ad6369c 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -839,9 +839,10 @@ } }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", From 267841d7bd659368f7382ca22d5ca0b2af68f0b3 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 21 Nov 2024 04:01:44 -0800 Subject: [PATCH 048/108] Add isGhes gate and refactor to clean up circular dependencies --- packages/cache/__tests__/cacheUtils.test.ts | 22 +------------- packages/cache/__tests__/config.test.ts | 26 ++++++++++++++++ packages/cache/__tests__/saveCache.test.ts | 28 +++++++++-------- packages/cache/src/cache.ts | 30 +++++++++---------- .../cache/src/internal/blob/download-cache.ts | 2 +- .../cache/src/internal/blob/upload-cache.ts | 2 +- packages/cache/src/internal/cacheUtils.ts | 13 -------- packages/cache/src/internal/config.ts | 20 +++++++++++++ 8 files changed, 79 insertions(+), 64 deletions(-) create mode 100644 packages/cache/__tests__/config.test.ts diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts index 4388026a..de6a01fe 100644 --- a/packages/cache/__tests__/cacheUtils.test.ts +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -1,4 +1,4 @@ -import {promises as fs} from 'fs' +import { promises as fs } from 'fs' import * as path from 'path' import * as cacheUtils from '../src/internal/cacheUtils' @@ -42,23 +42,3 @@ test('resolvePaths works on github workspace directory', async () => { const paths = await cacheUtils.resolvePaths([workspace]) expect(paths.length).toBeGreaterThan(0) }) - -test('isGhes returns false for github.com', async () => { - process.env.GITHUB_SERVER_URL = 'https://github.com' - expect(cacheUtils.isGhes()).toBe(false) -}) - -test('isGhes returns false for ghe.com', async () => { - process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com' - expect(cacheUtils.isGhes()).toBe(false) -}) - -test('isGhes returns true for enterprise URL', async () => { - process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com' - expect(cacheUtils.isGhes()).toBe(true) -}) - -test('isGhes returns false for ghe.localhost', () => { - process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost' - expect(cacheUtils.isGhes()).toBe(false) -}) diff --git a/packages/cache/__tests__/config.test.ts b/packages/cache/__tests__/config.test.ts new file mode 100644 index 00000000..66cc34a3 --- /dev/null +++ b/packages/cache/__tests__/config.test.ts @@ -0,0 +1,26 @@ +import { promises as fs } from 'fs' +import * as config from '../src/internal/config' + +beforeEach(() => { + jest.resetModules() +}) + +test('isGhes returns false for github.com', async () => { + process.env.GITHUB_SERVER_URL = 'https://github.com' + expect(config.isGhes()).toBe(false) +}) + +test('isGhes returns false for ghe.com', async () => { + process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com' + expect(config.isGhes()).toBe(false) +}) + +test('isGhes returns true for enterprise URL', async () => { + process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com' + expect(config.isGhes()).toBe(true) +}) + +test('isGhes returns false for ghe.localhost', () => { + process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost' + expect(config.isGhes()).toBe(false) +}) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 4d0027be..e0b6cffd 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -1,27 +1,29 @@ import * as core from '@actions/core' import * as path from 'path' -import {saveCache} from '../src/cache' +import { saveCache } from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' import * as cacheUtils from '../src/internal/cacheUtils' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import * as config from '../src/internal/config' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' import * as tar from '../src/internal/tar' -import {TypedResponse} from '@actions/http-client/lib/interfaces' +import { TypedResponse } from '@actions/http-client/lib/interfaces' import { ReserveCacheResponse, ITypedResponseWithError } from '../src/internal/contracts' -import {HttpClientError} from '@actions/http-client' +import { HttpClientError } from '@actions/http-client' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/config') jest.mock('../src/internal/tar') beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(core, 'debug').mockImplementation(() => {}) - jest.spyOn(core, 'info').mockImplementation(() => {}) - jest.spyOn(core, 'warning').mockImplementation(() => {}) - jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { const actualUtils = jest.requireActual('../src/internal/cacheUtils') return actualUtils.getCacheFileName(cm) @@ -94,7 +96,7 @@ test('save with large cache outputs should fail in GHES with error message', asy .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) - jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true) + jest.spyOn(config, 'isGhes').mockReturnValueOnce(true) const reserveCacheMock = jest .spyOn(cacheHttpClient, 'reserveCache') @@ -146,7 +148,7 @@ test('save with large cache outputs should fail in GHES without error message', .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) - jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true) + jest.spyOn(config, 'isGhes').mockReturnValueOnce(true) const reserveCacheMock = jest .spyOn(cacheHttpClient, 'reserveCache') @@ -229,7 +231,7 @@ test('save with server error should fail', async () => { .mockImplementation(async () => { const response: TypedResponse = { statusCode: 500, - result: {cacheId}, + result: { cacheId }, headers: {} } return response @@ -283,7 +285,7 @@ test('save with valid inputs uploads a cache', async () => { .mockImplementation(async () => { const response: TypedResponse = { statusCode: 500, - result: {cacheId}, + result: { cacheId }, headers: {} } return response diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index fe379b9a..2383a40c 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,20 +1,20 @@ import * as core from '@actions/core' import * as path from 'path' -import * as config from './internal/config' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {UploadCacheFile} from './internal/blob/upload-cache' -import {DownloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -81,7 +81,7 @@ export async function restoreCache( ): Promise { checkPaths(paths) - const cacheServiceVersion: string = config.getCacheServiceVersion() + const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await restoreCacheV2( @@ -269,7 +269,7 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - await DownloadCacheFile(response.signedDownloadUrl, archivePath) + await downloadCacheFile(response.signedDownloadUrl, archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -317,7 +317,7 @@ export async function saveCache( checkPaths(paths) checkKey(key) - const cacheServiceVersion: string = config.getCacheServiceVersion() + const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await saveCacheV2(paths, key, options, enableCrossOsArchive) @@ -373,7 +373,7 @@ async function saveCacheV1( core.debug(`File Size: ${archiveFileSize}`) // For GHES, this check will take place in ReserveCache API with enterprise file size limit - if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { + if (archiveFileSize > fileSizeLimit && !isGhes()) { throw new Error( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) @@ -397,9 +397,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -477,7 +477,7 @@ async function saveCacheV2( core.debug(`File Size: ${archiveFileSize}`) // For GHES, this check will take place in ReserveCache API with enterprise file size limit - if (archiveFileSize > CacheFileSizeLimit && !utils.isGhes()) { + if (archiveFileSize > CacheFileSizeLimit && !isGhes()) { throw new Error( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) @@ -504,7 +504,7 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - await UploadCacheFile(response.signedUploadUrl, archivePath) + await uploadCacheFile(response.signedUploadUrl, archivePath) const finalizeRequest: FinalizeCacheEntryUploadRequest = { key, diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 38443de3..807c73a4 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -6,7 +6,7 @@ import { BlobDownloadOptions } from '@azure/storage-blob' -export async function DownloadCacheFile( +export async function downloadCacheFile( signedUploadURL: string, archivePath: string ): Promise<{}> { diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index a29672dc..15c913ed 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -5,7 +5,7 @@ import { BlockBlobParallelUploadOptions } from '@azure/storage-blob' -export async function UploadCacheFile( +export async function uploadCacheFile( signedUploadURL: string, archivePath: string ): Promise<{}> { diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index 250843a5..de9053ea 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -133,19 +133,6 @@ export function assertDefined(name: string, value?: T): T { return value } -export function isGhes(): boolean { - const ghUrl = new URL( - process.env['GITHUB_SERVER_URL'] || 'https://github.com' - ) - - const hostname = ghUrl.hostname.trimEnd().toUpperCase() - const isGitHubHost = hostname === 'GITHUB.COM' - const isGheHost = - hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST') - - return !isGitHubHost && !isGheHost -} - export function getCacheVersion( paths: string[], compressionMethod?: CompressionMethod, diff --git a/packages/cache/src/internal/config.ts b/packages/cache/src/internal/config.ts index 28524e72..24b9fa1a 100644 --- a/packages/cache/src/internal/config.ts +++ b/packages/cache/src/internal/config.ts @@ -1,9 +1,29 @@ +export function isGhes(): boolean { + const ghUrl = new URL( + process.env['GITHUB_SERVER_URL'] || 'https://github.com' + ) + + const hostname = ghUrl.hostname.trimEnd().toUpperCase() + const isGitHubHost = hostname === 'GITHUB.COM' + const isGheHost = hostname.endsWith('.GHE.COM') + const isLocalHost = hostname.endsWith('.LOCALHOST') + + return !isGitHubHost && !isGheHost && !isLocalHost +} + export function getCacheServiceVersion(): string { + // Cache service v2 is not supported on GHES. We will default to + // cache service v1 even if the feature flag was enabled by user. + if (isGhes()) return 'v1' + return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1' } export function getCacheServiceURL(): string { const version = getCacheServiceVersion() + + // Based on the version of the cache service, we will determine which + // URL to use. switch (version) { case 'v1': return ( From e2028d43a26abaf59ed4b4715d5c01f1bd61d722 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 21 Nov 2024 04:05:04 -0800 Subject: [PATCH 049/108] Linter fixes and remove unnecessary dependency --- packages/cache/__tests__/cacheUtils.test.ts | 2 +- packages/cache/__tests__/config.test.ts | 19 +++++++++--------- packages/cache/__tests__/saveCache.test.ts | 22 ++++++++++----------- packages/cache/src/cache.ts | 18 ++++++++--------- 4 files changed, 30 insertions(+), 31 deletions(-) diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts index de6a01fe..fad045b4 100644 --- a/packages/cache/__tests__/cacheUtils.test.ts +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -1,4 +1,4 @@ -import { promises as fs } from 'fs' +import {promises as fs} from 'fs' import * as path from 'path' import * as cacheUtils from '../src/internal/cacheUtils' diff --git a/packages/cache/__tests__/config.test.ts b/packages/cache/__tests__/config.test.ts index 66cc34a3..52d86d36 100644 --- a/packages/cache/__tests__/config.test.ts +++ b/packages/cache/__tests__/config.test.ts @@ -1,26 +1,25 @@ -import { promises as fs } from 'fs' import * as config from '../src/internal/config' beforeEach(() => { - jest.resetModules() + jest.resetModules() }) test('isGhes returns false for github.com', async () => { - process.env.GITHUB_SERVER_URL = 'https://github.com' - expect(config.isGhes()).toBe(false) + process.env.GITHUB_SERVER_URL = 'https://github.com' + expect(config.isGhes()).toBe(false) }) test('isGhes returns false for ghe.com', async () => { - process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com' - expect(config.isGhes()).toBe(false) + process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com' + expect(config.isGhes()).toBe(false) }) test('isGhes returns true for enterprise URL', async () => { - process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com' - expect(config.isGhes()).toBe(true) + process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com' + expect(config.isGhes()).toBe(true) }) test('isGhes returns false for ghe.localhost', () => { - process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost' - expect(config.isGhes()).toBe(false) + process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost' + expect(config.isGhes()).toBe(false) }) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index e0b6cffd..81049e0a 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -1,17 +1,17 @@ import * as core from '@actions/core' import * as path from 'path' -import { saveCache } from '../src/cache' +import {saveCache} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' import * as cacheUtils from '../src/internal/cacheUtils' import * as config from '../src/internal/config' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as tar from '../src/internal/tar' -import { TypedResponse } from '@actions/http-client/lib/interfaces' +import {TypedResponse} from '@actions/http-client/lib/interfaces' import { ReserveCacheResponse, ITypedResponseWithError } from '../src/internal/contracts' -import { HttpClientError } from '@actions/http-client' +import {HttpClientError} from '@actions/http-client' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') @@ -19,11 +19,11 @@ jest.mock('../src/internal/config') jest.mock('../src/internal/tar') beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { const actualUtils = jest.requireActual('../src/internal/cacheUtils') return actualUtils.getCacheFileName(cm) @@ -231,7 +231,7 @@ test('save with server error should fail', async () => { .mockImplementation(async () => { const response: TypedResponse = { statusCode: 500, - result: { cacheId }, + result: {cacheId}, headers: {} } return response @@ -285,7 +285,7 @@ test('save with valid inputs uploads a cache', async () => { .mockImplementation(async () => { const response: TypedResponse = { statusCode: 500, - result: { cacheId }, + result: {cacheId}, headers: {} } return response diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 2383a40c..1450c8ac 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -397,9 +397,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 39d19810a88675c2360d4949b352d94cc453827b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:01:59 -0800 Subject: [PATCH 050/108] Add restore tests --- .../cache/__tests__/restoreCacheV2.test.ts | 327 ++++++++++++++++++ packages/cache/src/cache.ts | 36 +- 2 files changed, 352 insertions(+), 11 deletions(-) create mode 100644 packages/cache/__tests__/restoreCacheV2.test.ts diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts new file mode 100644 index 00000000..87b2d1d0 --- /dev/null +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -0,0 +1,327 @@ +import * as core from '@actions/core' +import * as path from 'path' +import * as tar from '../src/internal/tar' +import * as config from '../src/internal/config' +import * as cacheUtils from '../src/internal/cacheUtils' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import { restoreCache } from '../src/cache' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import { ArtifactCacheEntry } from '../src/internal/contracts' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' + +jest.mock('../src/internal/cacheHttpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/config') +jest.mock('../src/internal/tar') + +beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) + + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') +}) + +test('restore with no path should fail', async () => { + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('restore with too many keys should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) +}) + +test('restore with large key should fail', async () => { + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) +}) + +test('restore with invalid key should fail', async () => { + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) +}) + +test('restore with no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(undefined) +}) + +test('restore with server error should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) + + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) +}) + +// test('restore with restore keys and no cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const restoreKey = 'node-' + +// jest +// .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') +// .mockImplementation(() => { +// return Promise.resolve(null) +// }) +// jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { +// return Promise.resolve(null) +// }) + +// const cacheKey = await restoreCache(paths, key, [restoreKey]) + +// expect(cacheKey).toBe(undefined) +// }) + +// test('restore with gzip compressed cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) + +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Gzip) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 142 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + +// const compression = CompressionMethod.Gzip +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key) + +// expect(cacheKey).toBe(key) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + +// expect(unlinkFileMock).toHaveBeenCalledTimes(1) +// expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with zstd compressed cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' + +// const infoMock = jest.spyOn(core, 'info') + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Zstd) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 62915000 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const compression = CompressionMethod.Zstd +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key) + +// expect(cacheKey).toBe(key) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) +// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with cache found for restore key', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const restoreKey = 'node-' + +// const infoMock = jest.spyOn(core, 'info') + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: restoreKey, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Zstd) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 142 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const compression = CompressionMethod.Zstd +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key, [restoreKey]) + +// expect(cacheKey).toBe(restoreKey) +// expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) +// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with dry run', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const options = { lookupOnly: true } + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const compression = CompressionMethod.Gzip +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key, undefined, options) + +// expect(cacheKey).toBe(key) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// // creating a tempDir and downloading the cache are skipped +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) +// expect(downloadCacheMock).toHaveBeenCalledTimes(0) +// }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1450c8ac..f9863b14 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -287,7 +287,13 @@ async function restoreCacheV2( return request.key } catch (error) { - throw new Error(`Failed to restore: ${error.message}`) + const typedError = error as Error + if (typedError.name === ValidationError.name) { + throw error + } else { + // Supress all non-validation cache related errors because caching should be optional + core.warning(`Failed to restore: ${(error as Error).message}`) + } } finally { try { if (archivePath) { @@ -297,6 +303,8 @@ async function restoreCacheV2( core.debug(`Failed to delete archive: ${error}`) } } + + return undefined } /** @@ -397,9 +405,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -525,7 +533,13 @@ async function saveCacheV2( cacheId = parseInt(finalizeResponse.entryId) } catch (error) { const typedError = error as Error - core.warning(`Failed to save: ${typedError.message}`) + if (typedError.name === ValidationError.name) { + throw error + } else if (typedError.name === ReserveCacheError.name) { + core.info(`Failed to save: ${typedError.message}`) + } else { + core.warning(`Failed to save: ${typedError.message}`) + } } finally { // Try to delete the archive to save space try { From 4de30f744eb65b2f721d1a7993516d8c01c475d8 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 03:53:03 -0800 Subject: [PATCH 051/108] Add more tests for restoreCacheV2 --- .../cache/__tests__/restoreCacheV2.test.ts | 372 ++++++++++-------- packages/cache/src/cache.ts | 18 +- .../cache/src/internal/blob/download-cache.ts | 5 +- 3 files changed, 216 insertions(+), 179 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 87b2d1d0..707432ca 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -3,11 +3,12 @@ import * as path from 'path' import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' -import * as cacheHttpClient from '../src/internal/cacheHttpClient' -import { restoreCache } from '../src/cache' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' -import { ArtifactCacheEntry } from '../src/internal/contracts' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import * as downloadCacheModule from '../src/internal/blob/download-cache' +import {restoreCache} from '../src/cache' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import {BlobDownloadResponseParsed} from '@azure/storage-blob' +// import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') @@ -15,222 +16,257 @@ jest.mock('../src/internal/config') jest.mock('../src/internal/tar') beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) -// test('restore with restore keys and no cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const restoreKey = 'node-' +test('restore with restore keys and no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const logWarningMock = jest.spyOn(core, 'warning') -// jest -// .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') -// .mockImplementation(() => { -// return Promise.resolve(null) -// }) -// jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { -// return Promise.resolve(null) -// }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) -// const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) -// expect(cacheKey).toBe(undefined) -// }) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, restoreKey].join(', ')}` + ) +}) -// test('restore with gzip compressed cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' +test('restore with gzip compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logInfoMock = jest.spyOn(core, 'info') + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const tempPath = '/foo/bar' + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ok: true, signedDownloadUrl}) + ) -// const archivePath = path.join(tempPath, CacheFilename.Gzip) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const tempPath = '/foo/bar' -// const fileSize = 142 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// const compression = CompressionMethod.Gzip -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// const cacheKey = await restoreCache(paths, key) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') -// expect(cacheKey).toBe(key) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + const cacheKey = await restoreCache(paths, key) -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) -// expect(unlinkFileMock).toHaveBeenCalledTimes(1) -// expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) -// test('restore with zstd compressed cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) -// const infoMock = jest.spyOn(core, 'info') +test('restore with zstd compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logInfoMock = jest.spyOn(core, 'info') + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) -// const tempPath = '/foo/bar' + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const archivePath = path.join(tempPath, CacheFilename.Zstd) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ok: true, signedDownloadUrl}) + ) -// const fileSize = 62915000 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const tempPath = '/foo/bar' -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const compression = CompressionMethod.Zstd -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const cacheKey = await restoreCache(paths, key) + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// expect(cacheKey).toBe(key) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) -// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) // test('restore with cache found for restore key', async () => { // const paths = ['node_modules'] diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f9863b14..07d6c7ce 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -405,9 +405,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 807c73a4..e974cb2f 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -3,13 +3,14 @@ import * as core from '@actions/core' import { BlobClient, BlockBlobClient, - BlobDownloadOptions + BlobDownloadOptions, + BlobDownloadResponseParsed } from '@azure/storage-blob' export async function downloadCacheFile( signedUploadURL: string, archivePath: string -): Promise<{}> { +): Promise { const downloadOptions: BlobDownloadOptions = { maxRetryRequests: 5 } From 54ac2dd012c3e940fb0f8a5a425df857f02a73a4 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:08:47 -0800 Subject: [PATCH 052/108] Add cache service version debug message --- .../cache/__tests__/restoreCacheV2.test.ts | 578 ++++++++++-------- packages/cache/src/cache.ts | 22 +- 2 files changed, 321 insertions(+), 279 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 707432ca..78b78aaa 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -4,10 +4,10 @@ import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' import * as downloadCacheModule from '../src/internal/blob/download-cache' -import {restoreCache} from '../src/cache' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' -import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import {BlobDownloadResponseParsed} from '@azure/storage-blob' +import { restoreCache } from '../src/cache' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import { BlobDownloadResponseParsed } from '@azure/storage-blob' // import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') @@ -15,349 +15,389 @@ jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/config') jest.mock('../src/internal/tar') +let logDebugMock: jest.SpyInstance +let logInfoMock: jest.SpyInstance + beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(core, 'debug').mockImplementation(() => {}) - jest.spyOn(core, 'info').mockImplementation(() => {}) - jest.spyOn(core, 'warning').mockImplementation(() => {}) - jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + + logDebugMock = jest.spyOn(core, 'debug') + logInfoMock = jest.spyOn(core, 'info') +}) + +afterEach(() => { + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) test('restore with restore keys and no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKey = 'node-' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, restoreKey].join(', ')}` - ) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, restoreKey].join(', ')}` + ) }) test('restore with gzip compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logInfoMock = jest.spyOn(core, 'info') - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ok: true, signedDownloadUrl}) - ) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) - const tempPath = '/foo/bar' + const tempPath = '/foo/bar' - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with zstd compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logInfoMock = jest.spyOn(core, 'info') - const compressionMethod = CompressionMethod.Zstd - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ok: true, signedDownloadUrl}) - ) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) - const tempPath = '/foo/bar' + const tempPath = '/foo/bar' - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const fileSize = 62915000 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) -// test('restore with cache found for restore key', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const restoreKey = 'node-' +test('restore with cache found for restore key', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' -// const infoMock = jest.spyOn(core, 'info') + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: restoreKey, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) -// const tempPath = '/foo/bar' + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) -// const archivePath = path.join(tempPath, CacheFilename.Zstd) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const tempPath = '/foo/bar' -// const fileSize = 142 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const compression = CompressionMethod.Zstd -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// const cacheKey = await restoreCache(paths, key, [restoreKey]) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// expect(cacheKey).toBe(restoreKey) -// expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) -// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + const cacheKey = await restoreCache(paths, key, [restoreKey]) -// test('restore with dry run', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const options = { lookupOnly: true } + expect(cacheKey).toBe(restoreKey) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: restoreKey, + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) -// const compression = CompressionMethod.Gzip -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) -// const cacheKey = await restoreCache(paths, key, undefined, options) +test('restore with dry run', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const options = { lookupOnly: true } + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' -// expect(cacheKey).toBe(key) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// // creating a tempDir and downloading the cache are skipped -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) -// expect(downloadCacheMock).toHaveBeenCalledTimes(0) -// }) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 07d6c7ce..a2ce38f8 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -79,9 +79,11 @@ export async function restoreCache( options?: DownloadOptions, enableCrossOsArchive = false ): Promise { + const cacheServiceVersion: string = getCacheServiceVersion() + core.debug(`Cache service version: ${cacheServiceVersion}`) + checkPaths(paths) - const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await restoreCacheV2( @@ -405,9 +407,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 4dadd612d6e122c49c46883d9c83d6f88cd3c975 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 05:42:50 -0800 Subject: [PATCH 053/108] Add support for matching on restore key values --- .../cache/__tests__/restoreCacheV2.test.ts | 54 ++- packages/cache/src/cache.ts | 20 +- .../src/generated/results/api/v1/cache.ts | 342 ++---------------- .../results/entities/v1/cacheentry.ts | 163 +++++++++ 4 files changed, 249 insertions(+), 330 deletions(-) create mode 100644 packages/cache/src/generated/results/entities/v1/cacheentry.ts diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 78b78aaa..f9fe0e9e 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -80,7 +80,13 @@ test('restore with no cache found', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) const cacheKey = await restoreCache(paths, key) @@ -109,18 +115,24 @@ test('restore with server error should fail', async () => { test('restore with restore keys and no cache found', async () => { const paths = ['node_modules'] const key = 'node-test' - const restoreKey = 'node-' + const restoreKeys = ['node-'] const logWarningMock = jest.spyOn(core, 'warning') jest .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, restoreKeys) expect(cacheKey).toBe(undefined) expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, restoreKey].join(', ')}` + `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` ) }) @@ -143,7 +155,11 @@ test('restore with gzip compressed cache found', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const tempPath = '/foo/bar' @@ -219,7 +235,11 @@ test('restore with zstd compressed cache found', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const tempPath = '/foo/bar' @@ -279,7 +299,7 @@ test('restore with zstd compressed cache found', async () => { test('restore with cache found for restore key', async () => { const paths = ['node_modules'] const key = 'node-test' - const restoreKey = 'node-' + const restoreKeys = ['node-'] const compressionMethod = CompressionMethod.Gzip const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = @@ -296,7 +316,11 @@ test('restore with cache found for restore key', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: restoreKeys[0] + }) ) const tempPath = '/foo/bar' @@ -323,9 +347,9 @@ test('restore with cache found for restore key', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, restoreKeys) - expect(cacheKey).toBe(restoreKey) + expect(cacheKey).toBe(restoreKeys[0]) expect(getCacheVersionMock).toHaveBeenCalledWith( paths, compressionMethod, @@ -333,7 +357,7 @@ test('restore with cache found for restore key', async () => { ) expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ key, - restoreKeys: restoreKey, + restoreKeys: restoreKeys, version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) @@ -373,7 +397,11 @@ test('restore with dry run', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index a2ce38f8..1f26e5ce 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -287,7 +287,7 @@ async function restoreCacheV2( await extractTar(archivePath, compressionMethod) core.info('Cache restored successfully') - return request.key + return response.matchedKey } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { @@ -407,9 +407,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index 0736c7ad..387bbd15 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -12,7 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime"; import { reflectionMergePartial } from "@protobuf-ts/runtime"; import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; +import { CacheEntry } from "../../entities/v1/cacheentry"; import { CacheMetadata } from "../../entities/v1/cachemetadata"; /** * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest @@ -139,6 +139,12 @@ export interface GetCacheEntryDownloadURLResponse { * @generated from protobuf field: string signed_download_url = 2; */ signedDownloadUrl: string; + /** + * Key or restore key that matches the lookup + * + * @generated from protobuf field: string matched_key = 3; + */ + matchedKey: string; } /** * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest @@ -200,62 +206,11 @@ export interface ListCacheEntriesRequest { */ export interface ListCacheEntriesResponse { /** - * @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; - */ - entries: ListCacheEntriesResponse_CacheEntry[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export interface ListCacheEntriesResponse_CacheEntry { - /** - * An explicit key for a cache entry + * Cache entries in the defined scope * - * @generated from protobuf field: string key = 1; + * @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1; */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entries: CacheEntry[]; } /** * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest @@ -296,61 +251,12 @@ export interface LookupCacheEntryResponse { * @generated from protobuf field: bool exists = 1; */ exists: boolean; -} -/** - * Matched cache entry metadata - * - * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export interface LookupCacheEntryResponse_CacheEntry { /** - * An explicit key for a cache entry + * Matched cache entry metadata * - * @generated from protobuf field: string key = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2; */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entry?: CacheEntry; } // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends MessageType { @@ -662,11 +568,12 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType): GetCacheEntryDownloadURLResponse { - const message = { ok: false, signedDownloadUrl: "" }; + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -683,6 +590,9 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesResponse", [ - { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry } + { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheEntry } ]); } create(value?: PartialMessage): ListCacheEntriesResponse { @@ -899,8 +812,8 @@ class ListCacheEntriesResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): ListCacheEntriesResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type(); -// @generated message type with reflection information, may provide speed optimized methods class LookupCacheEntryRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryRequest", [ @@ -1095,7 +912,8 @@ export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); class LookupCacheEntryResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryResponse", [ - { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry", kind: "message", T: () => CacheEntry } ]); } create(value?: PartialMessage): LookupCacheEntryResponse { @@ -1113,6 +931,9 @@ class LookupCacheEntryResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): LookupCacheEntryResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ diff --git a/packages/cache/src/generated/results/entities/v1/cacheentry.ts b/packages/cache/src/generated/results/entities/v1/cacheentry.ts new file mode 100644 index 00000000..b55b4afa --- /dev/null +++ b/packages/cache/src/generated/results/entities/v1/cacheentry.ts @@ -0,0 +1,163 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/entities/v1/cacheentry.proto" (package "github.actions.results.entities.v1", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.entities.v1.CacheEntry + */ +export interface CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +// @generated message type with reflection information, may provide speed optimized methods +class CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry + */ +export const CacheEntry = new CacheEntry$Type(); From de236da416f84474a98d0ac6e9ad35dd13314552 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 05:47:51 -0800 Subject: [PATCH 054/108] Fix cache lookup scenario --- .../cache/__tests__/restoreCacheV2.test.ts | 652 +++++++++--------- packages/cache/src/cache.ts | 2 +- 2 files changed, 327 insertions(+), 327 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index f9fe0e9e..c74d7fab 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -4,10 +4,10 @@ import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' import * as downloadCacheModule from '../src/internal/blob/download-cache' -import { restoreCache } from '../src/cache' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' -import { BlobDownloadResponseParsed } from '@azure/storage-blob' +import {restoreCache} from '../src/cache' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import {BlobDownloadResponseParsed} from '@azure/storage-blob' // import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') @@ -19,413 +19,413 @@ let logDebugMock: jest.SpyInstance let logInfoMock: jest.SpyInstance beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') - logDebugMock = jest.spyOn(core, 'debug') - logInfoMock = jest.spyOn(core, 'info') + logDebugMock = jest.spyOn(core, 'debug') + logInfoMock = jest.spyOn(core, 'info') }) afterEach(() => { - expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue( - Promise.resolve({ - ok: false, - signedDownloadUrl: '', - matchedKey: '' - }) - ) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) test('restore with restore keys and no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = ['node-'] - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue( - Promise.resolve({ - ok: false, - signedDownloadUrl: '', - matchedKey: '' - }) - ) - - const cacheKey = await restoreCache(paths, key, restoreKeys) - - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) ) + + const cacheKey = await restoreCache(paths, key, restoreKeys) + + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` + ) }) test('restore with gzip compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) + ) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with zstd compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const compressionMethod = CompressionMethod.Zstd - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) + ) - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 62915000 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with cache found for restore key', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = ['node-'] - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: restoreKeys[0] - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: restoreKeys[0] }) + ) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key, restoreKeys) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(restoreKeys[0]) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: restoreKeys, - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key, restoreKeys) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(restoreKeys[0]) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys, + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with dry run', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const options = { lookupOnly: true } - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const options = {lookupOnly: true} + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - - const cacheKey = await restoreCache(paths, key, undefined, options) - - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) - expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + ) - // creating a tempDir and downloading the cache are skipped - expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1f26e5ce..0f8f370d 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -261,7 +261,7 @@ async function restoreCacheV2( if (options?.lookupOnly) { core.info('Lookup only - skipping download') - return request.key + return response.matchedKey } archivePath = path.join( From 2d2513915c0f108e65ece5b165edf195bccfa73b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 16:13:20 +0100 Subject: [PATCH 055/108] Remove unused package Co-authored-by: Rob Herley --- packages/cache/__tests__/restoreCacheV2.test.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index c74d7fab..46a1ee0f 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -8,7 +8,6 @@ import {restoreCache} from '../src/cache' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import {BlobDownloadResponseParsed} from '@azure/storage-blob' -// import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') From 0e321b26f42796370493a2863297be202d41d673 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 07:34:07 -0800 Subject: [PATCH 056/108] Add the download cache file status code to debug log --- packages/cache/src/cache.ts | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0f8f370d..ca3b844f 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -271,7 +271,8 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - await downloadCacheFile(response.signedDownloadUrl, archivePath) + const downloadResponse = await downloadCacheFile(response.signedDownloadUrl, archivePath) + core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -407,9 +408,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 4d31e1048ae67c6b145618fa34b92aad57ab340a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 07:34:52 -0800 Subject: [PATCH 057/108] Add the download cache file status code to debug log --- packages/cache/src/cache.ts | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index ca3b844f..8b7a8d02 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -271,7 +271,10 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - const downloadResponse = await downloadCacheFile(response.signedDownloadUrl, archivePath) + const downloadResponse = await downloadCacheFile( + response.signedDownloadUrl, + archivePath + ) core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -408,9 +411,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 35ede8fcf0bc19ecfa7d038ccb54132ed132b301 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 12:08:07 -0800 Subject: [PATCH 058/108] Add a new debug message for downloads --- .../cache/__tests__/restoreCacheV2.test.ts | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 46a1ee0f..cc4f9e3c 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -174,7 +174,11 @@ test('restore with gzip compressed cache found', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 142 @@ -254,7 +258,11 @@ test('restore with zstd compressed cache found', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 62915000 @@ -335,7 +343,11 @@ test('restore with cache found for restore key', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 142 From 8f606682c2651cedb342d9a4a406b37a8dfe0eb7 Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Sun, 24 Nov 2024 18:44:39 +0000 Subject: [PATCH 059/108] Add saveCacheV2 tests --- packages/cache/__tests__/saveCacheV2.test.ts | 311 ++++++++++++++++++ packages/cache/src/cache.ts | 4 +- .../cache/src/internal/blob/upload-cache.ts | 3 +- 3 files changed, 315 insertions(+), 3 deletions(-) create mode 100644 packages/cache/__tests__/saveCacheV2.test.ts diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts new file mode 100644 index 00000000..fdbf596f --- /dev/null +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -0,0 +1,311 @@ +import * as core from '@actions/core' +import * as path from 'path' +import { saveCache } from '../src/cache' +import * as cacheUtils from '../src/internal/cacheUtils' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import * as config from '../src/internal/config' +import * as tar from '../src/internal/tar' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import * as uploadCacheModule from '../src/internal/blob/upload-cache' +import { BlobUploadCommonResponse } from '@azure/storage-blob' + +let logDebugMock: jest.SpyInstance + +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/tar') + +beforeAll(() => { + process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + jest.spyOn(cacheUtils, 'getCacheVersion').mockImplementation((paths, cm) => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheVersion(paths, cm) + }) + jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { + return filePaths.map(x => path.resolve(x)) + }) + jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => { + return Promise.resolve('/foo/bar') + }) + + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + + logDebugMock = jest.spyOn(core, 'debug') +}) + +afterEach(() => { + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') + jest.clearAllMocks() +}) + +test('save with missing input should fail', async () => { + const paths: string[] = [] + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + + await expect(saveCache(paths, primaryKey)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('save with large cache outputs should fail using v2 saveCache', async () => { + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(filePath)] + + const createTarMock = jest.spyOn(tar, 'createTar') + const logWarningMock = jest.spyOn(core, 'warning') + + const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(cacheSize) + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheId = await saveCache([filePath], primaryKey) + expect(cacheId).toBe(-1) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to save: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.' + ) + + const archiveFolder = '/foo/bar' + + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('create cache entry failure', async () => { + const paths = ['node_modules'] + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const infoLogMock = jest.spyOn(core, 'info') + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) + + const createTarMock = jest.spyOn(tar, 'createTar') + const finalizeCacheEntryMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'FinalizeCacheEntryUpload' + ) + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + const cacheVersion = cacheUtils.getCacheVersion(paths, compression) + const uploadCacheFileMock = jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const cacheId = await saveCache(paths, primaryKey) + expect(cacheId).toBe(-1) + expect(infoLogMock).toHaveBeenCalledTimes(1) + expect(infoLogMock).toHaveBeenCalledWith( + `Failed to save: Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ) + + expect(createCacheEntryMock).toHaveBeenCalledTimes(1) + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key: primaryKey, + version: cacheVersion + }) + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(getCompressionMock).toHaveBeenCalledTimes(1) + expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) + expect(uploadCacheFileMock).toHaveBeenCalledTimes(0) +}) + +test('finalize save cache failure', async () => { + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(filePath)] + const logWarningMock = jest.spyOn(core, 'warning') + const signedUploadURL = 'https://blob-storage.local?signed=true' + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + ) + + const createTarMock = jest.spyOn(tar, 'createTar') + + const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile') + uploadCacheMock.mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheVersion = cacheUtils.getCacheVersion([filePath], compression) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const finalizeCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') + .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) + + const cacheId = await saveCache([filePath], primaryKey) + + expect(createCacheEntryMock).toHaveBeenCalledTimes(1) + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key: primaryKey, + version: cacheVersion + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(uploadCacheMock).toHaveBeenCalledTimes(1) + expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(getCompressionMock).toHaveBeenCalledTimes(1) + + expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(1) + expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ + key: primaryKey, + version: cacheVersion, + sizeBytes: archiveFileSize.toString() + }) + + expect(cacheId).toBe(-1) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + `Failed to save: Unable to finalize cache with key ${primaryKey}, another job may be finalizing this cache.` + ) +}) + +test('save with uploadCache Server error will fail', async () => { + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const logWarningMock = jest.spyOn(core, 'warning') + const signedUploadURL = 'https://signed-upload-url.com' + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + ) + + jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockReturnValueOnce(Promise.reject(new Error('HTTP Error Occurred'))) + + const cacheId = await saveCache([filePath], primaryKey) + + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + `Failed to save: HTTP Error Occurred` + ) + expect(cacheId).toBe(-1) +}) + +test('save with valid inputs uploads a cache', async () => { + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(filePath)] + const signedUploadURL = 'https://blob-storage.local?signed=true' + const createTarMock = jest.spyOn(tar, 'createTar') + + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const cacheId = 4 + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + ) + + const uploadCacheMock = jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + const cacheVersion = cacheUtils.getCacheVersion([filePath], compression) + + const finalizeCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') + .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) + + const expectedCacheId = await saveCache([filePath], primaryKey) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(uploadCacheMock).toHaveBeenCalledTimes(1) + expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(1) + expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ + key: primaryKey, + version: cacheVersion, + sizeBytes: archiveFileSize.toString() + }) + + expect(getCompressionMock).toHaveBeenCalledTimes(1) + expect(expectedCacheId).toBe(cacheId) +}) + +test('save with non existing path should not save cache using v2 saveCache', async () => { + const path = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => { + return [] + }) + await expect(saveCache([path], primaryKey)).rejects.toThrowError( + `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` + ) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 8b7a8d02..53813f85 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -328,10 +328,10 @@ export async function saveCache( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + const cacheServiceVersion: string = getCacheServiceVersion() + core.debug(`Cache service version: ${cacheServiceVersion}`) checkPaths(paths) checkKey(key) - - const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await saveCacheV2(paths, key, options, enableCrossOsArchive) diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index 15c913ed..a171c9da 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -1,6 +1,7 @@ import * as core from '@actions/core' import { BlobClient, + BlobUploadCommonResponse, BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' @@ -8,7 +9,7 @@ import { export async function uploadCacheFile( signedUploadURL: string, archivePath: string -): Promise<{}> { +): Promise { // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size From 1f087496cab0a5ec5e38471f1f1b6c00f280f70c Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Tue, 26 Nov 2024 00:39:01 +0000 Subject: [PATCH 060/108] Add debug message for uploadResponse --- packages/cache/__tests__/saveCacheV2.test.ts | 2 +- packages/cache/src/cache.ts | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index fdbf596f..509f97ab 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -56,7 +56,7 @@ test('save with missing input should fail', async () => { ) }) -test('save with large cache outputs should fail using v2 saveCache', async () => { +test('save with large cache outputs should fail using', async () => { const filePath = 'node_modules' const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(filePath)] diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 53813f85..a8e741cd 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -518,7 +518,8 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - await uploadCacheFile(response.signedUploadUrl, archivePath) + const uploadResponse = await uploadCacheFile(response.signedUploadUrl, archivePath) + core.debug(`Download response status: ${uploadResponse._response.status}`) const finalizeRequest: FinalizeCacheEntryUploadRequest = { key, From 46174ed57357a1af42417af67762fe66445da64c Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Tue, 26 Nov 2024 00:56:07 +0000 Subject: [PATCH 061/108] run prettier --- packages/cache/__tests__/saveCacheV2.test.ts | 30 ++++++++++---------- packages/cache/src/cache.ts | 5 +++- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 509f97ab..7263ea89 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -1,13 +1,13 @@ import * as core from '@actions/core' import * as path from 'path' -import { saveCache } from '../src/cache' +import {saveCache} from '../src/cache' import * as cacheUtils from '../src/internal/cacheUtils' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import * as uploadCacheModule from '../src/internal/blob/upload-cache' -import { BlobUploadCommonResponse } from '@azure/storage-blob' +import {BlobUploadCommonResponse} from '@azure/storage-blob' let logDebugMock: jest.SpyInstance @@ -16,11 +16,11 @@ jest.mock('../src/internal/tar') beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { const actualUtils = jest.requireActual('../src/internal/cacheUtils') return actualUtils.getCacheFileName(cm) @@ -98,7 +98,7 @@ test('create cache entry failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) + .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) const createTarMock = jest.spyOn(tar, 'createTar') const finalizeCacheEntryMock = jest.spyOn( @@ -148,7 +148,7 @@ test('finalize save cache failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -175,7 +175,7 @@ test('finalize save cache failure', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) + .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) const cacheId = await saveCache([filePath], primaryKey) @@ -220,7 +220,7 @@ test('save with uploadCache Server error will fail', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) jest @@ -252,7 +252,7 @@ test('save with valid inputs uploads a cache', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const uploadCacheMock = jest @@ -273,7 +273,7 @@ test('save with valid inputs uploads a cache', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) + .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) const expectedCacheId = await saveCache([filePath], primaryKey) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index a8e741cd..0a73059a 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -518,7 +518,10 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - const uploadResponse = await uploadCacheFile(response.signedUploadUrl, archivePath) + const uploadResponse = await uploadCacheFile( + response.signedUploadUrl, + archivePath + ) core.debug(`Download response status: ${uploadResponse._response.status}`) const finalizeRequest: FinalizeCacheEntryUploadRequest = { From 208dbe21316f19ce222330e23e3329fb716cd5f5 Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Tue, 26 Nov 2024 16:36:12 +0000 Subject: [PATCH 062/108] PR feedback --- packages/cache/__tests__/saveCacheV2.test.ts | 73 ++++++++------------ 1 file changed, 30 insertions(+), 43 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 7263ea89..28e82ae0 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -49,17 +49,17 @@ afterEach(() => { test('save with missing input should fail', async () => { const paths: string[] = [] - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - await expect(saveCache(paths, primaryKey)).rejects.toThrowError( + await expect(saveCache(paths, key)).rejects.toThrowError( `Path Validation Error: At least one directory or file path is required` ) }) test('save with large cache outputs should fail using', async () => { - const filePath = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(filePath)] + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] const createTarMock = jest.spyOn(tar, 'createTar') const logWarningMock = jest.spyOn(core, 'warning') @@ -73,16 +73,14 @@ test('save with large cache outputs should fail using', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) - const cacheId = await saveCache([filePath], primaryKey) + const cacheId = await saveCache([paths], key) expect(cacheId).toBe(-1) - expect(logWarningMock).toHaveBeenCalledTimes(1) expect(logWarningMock).toHaveBeenCalledWith( 'Failed to save: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.' ) const archiveFolder = '/foo/bar' - expect(createTarMock).toHaveBeenCalledTimes(1) expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, @@ -93,7 +91,7 @@ test('save with large cache outputs should fail using', async () => { test('create cache entry failure', async () => { const paths = ['node_modules'] - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const infoLogMock = jest.spyOn(core, 'info') const createCacheEntryMock = jest @@ -120,16 +118,14 @@ test('create cache entry failure', async () => { } as BlobUploadCommonResponse) ) - const cacheId = await saveCache(paths, primaryKey) + const cacheId = await saveCache(paths, key) expect(cacheId).toBe(-1) - expect(infoLogMock).toHaveBeenCalledTimes(1) expect(infoLogMock).toHaveBeenCalledWith( - `Failed to save: Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + `Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.` ) - expect(createCacheEntryMock).toHaveBeenCalledTimes(1) expect(createCacheEntryMock).toHaveBeenCalledWith({ - key: primaryKey, + key, version: cacheVersion }) expect(createTarMock).toHaveBeenCalledTimes(1) @@ -139,9 +135,9 @@ test('create cache entry failure', async () => { }) test('finalize save cache failure', async () => { - const filePath = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(filePath)] + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] const logWarningMock = jest.spyOn(core, 'warning') const signedUploadURL = 'https://blob-storage.local?signed=true' @@ -167,7 +163,7 @@ test('finalize save cache failure', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) - const cacheVersion = cacheUtils.getCacheVersion([filePath], compression) + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) const archiveFileSize = 1024 jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') @@ -177,46 +173,41 @@ test('finalize save cache failure', async () => { .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) - const cacheId = await saveCache([filePath], primaryKey) + const cacheId = await saveCache([paths], key) - expect(createCacheEntryMock).toHaveBeenCalledTimes(1) expect(createCacheEntryMock).toHaveBeenCalledWith({ - key: primaryKey, + key, version: cacheVersion }) const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) - expect(createTarMock).toHaveBeenCalledTimes(1) expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, compression ) - expect(uploadCacheMock).toHaveBeenCalledTimes(1) expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) expect(getCompressionMock).toHaveBeenCalledTimes(1) - expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ - key: primaryKey, + key, version: cacheVersion, sizeBytes: archiveFileSize.toString() }) expect(cacheId).toBe(-1) - expect(logWarningMock).toHaveBeenCalledTimes(1) expect(logWarningMock).toHaveBeenCalledWith( - `Failed to save: Unable to finalize cache with key ${primaryKey}, another job may be finalizing this cache.` + `Failed to save: Unable to finalize cache with key ${key}, another job may be finalizing this cache.` ) }) test('save with uploadCache Server error will fail', async () => { - const filePath = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const logWarningMock = jest.spyOn(core, 'warning') - const signedUploadURL = 'https://signed-upload-url.com' + const signedUploadURL = 'https://blob-storage.local?signed=true' jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( @@ -227,9 +218,8 @@ test('save with uploadCache Server error will fail', async () => { .spyOn(uploadCacheModule, 'uploadCacheFile') .mockReturnValueOnce(Promise.reject(new Error('HTTP Error Occurred'))) - const cacheId = await saveCache([filePath], primaryKey) + const cacheId = await saveCache([paths], key) - expect(logWarningMock).toHaveBeenCalledTimes(1) expect(logWarningMock).toHaveBeenCalledWith( `Failed to save: HTTP Error Occurred` ) @@ -237,9 +227,9 @@ test('save with uploadCache Server error will fail', async () => { }) test('save with valid inputs uploads a cache', async () => { - const filePath = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(filePath)] + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] const signedUploadURL = 'https://blob-storage.local?signed=true' const createTarMock = jest.spyOn(tar, 'createTar') @@ -269,28 +259,25 @@ test('save with valid inputs uploads a cache', async () => { const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValue(Promise.resolve(compression)) - const cacheVersion = cacheUtils.getCacheVersion([filePath], compression) + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) - const expectedCacheId = await saveCache([filePath], primaryKey) + const expectedCacheId = await saveCache([paths], key) const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) - expect(uploadCacheMock).toHaveBeenCalledTimes(1) expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) - expect(createTarMock).toHaveBeenCalledTimes(1) expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, compression ) - expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ - key: primaryKey, + key, version: cacheVersion, sizeBytes: archiveFileSize.toString() }) @@ -301,11 +288,11 @@ test('save with valid inputs uploads a cache', async () => { test('save with non existing path should not save cache using v2 saveCache', async () => { const path = 'node_modules' - const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => { return [] }) - await expect(saveCache([path], primaryKey)).rejects.toThrowError( + await expect(saveCache([path], key)).rejects.toThrowError( `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` ) }) From 94f18eb26eb16f9d1d79470a9146d4a3cd0cff08 Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Tue, 26 Nov 2024 23:05:11 +0000 Subject: [PATCH 063/108] Only mock the cacheUtil methods we need --- packages/cache/__tests__/saveCacheV2.test.ts | 24 ++++++++----------- .../cache/src/internal/blob/upload-cache.ts | 11 ++++++++- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 28e82ae0..23869a1f 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -8,10 +8,10 @@ import * as tar from '../src/internal/tar' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import * as uploadCacheModule from '../src/internal/blob/upload-cache' import {BlobUploadCommonResponse} from '@azure/storage-blob' +import {InvalidResponseError} from '../src/internal/shared/errors' let logDebugMock: jest.SpyInstance -jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/tar') beforeAll(() => { @@ -21,14 +21,6 @@ beforeAll(() => { jest.spyOn(core, 'info').mockImplementation(() => {}) jest.spyOn(core, 'warning').mockImplementation(() => {}) jest.spyOn(core, 'error').mockImplementation(() => {}) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) - jest.spyOn(cacheUtils, 'getCacheVersion').mockImplementation((paths, cm) => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheVersion(paths, cm) - }) jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { return filePaths.map(x => path.resolve(x)) }) @@ -107,6 +99,10 @@ test('create cache entry failure', async () => { const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) const cacheVersion = cacheUtils.getCacheVersion(paths, compression) const uploadCacheFileMock = jest .spyOn(uploadCacheModule, 'uploadCacheFile') @@ -214,15 +210,15 @@ test('save with uploadCache Server error will fail', async () => { Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) jest .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockReturnValueOnce(Promise.reject(new Error('HTTP Error Occurred'))) + .mockRejectedValueOnce(new InvalidResponseError('boom')) const cacheId = await saveCache([paths], key) - - expect(logWarningMock).toHaveBeenCalledWith( - `Failed to save: HTTP Error Occurred` - ) expect(cacheId).toBe(-1) }) diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index a171c9da..934ecb6f 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -5,6 +5,7 @@ import { BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' +import {InvalidResponseError} from '../shared/errors' export async function uploadCacheFile( signedUploadURL: string, @@ -24,5 +25,13 @@ export async function uploadCacheFile( `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` ) - return blockBlobClient.uploadFile(archivePath, uploadOptions) + const resp = await blockBlobClient.uploadFile(archivePath, uploadOptions) + + if (resp._response.status >= 400) { + throw new InvalidResponseError( + `Upload failed with status code: ${resp._response.status}` + ) + } + + return resp } From 5d0a4af70a2d75b0de58cc726474410a7dc92112 Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Tue, 26 Nov 2024 23:33:19 +0000 Subject: [PATCH 064/108] Remove unused mock --- packages/cache/__tests__/saveCacheV2.test.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 23869a1f..5ae79d99 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -202,7 +202,6 @@ test('finalize save cache failure', async () => { test('save with uploadCache Server error will fail', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const logWarningMock = jest.spyOn(core, 'warning') const signedUploadURL = 'https://blob-storage.local?signed=true' jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') From b050504b2d9a98762b983a04418dc1c7b3c57ecc Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Wed, 27 Nov 2024 01:45:46 +0000 Subject: [PATCH 065/108] Add test case for when the uploadFile fails on the blobclient --- packages/cache/__tests__/saveCacheV2.test.ts | 48 +++++++++++++++++-- .../cache/src/internal/blob/upload-cache.ts | 2 +- 2 files changed, 46 insertions(+), 4 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 5ae79d99..67c7f1de 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -14,6 +14,18 @@ let logDebugMock: jest.SpyInstance jest.mock('../src/internal/tar') +let uploadFileMock = jest.fn() +const blockBlobClientMock = jest.fn().mockImplementation(() => ({ + uploadFile: uploadFileMock +})) +jest.mock('@azure/storage-blob', () => ({ + BlobClient: jest.fn().mockImplementation(() => { + return { + getBlockBlobClient: blockBlobClientMock + } + }) +})) + beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' jest.spyOn(console, 'log').mockImplementation(() => {}) @@ -106,7 +118,7 @@ test('create cache entry failure', async () => { const cacheVersion = cacheUtils.getCacheVersion(paths, compression) const uploadCacheFileMock = jest .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockReturnValue( + .mockReturnValueOnce( Promise.resolve({ _response: { status: 200 @@ -146,7 +158,7 @@ test('finalize save cache failure', async () => { const createTarMock = jest.spyOn(tar, 'createTar') const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile') - uploadCacheMock.mockReturnValue( + uploadCacheMock.mockReturnValueOnce( Promise.resolve({ _response: { status: 200 @@ -221,6 +233,36 @@ test('save with uploadCache Server error will fail', async () => { expect(cacheId).toBe(-1) }) +test('uploadFile returns 500', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const signedUploadURL = 'https://blob-storage.local?signed=true' + const logWarningMock = jest.spyOn(core, 'warning') + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + jest.spyOn(uploadCacheModule, 'uploadCacheFile').mockRestore() + + uploadFileMock = jest.fn().mockResolvedValueOnce({ + _response: { + status: 500 + } + }) + const cacheId = await saveCache([paths], key) + + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to save: Upload failed with status code 500' + ) + expect(cacheId).toBe(-1) +}) + test('save with valid inputs uploads a cache', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' @@ -242,7 +284,7 @@ test('save with valid inputs uploads a cache', async () => { const uploadCacheMock = jest .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockReturnValue( + .mockReturnValueOnce( Promise.resolve({ _response: { status: 200 diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index 934ecb6f..b9970c46 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -29,7 +29,7 @@ export async function uploadCacheFile( if (resp._response.status >= 400) { throw new InvalidResponseError( - `Upload failed with status code: ${resp._response.status}` + `Upload failed with status code ${resp._response.status}` ) } From 27e5cf25146e49cc5006adc7b2d289faeea7392f Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 27 Nov 2024 04:51:21 -0800 Subject: [PATCH 066/108] Replace downloadCacheFile with downloadCacheStorageSDK --- .../cache/__tests__/restoreCacheV2.test.ts | 75 ++++++++----------- packages/cache/src/cache.ts | 18 +++-- .../cache/src/internal/blob/download-cache.ts | 16 +++- 3 files changed, 59 insertions(+), 50 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index cc4f9e3c..365afdf0 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -3,11 +3,11 @@ import * as path from 'path' import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' -import * as downloadCacheModule from '../src/internal/blob/download-cache' +import * as downloadUtils from '../src/internal/downloadUtils' import {restoreCache} from '../src/cache' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import {BlobDownloadResponseParsed} from '@azure/storage-blob' +import {DownloadOptions} from '../src/options' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') @@ -142,6 +142,7 @@ test('restore with gzip compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -169,17 +170,11 @@ test('restore with gzip compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) + downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -203,9 +198,10 @@ test('restore with gzip compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -226,6 +222,7 @@ test('restore with zstd compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -253,17 +250,11 @@ test('restore with zstd compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) + downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) const fileSize = 62915000 const getArchiveFileSizeInBytesMock = jest @@ -287,9 +278,10 @@ test('restore with zstd compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) @@ -311,6 +303,7 @@ test('restore with cache found for restore key', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -338,17 +331,11 @@ test('restore with cache found for restore key', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) + downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -372,9 +359,10 @@ test('restore with cache found for restore key', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -391,11 +379,11 @@ test('restore with cache found for restore key', async () => { test('restore with dry run', async () => { const paths = ['node_modules'] const key = 'node-test' - const options = {lookupOnly: true} const compressionMethod = CompressionMethod.Gzip const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const options: DownloadOptions = {lookupOnly: true, timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -416,10 +404,11 @@ test('restore with dry run', async () => { ) const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' + const downloadCacheStorageSDKMock = jest.spyOn( + downloadUtils, + 'downloadCacheStorageSDK' ) + downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) const cacheKey = await restoreCache(paths, key, undefined, options) @@ -438,5 +427,5 @@ test('restore with dry run', async () => { // creating a tempDir and downloading the cache are skipped expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) + expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0a73059a..0e17e3af 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,6 +3,7 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' +import {downloadCacheStorageSDK} from './internal/downloadUtils' import {getCacheServiceVersion, isGhes} from './internal/config' import {DownloadOptions, UploadOptions} from './options' import {createTar, extractTar, listTar} from './internal/tar' @@ -14,7 +15,6 @@ import { } from './generated/results/api/v1/cache' import {CacheFileSizeLimit} from './internal/constants' import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -161,11 +161,14 @@ async function restoreCacheV1( ) core.debug(`Archive Path: ${archivePath}`) - // Download the cache from the cache entry + // Download the cache archive from from blob storage await cacheHttpClient.downloadCache( cacheEntry.archiveLocation, archivePath, - options + options || + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) if (core.isDebug()) { @@ -271,11 +274,14 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - const downloadResponse = await downloadCacheFile( + await downloadCacheStorageSDK( response.signedDownloadUrl, - archivePath + archivePath, + options || + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) - core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index e974cb2f..38384b10 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -22,10 +22,24 @@ export async function downloadCacheFile( `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` ) - return blockBlobClient.downloadToFile( + const response = await blockBlobClient.downloadToFile( archivePath, 0, undefined, downloadOptions ) + + switch (response._response.status) { + case 200: + core.info(`Cache downloaded from "${signedUploadURL}"`) + break + case 304: + core.info(`Cache not found at "${signedUploadURL}"`) + break + default: + core.info(`Unexpected HTTP response: ${response._response.status}`) + break + } + + return response } From af3981c955a097619c92e0db536b44cbaea0187f Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 27 Nov 2024 05:50:01 -0800 Subject: [PATCH 067/108] Update the useragent of the old http client to pass cache version --- packages/cache/src/cache.ts | 31 +++++++++---------- .../cache/src/internal/cacheHttpClient.ts | 16 +++++----- 2 files changed, 22 insertions(+), 25 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0e17e3af..05dd4a0d 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {downloadCacheStorageSDK} from './internal/downloadUtils' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { downloadCacheStorageSDK } from './internal/downloadUtils' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -161,14 +161,11 @@ async function restoreCacheV1( ) core.debug(`Archive Path: ${archivePath}`) - // Download the cache archive from from blob storage + // Download the cache from the cache entry await cacheHttpClient.downloadCache( cacheEntry.archiveLocation, archivePath, - options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + options ) if (core.isDebug()) { @@ -278,9 +275,9 @@ async function restoreCacheV2( response.signedDownloadUrl, archivePath, options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -417,9 +414,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 051348ec..6cb8ae7e 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,12 +1,12 @@ import * as core from '@actions/core' -import {HttpClient} from '@actions/http-client' -import {BearerCredentialHandler} from '@actions/http-client/lib/auth' +import { HttpClient } from '@actions/http-client' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { RequestOptions, TypedResponse } from '@actions/http-client/lib/interfaces' import * as fs from 'fs' -import {URL} from 'url' +import { URL } from 'url' import * as utils from './cacheUtils' import { ArtifactCacheEntry, @@ -33,7 +33,8 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import {getCacheServiceURL} from './config' +import { getCacheServiceURL } from './config' +import { getUserAgentString } from './shared/user-agent' function getCacheApiUrl(resource: string): string { const baseUrl: string = getCacheServiceURL() @@ -65,7 +66,7 @@ function createHttpClient(): HttpClient { const bearerCredentialHandler = new BearerCredentialHandler(token) return new HttpClient( - 'actions/cache', + getUserAgentString(), [bearerCredentialHandler], getRequestOptions() ) @@ -216,8 +217,7 @@ async function uploadChunk( end: number ): Promise { core.debug( - `Uploading chunk of size ${ - end - start + 1 + `Uploading chunk of size ${end - start + 1 } bytes at offset ${start} with content range: ${getContentRange( start, end @@ -313,7 +313,7 @@ async function commitCache( cacheId: number, filesize: number ): Promise> { - const commitCacheRequest: CommitCacheRequest = {size: filesize} + const commitCacheRequest: CommitCacheRequest = { size: filesize } return await retryTypedResponse('commitCache', async () => httpClient.postJson( getCacheApiUrl(`caches/${cacheId.toString()}`), From 35d87ab1299a1c5d1adb11612007c560ff5aed8a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 27 Nov 2024 05:58:22 -0800 Subject: [PATCH 068/108] Refactor code formatting for consistency and readability --- packages/cache/src/cache.ts | 24 +++++++++---------- .../cache/src/internal/cacheHttpClient.ts | 15 ++++++------ 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 05dd4a0d..fada75f2 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { downloadCacheStorageSDK } from './internal/downloadUtils' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {downloadCacheStorageSDK} from './internal/downloadUtils' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -275,9 +275,9 @@ async function restoreCacheV2( response.signedDownloadUrl, archivePath, options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -414,9 +414,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 6cb8ae7e..c219000b 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,12 +1,12 @@ import * as core from '@actions/core' -import { HttpClient } from '@actions/http-client' -import { BearerCredentialHandler } from '@actions/http-client/lib/auth' +import {HttpClient} from '@actions/http-client' +import {BearerCredentialHandler} from '@actions/http-client/lib/auth' import { RequestOptions, TypedResponse } from '@actions/http-client/lib/interfaces' import * as fs from 'fs' -import { URL } from 'url' +import {URL} from 'url' import * as utils from './cacheUtils' import { ArtifactCacheEntry, @@ -33,8 +33,8 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import { getCacheServiceURL } from './config' -import { getUserAgentString } from './shared/user-agent' +import {getCacheServiceURL} from './config' +import {getUserAgentString} from './shared/user-agent' function getCacheApiUrl(resource: string): string { const baseUrl: string = getCacheServiceURL() @@ -217,7 +217,8 @@ async function uploadChunk( end: number ): Promise { core.debug( - `Uploading chunk of size ${end - start + 1 + `Uploading chunk of size ${ + end - start + 1 } bytes at offset ${start} with content range: ${getContentRange( start, end @@ -313,7 +314,7 @@ async function commitCache( cacheId: number, filesize: number ): Promise> { - const commitCacheRequest: CommitCacheRequest = { size: filesize } + const commitCacheRequest: CommitCacheRequest = {size: filesize} return await retryTypedResponse('commitCache', async () => httpClient.postJson( getCacheApiUrl(`caches/${cacheId.toString()}`), From 9cc30cb0d3d4bb7b4e5a59eaf4bfe029889bdbfb Mon Sep 17 00:00:00 2001 From: John Sudol <24583161+johnsudol@users.noreply.github.com> Date: Wed, 27 Nov 2024 09:30:36 -0500 Subject: [PATCH 069/108] Add `saveCacheV2` tests (#1879) --- packages/cache/__tests__/saveCacheV2.test.ts | 335 ++++++++++++++++++ packages/cache/src/cache.ts | 10 +- .../cache/src/internal/blob/upload-cache.ts | 14 +- 3 files changed, 354 insertions(+), 5 deletions(-) create mode 100644 packages/cache/__tests__/saveCacheV2.test.ts diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts new file mode 100644 index 00000000..67c7f1de --- /dev/null +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -0,0 +1,335 @@ +import * as core from '@actions/core' +import * as path from 'path' +import {saveCache} from '../src/cache' +import * as cacheUtils from '../src/internal/cacheUtils' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import * as config from '../src/internal/config' +import * as tar from '../src/internal/tar' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import * as uploadCacheModule from '../src/internal/blob/upload-cache' +import {BlobUploadCommonResponse} from '@azure/storage-blob' +import {InvalidResponseError} from '../src/internal/shared/errors' + +let logDebugMock: jest.SpyInstance + +jest.mock('../src/internal/tar') + +let uploadFileMock = jest.fn() +const blockBlobClientMock = jest.fn().mockImplementation(() => ({ + uploadFile: uploadFileMock +})) +jest.mock('@azure/storage-blob', () => ({ + BlobClient: jest.fn().mockImplementation(() => { + return { + getBlockBlobClient: blockBlobClientMock + } + }) +})) + +beforeAll(() => { + process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { + return filePaths.map(x => path.resolve(x)) + }) + jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => { + return Promise.resolve('/foo/bar') + }) + + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + + logDebugMock = jest.spyOn(core, 'debug') +}) + +afterEach(() => { + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') + jest.clearAllMocks() +}) + +test('save with missing input should fail', async () => { + const paths: string[] = [] + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + + await expect(saveCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('save with large cache outputs should fail using', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] + + const createTarMock = jest.spyOn(tar, 'createTar') + const logWarningMock = jest.spyOn(core, 'warning') + + const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(cacheSize) + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheId = await saveCache([paths], key) + expect(cacheId).toBe(-1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to save: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.' + ) + + const archiveFolder = '/foo/bar' + + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('create cache entry failure', async () => { + const paths = ['node_modules'] + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const infoLogMock = jest.spyOn(core, 'info') + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) + + const createTarMock = jest.spyOn(tar, 'createTar') + const finalizeCacheEntryMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'FinalizeCacheEntryUpload' + ) + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + const cacheVersion = cacheUtils.getCacheVersion(paths, compression) + const uploadCacheFileMock = jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockReturnValueOnce( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const cacheId = await saveCache(paths, key) + expect(cacheId).toBe(-1) + expect(infoLogMock).toHaveBeenCalledWith( + `Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.` + ) + + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion + }) + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(getCompressionMock).toHaveBeenCalledTimes(1) + expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) + expect(uploadCacheFileMock).toHaveBeenCalledTimes(0) +}) + +test('finalize save cache failure', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] + const logWarningMock = jest.spyOn(core, 'warning') + const signedUploadURL = 'https://blob-storage.local?signed=true' + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const createTarMock = jest.spyOn(tar, 'createTar') + + const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile') + uploadCacheMock.mockReturnValueOnce( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const finalizeCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') + .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) + + const cacheId = await saveCache([paths], key) + + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(getCompressionMock).toHaveBeenCalledTimes(1) + + expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion, + sizeBytes: archiveFileSize.toString() + }) + + expect(cacheId).toBe(-1) + expect(logWarningMock).toHaveBeenCalledWith( + `Failed to save: Unable to finalize cache with key ${key}, another job may be finalizing this cache.` + ) +}) + +test('save with uploadCache Server error will fail', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const signedUploadURL = 'https://blob-storage.local?signed=true' + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockRejectedValueOnce(new InvalidResponseError('boom')) + + const cacheId = await saveCache([paths], key) + expect(cacheId).toBe(-1) +}) + +test('uploadFile returns 500', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const signedUploadURL = 'https://blob-storage.local?signed=true' + const logWarningMock = jest.spyOn(core, 'warning') + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + jest.spyOn(uploadCacheModule, 'uploadCacheFile').mockRestore() + + uploadFileMock = jest.fn().mockResolvedValueOnce({ + _response: { + status: 500 + } + }) + const cacheId = await saveCache([paths], key) + + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to save: Upload failed with status code 500' + ) + expect(cacheId).toBe(-1) +}) + +test('save with valid inputs uploads a cache', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] + const signedUploadURL = 'https://blob-storage.local?signed=true' + const createTarMock = jest.spyOn(tar, 'createTar') + + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const cacheId = 4 + jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const uploadCacheMock = jest + .spyOn(uploadCacheModule, 'uploadCacheFile') + .mockReturnValueOnce( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobUploadCommonResponse) + ) + + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) + + const finalizeCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') + .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) + + const expectedCacheId = await saveCache([paths], key) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion, + sizeBytes: archiveFileSize.toString() + }) + + expect(getCompressionMock).toHaveBeenCalledTimes(1) + expect(expectedCacheId).toBe(cacheId) +}) + +test('save with non existing path should not save cache using v2 saveCache', async () => { + const path = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => { + return [] + }) + await expect(saveCache([path], key)).rejects.toThrowError( + `Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.` + ) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 8b7a8d02..0a73059a 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -328,10 +328,10 @@ export async function saveCache( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + const cacheServiceVersion: string = getCacheServiceVersion() + core.debug(`Cache service version: ${cacheServiceVersion}`) checkPaths(paths) checkKey(key) - - const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await saveCacheV2(paths, key, options, enableCrossOsArchive) @@ -518,7 +518,11 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - await uploadCacheFile(response.signedUploadUrl, archivePath) + const uploadResponse = await uploadCacheFile( + response.signedUploadUrl, + archivePath + ) + core.debug(`Download response status: ${uploadResponse._response.status}`) const finalizeRequest: FinalizeCacheEntryUploadRequest = { key, diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts index 15c913ed..b9970c46 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/blob/upload-cache.ts @@ -1,14 +1,16 @@ import * as core from '@actions/core' import { BlobClient, + BlobUploadCommonResponse, BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' +import {InvalidResponseError} from '../shared/errors' export async function uploadCacheFile( signedUploadURL: string, archivePath: string -): Promise<{}> { +): Promise { // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size @@ -23,5 +25,13 @@ export async function uploadCacheFile( `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` ) - return blockBlobClient.uploadFile(archivePath, uploadOptions) + const resp = await blockBlobClient.uploadFile(archivePath, uploadOptions) + + if (resp._response.status >= 400) { + throw new InvalidResponseError( + `Upload failed with status code ${resp._response.status}` + ) + } + + return resp } From c5a5de05f6ebb26bc5bd41837b2a5536d36b8132 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 03:36:32 -0800 Subject: [PATCH 070/108] Delete download-cache --- .../cache/src/internal/blob/download-cache.ts | 45 ------------------- 1 file changed, 45 deletions(-) delete mode 100644 packages/cache/src/internal/blob/download-cache.ts diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts deleted file mode 100644 index 38384b10..00000000 --- a/packages/cache/src/internal/blob/download-cache.ts +++ /dev/null @@ -1,45 +0,0 @@ -import * as core from '@actions/core' - -import { - BlobClient, - BlockBlobClient, - BlobDownloadOptions, - BlobDownloadResponseParsed -} from '@azure/storage-blob' - -export async function downloadCacheFile( - signedUploadURL: string, - archivePath: string -): Promise { - const downloadOptions: BlobDownloadOptions = { - maxRetryRequests: 5 - } - - const blobClient: BlobClient = new BlobClient(signedUploadURL) - const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` - ) - - const response = await blockBlobClient.downloadToFile( - archivePath, - 0, - undefined, - downloadOptions - ) - - switch (response._response.status) { - case 200: - core.info(`Cache downloaded from "${signedUploadURL}"`) - break - case 304: - core.info(`Cache not found at "${signedUploadURL}"`) - break - default: - core.info(`Unexpected HTTP response: ${response._response.status}`) - break - } - - return response -} From df166709a33b5c2af2322833ce0e038e71a2f71e Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 03:52:09 -0800 Subject: [PATCH 071/108] Refactor cache upload functionality and improve test cases --- packages/cache/__tests__/saveCacheV2.test.ts | 50 +++++++++---------- packages/cache/src/cache.ts | 26 +++++----- .../{blob/upload-cache.ts => uploadUtils.ts} | 11 ++-- 3 files changed, 44 insertions(+), 43 deletions(-) rename packages/cache/src/internal/{blob/upload-cache.ts => uploadUtils.ts} (82%) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 67c7f1de..2c69c5ee 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -1,14 +1,14 @@ import * as core from '@actions/core' import * as path from 'path' -import {saveCache} from '../src/cache' +import { saveCache } from '../src/cache' import * as cacheUtils from '../src/internal/cacheUtils' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' -import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import * as uploadCacheModule from '../src/internal/blob/upload-cache' -import {BlobUploadCommonResponse} from '@azure/storage-blob' -import {InvalidResponseError} from '../src/internal/shared/errors' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import * as uploadCacheModule from '../src/internal/uploadUtils' +import { BlobUploadCommonResponse } from '@azure/storage-blob' +import { InvalidResponseError } from '../src/internal/shared/errors' let logDebugMock: jest.SpyInstance @@ -28,11 +28,11 @@ jest.mock('@azure/storage-blob', () => ({ beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(core, 'debug').mockImplementation(() => {}) - jest.spyOn(core, 'info').mockImplementation(() => {}) - jest.spyOn(core, 'warning').mockImplementation(() => {}) - jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { return filePaths.map(x => path.resolve(x)) }) @@ -100,7 +100,7 @@ test('create cache entry failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) + .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) const createTarMock = jest.spyOn(tar, 'createTar') const finalizeCacheEntryMock = jest.spyOn( @@ -116,8 +116,8 @@ test('create cache entry failure', async () => { .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) const cacheVersion = cacheUtils.getCacheVersion(paths, compression) - const uploadCacheFileMock = jest - .spyOn(uploadCacheModule, 'uploadCacheFile') + const uploadCacheArchiveSDKMock = jest + .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') .mockReturnValueOnce( Promise.resolve({ _response: { @@ -139,7 +139,7 @@ test('create cache entry failure', async () => { expect(createTarMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) - expect(uploadCacheFileMock).toHaveBeenCalledTimes(0) + expect(uploadCacheArchiveSDKMock).toHaveBeenCalledTimes(0) }) test('finalize save cache failure', async () => { @@ -152,12 +152,12 @@ test('finalize save cache failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const createTarMock = jest.spyOn(tar, 'createTar') - const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile') + const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') uploadCacheMock.mockReturnValueOnce( Promise.resolve({ _response: { @@ -179,7 +179,7 @@ test('finalize save cache failure', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) + .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) const cacheId = await saveCache([paths], key) @@ -218,7 +218,7 @@ test('save with uploadCache Server error will fail', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const archiveFileSize = 1024 @@ -226,7 +226,7 @@ test('save with uploadCache Server error will fail', async () => { .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) jest - .spyOn(uploadCacheModule, 'uploadCacheFile') + .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') .mockRejectedValueOnce(new InvalidResponseError('boom')) const cacheId = await saveCache([paths], key) @@ -241,14 +241,14 @@ test('uploadFile returns 500', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const archiveFileSize = 1024 jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) - jest.spyOn(uploadCacheModule, 'uploadCacheFile').mockRestore() + jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK').mockRestore() uploadFileMock = jest.fn().mockResolvedValueOnce({ _response: { @@ -279,11 +279,11 @@ test('save with valid inputs uploads a cache', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const uploadCacheMock = jest - .spyOn(uploadCacheModule, 'uploadCacheFile') + .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') .mockReturnValueOnce( Promise.resolve({ _response: { @@ -300,7 +300,7 @@ test('save with valid inputs uploads a cache', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) + .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) const expectedCacheId = await saveCache([paths], key) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index fada75f2..adc8b915 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {downloadCacheStorageSDK} from './internal/downloadUtils' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { downloadCacheStorageSDK } from './internal/downloadUtils' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheArchiveSDK } from './internal/uploadUtils' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -275,9 +275,9 @@ async function restoreCacheV2( response.signedDownloadUrl, archivePath, options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -414,9 +414,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -521,7 +521,7 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - const uploadResponse = await uploadCacheFile( + const uploadResponse = await uploadCacheArchiveSDK( response.signedUploadUrl, archivePath ) diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/uploadUtils.ts similarity index 82% rename from packages/cache/src/internal/blob/upload-cache.ts rename to packages/cache/src/internal/uploadUtils.ts index b9970c46..ffcb37f8 100644 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -5,12 +5,13 @@ import { BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' -import {InvalidResponseError} from '../shared/errors' +import { InvalidResponseError } from './shared/errors' -export async function uploadCacheFile( - signedUploadURL: string, - archivePath: string -): Promise { +export async function uploadCacheArchiveSDK + ( + signedUploadURL: string, + archivePath: string + ): Promise { // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size From c1fb081674639f34502220b5c10235ad92584e75 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 03:53:34 -0800 Subject: [PATCH 072/108] Linter fixes --- packages/cache/__tests__/saveCacheV2.test.ts | 34 ++++++++++---------- packages/cache/src/cache.ts | 24 +++++++------- packages/cache/src/internal/uploadUtils.ts | 11 +++---- 3 files changed, 34 insertions(+), 35 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 2c69c5ee..ab98bc81 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -1,14 +1,14 @@ import * as core from '@actions/core' import * as path from 'path' -import { saveCache } from '../src/cache' +import {saveCache} from '../src/cache' import * as cacheUtils from '../src/internal/cacheUtils' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import * as uploadCacheModule from '../src/internal/uploadUtils' -import { BlobUploadCommonResponse } from '@azure/storage-blob' -import { InvalidResponseError } from '../src/internal/shared/errors' +import {BlobUploadCommonResponse} from '@azure/storage-blob' +import {InvalidResponseError} from '../src/internal/shared/errors' let logDebugMock: jest.SpyInstance @@ -28,11 +28,11 @@ jest.mock('@azure/storage-blob', () => ({ beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { return filePaths.map(x => path.resolve(x)) }) @@ -100,7 +100,7 @@ test('create cache entry failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) + .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) const createTarMock = jest.spyOn(tar, 'createTar') const finalizeCacheEntryMock = jest.spyOn( @@ -152,7 +152,7 @@ test('finalize save cache failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -179,7 +179,7 @@ test('finalize save cache failure', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) + .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) const cacheId = await saveCache([paths], key) @@ -218,7 +218,7 @@ test('save with uploadCache Server error will fail', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const archiveFileSize = 1024 @@ -241,7 +241,7 @@ test('uploadFile returns 500', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const archiveFileSize = 1024 @@ -279,7 +279,7 @@ test('save with valid inputs uploads a cache', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const uploadCacheMock = jest @@ -300,7 +300,7 @@ test('save with valid inputs uploads a cache', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) + .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) const expectedCacheId = await saveCache([paths], key) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index adc8b915..1617b793 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { downloadCacheStorageSDK } from './internal/downloadUtils' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {downloadCacheStorageSDK} from './internal/downloadUtils' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheArchiveSDK } from './internal/uploadUtils' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheArchiveSDK} from './internal/uploadUtils' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -275,9 +275,9 @@ async function restoreCacheV2( response.signedDownloadUrl, archivePath, options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + ({ + timeoutInMs: 30000 + } as DownloadOptions) ) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -414,9 +414,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index ffcb37f8..60b4a315 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -5,13 +5,12 @@ import { BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' -import { InvalidResponseError } from './shared/errors' +import {InvalidResponseError} from './shared/errors' -export async function uploadCacheArchiveSDK - ( - signedUploadURL: string, - archivePath: string - ): Promise { +export async function uploadCacheArchiveSDK( + signedUploadURL: string, + archivePath: string +): Promise { // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size From eaf0083ee213751b2e128f7efa0b79bfb2630350 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 04:56:37 -0800 Subject: [PATCH 073/108] Respect download options for restore --- .../cache/__tests__/restoreCacheV2.test.ts | 47 ++++++------------- packages/cache/src/cache.ts | 8 +--- 2 files changed, 16 insertions(+), 39 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 365afdf0..ae366412 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -3,7 +3,7 @@ import * as path from 'path' import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' -import * as downloadUtils from '../src/internal/downloadUtils' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' import {restoreCache} from '../src/cache' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' @@ -142,7 +142,6 @@ test('restore with gzip compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -170,11 +169,7 @@ test('restore with gzip compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheStorageSDKMock = jest.spyOn( - downloadUtils, - 'downloadCacheStorageSDK' - ) - downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -198,10 +193,10 @@ test('restore with gzip compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - options + undefined ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -222,7 +217,6 @@ test('restore with zstd compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' - const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -250,11 +244,7 @@ test('restore with zstd compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheStorageSDKMock = jest.spyOn( - downloadUtils, - 'downloadCacheStorageSDK' - ) - downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 62915000 const getArchiveFileSizeInBytesMock = jest @@ -278,10 +268,10 @@ test('restore with zstd compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - options + undefined ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) @@ -303,7 +293,6 @@ test('restore with cache found for restore key', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' - const options: DownloadOptions = {timeoutInMs: 30000} const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -331,11 +320,7 @@ test('restore with cache found for restore key', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheStorageSDKMock = jest.spyOn( - downloadUtils, - 'downloadCacheStorageSDK' - ) - downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -359,10 +344,10 @@ test('restore with cache found for restore key', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheStorageSDKMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - options + undefined ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -376,14 +361,14 @@ test('restore with cache found for restore key', async () => { expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) -test('restore with dry run', async () => { +test('restore with lookup only enabled', async () => { const paths = ['node_modules'] const key = 'node-test' const compressionMethod = CompressionMethod.Gzip const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const options: DownloadOptions = {lookupOnly: true, timeoutInMs: 30000} + const options = {lookupOnly: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -404,11 +389,7 @@ test('restore with dry run', async () => { ) const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheStorageSDKMock = jest.spyOn( - downloadUtils, - 'downloadCacheStorageSDK' - ) - downloadCacheStorageSDKMock.mockReturnValue(Promise.resolve()) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const cacheKey = await restoreCache(paths, key, undefined, options) @@ -427,5 +408,5 @@ test('restore with dry run', async () => { // creating a tempDir and downloading the cache are skipped expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheStorageSDKMock).toHaveBeenCalledTimes(0) + expect(downloadCacheMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1617b793..ddf4e7fb 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,7 +3,6 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {downloadCacheStorageSDK} from './internal/downloadUtils' import {getCacheServiceVersion, isGhes} from './internal/config' import {DownloadOptions, UploadOptions} from './options' import {createTar, extractTar, listTar} from './internal/tar' @@ -271,13 +270,10 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - await downloadCacheStorageSDK( + await cacheHttpClient.downloadCache( response.signedDownloadUrl, archivePath, - options || - ({ - timeoutInMs: 30000 - } as DownloadOptions) + options ) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) From 62f5f1885b005bf104d61bfecc827d289016f5de Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 07:22:01 -0800 Subject: [PATCH 074/108] Refactor saveCacheV2 to use saveCache from cacheHttpClient --- packages/cache/__tests__/options.test.ts | 6 +- packages/cache/__tests__/saveCache.test.ts | 14 +- packages/cache/__tests__/saveCacheV2.test.ts | 165 ++++++++---------- packages/cache/src/cache.ts | 15 +- .../cache/src/internal/cacheHttpClient.ts | 50 ++++-- packages/cache/src/internal/uploadUtils.ts | 10 +- packages/cache/src/options.ts | 14 ++ 7 files changed, 158 insertions(+), 116 deletions(-) diff --git a/packages/cache/__tests__/options.test.ts b/packages/cache/__tests__/options.test.ts index 7585b60f..fd742487 100644 --- a/packages/cache/__tests__/options.test.ts +++ b/packages/cache/__tests__/options.test.ts @@ -47,14 +47,16 @@ test('getUploadOptions sets defaults', async () => { expect(actualOptions).toEqual({ uploadConcurrency, - uploadChunkSize + uploadChunkSize, + useAzureSdk }) }) test('getUploadOptions overrides all settings', async () => { const expectedOptions: UploadOptions = { uploadConcurrency: 2, - uploadChunkSize: 16 * 1024 * 1024 + uploadChunkSize: 16 * 1024 * 1024, + useAzureSdk: true } const actualOptions = getUploadOptions(expectedOptions) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 81049e0a..e5ed695b 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -270,7 +270,12 @@ test('save with server error should fail', async () => { compression ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) + expect(saveCacheMock).toHaveBeenCalledWith( + cacheId, + archiveFile, + '', + undefined + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -315,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => { compression ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) + expect(saveCacheMock).toHaveBeenCalledWith( + cacheId, + archiveFile, + '', + undefined + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index ab98bc81..3a18272a 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -6,15 +6,14 @@ import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import * as uploadCacheModule from '../src/internal/uploadUtils' -import {BlobUploadCommonResponse} from '@azure/storage-blob' -import {InvalidResponseError} from '../src/internal/shared/errors' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import {UploadOptions} from '../src/options' let logDebugMock: jest.SpyInstance jest.mock('../src/internal/tar') -let uploadFileMock = jest.fn() +const uploadFileMock = jest.fn() const blockBlobClientMock = jest.fn().mockImplementation(() => ({ uploadFile: uploadFileMock })) @@ -116,15 +115,7 @@ test('create cache entry failure', async () => { .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) const cacheVersion = cacheUtils.getCacheVersion(paths, compression) - const uploadCacheArchiveSDKMock = jest - .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') - .mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) - ) + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') const cacheId = await saveCache(paths, key) expect(cacheId).toBe(-1) @@ -139,15 +130,15 @@ test('create cache entry failure', async () => { expect(createTarMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) - expect(uploadCacheArchiveSDKMock).toHaveBeenCalledTimes(0) + expect(saveCacheMock).toHaveBeenCalledTimes(0) }) -test('finalize save cache failure', async () => { +test('save cache fails if a signedUploadURL was not passed', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] - const logWarningMock = jest.spyOn(core, 'warning') - const signedUploadURL = 'https://blob-storage.local?signed=true' + const signedUploadURL = '' + const options: UploadOptions = {useAzureSdk: true} const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') @@ -156,16 +147,63 @@ test('finalize save cache failure', async () => { ) const createTarMock = jest.spyOn(tar, 'createTar') + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') - const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') - uploadCacheMock.mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) + const archiveFileSize = 1024 + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const cacheId = await saveCache([paths], key, options) + + expect(cacheId).toBe(-1) + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression ) + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('finalize save cache failure', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] + const logWarningMock = jest.spyOn(core, 'warning') + const signedUploadURL = 'https://blob-storage.local?signed=true' + const options: UploadOptions = {useAzureSdk: true} + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const createTarMock = jest.spyOn(tar, 'createTar') + const saveCacheMock = jest + .spyOn(cacheHttpClient, 'saveCache') + .mockResolvedValue(Promise.resolve()) + const compression = CompressionMethod.Zstd const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') @@ -181,7 +219,7 @@ test('finalize save cache failure', async () => { .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) - const cacheId = await saveCache([paths], key) + const cacheId = await saveCache([paths], key, options) expect(createCacheEntryMock).toHaveBeenCalledWith({ key, @@ -196,7 +234,12 @@ test('finalize save cache failure', async () => { compression ) - expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ @@ -211,64 +254,13 @@ test('finalize save cache failure', async () => { ) }) -test('save with uploadCache Server error will fail', async () => { - const paths = 'node_modules' - const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const signedUploadURL = 'https://blob-storage.local?signed=true' - jest - .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) - ) - - const archiveFileSize = 1024 - jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValueOnce(archiveFileSize) - jest - .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') - .mockRejectedValueOnce(new InvalidResponseError('boom')) - - const cacheId = await saveCache([paths], key) - expect(cacheId).toBe(-1) -}) - -test('uploadFile returns 500', async () => { - const paths = 'node_modules' - const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const signedUploadURL = 'https://blob-storage.local?signed=true' - const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) - ) - - const archiveFileSize = 1024 - jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValueOnce(archiveFileSize) - jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK').mockRestore() - - uploadFileMock = jest.fn().mockResolvedValueOnce({ - _response: { - status: 500 - } - }) - const cacheId = await saveCache([paths], key) - - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to save: Upload failed with status code 500' - ) - expect(cacheId).toBe(-1) -}) - test('save with valid inputs uploads a cache', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] const signedUploadURL = 'https://blob-storage.local?signed=true' const createTarMock = jest.spyOn(tar, 'createTar') + const options: UploadOptions = {useAzureSdk: true} const archiveFileSize = 1024 jest @@ -282,15 +274,7 @@ test('save with valid inputs uploads a cache', async () => { Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) - const uploadCacheMock = jest - .spyOn(uploadCacheModule, 'uploadCacheArchiveSDK') - .mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) - ) + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') const compression = CompressionMethod.Zstd const getCompressionMock = jest @@ -306,7 +290,12 @@ test('save with valid inputs uploads a cache', async () => { const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) - expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index ddf4e7fb..173a1a87 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -13,7 +13,6 @@ import { GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheArchiveSDK} from './internal/uploadUtils' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -421,7 +420,7 @@ async function saveCacheV1( } core.debug(`Saving Cache (ID: ${cacheId})`) - await cacheHttpClient.saveCache(cacheId, archivePath, options) + await cacheHttpClient.saveCache(cacheId, archivePath, '', options) } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { @@ -458,6 +457,11 @@ async function saveCacheV2( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + // Override UploadOptions to force the use of Azure + options = { + ...options, + useAzureSdk: true + } const compressionMethod = await utils.getCompressionMethod() const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 @@ -517,11 +521,12 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - const uploadResponse = await uploadCacheArchiveSDK( + await cacheHttpClient.saveCache( + cacheId, + archivePath, response.signedUploadUrl, - archivePath + options ) - core.debug(`Download response status: ${uploadResponse._response.status}`) const finalizeRequest: FinalizeCacheEntryUploadRequest = { key, diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index c219000b..2470555b 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -8,6 +8,7 @@ import { import * as fs from 'fs' import {URL} from 'url' import * as utils from './cacheUtils' +import {uploadCacheArchiveSDK} from './uploadUtils' import { ArtifactCacheEntry, InternalCacheOptions, @@ -326,26 +327,45 @@ async function commitCache( export async function saveCache( cacheId: number, archivePath: string, + signedUploadURL?: string, options?: UploadOptions ): Promise { - const httpClient = createHttpClient() + const uploadOptions = getUploadOptions(options) - core.debug('Upload cache') - await uploadFile(httpClient, cacheId, archivePath, options) + if (uploadOptions.useAzureSdk) { + // Use Azure storage SDK to upload caches directly to Azure + if (!signedUploadURL) { + throw new Error( + 'Azure Storage SDK can only be used when a signed URL is provided.' + ) + } + await uploadCacheArchiveSDK(signedUploadURL, archivePath, options) + } else { + const httpClient = createHttpClient() - // Commit Cache - core.debug('Commiting cache') - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) - core.info( - `Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` - ) + core.debug('Upload cache') + await uploadFile(httpClient, cacheId, archivePath, options) - const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) - if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { - throw new Error( - `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + // Commit Cache + core.debug('Commiting cache') + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) + core.info( + `Cache Size: ~${Math.round( + cacheSize / (1024 * 1024) + )} MB (${cacheSize} B)` ) - } - core.info('Cache saved successfully') + const commitCacheResponse = await commitCache( + httpClient, + cacheId, + cacheSize + ) + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error( + `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + ) + } + + core.info('Cache saved successfully') + } } diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index 60b4a315..a3376d9d 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -6,16 +6,18 @@ import { BlockBlobParallelUploadOptions } from '@azure/storage-blob' import {InvalidResponseError} from './shared/errors' +import {UploadOptions} from '../options' export async function uploadCacheArchiveSDK( signedUploadURL: string, - archivePath: string + archivePath: string, + options?: UploadOptions ): Promise { // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { - blockSize: 4 * 1024 * 1024, // 4 MiB max block size - concurrency: 4, // maximum number of parallel transfer workers - maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size + blockSize: options?.uploadChunkSize, + concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers + maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size } const blobClient: BlobClient = new BlobClient(signedUploadURL) diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index d768ff54..778c6a0e 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -4,6 +4,14 @@ import * as core from '@actions/core' * Options to control cache upload */ export interface UploadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default false + */ + useAzureSdk?: boolean /** * Number of parallel cache upload * @@ -77,11 +85,16 @@ export interface DownloadOptions { */ export function getUploadOptions(copy?: UploadOptions): UploadOptions { const result: UploadOptions = { + useAzureSdk: false, uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 } if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk + } + if (typeof copy.uploadConcurrency === 'number') { result.uploadConcurrency = copy.uploadConcurrency } @@ -91,6 +104,7 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { } } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`) From 8c5f6f2dc5acc4574678e4e51df57f2f1779473a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Thu, 28 Nov 2024 07:42:07 -0800 Subject: [PATCH 075/108] Force use of Azure for restoreCacheV2 --- packages/cache/__tests__/restoreCacheV2.test.ts | 17 ++++++++++------- packages/cache/src/cache.ts | 5 +++++ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index ae366412..edcb16d7 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -142,6 +142,7 @@ test('restore with gzip compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -179,7 +180,7 @@ test('restore with gzip compressed cache found', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key, [], options) expect(cacheKey).toBe(key) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -196,7 +197,7 @@ test('restore with gzip compressed cache found', async () => { expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - undefined + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -217,6 +218,7 @@ test('restore with zstd compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -254,7 +256,7 @@ test('restore with zstd compressed cache found', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key, [], options) expect(cacheKey).toBe(key) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -271,7 +273,7 @@ test('restore with zstd compressed cache found', async () => { expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - undefined + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) @@ -293,6 +295,7 @@ test('restore with cache found for restore key', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -330,7 +333,7 @@ test('restore with cache found for restore key', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key, restoreKeys) + const cacheKey = await restoreCache(paths, key, restoreKeys, options) expect(cacheKey).toBe(restoreKeys[0]) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -347,7 +350,7 @@ test('restore with cache found for restore key', async () => { expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, archivePath, - undefined + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -368,7 +371,7 @@ test('restore with lookup only enabled', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const options = {lookupOnly: true} as DownloadOptions + const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 173a1a87..0bfbf894 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -218,6 +218,11 @@ async function restoreCacheV2( options?: DownloadOptions, enableCrossOsArchive = false ): Promise { + // Override UploadOptions to force the use of Azure + options = { + ...options, + useAzureSdk: true + } restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] From 65892d5ffe49d496db47651191918073a9b5c90a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Fri, 29 Nov 2024 07:09:05 -0800 Subject: [PATCH 076/108] Fine tune blob uploads --- packages/cache/src/cache.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0bfbf894..139512f9 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -465,6 +465,8 @@ async function saveCacheV2( // Override UploadOptions to force the use of Azure options = { ...options, + uploadChunkSize: 64 * 1024 * 1024, // 128MiB + uploadConcurrency: 8, // 8 workers for parallel upload useAzureSdk: true } const compressionMethod = await utils.getCompressionMethod() From 1d403c2fd88438aeb755bc61ffd22630e0a3fea2 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Fri, 29 Nov 2024 07:36:51 -0800 Subject: [PATCH 077/108] Fix tests --- packages/cache/__tests__/saveCacheV2.test.ts | 18 +++++++++++++++--- packages/cache/src/cache.ts | 2 +- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 3a18272a..94a2462e 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -138,7 +138,11 @@ test('save cache fails if a signedUploadURL was not passed', async () => { const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] const signedUploadURL = '' - const options: UploadOptions = {useAzureSdk: true} + const options: UploadOptions = { + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') @@ -191,7 +195,11 @@ test('finalize save cache failure', async () => { const cachePaths = [path.resolve(paths)] const logWarningMock = jest.spyOn(core, 'warning') const signedUploadURL = 'https://blob-storage.local?signed=true' - const options: UploadOptions = {useAzureSdk: true} + const options: UploadOptions = { + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') @@ -260,7 +268,11 @@ test('save with valid inputs uploads a cache', async () => { const cachePaths = [path.resolve(paths)] const signedUploadURL = 'https://blob-storage.local?signed=true' const createTarMock = jest.spyOn(tar, 'createTar') - const options: UploadOptions = {useAzureSdk: true} + const options: UploadOptions = { + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } const archiveFileSize = 1024 jest diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 139512f9..2a89d50d 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -465,7 +465,7 @@ async function saveCacheV2( // Override UploadOptions to force the use of Azure options = { ...options, - uploadChunkSize: 64 * 1024 * 1024, // 128MiB + uploadChunkSize: 64 * 1024 * 1024, // 64 MiB uploadConcurrency: 8, // 8 workers for parallel upload useAzureSdk: true } From c6f1224d30a062385d872421e3a5c0efab0923e7 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 02:33:27 -0800 Subject: [PATCH 078/108] Add progress tracking for blob uploads --- packages/cache/__tests__/uploadUtils.test.ts | 58 +++++++ packages/cache/src/internal/uploadUtils.ts | 155 +++++++++++++++++-- 2 files changed, 199 insertions(+), 14 deletions(-) create mode 100644 packages/cache/__tests__/uploadUtils.test.ts diff --git a/packages/cache/__tests__/uploadUtils.test.ts b/packages/cache/__tests__/uploadUtils.test.ts new file mode 100644 index 00000000..6a4876d1 --- /dev/null +++ b/packages/cache/__tests__/uploadUtils.test.ts @@ -0,0 +1,58 @@ +import {UploadProgress} from '../src/internal/uploadUtils' +import {TransferProgressEvent} from '@azure/ms-rest-js' + +test('upload progress tracked correctly', () => { + const progress = new UploadProgress(1000) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(0) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(0) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 0} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(0) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(0) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 250} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(250) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(250) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 500} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(500) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(500) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 750} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(750) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(750) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(1000) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(1000) + expect(progress.isDone()).toBe(true) +}) diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index a3376d9d..5ba98f91 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -5,35 +5,162 @@ import { BlockBlobClient, BlockBlobParallelUploadOptions } from '@azure/storage-blob' +import {TransferProgressEvent} from '@azure/ms-rest-js' import {InvalidResponseError} from './shared/errors' import {UploadOptions} from '../options' +/** + * Class for tracking the upload state and displaying stats. + */ +export class UploadProgress { + contentLength: number + sentBytes: number + startTime: number + displayedComplete: boolean + timeoutHandle?: ReturnType + + constructor(contentLength: number) { + this.contentLength = contentLength + this.sentBytes = 0 + this.displayedComplete = false + this.startTime = Date.now() + } + + /** + * Sets the number of bytes sent + * + * @param sentBytes the number of bytes sent + */ + setSentBytes(sentBytes: number): void { + this.sentBytes = sentBytes + } + + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes(): number { + return this.sentBytes + } + + /** + * Returns true if the upload is complete. + */ + isDone(): boolean { + return this.getTransferredBytes() === this.contentLength + } + + /** + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. + */ + display(): void { + if (this.displayedComplete) { + return + } + + const transferredBytes = this.sentBytes + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed( + 1 + ) + const elapsedTime = Date.now() - this.startTime + const uploadSpeed = ( + transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000) + ).toFixed(1) + + core.info( + `Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec` + ) + + if (this.isDone()) { + this.displayedComplete = true + } + } + + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress(): (progress: TransferProgressEvent) => void { + return (progress: TransferProgressEvent) => { + this.setSentBytes(progress.loadedBytes) + } + } + + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000): void { + const displayCallback = (): void => { + this.display() + + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs) + } + } + + this.timeoutHandle = setTimeout(displayCallback, delayInMs) + } + + /** + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer(): void { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle) + this.timeoutHandle = undefined + } + + this.display() + } +} + export async function uploadCacheArchiveSDK( signedUploadURL: string, archivePath: string, options?: UploadOptions ): Promise { + const blobClient: BlobClient = new BlobClient(signedUploadURL) + const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + + const properties = await blobClient.getProperties() + const contentLength = properties.contentLength ?? -1 + + const uploadProgress = new UploadProgress(contentLength) + // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: options?.uploadChunkSize, concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers - maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size + maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size + onProgress: uploadProgress.onProgress() } - const blobClient: BlobClient = new BlobClient(signedUploadURL) - const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + try { + uploadProgress.startDisplayTimer() - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` - ) - - const resp = await blockBlobClient.uploadFile(archivePath, uploadOptions) - - if (resp._response.status >= 400) { - throw new InvalidResponseError( - `Upload failed with status code ${resp._response.status}` + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` ) - } - return resp + const response = await blockBlobClient.uploadFile( + archivePath, + uploadOptions + ) + + // TODO: better management of non-retryable errors + if (response._response.status >= 400) { + throw new InvalidResponseError( + `Upload failed with status code ${response._response.status}` + ) + } + + return response + } finally { + uploadProgress.stopDisplayTimer() + } } From ee1c07d0aafbf266d3a58a4f7bc73efde3f47414 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 02:38:51 -0800 Subject: [PATCH 079/108] Add error handling for failed uploads --- packages/cache/src/internal/uploadUtils.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index 5ba98f91..efb80de1 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -160,6 +160,9 @@ export async function uploadCacheArchiveSDK( } return response + } catch (error) { + core.debug(`Error uploading cache archive: ${error}`) + throw error } finally { uploadProgress.stopDisplayTimer() } From 4a272e90530ba272dcc251d638f0aa4bffe91f61 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 03:08:05 -0800 Subject: [PATCH 080/108] Troubleshoot --- packages/cache/src/internal/uploadUtils.ts | 43 ++++++++++------------ 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index efb80de1..372b2176 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -140,30 +140,27 @@ export async function uploadCacheArchiveSDK( onProgress: uploadProgress.onProgress() } - try { - uploadProgress.startDisplayTimer() + // try { + uploadProgress.startDisplayTimer() - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` + ) + + const response = await blockBlobClient.uploadFile(archivePath, uploadOptions) + + // TODO: better management of non-retryable errors + if (response._response.status >= 400) { + throw new InvalidResponseError( + `Upload failed with status code ${response._response.status}` ) - - const response = await blockBlobClient.uploadFile( - archivePath, - uploadOptions - ) - - // TODO: better management of non-retryable errors - if (response._response.status >= 400) { - throw new InvalidResponseError( - `Upload failed with status code ${response._response.status}` - ) - } - - return response - } catch (error) { - core.debug(`Error uploading cache archive: ${error}`) - throw error - } finally { - uploadProgress.stopDisplayTimer() } + + return response + // } catch (error) { + // core.debug(`Error uploading cache archive: ${error}`) + // throw error + // } finally { + // uploadProgress.stopDisplayTimer() + // } } From db1d01308c2999654e70c419d5a545c345ed5fa2 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 03:35:20 -0800 Subject: [PATCH 081/108] Troubleshoot --- packages/cache/__tests__/uploadUtils.test.ts | 26 +++++++++++- packages/cache/src/cache.ts | 1 + packages/cache/src/internal/uploadUtils.ts | 43 +++++++++++--------- 3 files changed, 48 insertions(+), 22 deletions(-) diff --git a/packages/cache/__tests__/uploadUtils.test.ts b/packages/cache/__tests__/uploadUtils.test.ts index 6a4876d1..65fed6f7 100644 --- a/packages/cache/__tests__/uploadUtils.test.ts +++ b/packages/cache/__tests__/uploadUtils.test.ts @@ -1,8 +1,8 @@ -import {UploadProgress} from '../src/internal/uploadUtils' +import * as uploadUtils from '../src/internal/uploadUtils' import {TransferProgressEvent} from '@azure/ms-rest-js' test('upload progress tracked correctly', () => { - const progress = new UploadProgress(1000) + const progress = new uploadUtils.UploadProgress(1000) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(0) @@ -56,3 +56,25 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(1000) expect(progress.isDone()).toBe(true) }) + +// test('upload to azure blob storage is successful', () => { +// const archivePath = 'path/to/archive.tzst' +// const signedUploadURL = 'https://storage10.blob.core.windows.net/cache-container/3fe-60?se=2024-12-002T11%3A08%3A58Z&sv=2024-11-04' +// const options: UploadOptions = { +// useAzureSdk: true, +// uploadChunkSize: 64 * 1024 * 1024, +// uploadConcurrency: 8 +// } + +// jest.spyOn(uploadUtils.UploadProgress.prototype, 'onProgress').mockImplementation(() => (progress: TransferProgressEvent) => { +// return progress.loadedBytes +// }) + +// jest.spyOn(uploadUtils.UploadProgress.prototype, 'onProgress').mockImplementation(() => (progress: TransferProgressEvent) => { +// return progress.loadedBytes +// }) + +// const response = uploadUtils.uploadCacheArchiveSDK(signedUploadURL, archivePath, options) + +// expect(response).toBeInstanceOf(Promise) +// }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 2a89d50d..69931e29 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -561,6 +561,7 @@ async function saveCacheV2( } else { core.warning(`Failed to save: ${typedError.message}`) } + throw error } finally { // Try to delete the archive to save space try { diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index 372b2176..efb80de1 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -140,27 +140,30 @@ export async function uploadCacheArchiveSDK( onProgress: uploadProgress.onProgress() } - // try { - uploadProgress.startDisplayTimer() + try { + uploadProgress.startDisplayTimer() - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` - ) - - const response = await blockBlobClient.uploadFile(archivePath, uploadOptions) - - // TODO: better management of non-retryable errors - if (response._response.status >= 400) { - throw new InvalidResponseError( - `Upload failed with status code ${response._response.status}` + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` ) - } - return response - // } catch (error) { - // core.debug(`Error uploading cache archive: ${error}`) - // throw error - // } finally { - // uploadProgress.stopDisplayTimer() - // } + const response = await blockBlobClient.uploadFile( + archivePath, + uploadOptions + ) + + // TODO: better management of non-retryable errors + if (response._response.status >= 400) { + throw new InvalidResponseError( + `Upload failed with status code ${response._response.status}` + ) + } + + return response + } catch (error) { + core.debug(`Error uploading cache archive: ${error}`) + throw error + } finally { + uploadProgress.stopDisplayTimer() + } } From d89855bb90ff9a5c406c07a99f409d4bbb66e9a0 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 03:55:57 -0800 Subject: [PATCH 082/108] Fix upload progress bug --- packages/cache/src/cache.ts | 5 ++++- packages/cache/src/internal/uploadUtils.ts | 10 ++++------ packages/cache/src/options.ts | 4 ++++ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 69931e29..78439141 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -509,6 +509,10 @@ async function saveCacheV2( ) } + // Set the archive size in the options, will be used to display the upload + // progress + options.archiveSizeBytes = archiveFileSize + core.debug('Reserving Cache') const version = utils.getCacheVersion( paths, @@ -561,7 +565,6 @@ async function saveCacheV2( } else { core.warning(`Failed to save: ${typedError.message}`) } - throw error } finally { // Try to delete the archive to save space try { diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index efb80de1..7d471a0d 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -126,11 +126,7 @@ export async function uploadCacheArchiveSDK( ): Promise { const blobClient: BlobClient = new BlobClient(signedUploadURL) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - - const properties = await blobClient.getProperties() - const contentLength = properties.contentLength ?? -1 - - const uploadProgress = new UploadProgress(contentLength) + const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0) // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { @@ -161,7 +157,9 @@ export async function uploadCacheArchiveSDK( return response } catch (error) { - core.debug(`Error uploading cache archive: ${error}`) + core.warning( + `uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}` + ) throw error } finally { uploadProgress.stopDisplayTimer() diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index 778c6a0e..08e71c10 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -24,6 +24,10 @@ export interface UploadOptions { * @default 32MB */ uploadChunkSize?: number + /** + * Archive size in bytes + */ + archiveSizeBytes?: number } /** From a762876d6d79617f23705f7dd9b6a71ec69c11aa Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:08:21 -0800 Subject: [PATCH 083/108] Minor refactoring --- packages/cache/src/cache.ts | 27 +++++++++++----------- packages/cache/src/internal/uploadUtils.ts | 12 +++++++++- 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 78439141..f94ccc6b 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -106,12 +106,12 @@ export async function restoreCache( /** * Restores cache using the legacy Cache Service * - * @param paths - * @param primaryKey - * @param restoreKeys - * @param options - * @param enableCrossOsArchive - * @returns + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param options cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform + * @returns string returns the key for the cache hit, otherwise returns undefined */ async function restoreCacheV1( paths: string[], @@ -202,7 +202,7 @@ async function restoreCacheV1( } /** - * Restores cache using the new Cache Service + * Restores cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache @@ -448,12 +448,12 @@ async function saveCacheV1( } /** - * Save cache using the new Cache Service + * Save cache using Cache Service v2 * - * @param paths - * @param key - * @param options - * @param enableCrossOsArchive + * @param paths a list of file paths to restore from the cache + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @returns */ async function saveCacheV2( @@ -509,8 +509,7 @@ async function saveCacheV2( ) } - // Set the archive size in the options, will be used to display the upload - // progress + // Set the archive size in the options, will be used to display the upload progress options.archiveSizeBytes = archiveFileSize core.debug('Reserving Cache') diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts index 7d471a0d..1b4f7af0 100644 --- a/packages/cache/src/internal/uploadUtils.ts +++ b/packages/cache/src/internal/uploadUtils.ts @@ -119,6 +119,16 @@ export class UploadProgress { } } +/** + * Uploads a cache archive directly to Azure Blob Storage using the Azure SDK. + * This function will display progress information to the console. Concurrency of the + * upload is determined by the calling functions. + * + * @param signedUploadURL + * @param archivePath + * @param options + * @returns + */ export async function uploadCacheArchiveSDK( signedUploadURL: string, archivePath: string, @@ -151,7 +161,7 @@ export async function uploadCacheArchiveSDK( // TODO: better management of non-retryable errors if (response._response.status >= 400) { throw new InvalidResponseError( - `Upload failed with status code ${response._response.status}` + `uploadCacheArchiveSDK: upload failed with status code ${response._response.status}` ) } From 87171e29ca36c26436ba9ad4ccd469538bf746b7 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:18:46 -0800 Subject: [PATCH 084/108] Fix tests --- packages/cache/__tests__/saveCacheV2.test.ts | 39 +++++++++++--------- packages/cache/__tests__/uploadUtils.test.ts | 34 +++-------------- 2 files changed, 27 insertions(+), 46 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 94a2462e..285d973b 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -1,13 +1,13 @@ import * as core from '@actions/core' import * as path from 'path' -import {saveCache} from '../src/cache' +import { saveCache } from '../src/cache' import * as cacheUtils from '../src/internal/cacheUtils' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' -import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' import * as cacheHttpClient from '../src/internal/cacheHttpClient' -import {UploadOptions} from '../src/options' +import { UploadOptions } from '../src/options' let logDebugMock: jest.SpyInstance @@ -27,11 +27,11 @@ jest.mock('@azure/storage-blob', () => ({ beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(core, 'debug').mockImplementation(() => {}) - jest.spyOn(core, 'info').mockImplementation(() => {}) - jest.spyOn(core, 'warning').mockImplementation(() => {}) - jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { return filePaths.map(x => path.resolve(x)) }) @@ -99,7 +99,7 @@ test('create cache entry failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) + .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) const createTarMock = jest.spyOn(tar, 'createTar') const finalizeCacheEntryMock = jest.spyOn( @@ -138,7 +138,9 @@ test('save cache fails if a signedUploadURL was not passed', async () => { const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] const signedUploadURL = '' + const archiveFileSize = 1024 const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8 @@ -147,7 +149,7 @@ test('save cache fails if a signedUploadURL was not passed', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -159,7 +161,6 @@ test('save cache fails if a signedUploadURL was not passed', async () => { .mockReturnValueOnce(Promise.resolve(compression)) const cacheVersion = cacheUtils.getCacheVersion([paths], compression) - const archiveFileSize = 1024 jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) @@ -195,7 +196,9 @@ test('finalize save cache failure', async () => { const cachePaths = [path.resolve(paths)] const logWarningMock = jest.spyOn(core, 'warning') const signedUploadURL = 'https://blob-storage.local?signed=true' + const archiveFileSize = 1024 const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8 @@ -204,7 +207,7 @@ test('finalize save cache failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -218,14 +221,13 @@ test('finalize save cache failure', async () => { .mockReturnValueOnce(Promise.resolve(compression)) const cacheVersion = cacheUtils.getCacheVersion([paths], compression) - const archiveFileSize = 1024 jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) + .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) const cacheId = await saveCache([paths], key, options) @@ -268,13 +270,14 @@ test('save with valid inputs uploads a cache', async () => { const cachePaths = [path.resolve(paths)] const signedUploadURL = 'https://blob-storage.local?signed=true' const createTarMock = jest.spyOn(tar, 'createTar') + const archiveFileSize = 1024 const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8 } - const archiveFileSize = 1024 jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) @@ -283,7 +286,7 @@ test('save with valid inputs uploads a cache', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) ) const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') @@ -296,7 +299,7 @@ test('save with valid inputs uploads a cache', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) + .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) const expectedCacheId = await saveCache([paths], key) diff --git a/packages/cache/__tests__/uploadUtils.test.ts b/packages/cache/__tests__/uploadUtils.test.ts index 65fed6f7..786b1d3f 100644 --- a/packages/cache/__tests__/uploadUtils.test.ts +++ b/packages/cache/__tests__/uploadUtils.test.ts @@ -1,5 +1,5 @@ import * as uploadUtils from '../src/internal/uploadUtils' -import {TransferProgressEvent} from '@azure/ms-rest-js' +import { TransferProgressEvent } from '@azure/ms-rest-js' test('upload progress tracked correctly', () => { const progress = new uploadUtils.UploadProgress(1000) @@ -11,7 +11,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(0) expect(progress.isDone()).toBe(false) - progress.onProgress()({loadedBytes: 0} as TransferProgressEvent) + progress.onProgress()({ loadedBytes: 0 } as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(0) @@ -20,7 +20,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(0) expect(progress.isDone()).toBe(false) - progress.onProgress()({loadedBytes: 250} as TransferProgressEvent) + progress.onProgress()({ loadedBytes: 250 } as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(250) @@ -29,7 +29,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(250) expect(progress.isDone()).toBe(false) - progress.onProgress()({loadedBytes: 500} as TransferProgressEvent) + progress.onProgress()({ loadedBytes: 500 } as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(500) @@ -38,7 +38,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(500) expect(progress.isDone()).toBe(false) - progress.onProgress()({loadedBytes: 750} as TransferProgressEvent) + progress.onProgress()({ loadedBytes: 750 } as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(750) @@ -47,7 +47,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(750) expect(progress.isDone()).toBe(false) - progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent) + progress.onProgress()({ loadedBytes: 1000 } as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(1000) @@ -56,25 +56,3 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(1000) expect(progress.isDone()).toBe(true) }) - -// test('upload to azure blob storage is successful', () => { -// const archivePath = 'path/to/archive.tzst' -// const signedUploadURL = 'https://storage10.blob.core.windows.net/cache-container/3fe-60?se=2024-12-002T11%3A08%3A58Z&sv=2024-11-04' -// const options: UploadOptions = { -// useAzureSdk: true, -// uploadChunkSize: 64 * 1024 * 1024, -// uploadConcurrency: 8 -// } - -// jest.spyOn(uploadUtils.UploadProgress.prototype, 'onProgress').mockImplementation(() => (progress: TransferProgressEvent) => { -// return progress.loadedBytes -// }) - -// jest.spyOn(uploadUtils.UploadProgress.prototype, 'onProgress').mockImplementation(() => (progress: TransferProgressEvent) => { -// return progress.loadedBytes -// }) - -// const response = uploadUtils.uploadCacheArchiveSDK(signedUploadURL, archivePath, options) - -// expect(response).toBeInstanceOf(Promise) -// }) From 7ad18fd6bd04185a7f728e401936e1d2b5dbe281 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:24:17 -0800 Subject: [PATCH 085/108] Fix linter complaints --- packages/cache/__tests__/saveCacheV2.test.ts | 30 ++++++++++---------- packages/cache/__tests__/uploadUtils.test.ts | 12 ++++---- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 285d973b..6744425d 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -1,13 +1,13 @@ import * as core from '@actions/core' import * as path from 'path' -import { saveCache } from '../src/cache' +import {saveCache} from '../src/cache' import * as cacheUtils from '../src/internal/cacheUtils' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import * as cacheHttpClient from '../src/internal/cacheHttpClient' -import { UploadOptions } from '../src/options' +import {UploadOptions} from '../src/options' let logDebugMock: jest.SpyInstance @@ -27,11 +27,11 @@ jest.mock('@azure/storage-blob', () => ({ beforeAll(() => { process.env['ACTIONS_RUNTIME_TOKEN'] = 'token' - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { return filePaths.map(x => path.resolve(x)) }) @@ -99,7 +99,7 @@ test('create cache entry failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue(Promise.resolve({ ok: false, signedUploadUrl: '' })) + .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) const createTarMock = jest.spyOn(tar, 'createTar') const finalizeCacheEntryMock = jest.spyOn( @@ -149,7 +149,7 @@ test('save cache fails if a signedUploadURL was not passed', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -207,7 +207,7 @@ test('finalize save cache failure', async () => { const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const createTarMock = jest.spyOn(tar, 'createTar') @@ -227,7 +227,7 @@ test('finalize save cache failure', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: false, entryId: '' })) + .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) const cacheId = await saveCache([paths], key, options) @@ -286,7 +286,7 @@ test('save with valid inputs uploads a cache', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .mockReturnValue( - Promise.resolve({ ok: true, signedUploadUrl: signedUploadURL }) + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') @@ -299,7 +299,7 @@ test('save with valid inputs uploads a cache', async () => { const finalizeCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') - .mockReturnValue(Promise.resolve({ ok: true, entryId: cacheId.toString() })) + .mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()})) const expectedCacheId = await saveCache([paths], key) diff --git a/packages/cache/__tests__/uploadUtils.test.ts b/packages/cache/__tests__/uploadUtils.test.ts index 786b1d3f..2f0b8b55 100644 --- a/packages/cache/__tests__/uploadUtils.test.ts +++ b/packages/cache/__tests__/uploadUtils.test.ts @@ -1,5 +1,5 @@ import * as uploadUtils from '../src/internal/uploadUtils' -import { TransferProgressEvent } from '@azure/ms-rest-js' +import {TransferProgressEvent} from '@azure/ms-rest-js' test('upload progress tracked correctly', () => { const progress = new uploadUtils.UploadProgress(1000) @@ -11,7 +11,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(0) expect(progress.isDone()).toBe(false) - progress.onProgress()({ loadedBytes: 0 } as TransferProgressEvent) + progress.onProgress()({loadedBytes: 0} as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(0) @@ -20,7 +20,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(0) expect(progress.isDone()).toBe(false) - progress.onProgress()({ loadedBytes: 250 } as TransferProgressEvent) + progress.onProgress()({loadedBytes: 250} as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(250) @@ -29,7 +29,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(250) expect(progress.isDone()).toBe(false) - progress.onProgress()({ loadedBytes: 500 } as TransferProgressEvent) + progress.onProgress()({loadedBytes: 500} as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(500) @@ -38,7 +38,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(500) expect(progress.isDone()).toBe(false) - progress.onProgress()({ loadedBytes: 750 } as TransferProgressEvent) + progress.onProgress()({loadedBytes: 750} as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(750) @@ -47,7 +47,7 @@ test('upload progress tracked correctly', () => { expect(progress.getTransferredBytes()).toBe(750) expect(progress.isDone()).toBe(false) - progress.onProgress()({ loadedBytes: 1000 } as TransferProgressEvent) + progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent) expect(progress.contentLength).toBe(1000) expect(progress.sentBytes).toBe(1000) From 792ec716de29f031413f3e4f810142cd223f1773 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 07:32:33 -0800 Subject: [PATCH 086/108] Tune upload options --- packages/cache/__tests__/options.test.ts | 41 ++++++++++++++++++++---- packages/cache/src/options.ts | 20 ++++++++++++ 2 files changed, 54 insertions(+), 7 deletions(-) diff --git a/packages/cache/__tests__/options.test.ts b/packages/cache/__tests__/options.test.ts index fd742487..b4c5a1f1 100644 --- a/packages/cache/__tests__/options.test.ts +++ b/packages/cache/__tests__/options.test.ts @@ -11,8 +11,6 @@ const downloadConcurrency = 8 const timeoutInMs = 30000 const segmentTimeoutInMs = 600000 const lookupOnly = false -const uploadConcurrency = 4 -const uploadChunkSize = 32 * 1024 * 1024 test('getDownloadOptions sets defaults', async () => { const actualOptions = getDownloadOptions() @@ -43,13 +41,14 @@ test('getDownloadOptions overrides all settings', async () => { }) test('getUploadOptions sets defaults', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024, + useAzureSdk: false + } const actualOptions = getUploadOptions() - expect(actualOptions).toEqual({ - uploadConcurrency, - uploadChunkSize, - useAzureSdk - }) + expect(actualOptions).toEqual(expectedOptions) }) test('getUploadOptions overrides all settings', async () => { @@ -64,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => { expect(actualOptions).toEqual(expectedOptions) }) +test('env variables override all getUploadOptions settings', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 16, + uploadChunkSize: 64 * 1024 * 1024, + useAzureSdk: true + } + + process.env.CACHE_UPLOAD_CONCURRENCY = '16' + process.env.CACHE_UPLOAD_CHUNK_SIZE = '64' + + const actualOptions = getUploadOptions(expectedOptions) + expect(actualOptions).toEqual(expectedOptions) +}) + +test('env variables override all getUploadOptions settings but do not exceed caps', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 32, + uploadChunkSize: 128 * 1024 * 1024, + useAzureSdk: true + } + + process.env.CACHE_UPLOAD_CONCURRENCY = '64' + process.env.CACHE_UPLOAD_CHUNK_SIZE = '256' + + const actualOptions = getUploadOptions(expectedOptions) + expect(actualOptions).toEqual(expectedOptions) +}) + test('getDownloadOptions overrides download timeout minutes', async () => { const expectedOptions: DownloadOptions = { useAzureSdk: false, diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index 08e71c10..3e4063f2 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -88,6 +88,7 @@ export interface DownloadOptions { * @param copy the original upload options */ export function getUploadOptions(copy?: UploadOptions): UploadOptions { + // Defaults if not overriden const result: UploadOptions = { useAzureSdk: false, uploadConcurrency: 4, @@ -108,6 +109,25 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { } } + /** + * Add env var overrides + */ + // Cap the uploadConcurrency at 32 + result.uploadConcurrency = !isNaN( + Number(process.env['CACHE_UPLOAD_CONCURRENCY']) + ) + ? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY'])) + : result.uploadConcurrency + // Cap the uploadChunkSize at 128MiB + result.uploadChunkSize = !isNaN( + Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) + ) + ? Math.min( + 128 * 1024 * 1024, + Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024 + ) + : result.uploadChunkSize + core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`) From b24632bd8043752827cc8295ef756969acf9ae21 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 19:46:11 +0100 Subject: [PATCH 087/108] Fix comments Co-authored-by: Josh Gross --- packages/cache/src/cache.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f94ccc6b..2959cc62 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -110,7 +110,7 @@ export async function restoreCache( * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param options cache download options - * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform + * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ async function restoreCacheV1( From 3f7df8ec5a47cccf436f2f782b98daa012db818b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 19:46:18 +0100 Subject: [PATCH 088/108] Fix comments Co-authored-by: Josh Gross --- packages/cache/src/cache.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 2959cc62..6b79be52 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -108,7 +108,7 @@ export async function restoreCache( * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param options cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined From 502e8ce6515cdb6d68920039301ee221781bb97a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:53:29 -0800 Subject: [PATCH 089/108] Minor comment adjustments --- packages/cache/src/cache.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f94ccc6b..f3724c20 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -64,7 +64,7 @@ export function isFeatureAvailable(): boolean { * Restores cache from keys * * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform @@ -107,7 +107,7 @@ export async function restoreCache( * Restores cache using the legacy Cache Service * * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param options cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform @@ -205,7 +205,7 @@ async function restoreCacheV1( * Restores cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform @@ -463,6 +463,8 @@ async function saveCacheV2( enableCrossOsArchive = false ): Promise { // Override UploadOptions to force the use of Azure + // ...options goes first because we want to override the default values + // set in UploadOptions with these specific figures options = { ...options, uploadChunkSize: 64 * 1024 * 1024, // 64 MiB From c649df4b940f300d98c8a43f0d37aa626be1f282 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:55:33 -0800 Subject: [PATCH 090/108] Minor comment adjustments --- packages/cache/src/cache.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index e4b53faf..8eb69044 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -65,7 +65,7 @@ export function isFeatureAvailable(): boolean { * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined @@ -206,7 +206,7 @@ async function restoreCacheV1( * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined From c02c929c562af2419e21be2bdff6fe8c257c7e02 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 2 Dec 2024 11:10:25 -0800 Subject: [PATCH 091/108] Minor comment adjustments --- packages/cache/src/cache.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 8eb69044..9b02489f 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,16 +3,16 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' +import {CacheFileSizeLimit} from './internal/constants' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -414,9 +414,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 4498687c5e855a7e61c31c75acb54b1e30e3f3b3 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Tue, 3 Dec 2024 02:40:00 -0800 Subject: [PATCH 092/108] Prepare @actions/cache 4.0.0 release --- packages/cache/RELEASES.md | 15 +++++++++++++++ packages/cache/package-lock.json | 4 ++-- packages/cache/package.json | 2 +- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 85415952..ebe6f031 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -1,6 +1,21 @@ # @actions/cache Releases +### 4.0.0 + +#### Important changes + +- The cache backend service has been rewritten from the ground up for improved performance and reliability. This release integrates with the new cache service (v2) APIs. The new service will gradually rollout following the deprecation period. The legacy service will be sunset on **February 1st, 2025**. +- Changes in this release are **fully backward compatible**. Upgrading to this version should not break or require any changes to your workflows beyond updating your `package.json` to this version. +- **All previous versions of this package will be deprecated**. We recommend upgrading to this version as soon as possible before **February 1st, 2025.** + +#### Minor changes + +- Update `@actions/core` to `1.11.0` +- Update `semver` `6.3.1` +- Add `twirp-ts` `2.5.0` to dependencies + ### 3.3.0 + - Update `@actions/core` to `1.11.1` - Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index beb23a68..132391fb 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/cache", - "version": "3.3.0", + "version": "4.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@actions/cache", - "version": "3.3.0", + "version": "4.0.0", "license": "MIT", "dependencies": { "@actions/core": "^1.11.1", diff --git a/packages/cache/package.json b/packages/cache/package.json index e5332a92..b03f4221 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "3.3.0", + "version": "4.0.0", "preview": true, "description": "Actions cache lib", "keywords": [ From cb001af8a39a02d063d4cba5f1151b292c4d022c Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Tue, 3 Dec 2024 02:52:39 -0800 Subject: [PATCH 093/108] Update README to include deprecation notice --- packages/cache/README.md | 8 ++++++-- packages/cache/RELEASES.md | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/cache/README.md b/packages/cache/README.md index 55185032..2f7be8df 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -6,6 +6,12 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. +## Important changes + +- The cache backend service has been rewritten from the ground up for improved performance and reliability. This release integrates with the new cache service (v2) APIs. The new service will gradually rollout following the deprecation period. The legacy service will be sunset on **February 1st, 2025**. +- Changes in the `4.0.0` release are **fully backward compatible**. Upgrading to version `4.0.0` should not break or require any changes to your workflows beyond updating your `package.json` to this version. +- **All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** + ## Usage This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). @@ -47,5 +53,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys) A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss. Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes. - - diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index ebe6f031..02be2ac0 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -5,8 +5,8 @@ #### Important changes - The cache backend service has been rewritten from the ground up for improved performance and reliability. This release integrates with the new cache service (v2) APIs. The new service will gradually rollout following the deprecation period. The legacy service will be sunset on **February 1st, 2025**. -- Changes in this release are **fully backward compatible**. Upgrading to this version should not break or require any changes to your workflows beyond updating your `package.json` to this version. -- **All previous versions of this package will be deprecated**. We recommend upgrading to this version as soon as possible before **February 1st, 2025.** +- Changes in this release are **fully backward compatible**. Upgrading to version `4.0.0` should not break or require any changes to your workflows beyond updating your `package.json` to this version. +- **All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** #### Minor changes From 59845ec3725df5c79d93c4ffd8e3d26d2ac83319 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 4 Dec 2024 05:30:50 -0800 Subject: [PATCH 094/108] Update deprecation notice --- packages/cache/README.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/cache/README.md b/packages/cache/README.md index 2f7be8df..558999ac 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -6,11 +6,17 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. -## Important changes +## ⚠️ Important changes -- The cache backend service has been rewritten from the ground up for improved performance and reliability. This release integrates with the new cache service (v2) APIs. The new service will gradually rollout following the deprecation period. The legacy service will be sunset on **February 1st, 2025**. -- Changes in the `4.0.0` release are **fully backward compatible**. Upgrading to version `4.0.0` should not break or require any changes to your workflows beyond updating your `package.json` to this version. -- **All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** +The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs. + +The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**. + +**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** + +If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail. + +Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`. ## Usage From 72447df44c8dd2e0969cde019dc956addeb1598a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 4 Dec 2024 05:33:47 -0800 Subject: [PATCH 095/108] Update deprecation notice --- packages/cache/README.md | 2 ++ packages/cache/RELEASES.md | 14 +++++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/packages/cache/README.md b/packages/cache/README.md index 558999ac..92820f7c 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -18,6 +18,8 @@ If you do not upgrade, all workflow runs using any of the deprecated [@actions/c Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`. +Read more about change & access the migration guide: [reference to the announcement](TBD). + ## Usage This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 02be2ac0..552abc97 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -4,9 +4,17 @@ #### Important changes -- The cache backend service has been rewritten from the ground up for improved performance and reliability. This release integrates with the new cache service (v2) APIs. The new service will gradually rollout following the deprecation period. The legacy service will be sunset on **February 1st, 2025**. -- Changes in this release are **fully backward compatible**. Upgrading to version `4.0.0` should not break or require any changes to your workflows beyond updating your `package.json` to this version. -- **All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** +The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs. + +The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**. + +**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.** + +If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail. + +Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`. + +Read more about change & access the migration guide: [reference to the announcement](TBD). #### Minor changes From cd9197e9bdaef000bdc0a4e3269169a90c6d4554 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Wed, 4 Dec 2024 08:23:10 -0800 Subject: [PATCH 096/108] Add announcement link --- packages/cache/README.md | 2 +- packages/cache/RELEASES.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cache/README.md b/packages/cache/README.md index 92820f7c..0f743848 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -18,7 +18,7 @@ If you do not upgrade, all workflow runs using any of the deprecated [@actions/c Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`. -Read more about change & access the migration guide: [reference to the announcement](TBD). +Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890). ## Usage diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 552abc97..8355e977 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -14,7 +14,7 @@ If you do not upgrade, all workflow runs using any of the deprecated [@actions/c Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`. -Read more about change & access the migration guide: [reference to the announcement](TBD). +Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890). #### Minor changes From 1e0c16f0dc67246ccb8005965c289aba4114bd56 Mon Sep 17 00:00:00 2001 From: Brian DeHamer Date: Fri, 6 Dec 2024 14:27:02 -0800 Subject: [PATCH 098/108] return artifact digest on upload Signed-off-by: Brian DeHamer --- packages/artifact/__tests__/upload-artifact.test.ts | 4 +++- packages/artifact/src/internal/shared/interfaces.ts | 5 +++++ packages/artifact/src/internal/upload/upload-artifact.ts | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/artifact/__tests__/upload-artifact.test.ts b/packages/artifact/__tests__/upload-artifact.test.ts index 7c7d8e2e..64cc4fb1 100644 --- a/packages/artifact/__tests__/upload-artifact.test.ts +++ b/packages/artifact/__tests__/upload-artifact.test.ts @@ -281,7 +281,7 @@ describe('upload-artifact', () => { } ) - const {id, size} = await uploadArtifact( + const {id, size, digest} = await uploadArtifact( fixtures.inputs.artifactName, fixtures.files.map(file => path.join(fixtures.uploadDirectory, file.name) @@ -291,6 +291,8 @@ describe('upload-artifact', () => { expect(id).toBe(1) expect(size).toBe(loadedBytes) + expect(digest).toBeDefined() + expect(digest).toHaveLength(64) const extractedDirectory = path.join( fixtures.uploadDirectory, diff --git a/packages/artifact/src/internal/shared/interfaces.ts b/packages/artifact/src/internal/shared/interfaces.ts index eb55ae8b..4255d020 100644 --- a/packages/artifact/src/internal/shared/interfaces.ts +++ b/packages/artifact/src/internal/shared/interfaces.ts @@ -12,6 +12,11 @@ export interface UploadArtifactResponse { * This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts */ id?: number + + /** + * The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded + */ + digest?: string } /** diff --git a/packages/artifact/src/internal/upload/upload-artifact.ts b/packages/artifact/src/internal/upload/upload-artifact.ts index e880102f..81be322c 100644 --- a/packages/artifact/src/internal/upload/upload-artifact.ts +++ b/packages/artifact/src/internal/upload/upload-artifact.ts @@ -110,6 +110,7 @@ export async function uploadArtifact( return { size: uploadResult.uploadSize, + digest: uploadResult.sha256Hash, id: Number(artifactId) } } From 4426b4ea91fb6d00c25ca5411008def45de89baa Mon Sep 17 00:00:00 2001 From: Brian DeHamer Date: Tue, 17 Dec 2024 10:05:45 -0800 Subject: [PATCH 099/108] Prepare artifact release 2.2.0 Signed-off-by: Brian DeHamer --- packages/artifact/RELEASES.md | 4 ++++ packages/artifact/docs/generated/README.md | 2 +- .../generated/classes/ArtifactNotFoundError.md | 2 +- .../generated/classes/DefaultArtifactClient.md | 10 +++++----- .../generated/classes/FilesNotFoundError.md | 4 ++-- .../generated/classes/GHESNotSupportedError.md | 2 +- .../generated/classes/InvalidResponseError.md | 2 +- .../docs/generated/classes/NetworkError.md | 6 +++--- .../docs/generated/classes/UsageError.md | 4 ++-- .../docs/generated/interfaces/Artifact.md | 8 ++++---- .../docs/generated/interfaces/ArtifactClient.md | 10 +++++----- .../interfaces/DeleteArtifactResponse.md | 2 +- .../interfaces/DownloadArtifactOptions.md | 2 +- .../interfaces/DownloadArtifactResponse.md | 2 +- .../docs/generated/interfaces/FindOptions.md | 2 +- .../generated/interfaces/GetArtifactResponse.md | 2 +- .../interfaces/ListArtifactsOptions.md | 2 +- .../interfaces/ListArtifactsResponse.md | 2 +- .../interfaces/UploadArtifactOptions.md | 4 ++-- .../interfaces/UploadArtifactResponse.md | 17 +++++++++++++++-- packages/artifact/package-lock.json | 4 ++-- packages/artifact/package.json | 2 +- 22 files changed, 56 insertions(+), 39 deletions(-) diff --git a/packages/artifact/RELEASES.md b/packages/artifact/RELEASES.md index d24cdfb5..9ba5c7e9 100644 --- a/packages/artifact/RELEASES.md +++ b/packages/artifact/RELEASES.md @@ -1,5 +1,9 @@ # @actions/artifact Releases +### 2.2.0 + +- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896) + ### 2.1.11 - Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844) diff --git a/packages/artifact/docs/generated/README.md b/packages/artifact/docs/generated/README.md index aeeaade2..462216fc 100644 --- a/packages/artifact/docs/generated/README.md +++ b/packages/artifact/docs/generated/README.md @@ -40,4 +40,4 @@ #### Defined in -[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7) +[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7) diff --git a/packages/artifact/docs/generated/classes/ArtifactNotFoundError.md b/packages/artifact/docs/generated/classes/ArtifactNotFoundError.md index 8b39bd0b..b194e22c 100644 --- a/packages/artifact/docs/generated/classes/ArtifactNotFoundError.md +++ b/packages/artifact/docs/generated/classes/ArtifactNotFoundError.md @@ -48,7 +48,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24) +[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24) ## Properties diff --git a/packages/artifact/docs/generated/classes/DefaultArtifactClient.md b/packages/artifact/docs/generated/classes/DefaultArtifactClient.md index 6ac11c31..959f83e1 100644 --- a/packages/artifact/docs/generated/classes/DefaultArtifactClient.md +++ b/packages/artifact/docs/generated/classes/DefaultArtifactClient.md @@ -61,7 +61,7 @@ single DeleteArtifactResponse object #### Defined in -[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248) +[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248) ___ @@ -92,7 +92,7 @@ single DownloadArtifactResponse object #### Defined in -[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138) +[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138) ___ @@ -127,7 +127,7 @@ If there are multiple artifacts with the same name in the same workflow run this #### Defined in -[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212) +[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212) ___ @@ -159,7 +159,7 @@ ListArtifactResponse object #### Defined in -[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176) +[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176) ___ @@ -190,4 +190,4 @@ single UploadArtifactResponse object #### Defined in -[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113) +[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113) diff --git a/packages/artifact/docs/generated/classes/FilesNotFoundError.md b/packages/artifact/docs/generated/classes/FilesNotFoundError.md index f20b608b..4aeaca7b 100644 --- a/packages/artifact/docs/generated/classes/FilesNotFoundError.md +++ b/packages/artifact/docs/generated/classes/FilesNotFoundError.md @@ -49,7 +49,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4) +[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4) ## Properties @@ -59,7 +59,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2) +[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2) ___ diff --git a/packages/artifact/docs/generated/classes/GHESNotSupportedError.md b/packages/artifact/docs/generated/classes/GHESNotSupportedError.md index 63e89906..dac5a79f 100644 --- a/packages/artifact/docs/generated/classes/GHESNotSupportedError.md +++ b/packages/artifact/docs/generated/classes/GHESNotSupportedError.md @@ -48,7 +48,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31) +[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31) ## Properties diff --git a/packages/artifact/docs/generated/classes/InvalidResponseError.md b/packages/artifact/docs/generated/classes/InvalidResponseError.md index bd1d80cb..234e3517 100644 --- a/packages/artifact/docs/generated/classes/InvalidResponseError.md +++ b/packages/artifact/docs/generated/classes/InvalidResponseError.md @@ -48,7 +48,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17) +[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17) ## Properties diff --git a/packages/artifact/docs/generated/classes/NetworkError.md b/packages/artifact/docs/generated/classes/NetworkError.md index 1383b639..4ffcaf6e 100644 --- a/packages/artifact/docs/generated/classes/NetworkError.md +++ b/packages/artifact/docs/generated/classes/NetworkError.md @@ -50,7 +50,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42) +[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42) ## Properties @@ -60,7 +60,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40) +[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40) ___ @@ -198,4 +198,4 @@ ___ #### Defined in -[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49) +[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49) diff --git a/packages/artifact/docs/generated/classes/UsageError.md b/packages/artifact/docs/generated/classes/UsageError.md index 9d7900fd..e4133401 100644 --- a/packages/artifact/docs/generated/classes/UsageError.md +++ b/packages/artifact/docs/generated/classes/UsageError.md @@ -43,7 +43,7 @@ Error.constructor #### Defined in -[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62) +[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62) ## Properties @@ -181,4 +181,4 @@ ___ #### Defined in -[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68) +[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68) diff --git a/packages/artifact/docs/generated/interfaces/Artifact.md b/packages/artifact/docs/generated/interfaces/Artifact.md index a4cb4e17..02fd77a0 100644 --- a/packages/artifact/docs/generated/interfaces/Artifact.md +++ b/packages/artifact/docs/generated/interfaces/Artifact.md @@ -23,7 +23,7 @@ The time when the artifact was created #### Defined in -[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123) +[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128) ___ @@ -35,7 +35,7 @@ The ID of the artifact #### Defined in -[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113) +[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118) ___ @@ -47,7 +47,7 @@ The name of the artifact #### Defined in -[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108) +[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113) ___ @@ -59,4 +59,4 @@ The size of the artifact in bytes #### Defined in -[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118) +[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123) diff --git a/packages/artifact/docs/generated/interfaces/ArtifactClient.md b/packages/artifact/docs/generated/interfaces/ArtifactClient.md index cf97ff33..ecab9606 100644 --- a/packages/artifact/docs/generated/interfaces/ArtifactClient.md +++ b/packages/artifact/docs/generated/interfaces/ArtifactClient.md @@ -43,7 +43,7 @@ single DeleteArtifactResponse object #### Defined in -[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103) +[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103) ___ @@ -70,7 +70,7 @@ single DownloadArtifactResponse object #### Defined in -[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89) +[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89) ___ @@ -101,7 +101,7 @@ If there are multiple artifacts with the same name in the same workflow run this #### Defined in -[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75) +[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75) ___ @@ -129,7 +129,7 @@ ListArtifactResponse object #### Defined in -[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57) +[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57) ___ @@ -156,4 +156,4 @@ single UploadArtifactResponse object #### Defined in -[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40) +[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40) diff --git a/packages/artifact/docs/generated/interfaces/DeleteArtifactResponse.md b/packages/artifact/docs/generated/interfaces/DeleteArtifactResponse.md index 209b2095..3c386396 100644 --- a/packages/artifact/docs/generated/interfaces/DeleteArtifactResponse.md +++ b/packages/artifact/docs/generated/interfaces/DeleteArtifactResponse.md @@ -20,4 +20,4 @@ The id of the artifact that was deleted #### Defined in -[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158) +[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163) diff --git a/packages/artifact/docs/generated/interfaces/DownloadArtifactOptions.md b/packages/artifact/docs/generated/interfaces/DownloadArtifactOptions.md index 1b34e637..bfcd1299 100644 --- a/packages/artifact/docs/generated/interfaces/DownloadArtifactOptions.md +++ b/packages/artifact/docs/generated/interfaces/DownloadArtifactOptions.md @@ -20,4 +20,4 @@ Denotes where the artifact will be downloaded to. If not specified then the arti #### Defined in -[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98) +[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103) diff --git a/packages/artifact/docs/generated/interfaces/DownloadArtifactResponse.md b/packages/artifact/docs/generated/interfaces/DownloadArtifactResponse.md index dd3e80bf..587dcc7a 100644 --- a/packages/artifact/docs/generated/interfaces/DownloadArtifactResponse.md +++ b/packages/artifact/docs/generated/interfaces/DownloadArtifactResponse.md @@ -20,4 +20,4 @@ The path where the artifact was downloaded to #### Defined in -[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88) +[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93) diff --git a/packages/artifact/docs/generated/interfaces/FindOptions.md b/packages/artifact/docs/generated/interfaces/FindOptions.md index 32e0f5b7..8769bea6 100644 --- a/packages/artifact/docs/generated/interfaces/FindOptions.md +++ b/packages/artifact/docs/generated/interfaces/FindOptions.md @@ -27,4 +27,4 @@ The criteria for finding Artifact(s) out of the scope of the current run. #### Defined in -[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131) +[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136) diff --git a/packages/artifact/docs/generated/interfaces/GetArtifactResponse.md b/packages/artifact/docs/generated/interfaces/GetArtifactResponse.md index 8f4405ae..17e94580 100644 --- a/packages/artifact/docs/generated/interfaces/GetArtifactResponse.md +++ b/packages/artifact/docs/generated/interfaces/GetArtifactResponse.md @@ -20,4 +20,4 @@ Metadata about the artifact that was found #### Defined in -[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57) +[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62) diff --git a/packages/artifact/docs/generated/interfaces/ListArtifactsOptions.md b/packages/artifact/docs/generated/interfaces/ListArtifactsOptions.md index c6a65432..271d524b 100644 --- a/packages/artifact/docs/generated/interfaces/ListArtifactsOptions.md +++ b/packages/artifact/docs/generated/interfaces/ListArtifactsOptions.md @@ -21,4 +21,4 @@ In the case of reruns, this can be useful to avoid duplicates #### Defined in -[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68) +[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73) diff --git a/packages/artifact/docs/generated/interfaces/ListArtifactsResponse.md b/packages/artifact/docs/generated/interfaces/ListArtifactsResponse.md index 3b1ebe8e..cd753fc5 100644 --- a/packages/artifact/docs/generated/interfaces/ListArtifactsResponse.md +++ b/packages/artifact/docs/generated/interfaces/ListArtifactsResponse.md @@ -20,4 +20,4 @@ A list of artifacts that were found #### Defined in -[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78) +[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83) diff --git a/packages/artifact/docs/generated/interfaces/UploadArtifactOptions.md b/packages/artifact/docs/generated/interfaces/UploadArtifactOptions.md index d2c07a0c..b58c9a10 100644 --- a/packages/artifact/docs/generated/interfaces/UploadArtifactOptions.md +++ b/packages/artifact/docs/generated/interfaces/UploadArtifactOptions.md @@ -28,7 +28,7 @@ For large files that are not easily compressed, a value of 0 is recommended for #### Defined in -[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47) +[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52) ___ @@ -52,4 +52,4 @@ input of 0 assumes default retention setting. #### Defined in -[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36) +[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41) diff --git a/packages/artifact/docs/generated/interfaces/UploadArtifactResponse.md b/packages/artifact/docs/generated/interfaces/UploadArtifactResponse.md index ea98efb0..6cd925cb 100644 --- a/packages/artifact/docs/generated/interfaces/UploadArtifactResponse.md +++ b/packages/artifact/docs/generated/interfaces/UploadArtifactResponse.md @@ -8,11 +8,24 @@ Response from the server when an artifact is uploaded ### Properties +- [digest](UploadArtifactResponse.md#digest) - [id](UploadArtifactResponse.md#id) - [size](UploadArtifactResponse.md#size) ## Properties +### digest + +• `Optional` **digest**: `string` + +The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded + +#### Defined in + +[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19) + +___ + ### id • `Optional` **id**: `number` @@ -22,7 +35,7 @@ This ID can be used as input to other APIs to download, delete or get more infor #### Defined in -[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14) +[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14) ___ @@ -34,4 +47,4 @@ Total size of the artifact in bytes. Not provided if no artifact was uploaded #### Defined in -[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8) +[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8) diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index 8ad6369c..44cddddd 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/artifact", - "version": "2.1.11", + "version": "2.2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@actions/artifact", - "version": "2.1.11", + "version": "2.2.0", "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", diff --git a/packages/artifact/package.json b/packages/artifact/package.json index 3b3233a1..69f33a02 100644 --- a/packages/artifact/package.json +++ b/packages/artifact/package.json @@ -1,6 +1,6 @@ { "name": "@actions/artifact", - "version": "2.1.11", + "version": "2.2.0", "preview": true, "description": "Actions artifact lib", "keywords": [ From 26f8f84a967192cdadfb666c510297cf21206250 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Tue, 17 Dec 2024 14:04:05 -0500 Subject: [PATCH 100/108] Remove unused cache API (#1907) --- .../src/generated/results/api/v1/cache.ts | 171 +--------------- .../generated/results/api/v1/cache.twirp.ts | 189 ------------------ 2 files changed, 1 insertion(+), 359 deletions(-) diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index 387bbd15..5345a2b1 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -212,52 +212,6 @@ export interface ListCacheEntriesResponse { */ entries: CacheEntry[]; } -/** - * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest - */ -export interface LookupCacheEntryRequest { - /** - * Scope and other metadata for the cache entry - * - * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; - */ - metadata?: CacheMetadata; - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 2; - */ - key: string; - /** - * Restore keys used for prefix searching - * - * @generated from protobuf field: repeated string restore_keys = 3; - */ - restoreKeys: string[]; - /** - * Hash of the compression tool, runner OS and paths cached - * - * @generated from protobuf field: string version = 4; - */ - version: string; -} -/** - * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse - */ -export interface LookupCacheEntryResponse { - /** - * Indicates whether the cache entry exists or not - * - * @generated from protobuf field: bool exists = 1; - */ - exists: boolean; - /** - * Matched cache entry metadata - * - * @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2; - */ - entry?: CacheEntry; -} // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends MessageType { constructor() { @@ -840,128 +794,6 @@ class ListCacheEntriesResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, - { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): LookupCacheEntryRequest { - const message = { key: "", restoreKeys: [], version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: - message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ 3: - message.restoreKeys.push(reader.string()); - break; - case /* string version */ 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - if (message.metadata) - CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - /* string key = 2; */ - if (message.key !== "") - writer.tag(2, WireType.LengthDelimited).string(message.key); - /* repeated string restore_keys = 3; */ - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]); - /* string version = 4; */ - if (message.version !== "") - writer.tag(4, WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest - */ -export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class LookupCacheEntryResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryResponse", [ - { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, - { no: 2, name: "entry", kind: "message", T: () => CacheEntry } - ]); - } - create(value?: PartialMessage): LookupCacheEntryResponse { - const message = { exists: false }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool exists */ 1: - message.exists = reader.bool(); - break; - case /* github.actions.results.entities.v1.CacheEntry entry */ 2: - message.entry = CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* bool exists = 1; */ - if (message.exists !== false) - writer.tag(1, WireType.Varint).bool(message.exists); - /* github.actions.results.entities.v1.CacheEntry entry = 2; */ - if (message.entry) - CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse - */ -export const LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ @@ -970,6 +802,5 @@ export const CacheService = new ServiceType("github.actions.results.api.v1.Cache { name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse }, { name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }, { name: "DeleteCacheEntry", options: {}, I: DeleteCacheEntryRequest, O: DeleteCacheEntryResponse }, - { name: "ListCacheEntries", options: {}, I: ListCacheEntriesRequest, O: ListCacheEntriesResponse }, - { name: "LookupCacheEntry", options: {}, I: LookupCacheEntryRequest, O: LookupCacheEntryResponse } + { name: "ListCacheEntries", options: {}, I: ListCacheEntriesRequest, O: ListCacheEntriesResponse } ]); diff --git a/packages/cache/src/generated/results/api/v1/cache.twirp.ts b/packages/cache/src/generated/results/api/v1/cache.twirp.ts index c8f1f633..3dd73707 100644 --- a/packages/cache/src/generated/results/api/v1/cache.twirp.ts +++ b/packages/cache/src/generated/results/api/v1/cache.twirp.ts @@ -19,8 +19,6 @@ import { DeleteCacheEntryResponse, ListCacheEntriesRequest, ListCacheEntriesResponse, - LookupCacheEntryRequest, - LookupCacheEntryResponse, } from "./cache"; //==================================// @@ -52,9 +50,6 @@ export interface CacheServiceClient { ListCacheEntries( request: ListCacheEntriesRequest ): Promise; - LookupCacheEntry( - request: LookupCacheEntryRequest - ): Promise; } export class CacheServiceClientJSON implements CacheServiceClient { @@ -66,7 +61,6 @@ export class CacheServiceClientJSON implements CacheServiceClient { this.GetCacheEntryDownloadURL.bind(this); this.DeleteCacheEntry.bind(this); this.ListCacheEntries.bind(this); - this.LookupCacheEntry.bind(this); } CreateCacheEntry( request: CreateCacheEntryRequest @@ -167,26 +161,6 @@ export class CacheServiceClientJSON implements CacheServiceClient { }) ); } - - LookupCacheEntry( - request: LookupCacheEntryRequest - ): Promise { - const data = LookupCacheEntryRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "LookupCacheEntry", - "application/json", - data as object - ); - return promise.then((data) => - LookupCacheEntryResponse.fromJson(data as any, { - ignoreUnknownFields: true, - }) - ); - } } export class CacheServiceClientProtobuf implements CacheServiceClient { @@ -198,7 +172,6 @@ export class CacheServiceClientProtobuf implements CacheServiceClient { this.GetCacheEntryDownloadURL.bind(this); this.DeleteCacheEntry.bind(this); this.ListCacheEntries.bind(this); - this.LookupCacheEntry.bind(this); } CreateCacheEntry( request: CreateCacheEntryRequest @@ -274,21 +247,6 @@ export class CacheServiceClientProtobuf implements CacheServiceClient { ListCacheEntriesResponse.fromBinary(data as Uint8Array) ); } - - LookupCacheEntry( - request: LookupCacheEntryRequest - ): Promise { - const data = LookupCacheEntryRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "LookupCacheEntry", - "application/protobuf", - data - ); - return promise.then((data) => - LookupCacheEntryResponse.fromBinary(data as Uint8Array) - ); - } } //==================================// @@ -316,10 +274,6 @@ export interface CacheServiceTwirp { ctx: T, request: ListCacheEntriesRequest ): Promise; - LookupCacheEntry( - ctx: T, - request: LookupCacheEntryRequest - ): Promise; } export enum CacheServiceMethod { @@ -328,7 +282,6 @@ export enum CacheServiceMethod { GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL", DeleteCacheEntry = "DeleteCacheEntry", ListCacheEntries = "ListCacheEntries", - LookupCacheEntry = "LookupCacheEntry", } export const CacheServiceMethodList = [ @@ -337,7 +290,6 @@ export const CacheServiceMethodList = [ CacheServiceMethod.GetCacheEntryDownloadURL, CacheServiceMethod.DeleteCacheEntry, CacheServiceMethod.ListCacheEntries, - CacheServiceMethod.LookupCacheEntry, ]; export function createCacheServiceServer( @@ -457,26 +409,6 @@ function matchCacheServiceRoute( interceptors ); }; - case "LookupCacheEntry": - return async ( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >[] - ) => { - ctx = { ...ctx, methodName: "LookupCacheEntry" }; - await events.onMatch(ctx); - return handleCacheServiceLookupCacheEntryRequest( - ctx, - service, - data, - interceptors - ); - }; default: events.onNotFound(); const msg = `no handler found`; @@ -648,39 +580,6 @@ function handleCacheServiceListCacheEntriesRequest< throw new TwirpError(TwirpErrorCode.BadRoute, msg); } } - -function handleCacheServiceLookupCacheEntryRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleCacheServiceLookupCacheEntryJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleCacheServiceLookupCacheEntryProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} async function handleCacheServiceCreateCacheEntryJSON< T extends TwirpContext = TwirpContext >( @@ -920,54 +819,6 @@ async function handleCacheServiceListCacheEntriesJSON< }) as string ); } - -async function handleCacheServiceLookupCacheEntryJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >[] -) { - let request: LookupCacheEntryRequest; - let response: LookupCacheEntryResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = LookupCacheEntryRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.LookupCacheEntry(ctx, inputReq); - }); - } else { - response = await service.LookupCacheEntry(ctx, request!); - } - - return JSON.stringify( - LookupCacheEntryResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} async function handleCacheServiceCreateCacheEntryProtobuf< T extends TwirpContext = TwirpContext >( @@ -1167,43 +1018,3 @@ async function handleCacheServiceListCacheEntriesProtobuf< return Buffer.from(ListCacheEntriesResponse.toBinary(response)); } - -async function handleCacheServiceLookupCacheEntryProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >[] -) { - let request: LookupCacheEntryRequest; - let response: LookupCacheEntryResponse; - - try { - request = LookupCacheEntryRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - LookupCacheEntryRequest, - LookupCacheEntryResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.LookupCacheEntry(ctx, inputReq); - }); - } else { - response = await service.LookupCacheEntry(ctx, request!); - } - - return Buffer.from(LookupCacheEntryResponse.toBinary(response)); -} From 01f21badd5a7522507f84558503b56c4deec5326 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Tue, 17 Dec 2024 14:51:57 -0500 Subject: [PATCH 101/108] Remove more unused cache APIs --- .../src/generated/results/api/v1/cache.ts | 287 +------------ .../generated/results/api/v1/cache.twirp.ts | 378 ------------------ .../results/entities/v1/cacheentry.ts | 163 -------- 3 files changed, 1 insertion(+), 827 deletions(-) delete mode 100644 packages/cache/src/generated/results/entities/v1/cacheentry.ts diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index 5345a2b1..5e998c37 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -12,7 +12,6 @@ import type { PartialMessage } from "@protobuf-ts/runtime"; import { reflectionMergePartial } from "@protobuf-ts/runtime"; import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; import { MessageType } from "@protobuf-ts/runtime"; -import { CacheEntry } from "../../entities/v1/cacheentry"; import { CacheMetadata } from "../../entities/v1/cachemetadata"; /** * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest @@ -146,72 +145,6 @@ export interface GetCacheEntryDownloadURLResponse { */ matchedKey: string; } -/** - * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest - */ -export interface DeleteCacheEntryRequest { - /** - * Scope and other metadata for the cache entry - * - * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; - */ - metadata?: CacheMetadata; - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 2; - */ - key: string; -} -/** - * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse - */ -export interface DeleteCacheEntryResponse { - /** - * @generated from protobuf field: bool ok = 1; - */ - ok: boolean; - /** - * Cache entry database ID - * - * @generated from protobuf field: int64 entry_id = 2; - */ - entryId: string; -} -/** - * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest - */ -export interface ListCacheEntriesRequest { - /** - * Scope and other metadata for the cache entry - * - * @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1; - */ - metadata?: CacheMetadata; - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 2; - */ - key: string; - /** - * Restore keys used for prefix searching - * - * @generated from protobuf field: repeated string restore_keys = 3; - */ - restoreKeys: string[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse - */ -export interface ListCacheEntriesResponse { - /** - * Cache entries in the defined scope - * - * @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1; - */ - entries: CacheEntry[]; -} // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends MessageType { constructor() { @@ -578,229 +511,11 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, - { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): DeleteCacheEntryRequest { - const message = { key: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: - message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ 2: - message.key = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - if (message.metadata) - CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - /* string key = 2; */ - if (message.key !== "") - writer.tag(2, WireType.LengthDelimited).string(message.key); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest - */ -export const DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class DeleteCacheEntryResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ - { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, - { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } - ]); - } - create(value?: PartialMessage): DeleteCacheEntryResponse { - const message = { ok: false, entryId: "0" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ 1: - message.ok = reader.bool(); - break; - case /* int64 entry_id */ 2: - message.entryId = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* bool ok = 1; */ - if (message.ok !== false) - writer.tag(1, WireType.Varint).bool(message.ok); - /* int64 entry_id = 2; */ - if (message.entryId !== "0") - writer.tag(2, WireType.Varint).int64(message.entryId); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse - */ -export const DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class ListCacheEntriesRequest$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => CacheMetadata }, - { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): ListCacheEntriesRequest { - const message = { key: "", restoreKeys: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: - message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ 3: - message.restoreKeys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ - if (message.metadata) - CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - /* string key = 2; */ - if (message.key !== "") - writer.tag(2, WireType.LengthDelimited).string(message.key); - /* repeated string restore_keys = 3; */ - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest - */ -export const ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class ListCacheEntriesResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesResponse", [ - { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheEntry } - ]); - } - create(value?: PartialMessage): ListCacheEntriesResponse { - const message = { entries: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1: - message.entries.push(CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */ - for (let i = 0; i < message.entries.length; i++) - CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse - */ -export const ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [ { name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse }, { name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse }, - { name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }, - { name: "DeleteCacheEntry", options: {}, I: DeleteCacheEntryRequest, O: DeleteCacheEntryResponse }, - { name: "ListCacheEntries", options: {}, I: ListCacheEntriesRequest, O: ListCacheEntriesResponse } + { name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse } ]); diff --git a/packages/cache/src/generated/results/api/v1/cache.twirp.ts b/packages/cache/src/generated/results/api/v1/cache.twirp.ts index 3dd73707..8c14c31d 100644 --- a/packages/cache/src/generated/results/api/v1/cache.twirp.ts +++ b/packages/cache/src/generated/results/api/v1/cache.twirp.ts @@ -15,10 +15,6 @@ import { FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse, - ListCacheEntriesRequest, - ListCacheEntriesResponse, } from "./cache"; //==================================// @@ -44,12 +40,6 @@ export interface CacheServiceClient { GetCacheEntryDownloadURL( request: GetCacheEntryDownloadURLRequest ): Promise; - DeleteCacheEntry( - request: DeleteCacheEntryRequest - ): Promise; - ListCacheEntries( - request: ListCacheEntriesRequest - ): Promise; } export class CacheServiceClientJSON implements CacheServiceClient { @@ -59,8 +49,6 @@ export class CacheServiceClientJSON implements CacheServiceClient { this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); - this.DeleteCacheEntry.bind(this); - this.ListCacheEntries.bind(this); } CreateCacheEntry( request: CreateCacheEntryRequest @@ -121,46 +109,6 @@ export class CacheServiceClientJSON implements CacheServiceClient { }) ); } - - DeleteCacheEntry( - request: DeleteCacheEntryRequest - ): Promise { - const data = DeleteCacheEntryRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "DeleteCacheEntry", - "application/json", - data as object - ); - return promise.then((data) => - DeleteCacheEntryResponse.fromJson(data as any, { - ignoreUnknownFields: true, - }) - ); - } - - ListCacheEntries( - request: ListCacheEntriesRequest - ): Promise { - const data = ListCacheEntriesRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "ListCacheEntries", - "application/json", - data as object - ); - return promise.then((data) => - ListCacheEntriesResponse.fromJson(data as any, { - ignoreUnknownFields: true, - }) - ); - } } export class CacheServiceClientProtobuf implements CacheServiceClient { @@ -170,8 +118,6 @@ export class CacheServiceClientProtobuf implements CacheServiceClient { this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); - this.DeleteCacheEntry.bind(this); - this.ListCacheEntries.bind(this); } CreateCacheEntry( request: CreateCacheEntryRequest @@ -217,36 +163,6 @@ export class CacheServiceClientProtobuf implements CacheServiceClient { GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array) ); } - - DeleteCacheEntry( - request: DeleteCacheEntryRequest - ): Promise { - const data = DeleteCacheEntryRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "DeleteCacheEntry", - "application/protobuf", - data - ); - return promise.then((data) => - DeleteCacheEntryResponse.fromBinary(data as Uint8Array) - ); - } - - ListCacheEntries( - request: ListCacheEntriesRequest - ): Promise { - const data = ListCacheEntriesRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.CacheService", - "ListCacheEntries", - "application/protobuf", - data - ); - return promise.then((data) => - ListCacheEntriesResponse.fromBinary(data as Uint8Array) - ); - } } //==================================// @@ -266,30 +182,18 @@ export interface CacheServiceTwirp { ctx: T, request: GetCacheEntryDownloadURLRequest ): Promise; - DeleteCacheEntry( - ctx: T, - request: DeleteCacheEntryRequest - ): Promise; - ListCacheEntries( - ctx: T, - request: ListCacheEntriesRequest - ): Promise; } export enum CacheServiceMethod { CreateCacheEntry = "CreateCacheEntry", FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload", GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL", - DeleteCacheEntry = "DeleteCacheEntry", - ListCacheEntries = "ListCacheEntries", } export const CacheServiceMethodList = [ CacheServiceMethod.CreateCacheEntry, CacheServiceMethod.FinalizeCacheEntryUpload, CacheServiceMethod.GetCacheEntryDownloadURL, - CacheServiceMethod.DeleteCacheEntry, - CacheServiceMethod.ListCacheEntries, ]; export function createCacheServiceServer( @@ -369,46 +273,6 @@ function matchCacheServiceRoute( interceptors ); }; - case "DeleteCacheEntry": - return async ( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >[] - ) => { - ctx = { ...ctx, methodName: "DeleteCacheEntry" }; - await events.onMatch(ctx); - return handleCacheServiceDeleteCacheEntryRequest( - ctx, - service, - data, - interceptors - ); - }; - case "ListCacheEntries": - return async ( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >[] - ) => { - ctx = { ...ctx, methodName: "ListCacheEntries" }; - await events.onMatch(ctx); - return handleCacheServiceListCacheEntriesRequest( - ctx, - service, - data, - interceptors - ); - }; default: events.onNotFound(); const msg = `no handler found`; @@ -514,72 +378,6 @@ function handleCacheServiceGetCacheEntryDownloadURLRequest< throw new TwirpError(TwirpErrorCode.BadRoute, msg); } } - -function handleCacheServiceDeleteCacheEntryRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleCacheServiceDeleteCacheEntryJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleCacheServiceDeleteCacheEntryProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} - -function handleCacheServiceListCacheEntriesRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleCacheServiceListCacheEntriesJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleCacheServiceListCacheEntriesProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} async function handleCacheServiceCreateCacheEntryJSON< T extends TwirpContext = TwirpContext >( @@ -723,102 +521,6 @@ async function handleCacheServiceGetCacheEntryDownloadURLJSON< }) as string ); } - -async function handleCacheServiceDeleteCacheEntryJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >[] -) { - let request: DeleteCacheEntryRequest; - let response: DeleteCacheEntryResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = DeleteCacheEntryRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.DeleteCacheEntry(ctx, inputReq); - }); - } else { - response = await service.DeleteCacheEntry(ctx, request!); - } - - return JSON.stringify( - DeleteCacheEntryResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} - -async function handleCacheServiceListCacheEntriesJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >[] -) { - let request: ListCacheEntriesRequest; - let response: ListCacheEntriesResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = ListCacheEntriesRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.ListCacheEntries(ctx, inputReq); - }); - } else { - response = await service.ListCacheEntries(ctx, request!); - } - - return JSON.stringify( - ListCacheEntriesResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} async function handleCacheServiceCreateCacheEntryProtobuf< T extends TwirpContext = TwirpContext >( @@ -938,83 +640,3 @@ async function handleCacheServiceGetCacheEntryDownloadURLProtobuf< return Buffer.from(GetCacheEntryDownloadURLResponse.toBinary(response)); } - -async function handleCacheServiceDeleteCacheEntryProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >[] -) { - let request: DeleteCacheEntryRequest; - let response: DeleteCacheEntryResponse; - - try { - request = DeleteCacheEntryRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - DeleteCacheEntryRequest, - DeleteCacheEntryResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.DeleteCacheEntry(ctx, inputReq); - }); - } else { - response = await service.DeleteCacheEntry(ctx, request!); - } - - return Buffer.from(DeleteCacheEntryResponse.toBinary(response)); -} - -async function handleCacheServiceListCacheEntriesProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: CacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >[] -) { - let request: ListCacheEntriesRequest; - let response: ListCacheEntriesResponse; - - try { - request = ListCacheEntriesRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - ListCacheEntriesRequest, - ListCacheEntriesResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.ListCacheEntries(ctx, inputReq); - }); - } else { - response = await service.ListCacheEntries(ctx, request!); - } - - return Buffer.from(ListCacheEntriesResponse.toBinary(response)); -} diff --git a/packages/cache/src/generated/results/entities/v1/cacheentry.ts b/packages/cache/src/generated/results/entities/v1/cacheentry.ts deleted file mode 100644 index b55b4afa..00000000 --- a/packages/cache/src/generated/results/entities/v1/cacheentry.ts +++ /dev/null @@ -1,163 +0,0 @@ -// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies -// @generated from protobuf file "results/entities/v1/cacheentry.proto" (package "github.actions.results.entities.v1", syntax proto3) -// tslint:disable -import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; -import type { IBinaryWriter } from "@protobuf-ts/runtime"; -import { WireType } from "@protobuf-ts/runtime"; -import type { BinaryReadOptions } from "@protobuf-ts/runtime"; -import type { IBinaryReader } from "@protobuf-ts/runtime"; -import { UnknownFieldHandler } from "@protobuf-ts/runtime"; -import type { PartialMessage } from "@protobuf-ts/runtime"; -import { reflectionMergePartial } from "@protobuf-ts/runtime"; -import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; -import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; -/** - * @generated from protobuf message github.actions.results.entities.v1.CacheEntry - */ -export interface CacheEntry { - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; -} -// @generated message type with reflection information, may provide speed optimized methods -class CacheEntry$Type extends MessageType { - constructor() { - super("github.actions.results.entities.v1.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry - */ -export const CacheEntry = new CacheEntry$Type(); From f3c12d55618ee99f7557a717ab9c899ca616873d Mon Sep 17 00:00:00 2001 From: Yang Cao Date: Wed, 8 Jan 2025 16:19:09 +0000 Subject: [PATCH 103/108] Set default concurrency to 10 and make timeout configurable --- packages/artifact/__tests__/config.test.ts | 16 ++++++++++++ .../artifact/src/internal/shared/config.ts | 25 ++++++++++--------- 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/packages/artifact/__tests__/config.test.ts b/packages/artifact/__tests__/config.test.ts index b9ef643c..11bbe396 100644 --- a/packages/artifact/__tests__/config.test.ts +++ b/packages/artifact/__tests__/config.test.ts @@ -30,3 +30,19 @@ describe('isGhes', () => { expect(config.isGhes()).toBe(true) }) }) + +describe('uploadChunkTimeoutEnv', () => { + it('should return default 300000 when no env set', () => { + expect(config.getUploadChunkTimeout()).toBe(300000) + }) + it('should return value set in ACTIONS_UPLOAD_TIMEOUT_MS', () => { + process.env.ACTIONS_UPLOAD_TIMEOUT_MS = '150000' + expect(config.getUploadChunkTimeout()).toBe(150000) + }) + it('should throw if value set in ACTIONS_UPLOAD_TIMEOUT_MS is invalid', () => { + process.env.ACTIONS_UPLOAD_TIMEOUT_MS = 'abc' + expect(() => { + config.getUploadChunkTimeout() + }).toThrow() + }) +}) diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 047d3b98..75bbf8b5 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -44,20 +44,21 @@ export function getGitHubWorkspaceDir(): string { return ghWorkspaceDir } -// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize -// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. -// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. +// From testing, setting this value to 10 yielded best results in terms of reliability and there are no impact on performance either export function getConcurrency(): number { - const numCPUs = os.cpus().length - - if (numCPUs <= 4) { - return 32 - } - - const concurrency = 16 * numCPUs - return concurrency > 300 ? 300 : concurrency + return 10 } export function getUploadChunkTimeout(): number { - return 300_000 // 5 minutes + const timeoutVar = process.env['ACTIONS_UPLOAD_TIMEOUT_MS'] + if (!timeoutVar) { + return 300000 // 5 minutes + } + + const timeout = parseInt(timeoutVar) + if (isNaN(timeout)) { + throw new Error('Invalid value set for ACTIONS_UPLOAD_TIMEOUT_MS env variable') + } + + return timeout } From ede05b95d7f7bb1e0e09edd39a3419e8ac5286ab Mon Sep 17 00:00:00 2001 From: Yang Cao Date: Wed, 8 Jan 2025 17:53:44 +0000 Subject: [PATCH 104/108] Make concurrency change opt-in, but can only go lower --- packages/artifact/__tests__/config.test.ts | 45 +++++++++++++++++++ .../artifact/src/internal/shared/config.ts | 40 +++++++++++++++-- 2 files changed, 81 insertions(+), 4 deletions(-) diff --git a/packages/artifact/__tests__/config.test.ts b/packages/artifact/__tests__/config.test.ts index 11bbe396..4057ec3e 100644 --- a/packages/artifact/__tests__/config.test.ts +++ b/packages/artifact/__tests__/config.test.ts @@ -1,4 +1,10 @@ import * as config from '../src/internal/shared/config' +import os from 'os' + +// Mock the 'os' module +jest.mock('os', () => ({ + cpus: jest.fn() +})) beforeEach(() => { jest.resetModules() @@ -35,10 +41,12 @@ describe('uploadChunkTimeoutEnv', () => { it('should return default 300000 when no env set', () => { expect(config.getUploadChunkTimeout()).toBe(300000) }) + it('should return value set in ACTIONS_UPLOAD_TIMEOUT_MS', () => { process.env.ACTIONS_UPLOAD_TIMEOUT_MS = '150000' expect(config.getUploadChunkTimeout()).toBe(150000) }) + it('should throw if value set in ACTIONS_UPLOAD_TIMEOUT_MS is invalid', () => { process.env.ACTIONS_UPLOAD_TIMEOUT_MS = 'abc' expect(() => { @@ -46,3 +54,40 @@ describe('uploadChunkTimeoutEnv', () => { }).toThrow() }) }) + +describe('uploadConcurrencyEnv', () => { + it('should return default 32 when cpu num is <= 4', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) + expect(config.getConcurrency()).toBe(32) + }) + + it('should return 16 * num of cpu when cpu num is > 4', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(6)) + expect(config.getConcurrency()).toBe(96) + }) + + it('should return up to 300 max value', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(32)) + expect(config.getConcurrency()).toBe(300) + }) + + it('should return override value when ACTIONS_UPLOAD_CONCURRENCY is set', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) + process.env.ACTIONS_UPLOAD_CONCURRENCY = '10' + expect(config.getConcurrency()).toBe(10) + }) + + it('should throw with invalid value of ACTIONS_UPLOAD_CONCURRENCY', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) + process.env.ACTIONS_UPLOAD_CONCURRENCY = 'abc' + expect(() => { + config.getConcurrency() + }).toThrow() + }) + + it('cannot go over currency cap when override value is greater', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) + process.env.ACTIONS_UPLOAD_CONCURRENCY = '40' + expect(config.getConcurrency()).toBe(32) + }) +}) diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 75bbf8b5..d9d9ae35 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -1,4 +1,5 @@ import os from 'os' +import {info} from '@actions/core' // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage @@ -44,20 +45,51 @@ export function getGitHubWorkspaceDir(): string { return ghWorkspaceDir } -// From testing, setting this value to 10 yielded best results in terms of reliability and there are no impact on performance either +// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize +// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. +// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. +// This value can be lowered with ACTIONS_UPLOAD_CONCURRENCY variable. export function getConcurrency(): number { - return 10 + const numCPUs = os.cpus().length + let concurrencyCap = 32 + + if (numCPUs > 4) { + const concurrency = 16 * numCPUs + concurrencyCap = concurrency > 300 ? 300 : concurrency + } + + const concurrencyOverride = process.env['ACTIONS_UPLOAD_CONCURRENCY'] + if (concurrencyOverride) { + const concurrency = parseInt(concurrencyOverride) + if (isNaN(concurrency)) { + throw new Error( + 'Invalid value set for ACTIONS_UPLOAD_CONCURRENCY env variable' + ) + } + + if (concurrency < concurrencyCap) { + return concurrency + } + + info( + `ACTIONS_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Lowering it to the cap.` + ) + } + + return concurrencyCap } export function getUploadChunkTimeout(): number { - const timeoutVar = process.env['ACTIONS_UPLOAD_TIMEOUT_MS'] + const timeoutVar = process.env['ACTIONS_UPLOAD_TIMEOUT_MS'] if (!timeoutVar) { return 300000 // 5 minutes } const timeout = parseInt(timeoutVar) if (isNaN(timeout)) { - throw new Error('Invalid value set for ACTIONS_UPLOAD_TIMEOUT_MS env variable') + throw new Error( + 'Invalid value set for ACTIONS_UPLOAD_TIMEOUT_MS env variable' + ) } return timeout From d4385a64a79e01e0b5eff991f721f234bdcd7620 Mon Sep 17 00:00:00 2001 From: Yang Cao Date: Wed, 8 Jan 2025 18:14:04 +0000 Subject: [PATCH 105/108] Concurrency has a min of 1 --- packages/artifact/__tests__/config.test.ts | 8 ++++++++ packages/artifact/src/internal/shared/config.ts | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/artifact/__tests__/config.test.ts b/packages/artifact/__tests__/config.test.ts index 4057ec3e..579fed6e 100644 --- a/packages/artifact/__tests__/config.test.ts +++ b/packages/artifact/__tests__/config.test.ts @@ -85,6 +85,14 @@ describe('uploadConcurrencyEnv', () => { }).toThrow() }) + it('should throw if ACTIONS_UPLOAD_CONCURRENCY is < 1', () => { + ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) + process.env.ACTIONS_UPLOAD_CONCURRENCY = '0' + expect(() => { + config.getConcurrency() + }).toThrow() + }) + it('cannot go over currency cap when override value is greater', () => { ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) process.env.ACTIONS_UPLOAD_CONCURRENCY = '40' diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index d9d9ae35..44547451 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -61,7 +61,7 @@ export function getConcurrency(): number { const concurrencyOverride = process.env['ACTIONS_UPLOAD_CONCURRENCY'] if (concurrencyOverride) { const concurrency = parseInt(concurrencyOverride) - if (isNaN(concurrency)) { + if (isNaN(concurrency) || concurrency < 1) { throw new Error( 'Invalid value set for ACTIONS_UPLOAD_CONCURRENCY env variable' ) From e55409315fb4946675f6859c12243eff19570ed4 Mon Sep 17 00:00:00 2001 From: Yang Cao Date: Wed, 8 Jan 2025 20:32:45 +0000 Subject: [PATCH 106/108] Rename the prefix to be more specific --- packages/artifact/__tests__/config.test.ts | 22 +++++++++---------- .../artifact/src/internal/shared/config.ts | 12 +++++----- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/artifact/__tests__/config.test.ts b/packages/artifact/__tests__/config.test.ts index 579fed6e..9fc4543d 100644 --- a/packages/artifact/__tests__/config.test.ts +++ b/packages/artifact/__tests__/config.test.ts @@ -42,13 +42,13 @@ describe('uploadChunkTimeoutEnv', () => { expect(config.getUploadChunkTimeout()).toBe(300000) }) - it('should return value set in ACTIONS_UPLOAD_TIMEOUT_MS', () => { - process.env.ACTIONS_UPLOAD_TIMEOUT_MS = '150000' + it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => { + process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000' expect(config.getUploadChunkTimeout()).toBe(150000) }) - it('should throw if value set in ACTIONS_UPLOAD_TIMEOUT_MS is invalid', () => { - process.env.ACTIONS_UPLOAD_TIMEOUT_MS = 'abc' + it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => { + process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc' expect(() => { config.getUploadChunkTimeout() }).toThrow() @@ -71,23 +71,23 @@ describe('uploadConcurrencyEnv', () => { expect(config.getConcurrency()).toBe(300) }) - it('should return override value when ACTIONS_UPLOAD_CONCURRENCY is set', () => { + it('should return override value when ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is set', () => { ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) - process.env.ACTIONS_UPLOAD_CONCURRENCY = '10' + process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10' expect(config.getConcurrency()).toBe(10) }) - it('should throw with invalid value of ACTIONS_UPLOAD_CONCURRENCY', () => { + it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => { ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) - process.env.ACTIONS_UPLOAD_CONCURRENCY = 'abc' + process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc' expect(() => { config.getConcurrency() }).toThrow() }) - it('should throw if ACTIONS_UPLOAD_CONCURRENCY is < 1', () => { + it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => { ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) - process.env.ACTIONS_UPLOAD_CONCURRENCY = '0' + process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0' expect(() => { config.getConcurrency() }).toThrow() @@ -95,7 +95,7 @@ describe('uploadConcurrencyEnv', () => { it('cannot go over currency cap when override value is greater', () => { ;(os.cpus as jest.Mock).mockReturnValue(new Array(4)) - process.env.ACTIONS_UPLOAD_CONCURRENCY = '40' + process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '40' expect(config.getConcurrency()).toBe(32) }) }) diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 44547451..7aeb2378 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -48,7 +48,7 @@ export function getGitHubWorkspaceDir(): string { // Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize // If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. // Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. -// This value can be lowered with ACTIONS_UPLOAD_CONCURRENCY variable. +// This value can be lowered with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable. export function getConcurrency(): number { const numCPUs = os.cpus().length let concurrencyCap = 32 @@ -58,12 +58,12 @@ export function getConcurrency(): number { concurrencyCap = concurrency > 300 ? 300 : concurrency } - const concurrencyOverride = process.env['ACTIONS_UPLOAD_CONCURRENCY'] + const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY'] if (concurrencyOverride) { const concurrency = parseInt(concurrencyOverride) if (isNaN(concurrency) || concurrency < 1) { throw new Error( - 'Invalid value set for ACTIONS_UPLOAD_CONCURRENCY env variable' + 'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable' ) } @@ -72,7 +72,7 @@ export function getConcurrency(): number { } info( - `ACTIONS_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Lowering it to the cap.` + `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Lowering it to the cap.` ) } @@ -80,7 +80,7 @@ export function getConcurrency(): number { } export function getUploadChunkTimeout(): number { - const timeoutVar = process.env['ACTIONS_UPLOAD_TIMEOUT_MS'] + const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS'] if (!timeoutVar) { return 300000 // 5 minutes } @@ -88,7 +88,7 @@ export function getUploadChunkTimeout(): number { const timeout = parseInt(timeoutVar) if (isNaN(timeout)) { throw new Error( - 'Invalid value set for ACTIONS_UPLOAD_TIMEOUT_MS env variable' + 'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable' ) } From 3095d112efe9a0e61b1af14a1f4b9f936b20cc21 Mon Sep 17 00:00:00 2001 From: Yang Cao Date: Wed, 8 Jan 2025 21:11:59 +0000 Subject: [PATCH 107/108] Prep release packages/artifact v2.2.1 --- packages/artifact/RELEASES.md | 4 ++++ packages/artifact/package-lock.json | 4 ++-- packages/artifact/package.json | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/artifact/RELEASES.md b/packages/artifact/RELEASES.md index 9ba5c7e9..6bbe6d2b 100644 --- a/packages/artifact/RELEASES.md +++ b/packages/artifact/RELEASES.md @@ -1,5 +1,9 @@ # @actions/artifact Releases +### 2.2.1 + +- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928) + ### 2.2.0 - Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896) diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index 44cddddd..768767fe 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/artifact", - "version": "2.2.0", + "version": "2.2.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@actions/artifact", - "version": "2.2.0", + "version": "2.2.1", "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", diff --git a/packages/artifact/package.json b/packages/artifact/package.json index 69f33a02..2200a758 100644 --- a/packages/artifact/package.json +++ b/packages/artifact/package.json @@ -1,6 +1,6 @@ { "name": "@actions/artifact", - "version": "2.2.0", + "version": "2.2.1", "preview": true, "description": "Actions artifact lib", "keywords": [ From 1f7c2c79e034fe8a0d28006f52fc5b70f6dbb750 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Wed, 15 Jan 2025 15:57:09 -0500 Subject: [PATCH 108/108] [tool-cache] Update `@actions/core` and prepare 2.0.2 release (#1872) * Update `@actions/core` and prepare 2.0.2 release * Include these changes in the release notes --- packages/tool-cache/RELEASES.md | 4 ++- packages/tool-cache/package-lock.json | 41 +++++++++------------------ packages/tool-cache/package.json | 4 +-- 3 files changed, 18 insertions(+), 31 deletions(-) diff --git a/packages/tool-cache/RELEASES.md b/packages/tool-cache/RELEASES.md index e2372238..d2d2c269 100644 --- a/packages/tool-cache/RELEASES.md +++ b/packages/tool-cache/RELEASES.md @@ -1,6 +1,8 @@ # @actions/tool-cache Releases -### Unreleased +### 2.0.2 + +- Update `@actions/core` to v1.11.1 [#1872](https://github.com/actions/toolkit/pull/1872) - Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842) ### 2.0.1 diff --git a/packages/tool-cache/package-lock.json b/packages/tool-cache/package-lock.json index 028842a0..39ec75d8 100644 --- a/packages/tool-cache/package-lock.json +++ b/packages/tool-cache/package-lock.json @@ -1,15 +1,15 @@ { "name": "@actions/tool-cache", - "version": "2.0.1", + "version": "2.0.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@actions/tool-cache", - "version": "2.0.1", + "version": "2.0.2", "license": "MIT", "dependencies": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.11.1", "@actions/exec": "^1.0.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.1.1", @@ -22,20 +22,12 @@ } }, "node_modules/@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/core/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, "node_modules/@actions/exec": { @@ -153,19 +145,12 @@ }, "dependencies": { "@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - }, - "dependencies": { - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - } + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, "@actions/exec": { diff --git a/packages/tool-cache/package.json b/packages/tool-cache/package.json index a1ff04b3..b3a64a5b 100644 --- a/packages/tool-cache/package.json +++ b/packages/tool-cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/tool-cache", - "version": "2.0.1", + "version": "2.0.2", "description": "Actions tool-cache lib", "keywords": [ "github", @@ -36,7 +36,7 @@ "url": "https://github.com/actions/toolkit/issues" }, "dependencies": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.11.1", "@actions/exec": "^1.0.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.1.1",