1
0
Fork 0

Merge pull request #2 from WarpBuilds/prajjwal-warp-245

feat: adds gcs provider for warpbuilds cache
pull/1935/head
Prajjwal 2024-04-15 19:14:50 +05:30 committed by GitHub
commit 596cfa8725
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
70 changed files with 4797 additions and 322 deletions

View File

@ -2,10 +2,10 @@ import * as cache from '../src/cache'
test('isFeatureAvailable returns true if server url is set', () => {
try {
process.env['WARP_CACHE_URL'] = 'http://cache.com'
process.env['WARPBUILD_CACHE_URL'] = 'http://cache.com'
expect(cache.isFeatureAvailable()).toBe(true)
} finally {
delete process.env['WARP_CACHE_URL']
delete process.env['WARPBUILD_CACHE_URL']
}
})

View File

@ -12,7 +12,7 @@ import {
import * as tar from '../src/internal/tar'
import * as utils from '../src/internal/cacheUtils'
// eslint-disable-next-line @typescript-eslint/no-require-imports
import fs = require('fs')
import fs from 'fs'
jest.mock('@actions/exec')
jest.mock('@actions/io')

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "github-actions.warp-cache",
"version": "0.2.0",
"version": "1.1.2",
"preview": true,
"description": "Github action to use WarpBuild's in-house cache offering",
"keywords": [
@ -9,7 +9,7 @@
"cache",
"warpbuild"
],
"homepage": "https://github.com/actions/toolkit/tree/main/packages/cache",
"homepage": "https://github.com/WarpBuilds/toolkit/tree/main/packages/warp-cache",
"license": "MIT",
"main": "lib/cache.js",
"types": "lib/cache.d.ts",
@ -26,7 +26,7 @@
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/toolkit.git",
"url": "git+https://github.com/WarpBuilds/toolkit.git",
"directory": "packages/cache"
},
"scripts": {
@ -35,7 +35,7 @@
"tsc": "tsc"
},
"bugs": {
"url": "https://github.com/actions/toolkit/issues"
"url": "https://github.com/WarpBuilds/toolkit/issues"
},
"dependencies": {
"@actions/core": "^1.10.0",
@ -46,7 +46,9 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"@google-cloud/storage": "^7.9.0",
"axios": "^1.6.2",
"google-auth-library": "^9.7.0",
"semver": "^6.3.1",
"uuid": "^3.3.3"
},

View File

@ -2,8 +2,16 @@ import * as core from '@actions/core'
import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar, listTar} from './internal/tar'
import {
createTar,
extractStreamingTar,
extractTar,
listTar
} from './internal/tar'
import {DownloadOptions, getUploadOptions} from './options'
import {isSuccessStatusCode} from './internal/requestUtils'
import {getDownloadCommandPipeForWget} from './internal/downloadUtils'
import {ChildProcessWithoutNullStreams} from 'child_process'
export class ValidationError extends Error {
constructor(message: string) {
@ -50,7 +58,7 @@ function checkKey(key: string): void {
*/
export function isFeatureAvailable(): boolean {
return !!process.env['WARP_CACHE_URL']
return !!process.env['WARPBUILD_CACHE_URL']
}
/**
@ -61,6 +69,7 @@ export function isFeatureAvailable(): boolean {
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @param enableCrossArchArchive an optional boolean enabled to restore cache created on any arch
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
export async function restoreCache(
@ -68,22 +77,23 @@ export async function restoreCache(
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
enableCrossOsArchive = false,
enableCrossArchArchive = false
): Promise<string | undefined> {
checkPaths(paths)
checkKey(primaryKey)
restoreKeys = restoreKeys ?? []
const keys = [primaryKey, ...restoreKeys]
core.debug('Resolved Keys:')
core.debug(JSON.stringify(keys))
core.debug('Resolved Restore Keys:')
core.debug(JSON.stringify(restoreKeys))
if (keys.length > 10) {
if (restoreKeys.length > 9) {
throw new ValidationError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
}
for (const key of keys) {
for (const key of restoreKeys) {
checkKey(key)
}
@ -91,18 +101,20 @@ export async function restoreCache(
let archivePath = ''
try {
// path are needed to compute version
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod,
enableCrossOsArchive
})
if (!cacheEntry?.pre_signed_url) {
// Cache not found
return undefined
}
const cacheEntry = await cacheHttpClient.getCacheEntry(
primaryKey,
restoreKeys,
paths,
{
compressionMethod,
enableCrossOsArchive,
enableCrossArchArchive
}
)
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return cacheEntry.cache_key
if (!cacheEntry) {
// Internal Error
return undefined
}
archivePath = path.join(
@ -111,30 +123,118 @@ export async function restoreCache(
)
core.debug(`Archive Path: ${archivePath}`)
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(cacheEntry.pre_signed_url, archivePath)
let cacheKey: string = ''
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
switch (cacheEntry.provider) {
case 's3': {
if (!cacheEntry.s3?.pre_signed_url) {
return undefined
}
cacheKey = cacheEntry.s3.pre_signed_url
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return cacheKey
}
await cacheHttpClient.downloadCache(
cacheEntry.provider,
cacheEntry.s3?.pre_signed_url,
archivePath
)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
break
}
case 'gcs': {
if (!cacheEntry.gcs?.cache_key) {
return undefined
}
cacheKey = cacheEntry.gcs?.cache_key
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return cacheKey
}
const archiveLocation = `gs://${cacheEntry.gcs?.bucket_name}/${cacheEntry.gcs?.cache_key}`
/*
* Alternate, Multipart download method for GCS
await cacheHttpClient.downloadCache(
cacheEntry.provider,
archiveLocation,
archivePath,
cacheEntry.gcs?.short_lived_token?.access_token ?? ''
)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
await extractTar(archivePath, compressionMethod)
*/
// For GCS, we do a streaming download which means that we extract the archive while we are downloading it.
let readStream: NodeJS.ReadableStream | undefined
let downloadCommandPipe: ChildProcessWithoutNullStreams | undefined
if (cacheEntry?.gcs?.pre_signed_url) {
downloadCommandPipe = getDownloadCommandPipeForWget(
cacheEntry?.gcs?.pre_signed_url
)
} else {
readStream = cacheHttpClient.downloadCacheStreaming(
'gcs',
archiveLocation,
cacheEntry?.gcs?.short_lived_token?.access_token ?? ''
)
if (!readStream) {
return undefined
}
}
await extractStreamingTar(
readStream,
archivePath,
compressionMethod,
downloadCommandPipe
)
core.info('Cache restored successfully')
break
}
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
return cacheEntry.cache_key
return cacheKey
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
throw error
} else {
// Supress all non-validation cache related errors because caching should be optional
// Suppress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${(error as Error).message}`)
}
} finally {
@ -155,13 +255,14 @@ export async function restoreCache(
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
* @param enableCrossArchArchive an optional boolean enabled to save cache on any arch which could be restored on any arch
* @returns string returns cacheId if the cache was saved successfully and throws an error if save fails
*/
export async function saveCache(
paths: string[],
key: string,
enableCrossOsArchive = false
enableCrossOsArchive = false,
enableCrossArchArchive = false
): Promise<string> {
checkPaths(paths)
checkKey(key)
@ -192,35 +293,45 @@ export async function saveCache(
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const fileSizeLimit = 20 * 1024 * 1024 * 1024 // 20GB per repo limit
const fileSizeLimit = 1000 * 1024 * 1024 * 1024 // 1000GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`)
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
if (archiveFileSize > fileSizeLimit) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`
)} MB (${archiveFileSize} B) is over the 1000GB limit, not saving cache.`
)
}
const cacheVersion = cacheHttpClient.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive,
enableCrossArchArchive
)
core.debug('Reserving Cache')
// Calculate number of chunks required
// Calculate number of chunks required. This is only required if backend is S3 as Google Cloud SDK will do it for us
const uploadOptions = getUploadOptions()
const maxChunkSize = uploadOptions?.uploadChunkSize ?? 32 * 1024 * 1024 // Default 32MB
const numberOfChunks = Math.floor(archiveFileSize / maxChunkSize)
const reserveCacheResponse = await cacheHttpClient.reserveCache(
key,
numberOfChunks,
{
compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize
}
cacheVersion
)
if (reserveCacheResponse?.statusCode === 400) {
if (!isSuccessStatusCode(reserveCacheResponse?.statusCode)) {
core.debug(`Failed to reserve cache: ${reserveCacheResponse?.statusCode}`)
core.debug(
`Reserve Cache Request: ${JSON.stringify({
key,
numberOfChunks,
cacheVersion
})}`
)
throw new Error(
reserveCacheResponse?.error?.message ??
`Cache size of ~${Math.round(
@ -229,20 +340,40 @@ export async function saveCache(
)
}
core.debug(`Saving Cache`)
cacheKey = await cacheHttpClient.saveCache(
key,
cacheHttpClient.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
),
reserveCacheResponse?.result?.upload_id ?? '',
reserveCacheResponse?.result?.upload_key ?? '',
numberOfChunks,
reserveCacheResponse?.result?.pre_signed_urls ?? [],
archivePath
)
switch (reserveCacheResponse.result?.provider) {
case 's3':
core.debug(`Saving Cache to S3`)
cacheKey = await cacheHttpClient.saveCache(
's3',
key,
cacheVersion,
archivePath,
reserveCacheResponse?.result?.s3?.upload_id ?? '',
reserveCacheResponse?.result?.s3?.upload_key ?? '',
numberOfChunks,
reserveCacheResponse?.result?.s3?.pre_signed_urls ?? []
)
break
case 'gcs':
core.debug(`Saving Cache to GCS`)
cacheKey = await cacheHttpClient.saveCache(
'gcs',
key,
cacheVersion,
archivePath,
// S3 Params are undefined for GCS
undefined,
undefined,
undefined,
undefined,
reserveCacheResponse?.result?.gcs?.short_lived_token?.access_token ??
'',
reserveCacheResponse?.result?.gcs?.bucket_name ?? '',
reserveCacheResponse?.result?.gcs?.cache_key ?? ''
)
break
}
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
@ -266,18 +397,30 @@ export async function saveCache(
/**
* Deletes an entire cache by cache key.
* @param keys The cache keys
* @param key The cache keys
*/
export async function deleteCache(keys: string[]): Promise<void> {
for (const key of keys) {
checkKey(key)
}
export async function deleteCache(
paths: string[],
key: string,
enableCrossOsArchive = false,
enableCrossArchArchive = false
): Promise<void> {
checkKey(key)
core.debug('Deleting Cache')
core.debug(`Cache Keys: ${keys}`)
core.debug(`Cache Key: ${key}`)
const compressionMethod = await utils.getCompressionMethod()
const cacheVersion = cacheHttpClient.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive,
enableCrossArchArchive
)
try {
await cacheHttpClient.deleteCache(keys)
await cacheHttpClient.deleteCache(key, cacheVersion)
} catch (error) {
core.warning(`Failed to delete cache: ${error}`)
}

View File

@ -6,30 +6,39 @@ import {
TypedResponse
} from '@actions/http-client/lib/interfaces'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as utils from './cacheUtils'
import {CompressionMethod} from './constants'
import {
ArtifactCacheEntry,
InternalCacheOptions,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse,
ITypedResponseWithError,
ArtifactCacheList,
InternalS3CompletedPart,
CommitCacheResponse
InternalS3CompletedPart
} from './contracts'
import {downloadCacheMultiConnection} from './downloadUtils'
import {
downloadCacheMultiConnection,
downloadCacheMultipartGCP,
downloadCacheStreamingGCP
} from './downloadUtils'
import {isSuccessStatusCode, retryTypedResponse} from './requestUtils'
import axios, {AxiosError} from 'axios'
import {Storage} from '@google-cloud/storage'
import {
CommonsCommitCacheRequest,
CommonsCommitCacheResponse,
CommonsDeleteCacheResponse,
CommonsGetCacheResponse,
CommonsReserveCacheRequest,
CommonsReserveCacheResponse
} from './warpcache-ts-sdk'
import {multiPartUploadToGCS, uploadFileToS3} from './uploadUtils'
import {CommonsGetCacheRequest} from './warpcache-ts-sdk/models/commons-get-cache-request'
import {CommonsDeleteCacheRequest} from './warpcache-ts-sdk/models/commons-delete-cache-request'
import {OAuth2Client} from 'google-auth-library'
const versionSalt = '1.0'
function getCacheApiUrl(resource: string): string {
const baseUrl: string =
process.env['WARP_CACHE_URL'] ?? 'https://cache.warpbuild.com'
process.env['WARPBUILD_CACHE_URL'] ?? 'https://cache.warpbuild.com'
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.')
}
@ -43,6 +52,30 @@ function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`
}
function getVCSRepository(): string {
const vcsRepository = process.env['GITHUB_REPOSITORY'] ?? ''
return vcsRepository
}
function getVCSRef(): string {
const vcsBranch = process.env['GITHUB_REF'] ?? ''
return vcsBranch
}
function getAnnotations(): {[key: string]: string} {
const annotations: {[key: string]: string} = {
GITHUB_WORKFLOW: process.env['GITHUB_WORKFLOW'] ?? '',
GITHUB_RUN_ID: process.env['GITHUB_RUN_ID'] ?? '',
GITHUB_RUN_ATTEMPT: process.env['GITHUB_RUN_ATTEMPT'] ?? '',
GITHUB_JOB: process.env['GITHUB_JOB'] ?? '',
GITHUB_REPOSITORY: process.env['GITHUB_REPOSITORY'] ?? '',
GITHUB_REF: process.env['GITHUB_REF'] ?? '',
GITHUB_ACTION: process.env['GITHUB_ACTION'] ?? '',
RUNNER_NAME: process.env['RUNNER_NAME'] ?? ''
}
return annotations
}
function getRequestOptions(): RequestOptions {
const requestOptions: RequestOptions = {
headers: {
@ -54,11 +87,11 @@ function getRequestOptions(): RequestOptions {
}
function createHttpClient(): HttpClient {
const token = process.env['WARP_RUNNER_VERIFICATION_TOKEN'] ?? ''
const token = process.env['WARPBUILD_RUNNER_VERIFICATION_TOKEN'] ?? ''
const bearerCredentialHandler = new BearerCredentialHandler(token)
return new HttpClient(
'actions/cache',
'warp/cache',
[bearerCredentialHandler],
getRequestOptions()
)
@ -67,7 +100,8 @@ function createHttpClient(): HttpClient {
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod,
enableCrossOsArchive = false
enableCrossOsArchive = false,
enableCrossArchArchive = false
): string {
const components = paths
@ -82,6 +116,11 @@ export function getCacheVersion(
components.push('windows-only')
}
// Add architecture to cache version
if (!enableCrossArchArchive) {
components.push(process.arch)
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
@ -89,29 +128,40 @@ export function getCacheVersion(
}
export async function getCacheEntry(
keys: string[],
key: string,
restoreKeys: string[],
paths: string[],
options?: InternalCacheOptions
): Promise<ArtifactCacheEntry | null> {
): Promise<CommonsGetCacheResponse | null> {
const httpClient = createHttpClient()
const version = getCacheVersion(
paths,
options?.compressionMethod,
options?.enableCrossOsArchive
options?.enableCrossOsArchive,
options?.enableCrossArchArchive
)
const resource = `cache?keys=${encodeURIComponent(
keys.join(',')
)}&version=${version}`
const getCacheRequest: CommonsGetCacheRequest = {
cache_key: key,
restore_keys: restoreKeys,
cache_version: version,
vcs_repository: getVCSRepository(),
vcs_ref: getVCSRef(),
annotations: getAnnotations()
}
const response = await retryTypedResponse('getCacheEntry', async () =>
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
httpClient.postJson<CommonsGetCacheResponse>(
getCacheApiUrl('cache/get'),
getCacheRequest
)
)
// Cache not found
if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
await printCachesListForDiagnostics(keys[0], httpClient, version)
}
// TODO: List cache for primary key only if cache miss occurs
// if (core.isDebug()) {
// await printCachesListForDiagnostics(keys[0], httpClient, version)
// }
return null
}
if (!isSuccessStatusCode(response.statusCode)) {
@ -119,18 +169,13 @@ export async function getCacheEntry(
}
const cacheResult = response.result
const cacheDownloadUrl = cacheResult?.pre_signed_url
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.')
}
core.setSecret(cacheDownloadUrl)
core.debug(`Cache Result:`)
core.debug(JSON.stringify(cacheResult))
return cacheResult
}
/*
async function printCachesListForDiagnostics(
key: string,
httpClient: HttpClient,
@ -155,29 +200,80 @@ async function printCachesListForDiagnostics(
}
}
}
*/
export async function downloadCache(
provider: string,
archiveLocation: string,
archivePath: string
archivePath: string,
gcsToken?: string
): Promise<void> {
await downloadCacheMultiConnection(archiveLocation, archivePath, 8)
switch (provider) {
case 's3':
await downloadCacheMultiConnection(archiveLocation, archivePath, 8)
break
case 'gcs': {
if (!gcsToken) {
throw new Error(
'Unable to download cache from GCS. GCP token is not provided.'
)
}
const oauth2Client = new OAuth2Client()
oauth2Client.setCredentials({access_token: gcsToken})
const storage = new Storage({
authClient: oauth2Client
})
await downloadCacheMultipartGCP(storage, archiveLocation, archivePath)
break
}
}
}
export function downloadCacheStreaming(
provider: string,
archiveLocation: string,
gcsToken?: string
): NodeJS.ReadableStream | undefined {
switch (provider) {
case 's3':
return undefined
case 'gcs': {
if (!gcsToken) {
throw new Error(
'Unable to download cache from GCS. GCP token is not provided.'
)
}
const oauth2Client = new OAuth2Client()
oauth2Client.setCredentials({access_token: gcsToken})
const storage = new Storage({
authClient: oauth2Client
})
return downloadCacheStreamingGCP(storage, archiveLocation)
}
default:
return undefined
}
}
// Reserve Cache
export async function reserveCache(
cacheKey: string,
numberOfChunks: number,
options?: InternalCacheOptions
): Promise<ITypedResponseWithError<ReserveCacheResponse>> {
cacheVersion: string
): Promise<ITypedResponseWithError<CommonsReserveCacheResponse>> {
const httpClient = createHttpClient()
const reserveCacheRequest: ReserveCacheRequest = {
const reserveCacheRequest: CommonsReserveCacheRequest = {
cache_key: cacheKey,
cache_version: cacheVersion,
number_of_chunks: numberOfChunks,
content_type: 'application/zstd'
content_type: 'application/zstd',
vcs_repository: getVCSRepository(),
vcs_ref: getVCSRef(),
annotations: getAnnotations()
}
const response = await retryTypedResponse('reserveCache', async () =>
httpClient.postJson<ReserveCacheResponse>(
httpClient.postJson<CommonsReserveCacheResponse>(
getCacheApiUrl('cache/reserve'),
reserveCacheRequest
)
@ -185,125 +281,32 @@ export async function reserveCache(
return response
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`
}
async function uploadChunk(
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
partNumber: number,
start: number,
end: number
): Promise<InternalS3CompletedPart> {
core.debug(
`Uploading chunk of size ${
end - start + 1
} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
)
// Manually convert the readable stream to a buffer. S3 doesn't allow stream as input
const chunks = await utils.streamToBuffer(openStream())
try {
// HACK: Using axios here as S3 API doesn't allow readable stream as input and Github's HTTP client is not able to send buffer as body
const response = await axios.request({
method: 'PUT',
url: resourceUrl,
headers: {
'Content-Type': 'application/octet-stream'
},
data: chunks
})
return {
ETag: response.headers.etag ?? '',
PartNumber: partNumber
}
} catch (error) {
throw new Error(
`Cache service responded with ${
(error as AxiosError).status
} during upload chunk.`
)
}
}
async function uploadFileToS3(
preSignedURLs: string[],
archivePath: string
): Promise<InternalS3CompletedPart[]> {
// Upload Chunks
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
const numberOfChunks = preSignedURLs.length
const fd = fs.openSync(archivePath, 'r')
core.debug('Awaiting all uploads')
let offset = 0
try {
const completedParts = await Promise.all(
preSignedURLs.map(async (presignedURL, index) => {
const chunkSize = Math.ceil(fileSize / numberOfChunks)
const start = offset
const end = offset + chunkSize - 1
offset += chunkSize
return await uploadChunk(
presignedURL,
() =>
fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(
`Cache upload failed because file read failed with ${error.message}`
)
}),
index + 1,
start,
end
)
})
)
return completedParts
} finally {
fs.closeSync(fd)
}
}
async function commitCache(
httpClient: HttpClient,
cacheKey: string,
cacheVersion: string,
uploadKey: string,
uploadID: string,
parts: InternalS3CompletedPart[]
): Promise<TypedResponse<CommitCacheResponse>> {
const commitCacheRequest: CommitCacheRequest = {
uploadKey?: string,
uploadID?: string,
parts?: InternalS3CompletedPart[]
): Promise<TypedResponse<CommonsCommitCacheResponse>> {
const httpClient = createHttpClient()
if (!parts) {
parts = []
}
const commitCacheRequest: CommonsCommitCacheRequest = {
cache_key: cacheKey,
cache_version: cacheVersion,
upload_key: uploadKey,
upload_id: uploadID,
parts: parts,
os: process.env['RUNNER_OS'] ?? 'Linux',
vcs_type: 'github'
vcs_type: 'github',
vcs_repository: getVCSRepository(),
vcs_ref: getVCSRef(),
annotations: getAnnotations()
}
return await retryTypedResponse('commitCache', async () =>
httpClient.postJson<CommitCacheResponse>(
httpClient.postJson<CommonsCommitCacheResponse>(
getCacheApiUrl(`cache/commit`),
commitCacheRequest
)
@ -311,44 +314,99 @@ async function commitCache(
}
export async function saveCache(
provider: string,
cacheKey: string,
cacheVersion: string,
uploadId: string,
uploadKey: string,
numberOfChunks: number,
preSignedURLs: string[],
archivePath: string
archivePath: string,
S3UploadId?: string,
S3UploadKey?: string,
S3NumberOfChunks?: number,
S3PreSignedURLs?: string[],
GCSAuthToken?: string,
GCSBucketName?: string,
GCSObjectName?: string
): Promise<string> {
// Number of chunks should match the number of pre-signed URLs
if (numberOfChunks !== preSignedURLs.length) {
throw new Error(
`Number of chunks (${numberOfChunks}) should match the number of pre-signed URLs (${preSignedURLs.length}).`
)
}
const httpClient = createHttpClient()
core.debug('Upload cache')
const completedParts = await uploadFileToS3(preSignedURLs, archivePath)
// Sort parts in ascending order by partNumber
completedParts.sort((a, b) => a.PartNumber - b.PartNumber)
// Commit Cache
core.debug('Committing cache')
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
)
const commitCacheResponse = await commitCache(
httpClient,
cacheKey,
cacheVersion,
uploadKey,
uploadId,
completedParts
)
let commitCacheResponse: TypedResponse<CommonsCommitCacheResponse> = {
headers: {},
statusCode: 0,
result: null
}
let cacheKeyResponse = ''
switch (provider) {
case 's3': {
if (
!S3NumberOfChunks ||
!S3PreSignedURLs ||
!S3UploadId ||
!S3UploadKey
) {
throw new Error(
'Unable to upload cache to S3. One of the following required parameters is missing: numberOfChunks, preSignedURLs, uploadId, uploadKey.'
)
}
// Number of chunks should match the number of pre-signed URLs
if (S3NumberOfChunks !== S3PreSignedURLs.length) {
throw new Error(
`Number of chunks (${S3NumberOfChunks}) should match the number of pre-signed URLs (${S3PreSignedURLs.length}).`
)
}
core.debug('Uploading cache')
const completedParts = await uploadFileToS3(S3PreSignedURLs, archivePath)
// Sort parts in ascending order by partNumber
completedParts.sort((a, b) => a.PartNumber - b.PartNumber)
core.debug('Committing cache')
commitCacheResponse = await commitCache(
cacheKey,
cacheVersion,
S3UploadKey,
S3UploadId,
completedParts
)
cacheKeyResponse = commitCacheResponse.result?.s3?.cache_key ?? ''
break
}
case 'gcs': {
if (!GCSBucketName || !GCSObjectName || !GCSAuthToken) {
throw new Error(
'Unable to upload cache to GCS. One of the following required parameters is missing: GCSBucketName, GCSObjectName, GCSAuthToken.'
)
}
core.debug('Uploading cache')
const oauth2Client = new OAuth2Client()
oauth2Client.setCredentials({access_token: GCSAuthToken})
const storage = new Storage({
authClient: oauth2Client
})
await multiPartUploadToGCS(
storage,
archivePath,
GCSBucketName,
GCSObjectName
)
core.debug('Committing cache')
commitCacheResponse = await commitCache(cacheKey, cacheVersion)
cacheKeyResponse = commitCacheResponse.result?.gcs?.cache_key ?? ''
break
}
}
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
@ -356,16 +414,28 @@ export async function saveCache(
}
core.info('Cache saved successfully')
return commitCacheResponse.result?.cache_key ?? ''
return cacheKeyResponse
}
export async function deleteCache(keys: string[]) {
export async function deleteCache(cacheKey: string, cacheVersion: string) {
const httpClient = createHttpClient()
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}`
const response = await httpClient.del(getCacheApiUrl(resource))
if (!isSuccessStatusCode(response.message.statusCode)) {
throw new Error(
`Cache service responded with ${response.message.statusCode}`
const deleteCacheRequest: CommonsDeleteCacheRequest = {
cache_key: cacheKey,
cache_version: cacheVersion,
vcs_repository: getVCSRepository(),
vcs_ref: getVCSRef(),
annotations: getAnnotations()
}
const response = await retryTypedResponse('deleteCacheEntry', async () =>
httpClient.postJson<CommonsDeleteCacheResponse>(
getCacheApiUrl('cache/delete'),
deleteCacheRequest
)
)
if (!isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`)
}
}

View File

@ -146,3 +146,28 @@ export function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
stream.on('end', () => resolve(Buffer.concat(buffer)))
})
}
/*
* Retrieve the bucket name and object name from the GCS URL
* @param gcsURL - The URL for the cache in the format gs://<bucket-name>/<object-name>
*/
export function retrieveGCSBucketAndObjectName(gcsURL: string): {
bucketName: string
objectName: string
} {
const bucketName = gcsURL.split('/')[2]
if (!bucketName || bucketName.length < 2) {
throw new Error(
`Invalid GCS URL: ${gcsURL}. Should be in the format gs://<bucket-name>/<object-name>`
)
}
const objectName = gcsURL.split('/').slice(3).join('/')
if (!objectName || objectName.length < 1) {
throw new Error(
`Invalid GCS URL: ${gcsURL}. Should be in the format gs://<bucket-name>/<object-name>`
)
}
return {bucketName, objectName}
}

View File

@ -6,47 +6,10 @@ export interface ITypedResponseWithError<T> extends TypedResponse<T> {
error?: HttpClientError
}
export interface ArtifactCacheEntry {
cache_key?: string
pre_signed_url?: string
cache_version?: string
}
export interface ArtifactCacheList {
totalCount: number
artifactCaches?: ArtifactCacheEntry[]
}
export interface CommitCacheRequest {
cache_key: string
cache_version: string
upload_key: string
upload_id: string
parts: InternalS3CompletedPart[]
os: string
vcs_type: string
}
export interface CommitCacheResponse {
cache_key: string
cache_version: string
}
export interface ReserveCacheRequest {
cache_key: string
content_type: string
number_of_chunks: number
}
export interface ReserveCacheResponse {
pre_signed_urls: string[]
upload_key: string
upload_id: string
}
export interface InternalCacheOptions {
compressionMethod?: CompressionMethod
enableCrossOsArchive?: boolean
enableCrossArchArchive?: boolean
cacheSize?: number
}

View File

@ -13,6 +13,8 @@ import {DownloadOptions} from '../options'
import {retryHttpClientResponse} from './requestUtils'
import {AbortController} from '@azure/abort-controller'
import {Storage, TransferManager} from '@google-cloud/storage'
import {ChildProcessWithoutNullStreams, spawn} from 'child_process'
/**
* Pipes the body of a HTTP response to a stream
@ -292,3 +294,66 @@ export async function downloadCacheMultiConnection(
await fileHandle?.close()
}
}
/**
* Download cache in multipart using the Gcloud SDK
*
* @param archiveLocation the URL for the cache
*/
export async function downloadCacheMultipartGCP(
storage: Storage,
archiveLocation: string,
archivePath: string
) {
try {
const {bucketName, objectName} =
utils.retrieveGCSBucketAndObjectName(archiveLocation)
const transferManager = new TransferManager(storage.bucket(bucketName))
await transferManager.downloadFileInChunks(objectName, {
destination: archivePath,
noReturnData: true,
chunkSizeBytes: 1024 * 1024 * 8
})
} catch (error) {
core.debug(`Failed to download cache: ${error}`)
core.error(`Failed to download cache.`)
throw error
}
}
/**
* Download the cache to a provider writable stream using GCloud SDK
*
* @param archiveLocation the URL for the cache
*/
export function downloadCacheStreamingGCP(
storage: Storage,
archiveLocation: string
): NodeJS.ReadableStream | undefined {
try {
// The archiveLocation for GCP will be in the format of gs://<bucket-name>/<object-name>
const {bucketName, objectName} =
utils.retrieveGCSBucketAndObjectName(archiveLocation)
storage
.bucket(bucketName)
.file(objectName)
.getMetadata()
.then(data => {
core.info(`File size: ${data[0]?.size} bytes`)
})
return storage.bucket(bucketName).file(objectName).createReadStream()
} catch (error) {
core.debug(`Failed to download cache: ${error}`)
core.error(`Failed to download cache.`)
throw error
}
}
export function getDownloadCommandPipeForWget(
url: string
): ChildProcessWithoutNullStreams {
return spawn('wget', ['-qO', '-', url])
}

View File

@ -111,7 +111,7 @@ export async function retryTypedResponse<T>(
if (error instanceof HttpClientError) {
return {
statusCode: error.statusCode,
result: null,
result: error.result ?? null,
headers: {},
error
}

View File

@ -11,9 +11,17 @@ import {
TarFilename,
ManifestFilename
} from './constants'
import {ChildProcessWithoutNullStreams, spawn} from 'child_process'
const IS_WINDOWS = process.platform === 'win32'
enum TAR_MODE {
CREATE = 'create',
EXTRACT = 'extract',
EXTRACT_STREAM = 'extractStream',
LIST = 'list'
}
// Returns tar path and type: BSD or GNU
async function getTarPath(): Promise<ArchiveTool> {
switch (process.platform) {
@ -54,7 +62,7 @@ async function getTarPath(): Promise<ArchiveTool> {
async function getTarArgs(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod,
type: string,
type: TAR_MODE,
archivePath = ''
): Promise<string[]> {
const args = [`"${tarPath.path}"`]
@ -69,7 +77,7 @@ async function getTarArgs(
// Method specific args
switch (type) {
case 'create':
case TAR_MODE.CREATE:
args.push(
'--posix',
'-cf',
@ -87,7 +95,7 @@ async function getTarArgs(
ManifestFilename
)
break
case 'extract':
case TAR_MODE.EXTRACT:
args.push(
'-xf',
BSD_TAR_ZSTD
@ -98,7 +106,16 @@ async function getTarArgs(
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
)
break
case 'list':
case TAR_MODE.EXTRACT_STREAM:
args.push(
'-xf',
'-',
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
)
break
case TAR_MODE.LIST:
args.push(
'-tf',
BSD_TAR_ZSTD
@ -127,7 +144,7 @@ async function getTarArgs(
// Returns commands to run tar and compression program
async function getCommands(
compressionMethod: CompressionMethod,
type: string,
type: TAR_MODE,
archivePath = ''
): Promise<string[]> {
let args
@ -139,8 +156,9 @@ async function getCommands(
type,
archivePath
)
const compressionArgs =
type !== 'create'
type !== TAR_MODE.CREATE
? await getDecompressionProgram(tarPath, compressionMethod, archivePath)
: await getCompressionProgram(tarPath, compressionMethod)
const BSD_TAR_ZSTD =
@ -148,7 +166,7 @@ async function getCommands(
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
if (BSD_TAR_ZSTD && type !== 'create') {
if (BSD_TAR_ZSTD && type !== TAR_MODE.CREATE) {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]
} else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]
@ -161,6 +179,42 @@ async function getCommands(
return [args.join(' ')]
}
/*
* Returns command pipes to stream data to tar and compression program.
* Only supports tar and zstd at the moment
* @returns Array of ChildProcessWithoutNullStreams. Pipe to the processes in the order they are returned
*/
async function getCommandPipes(
compressionMethod: CompressionMethod,
type: TAR_MODE,
archivePath = ''
): Promise<ChildProcessWithoutNullStreams[]> {
const spawnedProcesses: ChildProcessWithoutNullStreams[] = []
const tarPath = await getTarPath()
const tarArgs = await getTarArgs(
tarPath,
compressionMethod,
type,
archivePath
)
// Remove tar executable from tarArgs
tarArgs.shift()
let zstdInfo =
type !== TAR_MODE.CREATE
? await getDecompressionProgramStream(tarPath, compressionMethod)
: await getCompressionProgramStream(tarPath, compressionMethod)
const zstdProcess = spawn(zstdInfo.command, zstdInfo.args)
spawnedProcesses.push(zstdProcess)
const tarProcess = spawn(tarPath.path, tarArgs)
spawnedProcesses.push(tarProcess)
return spawnedProcesses
}
function getWorkingDirectory(): string {
return process.env['GITHUB_WORKSPACE'] ?? process.cwd()
}
@ -204,6 +258,39 @@ async function getDecompressionProgram(
}
}
// Alternative to getDecompressionProgram which returns zstd that command that can be piped into
async function getDecompressionProgramStream(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod
): Promise<{command: string; args: string[]}> {
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
switch (compressionMethod) {
case CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? {command: 'zstd', args: ['-d', '--long=30', '--force', '--stdout']}
: {
command: IS_WINDOWS ? 'zstd' : 'unzstd',
args: IS_WINDOWS
? ['-d', '--long=30', '--stdout', '-T0']
: ['--long=30', '--stdout', '-T0']
}
case CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? {command: 'zstd', args: ['-d', '--force', '--stdout']}
: {
command: IS_WINDOWS ? 'zstd' : 'unzstd',
args: ['-d', '--stdout', '-T0']
}
default:
// Assuming gzip is the default method if none specified
return {command: 'gzip', args: ['-d']}
}
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
@ -244,6 +331,44 @@ async function getCompressionProgram(
}
}
async function getCompressionProgramStream(
tarPath: ArchiveTool,
compressionMethod: CompressionMethod
): Promise<{command: string; args: string[]}> {
const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS
switch (compressionMethod) {
case CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? {
command: 'zstd',
args: ['-T0', '--long=30', '--force', '--stdout']
}
: {
command: IS_WINDOWS ? 'zstd' : 'zstdmt',
args: IS_WINDOWS
? ['-T0', '--long=30', '--stdout', '-T0']
: ['--long=30', '--stdout', '-T0']
}
case CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? {
command: 'zstd',
args: ['-T0', '--force', '--stdout']
}
: {
command: IS_WINDOWS ? 'zstd' : 'zstdmt',
args: ['-T0', '--stdout']
}
default:
// Assuming gzip is the default method if none specified
return {command: 'gzip', args: []}
}
}
// Executes all commands as separate processes
async function execCommands(commands: string[], cwd?: string): Promise<void> {
for (const command of commands) {
@ -265,11 +390,14 @@ export async function listTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
const commands = await getCommands(compressionMethod, 'list', archivePath)
const commands = await getCommands(
compressionMethod,
TAR_MODE.LIST,
archivePath
)
await execCommands(commands)
}
// Extract a tar
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
@ -277,11 +405,76 @@ export async function extractTar(
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory()
await io.mkdirP(workingDirectory)
const commands = await getCommands(compressionMethod, 'extract', archivePath)
const commands = await getCommands(
compressionMethod,
TAR_MODE.EXTRACT,
archivePath
)
await execCommands(commands)
}
// Create a tar
/*
* NOTE: Currently tested only on archives created using tar and zstd
*/
export async function extractStreamingTar(
stream: NodeJS.ReadableStream | undefined,
archivePath: string,
compressionMethod: CompressionMethod,
downloadCommandPipe?: ChildProcessWithoutNullStreams
): Promise<void> {
const workingDirectory = getWorkingDirectory()
await io.mkdirP(workingDirectory)
const commandPipes = await getCommandPipes(
compressionMethod,
TAR_MODE.EXTRACT_STREAM,
archivePath
)
if (downloadCommandPipe) {
commandPipes.unshift(downloadCommandPipe)
}
if (commandPipes.length < 2) {
throw new Error(
'At least two processes should be present as the archive is compressed at least twice.'
)
}
return new Promise((resolve, reject) => {
if (stream) {
stream.pipe(commandPipes[0].stdin)
}
for (let i = 0; i < commandPipes.length - 1; i++) {
commandPipes[i].stdout.pipe(commandPipes[i + 1].stdin)
commandPipes[i].stderr.on('data', data => {
reject(
new Error(`Error in ${commandPipes[i].spawnfile}: ${data.toString()}`)
)
})
commandPipes[i].on('error', error => {
reject(
new Error(`Error in ${commandPipes[i].spawnfile}: ${error.message}`)
)
})
}
const lastCommand = commandPipes[commandPipes.length - 1]
lastCommand.stderr.on('data', data => {
console.error(`Error in ${lastCommand.spawnfile}:`, data.toString())
reject(new Error(`Error in ${lastCommand.spawnfile}: ${data.toString()}`))
})
lastCommand.on('close', code => {
if (code === 0) {
resolve()
} else {
reject(new Error(`Last command exited with code ${code}`))
}
})
})
}
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
@ -292,6 +485,6 @@ export async function createTar(
path.join(archiveFolder, ManifestFilename),
sourceDirectories.join('\n')
)
const commands = await getCommands(compressionMethod, 'create')
const commands = await getCommands(compressionMethod, TAR_MODE.CREATE)
await execCommands(commands, archiveFolder)
}

View File

@ -0,0 +1,131 @@
import * as core from '@actions/core'
import * as utils from './cacheUtils'
import fs from 'fs'
import axios, {AxiosError} from 'axios'
import {InternalS3CompletedPart} from './contracts'
import {Storage, TransferManager} from '@google-cloud/storage'
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`
}
async function uploadChunk(
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
partNumber: number,
start: number,
end: number
): Promise<InternalS3CompletedPart> {
core.debug(
`Uploading chunk of size ${
end - start + 1
} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
)
// Manually convert the readable stream to a buffer. S3 doesn't allow stream as input
const chunks = await utils.streamToBuffer(openStream())
try {
// HACK: Using axios here as S3 API doesn't allow readable stream as input and Github's HTTP client is not able to send buffer as body
const response = await axios.request({
method: 'PUT',
url: resourceUrl,
headers: {
'Content-Type': 'application/octet-stream'
},
data: chunks
})
return {
ETag: response.headers.etag ?? '',
PartNumber: partNumber
}
} catch (error) {
throw new Error(
`Cache service responded with ${
(error as AxiosError).status
} during upload chunk.`
)
}
}
export async function uploadFileToS3(
preSignedURLs: string[],
archivePath: string
): Promise<InternalS3CompletedPart[]> {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
const numberOfChunks = preSignedURLs.length
const fd = fs.openSync(archivePath, 'r')
core.debug('Awaiting all uploads')
let offset = 0
try {
const completedParts = await Promise.all(
preSignedURLs.map(async (presignedURL, index) => {
const chunkSize = Math.ceil(fileSize / numberOfChunks)
const start = offset
const end = offset + chunkSize - 1
offset += chunkSize
return await uploadChunk(
presignedURL,
() =>
fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(
`Cache upload failed because file read failed with ${error.message}`
)
}),
index + 1,
start,
end
)
})
)
return completedParts
} finally {
fs.closeSync(fd)
}
}
/*
* Uploads the cache to GCS
* @param localArchivePath - The path to the cache archive
* @param bucketName - The name of the bucket in GCS
* @param objectName - The name of the object in GCS
*/
export async function multiPartUploadToGCS(
storage: Storage,
localArchivePath: string,
bucketName: string,
objectName: string
) {
try {
const transferManager = new TransferManager(storage.bucket(bucketName))
await transferManager.uploadFileInChunks(localArchivePath, {
uploadName: objectName
})
} catch (error) {
throw new Error(`Failed to upload to GCS: ${error}`)
}
}

View File

@ -0,0 +1,23 @@
# OpenAPI Generator Ignore
# Generated by openapi-generator https://github.com/openapitools/openapi-generator
# Use this file to prevent files from being overwritten by the generator.
# The patterns follow closely to .gitignore or .dockerignore.
# As an example, the C# client generator defines ApiClient.cs.
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
#ApiClient.cs
# You can match any string of characters against a directory, file or extension with a single asterisk (*):
#foo/*/qux
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
#foo/**/qux
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
# You can also negate patterns with an exclamation (!).
# For example, you can ignore all files in a docs folder with the file extension .md:
#docs/*.md
# Then explicitly reverse the ignore rule for a single file:
#!docs/README.md

View File

@ -0,0 +1,37 @@
.gitignore
.npmignore
.openapi-generator-ignore
README.md
api.ts
api/default-api.ts
base.ts
common.ts
configuration.ts
git_push.sh
index.ts
models/commons-append-operation-input.ts
models/commons-cache-entry.ts
models/commons-commit-cache-request.ts
models/commons-commit-cache-response.ts
models/commons-delete-cache-request.ts
models/commons-delete-cache-response.ts
models/commons-gcscommit-cache-response.ts
models/commons-gcsdelete-cache-response.ts
models/commons-gcsget-cache-reponse.ts
models/commons-gcsreserve-cache-response.ts
models/commons-get-cache-request.ts
models/commons-get-cache-response.ts
models/commons-operation.ts
models/commons-reserve-cache-request.ts
models/commons-reserve-cache-response.ts
models/commons-s3-commit-cache-response.ts
models/commons-s3-delete-cache-response.ts
models/commons-s3-get-cache-response.ts
models/commons-s3-reserve-cache-response.ts
models/commons-short-lived-token.ts
models/index.ts
models/types-completed-part.ts
models/warp-build-apierror.ts
package.json
tsconfig.esm.json
tsconfig.json

View File

@ -0,0 +1,18 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
export * from './api/default-api';

View File

@ -0,0 +1,569 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import type { Configuration } from '../configuration';
import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios';
import globalAxios from 'axios';
// Some imports not used depending on template conditions
// @ts-ignore
import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from '../common';
// @ts-ignore
import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError, operationServerMap } from '../base';
// @ts-ignore
import { CommonsAppendOperationInput } from '../models';
// @ts-ignore
import { CommonsCommitCacheRequest } from '../models';
// @ts-ignore
import { CommonsCommitCacheResponse } from '../models';
// @ts-ignore
import { CommonsDeleteCacheRequest } from '../models';
// @ts-ignore
import { CommonsDeleteCacheResponse } from '../models';
// @ts-ignore
import { CommonsGetCacheRequest } from '../models';
// @ts-ignore
import { CommonsGetCacheResponse } from '../models';
// @ts-ignore
import { CommonsOperation } from '../models';
// @ts-ignore
import { CommonsReserveCacheRequest } from '../models';
// @ts-ignore
import { CommonsReserveCacheResponse } from '../models';
// @ts-ignore
import { WarpBuildAPIError } from '../models';
/**
* DefaultApi - axios parameter creator
* @export
*/
export const DefaultApiAxiosParamCreator = function (configuration?: Configuration) {
return {
/**
* do ping
* @summary pings the api
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
pingGet: async (options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
const localVarPath = `/ping`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* commit cache
* @summary commit cache
* @param {CommonsCommitCacheRequest} body Commit Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheCommitPost: async (body: CommonsCommitCacheRequest, options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'body' is not null or undefined
assertParamExists('v1CacheCommitPost', 'body', body)
const localVarPath = `/v1/cache/commit`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
localVarHeaderParameter['Content-Type'] = 'application/json';
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* delete cache
* @summary delete cache
* @param {CommonsDeleteCacheRequest} body Delete Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheDeletePost: async (body: CommonsDeleteCacheRequest, options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'body' is not null or undefined
assertParamExists('v1CacheDeletePost', 'body', body)
const localVarPath = `/v1/cache/delete`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
localVarHeaderParameter['Content-Type'] = 'application/json';
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* get cache
* @summary get cache
* @param {CommonsGetCacheRequest} body Get Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheGetPost: async (body: CommonsGetCacheRequest, options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'body' is not null or undefined
assertParamExists('v1CacheGetPost', 'body', body)
const localVarPath = `/v1/cache/get`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
localVarHeaderParameter['Content-Type'] = 'application/json';
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* reserve cache
* @summary reserve cache
* @param {CommonsReserveCacheRequest} body Reserve Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheReservePost: async (body: CommonsReserveCacheRequest, options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'body' is not null or undefined
assertParamExists('v1CacheReservePost', 'body', body)
const localVarPath = `/v1/cache/reserve`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
localVarHeaderParameter['Content-Type'] = 'application/json';
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
* record operation
* @summary record operation
* @param {CommonsAppendOperationInput} body Record Operation details Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1InstrumentationOperationPost: async (body: CommonsAppendOperationInput, options: RawAxiosRequestConfig = {}): Promise<RequestArgs> => {
// verify required parameter 'body' is not null or undefined
assertParamExists('v1InstrumentationOperationPost', 'body', body)
const localVarPath = `/v1/instrumentation/operation`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
localVarHeaderParameter['Content-Type'] = 'application/json';
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration)
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
}
};
/**
* DefaultApi - functional programming interface
* @export
*/
export const DefaultApiFp = function(configuration?: Configuration) {
const localVarAxiosParamCreator = DefaultApiAxiosParamCreator(configuration)
return {
/**
* do ping
* @summary pings the api
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async pingGet(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<string>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.pingGet(options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.pingGet']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
/**
* commit cache
* @summary commit cache
* @param {CommonsCommitCacheRequest} body Commit Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async v1CacheCommitPost(body: CommonsCommitCacheRequest, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CommonsCommitCacheResponse>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.v1CacheCommitPost(body, options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.v1CacheCommitPost']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
/**
* delete cache
* @summary delete cache
* @param {CommonsDeleteCacheRequest} body Delete Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async v1CacheDeletePost(body: CommonsDeleteCacheRequest, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CommonsDeleteCacheResponse>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.v1CacheDeletePost(body, options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.v1CacheDeletePost']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
/**
* get cache
* @summary get cache
* @param {CommonsGetCacheRequest} body Get Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async v1CacheGetPost(body: CommonsGetCacheRequest, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CommonsGetCacheResponse>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.v1CacheGetPost(body, options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.v1CacheGetPost']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
/**
* reserve cache
* @summary reserve cache
* @param {CommonsReserveCacheRequest} body Reserve Cache Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async v1CacheReservePost(body: CommonsReserveCacheRequest, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CommonsReserveCacheResponse>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.v1CacheReservePost(body, options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.v1CacheReservePost']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
/**
* record operation
* @summary record operation
* @param {CommonsAppendOperationInput} body Record Operation details Request Body
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async v1InstrumentationOperationPost(body: CommonsAppendOperationInput, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<CommonsOperation>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.v1InstrumentationOperationPost(body, options);
const localVarOperationServerIndex = configuration?.serverIndex ?? 0;
const localVarOperationServerBasePath = operationServerMap['DefaultApi.v1InstrumentationOperationPost']?.[localVarOperationServerIndex]?.url;
return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath);
},
}
};
/**
* DefaultApi - factory interface
* @export
*/
export const DefaultApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
const localVarFp = DefaultApiFp(configuration)
return {
/**
* do ping
* @summary pings the api
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
pingGet(options?: RawAxiosRequestConfig): AxiosPromise<string> {
return localVarFp.pingGet(options).then((request) => request(axios, basePath));
},
/**
* commit cache
* @summary commit cache
* @param {DefaultApiV1CacheCommitPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheCommitPost(requestParameters: DefaultApiV1CacheCommitPostRequest, options?: RawAxiosRequestConfig): AxiosPromise<CommonsCommitCacheResponse> {
return localVarFp.v1CacheCommitPost(requestParameters.body, options).then((request) => request(axios, basePath));
},
/**
* delete cache
* @summary delete cache
* @param {DefaultApiV1CacheDeletePostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheDeletePost(requestParameters: DefaultApiV1CacheDeletePostRequest, options?: RawAxiosRequestConfig): AxiosPromise<CommonsDeleteCacheResponse> {
return localVarFp.v1CacheDeletePost(requestParameters.body, options).then((request) => request(axios, basePath));
},
/**
* get cache
* @summary get cache
* @param {DefaultApiV1CacheGetPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheGetPost(requestParameters: DefaultApiV1CacheGetPostRequest, options?: RawAxiosRequestConfig): AxiosPromise<CommonsGetCacheResponse> {
return localVarFp.v1CacheGetPost(requestParameters.body, options).then((request) => request(axios, basePath));
},
/**
* reserve cache
* @summary reserve cache
* @param {DefaultApiV1CacheReservePostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1CacheReservePost(requestParameters: DefaultApiV1CacheReservePostRequest, options?: RawAxiosRequestConfig): AxiosPromise<CommonsReserveCacheResponse> {
return localVarFp.v1CacheReservePost(requestParameters.body, options).then((request) => request(axios, basePath));
},
/**
* record operation
* @summary record operation
* @param {DefaultApiV1InstrumentationOperationPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
v1InstrumentationOperationPost(requestParameters: DefaultApiV1InstrumentationOperationPostRequest, options?: RawAxiosRequestConfig): AxiosPromise<CommonsOperation> {
return localVarFp.v1InstrumentationOperationPost(requestParameters.body, options).then((request) => request(axios, basePath));
},
};
};
/**
* Request parameters for v1CacheCommitPost operation in DefaultApi.
* @export
* @interface DefaultApiV1CacheCommitPostRequest
*/
export interface DefaultApiV1CacheCommitPostRequest {
/**
* Commit Cache Request Body
* @type {CommonsCommitCacheRequest}
* @memberof DefaultApiV1CacheCommitPost
*/
readonly body: CommonsCommitCacheRequest
}
/**
* Request parameters for v1CacheDeletePost operation in DefaultApi.
* @export
* @interface DefaultApiV1CacheDeletePostRequest
*/
export interface DefaultApiV1CacheDeletePostRequest {
/**
* Delete Cache Request Body
* @type {CommonsDeleteCacheRequest}
* @memberof DefaultApiV1CacheDeletePost
*/
readonly body: CommonsDeleteCacheRequest
}
/**
* Request parameters for v1CacheGetPost operation in DefaultApi.
* @export
* @interface DefaultApiV1CacheGetPostRequest
*/
export interface DefaultApiV1CacheGetPostRequest {
/**
* Get Cache Request Body
* @type {CommonsGetCacheRequest}
* @memberof DefaultApiV1CacheGetPost
*/
readonly body: CommonsGetCacheRequest
}
/**
* Request parameters for v1CacheReservePost operation in DefaultApi.
* @export
* @interface DefaultApiV1CacheReservePostRequest
*/
export interface DefaultApiV1CacheReservePostRequest {
/**
* Reserve Cache Request Body
* @type {CommonsReserveCacheRequest}
* @memberof DefaultApiV1CacheReservePost
*/
readonly body: CommonsReserveCacheRequest
}
/**
* Request parameters for v1InstrumentationOperationPost operation in DefaultApi.
* @export
* @interface DefaultApiV1InstrumentationOperationPostRequest
*/
export interface DefaultApiV1InstrumentationOperationPostRequest {
/**
* Record Operation details Request Body
* @type {CommonsAppendOperationInput}
* @memberof DefaultApiV1InstrumentationOperationPost
*/
readonly body: CommonsAppendOperationInput
}
/**
* DefaultApi - object-oriented interface
* @export
* @class DefaultApi
* @extends {BaseAPI}
*/
export class DefaultApi extends BaseAPI {
/**
* do ping
* @summary pings the api
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public pingGet(options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).pingGet(options).then((request) => request(this.axios, this.basePath));
}
/**
* commit cache
* @summary commit cache
* @param {DefaultApiV1CacheCommitPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public v1CacheCommitPost(requestParameters: DefaultApiV1CacheCommitPostRequest, options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).v1CacheCommitPost(requestParameters.body, options).then((request) => request(this.axios, this.basePath));
}
/**
* delete cache
* @summary delete cache
* @param {DefaultApiV1CacheDeletePostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public v1CacheDeletePost(requestParameters: DefaultApiV1CacheDeletePostRequest, options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).v1CacheDeletePost(requestParameters.body, options).then((request) => request(this.axios, this.basePath));
}
/**
* get cache
* @summary get cache
* @param {DefaultApiV1CacheGetPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public v1CacheGetPost(requestParameters: DefaultApiV1CacheGetPostRequest, options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).v1CacheGetPost(requestParameters.body, options).then((request) => request(this.axios, this.basePath));
}
/**
* reserve cache
* @summary reserve cache
* @param {DefaultApiV1CacheReservePostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public v1CacheReservePost(requestParameters: DefaultApiV1CacheReservePostRequest, options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).v1CacheReservePost(requestParameters.body, options).then((request) => request(this.axios, this.basePath));
}
/**
* record operation
* @summary record operation
* @param {DefaultApiV1InstrumentationOperationPostRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof DefaultApi
*/
public v1InstrumentationOperationPost(requestParameters: DefaultApiV1InstrumentationOperationPostRequest, options?: RawAxiosRequestConfig) {
return DefaultApiFp(this.configuration).v1InstrumentationOperationPost(requestParameters.body, options).then((request) => request(this.axios, this.basePath));
}
}

View File

@ -0,0 +1,86 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import type { Configuration } from './configuration';
// Some imports not used depending on template conditions
// @ts-ignore
import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios';
import globalAxios from 'axios';
export const BASE_PATH = "http://localhost".replace(/\/+$/, "");
/**
*
* @export
*/
export const COLLECTION_FORMATS = {
csv: ",",
ssv: " ",
tsv: "\t",
pipes: "|",
};
/**
*
* @export
* @interface RequestArgs
*/
export interface RequestArgs {
url: string;
options: RawAxiosRequestConfig;
}
/**
*
* @export
* @class BaseAPI
*/
export class BaseAPI {
protected configuration: Configuration | undefined;
constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
if (configuration) {
this.configuration = configuration;
this.basePath = configuration.basePath ?? basePath;
}
}
};
/**
*
* @export
* @class RequiredError
* @extends {Error}
*/
export class RequiredError extends Error {
constructor(public field: string, msg?: string) {
super(msg);
this.name = "RequiredError"
}
}
interface ServerMap {
[key: string]: {
url: string,
description: string,
}[];
}
/**
*
* @export
*/
export const operationServerMap: ServerMap = {
}

View File

@ -0,0 +1,150 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import type { Configuration } from "./configuration";
import type { RequestArgs } from "./base";
import type { AxiosInstance, AxiosResponse } from 'axios';
import { RequiredError } from "./base";
/**
*
* @export
*/
export const DUMMY_BASE_URL = 'https://example.com'
/**
*
* @throws {RequiredError}
* @export
*/
export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
if (paramValue === null || paramValue === undefined) {
throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
}
}
/**
*
* @export
*/
export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
if (configuration && configuration.apiKey) {
const localVarApiKeyValue = typeof configuration.apiKey === 'function'
? await configuration.apiKey(keyParamName)
: await configuration.apiKey;
object[keyParamName] = localVarApiKeyValue;
}
}
/**
*
* @export
*/
export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
if (configuration && (configuration.username || configuration.password)) {
object["auth"] = { username: configuration.username, password: configuration.password };
}
}
/**
*
* @export
*/
export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
if (configuration && configuration.accessToken) {
const accessToken = typeof configuration.accessToken === 'function'
? await configuration.accessToken()
: await configuration.accessToken;
object["Authorization"] = "Bearer " + accessToken;
}
}
/**
*
* @export
*/
export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
if (configuration && configuration.accessToken) {
const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
? await configuration.accessToken(name, scopes)
: await configuration.accessToken;
object["Authorization"] = "Bearer " + localVarAccessTokenValue;
}
}
function setFlattenedQueryParams(urlSearchParams: URLSearchParams, parameter: any, key: string = ""): void {
if (parameter == null) return;
if (typeof parameter === "object") {
if (Array.isArray(parameter)) {
(parameter as any[]).forEach(item => setFlattenedQueryParams(urlSearchParams, item, key));
}
else {
Object.keys(parameter).forEach(currentKey =>
setFlattenedQueryParams(urlSearchParams, parameter[currentKey], `${key}${key !== '' ? '.' : ''}${currentKey}`)
);
}
}
else {
if (urlSearchParams.has(key)) {
urlSearchParams.append(key, parameter);
}
else {
urlSearchParams.set(key, parameter);
}
}
}
/**
*
* @export
*/
export const setSearchParams = function (url: URL, ...objects: any[]) {
const searchParams = new URLSearchParams(url.search);
setFlattenedQueryParams(searchParams, objects);
url.search = searchParams.toString();
}
/**
*
* @export
*/
export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
const nonString = typeof value !== 'string';
const needsSerialization = nonString && configuration && configuration.isJsonMime
? configuration.isJsonMime(requestOptions.headers['Content-Type'])
: nonString;
return needsSerialization
? JSON.stringify(value !== undefined ? value : {})
: (value || "");
}
/**
*
* @export
*/
export const toPathString = function (url: URL) {
return url.pathname + url.search + url.hash
}
/**
*
* @export
*/
export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
return <T = unknown, R = AxiosResponse<T>>(axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
const axiosRequestArgs = {...axiosArgs.options, url: (axios.defaults.baseURL ? '' : configuration?.basePath ?? basePath) + axiosArgs.url};
return axios.request<T, R>(axiosRequestArgs);
};
}

View File

@ -0,0 +1,110 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
export interface ConfigurationParameters {
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
username?: string;
password?: string;
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
basePath?: string;
serverIndex?: number;
baseOptions?: any;
formDataCtor?: new () => any;
}
export class Configuration {
/**
* parameter for apiKey security
* @param name security name
* @memberof Configuration
*/
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
/**
* parameter for basic security
*
* @type {string}
* @memberof Configuration
*/
username?: string;
/**
* parameter for basic security
*
* @type {string}
* @memberof Configuration
*/
password?: string;
/**
* parameter for oauth2 security
* @param name security name
* @param scopes oauth2 scope
* @memberof Configuration
*/
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
/**
* override base path
*
* @type {string}
* @memberof Configuration
*/
basePath?: string;
/**
* override server index
*
* @type {number}
* @memberof Configuration
*/
serverIndex?: number;
/**
* base options for axios calls
*
* @type {any}
* @memberof Configuration
*/
baseOptions?: any;
/**
* The FormData constructor that will be used to create multipart form data
* requests. You can inject this here so that execution environments that
* do not support the FormData class can still run the generated client.
*
* @type {new () => FormData}
*/
formDataCtor?: new () => any;
constructor(param: ConfigurationParameters = {}) {
this.apiKey = param.apiKey;
this.username = param.username;
this.password = param.password;
this.accessToken = param.accessToken;
this.basePath = param.basePath;
this.serverIndex = param.serverIndex;
this.baseOptions = param.baseOptions;
this.formDataCtor = param.formDataCtor;
}
/**
* Check if the given MIME is a JSON MIME.
* JSON MIME examples:
* application/json
* application/json; charset=UTF8
* APPLICATION/JSON
* application/vnd.company+json
* @param mime - MIME (Multipurpose Internet Mail Extensions)
* @return True if the given MIME is JSON, false otherwise.
*/
public isJsonMime(mime: string): boolean {
const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
}
}

View File

@ -0,0 +1,21 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
export * from "./api";
export * from "./configuration";
export * from "./models";
import * as Schema from "./schema";
export {Schema};

View File

@ -0,0 +1,60 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsAppendOperationInput
*/
export interface CommonsAppendOperationInput {
/**
*
* @type {string}
* @memberof CommonsAppendOperationInput
*/
'cache_id'?: string;
/**
*
* @type {string}
* @memberof CommonsAppendOperationInput
*/
'external_id'?: string;
/**
*
* @type {{ [key: string]: any; }}
* @memberof CommonsAppendOperationInput
*/
'meta'?: { [key: string]: any; };
/**
*
* @type {string}
* @memberof CommonsAppendOperationInput
*/
'operation_type'?: string;
/**
*
* @type {number}
* @memberof CommonsAppendOperationInput
*/
'size'?: number;
/**
*
* @type {string}
* @memberof CommonsAppendOperationInput
*/
'time'?: string;
}

View File

@ -0,0 +1,84 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsCacheEntry
*/
export interface CommonsCacheEntry {
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'cache_key'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'cache_version'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'created_at'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'id'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'organization_id'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'storage_backend_id'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'updated_at'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'vcs_organization_name'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'vcs_ref'?: string;
/**
*
* @type {string}
* @memberof CommonsCacheEntry
*/
'vcs_repository_name'?: string;
}

View File

@ -0,0 +1,81 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { TypesCompletedPart } from './types-completed-part';
/**
*
* @export
* @interface CommonsCommitCacheRequest
*/
export interface CommonsCommitCacheRequest {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsCommitCacheRequest
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'cache_version': string;
/**
*
* @type {Array<TypesCompletedPart>}
* @memberof CommonsCommitCacheRequest
*/
'parts': Array<TypesCompletedPart>;
/**
* UploadID * This is not supported for GCS cache. When passed this will be ignored. *
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'upload_id'?: string;
/**
* UploadKey * This is not supported for GCS cache. When passed this will be ignored. *
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'upload_key'?: string;
/**
* VCSRef is the ref of the repository in vcs for which cache is being used. This can be a branch, git tag, or pull request ref.
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'vcs_ref'?: string;
/**
* VCSRepository is the repository name in vcs. It can be of the format <organization>/<repository> or <repository>. While saving the entry, <organization>/ will be trimmed if passed.
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'vcs_repository'?: string;
/**
*
* @type {string}
* @memberof CommonsCommitCacheRequest
*/
'vcs_type': string;
}

View File

@ -0,0 +1,69 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsCacheEntry } from './commons-cache-entry';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsGCSCommitCacheResponse } from './commons-gcscommit-cache-response';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsS3CommitCacheResponse } from './commons-s3-commit-cache-response';
/**
*
* @export
* @interface CommonsCommitCacheResponse
*/
export interface CommonsCommitCacheResponse {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsCommitCacheResponse
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {CommonsCacheEntry}
* @memberof CommonsCommitCacheResponse
*/
'cache_entry'?: CommonsCacheEntry;
/**
*
* @type {CommonsGCSCommitCacheResponse}
* @memberof CommonsCommitCacheResponse
*/
'gcs'?: CommonsGCSCommitCacheResponse;
/**
*
* @type {string}
* @memberof CommonsCommitCacheResponse
*/
'provider'?: string;
/**
*
* @type {CommonsS3CommitCacheResponse}
* @memberof CommonsCommitCacheResponse
*/
's3'?: CommonsS3CommitCacheResponse;
/**
* VCSRepository is the repository name in vcs. It can be of the format <organization>/<repository> or <repository>. While saving the entry, <organization>/ will be trimmed if passed.
* @type {string}
* @memberof CommonsCommitCacheResponse
*/
'vcs_repository'?: string;
}

View File

@ -0,0 +1,54 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsDeleteCacheRequest
*/
export interface CommonsDeleteCacheRequest {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsDeleteCacheRequest
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {string}
* @memberof CommonsDeleteCacheRequest
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsDeleteCacheRequest
*/
'cache_version': string;
/**
* VCSRef is the ref of the repository in vcs for which cache is being used. This can be a branch, git tag, or pull request ref.
* @type {string}
* @memberof CommonsDeleteCacheRequest
*/
'vcs_ref'?: string;
/**
* VCSRepository is the repository name in vcs. It can be of the format <organization>/<repository> or <repository>. While saving the entry, <organization>/ will be trimmed if passed.
* @type {string}
* @memberof CommonsDeleteCacheRequest
*/
'vcs_repository'?: string;
}

View File

@ -0,0 +1,63 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsCacheEntry } from './commons-cache-entry';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsGCSDeleteCacheResponse } from './commons-gcsdelete-cache-response';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsS3DeleteCacheResponse } from './commons-s3-delete-cache-response';
/**
*
* @export
* @interface CommonsDeleteCacheResponse
*/
export interface CommonsDeleteCacheResponse {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsDeleteCacheResponse
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {CommonsCacheEntry}
* @memberof CommonsDeleteCacheResponse
*/
'cache_entry'?: CommonsCacheEntry;
/**
*
* @type {CommonsGCSDeleteCacheResponse}
* @memberof CommonsDeleteCacheResponse
*/
'gcs'?: CommonsGCSDeleteCacheResponse;
/**
*
* @type {string}
* @memberof CommonsDeleteCacheResponse
*/
'provider'?: string;
/**
*
* @type {CommonsS3DeleteCacheResponse}
* @memberof CommonsDeleteCacheResponse
*/
's3'?: CommonsS3DeleteCacheResponse;
}

View File

@ -0,0 +1,57 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsShortLivedToken } from './commons-short-lived-token';
/**
*
* @export
* @interface CommonsGCSCommitCacheResponse
*/
export interface CommonsGCSCommitCacheResponse {
/**
*
* @type {string}
* @memberof CommonsGCSCommitCacheResponse
*/
'bucket_name'?: string;
/**
* CacheKey is the resolved cache key which might contain some prefix or suffix in addition to the cache key provided by the user. This is the actual storage location in gcs.
* @type {string}
* @memberof CommonsGCSCommitCacheResponse
*/
'cache_key': string;
/**
* Method contains the auth method to be used to connect to the GCP storage backend
* @type {string}
* @memberof CommonsGCSCommitCacheResponse
*/
'method'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSCommitCacheResponse
*/
'project_id'?: string;
/**
*
* @type {CommonsShortLivedToken}
* @memberof CommonsGCSCommitCacheResponse
*/
'short_lived_token'?: CommonsShortLivedToken;
}

View File

@ -0,0 +1,36 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsGCSDeleteCacheResponse
*/
export interface CommonsGCSDeleteCacheResponse {
/**
*
* @type {string}
* @memberof CommonsGCSDeleteCacheResponse
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsGCSDeleteCacheResponse
*/
'cache_version': string;
}

View File

@ -0,0 +1,69 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsShortLivedToken } from './commons-short-lived-token';
/**
*
* @export
* @interface CommonsGCSGetCacheReponse
*/
export interface CommonsGCSGetCacheReponse {
/**
*
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'bucket_name'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'cache_key'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'cache_version'?: string;
/**
* Method contains the auth method to be used to connect to the GCP storage backend
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'method'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'pre_signed_url'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSGetCacheReponse
*/
'project_id'?: string;
/**
*
* @type {CommonsShortLivedToken}
* @memberof CommonsGCSGetCacheReponse
*/
'short_lived_token'?: CommonsShortLivedToken;
}

View File

@ -0,0 +1,57 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsShortLivedToken } from './commons-short-lived-token';
/**
*
* @export
* @interface CommonsGCSReserveCacheResponse
*/
export interface CommonsGCSReserveCacheResponse {
/**
*
* @type {string}
* @memberof CommonsGCSReserveCacheResponse
*/
'bucket_name'?: string;
/**
* CacheKey is the resolved cache key which might contain some prefix or suffix in addition to the cache key provided by the user. This is the actual storage location in gcs.
* @type {string}
* @memberof CommonsGCSReserveCacheResponse
*/
'cache_key': string;
/**
* Method contains the auth method to be used to connect to the GCP storage backend
* @type {string}
* @memberof CommonsGCSReserveCacheResponse
*/
'method'?: string;
/**
*
* @type {string}
* @memberof CommonsGCSReserveCacheResponse
*/
'project_id'?: string;
/**
*
* @type {CommonsShortLivedToken}
* @memberof CommonsGCSReserveCacheResponse
*/
'short_lived_token'?: CommonsShortLivedToken;
}

View File

@ -0,0 +1,60 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsGetCacheRequest
*/
export interface CommonsGetCacheRequest {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsGetCacheRequest
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {string}
* @memberof CommonsGetCacheRequest
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsGetCacheRequest
*/
'cache_version': string;
/**
*
* @type {Array<string>}
* @memberof CommonsGetCacheRequest
*/
'restore_keys'?: Array<string>;
/**
* VCSRef is the ref of the repository in vcs for which cache is being used. This can be a branch, git tag, or pull request ref.
* @type {string}
* @memberof CommonsGetCacheRequest
*/
'vcs_ref'?: string;
/**
* VCSRepository is the repository name in vcs. It can be of the format <organization>/<repository> or <repository>. While saving the entry, <organization>/ will be trimmed if passed.
* @type {string}
* @memberof CommonsGetCacheRequest
*/
'vcs_repository'?: string;
}

View File

@ -0,0 +1,63 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsCacheEntry } from './commons-cache-entry';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsGCSGetCacheReponse } from './commons-gcsget-cache-reponse';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsS3GetCacheResponse } from './commons-s3-get-cache-response';
/**
*
* @export
* @interface CommonsGetCacheResponse
*/
export interface CommonsGetCacheResponse {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsGetCacheResponse
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {CommonsCacheEntry}
* @memberof CommonsGetCacheResponse
*/
'cache_entry'?: CommonsCacheEntry;
/**
*
* @type {CommonsGCSGetCacheReponse}
* @memberof CommonsGetCacheResponse
*/
'gcs'?: CommonsGCSGetCacheReponse;
/**
*
* @type {string}
* @memberof CommonsGetCacheResponse
*/
'provider'?: string;
/**
*
* @type {CommonsS3GetCacheResponse}
* @memberof CommonsGetCacheResponse
*/
's3'?: CommonsS3GetCacheResponse;
}

View File

@ -0,0 +1,66 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsOperation
*/
export interface CommonsOperation {
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'cache_id'?: string;
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'created-at'?: string;
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'id'?: string;
/**
*
* @type {{ [key: string]: any; }}
* @memberof CommonsOperation
*/
'meta'?: { [key: string]: any; };
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'operation_type'?: string;
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'organization_id'?: string;
/**
*
* @type {string}
* @memberof CommonsOperation
*/
'updated_at'?: string;
}

View File

@ -0,0 +1,66 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsReserveCacheRequest
*/
export interface CommonsReserveCacheRequest {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsReserveCacheRequest
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {string}
* @memberof CommonsReserveCacheRequest
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsReserveCacheRequest
*/
'cache_version': string;
/**
* ContentType contains the content type of the cache. * This is not supported for GCS cache. When passed this will be ignored. *
* @type {string}
* @memberof CommonsReserveCacheRequest
*/
'content_type'?: string;
/**
* NumberOfChunks contains the number of chunks the cache will be split into. Minimum value: 1. Maximum value: 10000. * This is not supported for GCS cache. When passed this will be ignored. *
* @type {number}
* @memberof CommonsReserveCacheRequest
*/
'number_of_chunks'?: number;
/**
* VCSRef is the ref of the repository in vcs for which cache is being used. This can be a branch, git tag, or pull request ref.
* @type {string}
* @memberof CommonsReserveCacheRequest
*/
'vcs_ref'?: string;
/**
* VCSRepository is the repository name in vcs. It can be of the format <organization>/<repository> or <repository>. While saving the entry, <organization>/ will be trimmed if passed.
* @type {string}
* @memberof CommonsReserveCacheRequest
*/
'vcs_repository'?: string;
}

View File

@ -0,0 +1,54 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
// May contain unused imports in some cases
// @ts-ignore
import { CommonsGCSReserveCacheResponse } from './commons-gcsreserve-cache-response';
// May contain unused imports in some cases
// @ts-ignore
import { CommonsS3ReserveCacheResponse } from './commons-s3-reserve-cache-response';
/**
*
* @export
* @interface CommonsReserveCacheResponse
*/
export interface CommonsReserveCacheResponse {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsReserveCacheResponse
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {CommonsGCSReserveCacheResponse}
* @memberof CommonsReserveCacheResponse
*/
'gcs'?: CommonsGCSReserveCacheResponse;
/**
*
* @type {string}
* @memberof CommonsReserveCacheResponse
*/
'provider'?: string;
/**
*
* @type {CommonsS3ReserveCacheResponse}
* @memberof CommonsReserveCacheResponse
*/
's3'?: CommonsS3ReserveCacheResponse;
}

View File

@ -0,0 +1,36 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsS3CommitCacheResponse
*/
export interface CommonsS3CommitCacheResponse {
/**
*
* @type {string}
* @memberof CommonsS3CommitCacheResponse
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsS3CommitCacheResponse
*/
'cache_version': string;
}

View File

@ -0,0 +1,36 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsS3DeleteCacheResponse
*/
export interface CommonsS3DeleteCacheResponse {
/**
*
* @type {string}
* @memberof CommonsS3DeleteCacheResponse
*/
'cache_key': string;
/**
*
* @type {string}
* @memberof CommonsS3DeleteCacheResponse
*/
'cache_version': string;
}

View File

@ -0,0 +1,48 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsS3GetCacheResponse
*/
export interface CommonsS3GetCacheResponse {
/**
*
* @type {{ [key: string]: string; }}
* @memberof CommonsS3GetCacheResponse
*/
'annotations'?: { [key: string]: string; };
/**
*
* @type {string}
* @memberof CommonsS3GetCacheResponse
*/
'cache_key'?: string;
/**
*
* @type {string}
* @memberof CommonsS3GetCacheResponse
*/
'cache_version'?: string;
/**
*
* @type {string}
* @memberof CommonsS3GetCacheResponse
*/
'pre_signed_url'?: string;
}

View File

@ -0,0 +1,42 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsS3ReserveCacheResponse
*/
export interface CommonsS3ReserveCacheResponse {
/**
*
* @type {Array<string>}
* @memberof CommonsS3ReserveCacheResponse
*/
'pre_signed_urls'?: Array<string>;
/**
*
* @type {string}
* @memberof CommonsS3ReserveCacheResponse
*/
'upload_id'?: string;
/**
*
* @type {string}
* @memberof CommonsS3ReserveCacheResponse
*/
'upload_key'?: string;
}

View File

@ -0,0 +1,36 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface CommonsShortLivedToken
*/
export interface CommonsShortLivedToken {
/**
* AccessToken contains the short lived access token to be used to connect to the GCP storage backend
* @type {string}
* @memberof CommonsShortLivedToken
*/
'access_token'?: string;
/**
* ExpiresAt contains the expiry time of the short lived access token format: date-time
* @type {string}
* @memberof CommonsShortLivedToken
*/
'expires_at'?: string;
}

View File

@ -0,0 +1,22 @@
export * from './commons-append-operation-input';
export * from './commons-cache-entry';
export * from './commons-commit-cache-request';
export * from './commons-commit-cache-response';
export * from './commons-delete-cache-request';
export * from './commons-delete-cache-response';
export * from './commons-gcscommit-cache-response';
export * from './commons-gcsdelete-cache-response';
export * from './commons-gcsget-cache-reponse';
export * from './commons-gcsreserve-cache-response';
export * from './commons-get-cache-request';
export * from './commons-get-cache-response';
export * from './commons-operation';
export * from './commons-reserve-cache-request';
export * from './commons-reserve-cache-response';
export * from './commons-s3-commit-cache-response';
export * from './commons-s3-delete-cache-response';
export * from './commons-s3-get-cache-response';
export * from './commons-s3-reserve-cache-response';
export * from './commons-short-lived-token';
export * from './types-completed-part';
export * from './warp-build-apierror';

View File

@ -0,0 +1,60 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface TypesCompletedPart
*/
export interface TypesCompletedPart {
/**
* The base64-encoded, 32-bit CRC32 checksum of the object. This will only be present if it was uploaded with the object. With multipart uploads, this may not be a checksum value of the object. For more information about how checksums are calculated with multipart uploads, see Checking object integrity (https://docs.aws.amazon.com/AmazonS3/latest/userguide/checking-object-integrity.html#large-object-checksums) in the Amazon S3 User Guide.
* @type {string}
* @memberof TypesCompletedPart
*/
'ChecksumCRC32'?: string;
/**
* The base64-encoded, 32-bit CRC32C checksum of the object. This will only be present if it was uploaded with the object. With multipart uploads, this may not be a checksum value of the object. For more information about how checksums are calculated with multipart uploads, see Checking object integrity (https://docs.aws.amazon.com/AmazonS3/latest/userguide/checking-object-integrity.html#large-object-checksums) in the Amazon S3 User Guide.
* @type {string}
* @memberof TypesCompletedPart
*/
'ChecksumCRC32C'?: string;
/**
* The base64-encoded, 160-bit SHA-1 digest of the object. This will only be present if it was uploaded with the object. With multipart uploads, this may not be a checksum value of the object. For more information about how checksums are calculated with multipart uploads, see Checking object integrity (https://docs.aws.amazon.com/AmazonS3/latest/userguide/checking-object-integrity.html#large-object-checksums) in the Amazon S3 User Guide.
* @type {string}
* @memberof TypesCompletedPart
*/
'ChecksumSHA1'?: string;
/**
* The base64-encoded, 256-bit SHA-256 digest of the object. This will only be present if it was uploaded with the object. With multipart uploads, this may not be a checksum value of the object. For more information about how checksums are calculated with multipart uploads, see Checking object integrity (https://docs.aws.amazon.com/AmazonS3/latest/userguide/checking-object-integrity.html#large-object-checksums) in the Amazon S3 User Guide.
* @type {string}
* @memberof TypesCompletedPart
*/
'ChecksumSHA256'?: string;
/**
* Entity tag returned when the part was uploaded.
* @type {string}
* @memberof TypesCompletedPart
*/
'ETag'?: string;
/**
* Part number that identifies the part. This is a positive integer between 1 and 10,000.
* @type {number}
* @memberof TypesCompletedPart
*/
'PartNumber'?: number;
}

View File

@ -0,0 +1,54 @@
/* tslint:disable */
/* eslint-disable */
/**
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/**
*
* @export
* @interface WarpBuildAPIError
*/
export interface WarpBuildAPIError {
/**
*
* @type {string}
* @memberof WarpBuildAPIError
*/
'code'?: string;
/**
*
* @type {string}
* @memberof WarpBuildAPIError
*/
'description'?: string;
/**
*
* @type {string}
* @memberof WarpBuildAPIError
*/
'message'?: string;
/**
*
* @type {string}
* @memberof WarpBuildAPIError
*/
'sub_code'?: string;
/**
*
* @type {string}
* @memberof WarpBuildAPIError
*/
'sub_message'?: string;
}

View File

@ -0,0 +1,26 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export { $commons_AppendOperationInput } from './schemas/$commons_AppendOperationInput';
export { $commons_CacheAnnotationsMap } from './schemas/$commons_CacheAnnotationsMap';
export { $commons_CacheEntry } from './schemas/$commons_CacheEntry';
export { $commons_CommitCacheRequest } from './schemas/$commons_CommitCacheRequest';
export { $commons_CommitCacheResponse } from './schemas/$commons_CommitCacheResponse';
export { $commons_DeleteCacheRequest } from './schemas/$commons_DeleteCacheRequest';
export { $commons_DeleteCacheResponse } from './schemas/$commons_DeleteCacheResponse';
export { $commons_GCSCommitCacheResponse } from './schemas/$commons_GCSCommitCacheResponse';
export { $commons_GCSDeleteCacheResponse } from './schemas/$commons_GCSDeleteCacheResponse';
export { $commons_GCSGetCacheReponse } from './schemas/$commons_GCSGetCacheReponse';
export { $commons_GCSReserveCacheResponse } from './schemas/$commons_GCSReserveCacheResponse';
export { $commons_GetCacheRequest } from './schemas/$commons_GetCacheRequest';
export { $commons_GetCacheResponse } from './schemas/$commons_GetCacheResponse';
export { $commons_Operation } from './schemas/$commons_Operation';
export { $commons_ReserveCacheRequest } from './schemas/$commons_ReserveCacheRequest';
export { $commons_ReserveCacheResponse } from './schemas/$commons_ReserveCacheResponse';
export { $commons_S3CommitCacheResponse } from './schemas/$commons_S3CommitCacheResponse';
export { $commons_S3DeleteCacheResponse } from './schemas/$commons_S3DeleteCacheResponse';
export { $commons_S3GetCacheResponse } from './schemas/$commons_S3GetCacheResponse';
export { $commons_S3ReserveCacheResponse } from './schemas/$commons_S3ReserveCacheResponse';
export { $commons_ShortLivedToken } from './schemas/$commons_ShortLivedToken';
export { $types_CompletedPart } from './schemas/$types_CompletedPart';
export { $WarpBuildAPIError } from './schemas/$WarpBuildAPIError';

View File

@ -0,0 +1,22 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $WarpBuildAPIError = {
properties: {
code: {
type: 'string',
},
description: {
type: 'string',
},
message: {
type: 'string',
},
sub_code: {
type: 'string',
},
sub_message: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,26 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_AppendOperationInput = {
properties: {
cache_id: {
type: 'string',
},
external_id: {
type: 'string',
},
meta: {
properties: {
},
},
operation_type: {
type: 'string',
},
size: {
type: 'number',
},
time: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,9 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_CacheAnnotationsMap = {
type: 'dictionary',
contains: {
type: 'string',
},
} as const;

View File

@ -0,0 +1,37 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_CacheEntry = {
properties: {
cache_key: {
type: 'string',
},
cache_version: {
type: 'string',
},
created_at: {
type: 'string',
},
id: {
type: 'string',
},
organization_id: {
type: 'string',
},
storage_backend_id: {
type: 'string',
},
updated_at: {
type: 'string',
},
vcs_organization_name: {
type: 'string',
},
vcs_ref: {
type: 'string',
},
vcs_repository_name: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,41 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_CommitCacheRequest = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
parts: {
type: 'array',
contains: {
type: 'types_CompletedPart',
},
isRequired: true,
},
upload_id: {
type: 'string',
},
upload_key: {
type: 'string',
},
vcs_ref: {
type: 'string',
},
vcs_repository: {
type: 'string',
},
vcs_type: {
type: 'string',
isRequired: true,
},
},
} as const;

View File

@ -0,0 +1,25 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_CommitCacheResponse = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_entry: {
type: 'commons_CacheEntry',
},
gcs: {
type: 'commons_GCSCommitCacheResponse',
},
provider: {
type: 'string',
},
s3: {
type: 'commons_S3CommitCacheResponse',
},
vcs_repository: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,24 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_DeleteCacheRequest = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
vcs_ref: {
type: 'string',
},
vcs_repository: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,22 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_DeleteCacheResponse = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_entry: {
type: 'commons_CacheEntry',
},
gcs: {
type: 'commons_GCSDeleteCacheResponse',
},
provider: {
type: 'string',
},
s3: {
type: 'commons_S3DeleteCacheResponse',
},
},
} as const;

View File

@ -0,0 +1,23 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GCSCommitCacheResponse = {
properties: {
bucket_name: {
type: 'string',
},
cache_key: {
type: 'string',
isRequired: true,
},
method: {
type: 'string',
},
project_id: {
type: 'string',
},
short_lived_token: {
type: 'commons_ShortLivedToken',
},
},
} as const;

View File

@ -0,0 +1,15 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GCSDeleteCacheResponse = {
properties: {
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
},
} as const;

View File

@ -0,0 +1,28 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GCSGetCacheReponse = {
properties: {
bucket_name: {
type: 'string',
},
cache_key: {
type: 'string',
},
cache_version: {
type: 'string',
},
method: {
type: 'string',
},
pre_signed_url: {
type: 'string',
},
project_id: {
type: 'string',
},
short_lived_token: {
type: 'commons_ShortLivedToken',
},
},
} as const;

View File

@ -0,0 +1,23 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GCSReserveCacheResponse = {
properties: {
bucket_name: {
type: 'string',
},
cache_key: {
type: 'string',
isRequired: true,
},
method: {
type: 'string',
},
project_id: {
type: 'string',
},
short_lived_token: {
type: 'commons_ShortLivedToken',
},
},
} as const;

View File

@ -0,0 +1,30 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GetCacheRequest = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
restore_keys: {
type: 'array',
contains: {
type: 'string',
},
},
vcs_ref: {
type: 'string',
},
vcs_repository: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,22 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_GetCacheResponse = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_entry: {
type: 'commons_CacheEntry',
},
gcs: {
type: 'commons_GCSGetCacheReponse',
},
provider: {
type: 'string',
},
s3: {
type: 'commons_S3GetCacheResponse',
},
},
} as const;

View File

@ -0,0 +1,29 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_Operation = {
properties: {
cache_id: {
type: 'string',
},
'created-at': {
type: 'string',
},
id: {
type: 'string',
},
meta: {
properties: {
},
},
operation_type: {
type: 'string',
},
organization_id: {
type: 'string',
},
updated_at: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,30 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_ReserveCacheRequest = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
content_type: {
type: 'string',
},
number_of_chunks: {
type: 'number',
},
vcs_ref: {
type: 'string',
},
vcs_repository: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,19 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_ReserveCacheResponse = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
gcs: {
type: 'commons_GCSReserveCacheResponse',
},
provider: {
type: 'string',
},
s3: {
type: 'commons_S3ReserveCacheResponse',
},
},
} as const;

View File

@ -0,0 +1,15 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_S3CommitCacheResponse = {
properties: {
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
},
} as const;

View File

@ -0,0 +1,15 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_S3DeleteCacheResponse = {
properties: {
cache_key: {
type: 'string',
isRequired: true,
},
cache_version: {
type: 'string',
isRequired: true,
},
},
} as const;

View File

@ -0,0 +1,19 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_S3GetCacheResponse = {
properties: {
annotations: {
type: 'commons_CacheAnnotationsMap',
},
cache_key: {
type: 'string',
},
cache_version: {
type: 'string',
},
pre_signed_url: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,19 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_S3ReserveCacheResponse = {
properties: {
pre_signed_urls: {
type: 'array',
contains: {
type: 'string',
},
},
upload_id: {
type: 'string',
},
upload_key: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,13 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $commons_ShortLivedToken = {
properties: {
access_token: {
type: 'string',
},
expires_at: {
type: 'string',
},
},
} as const;

View File

@ -0,0 +1,25 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export const $types_CompletedPart = {
properties: {
ChecksumCRC32: {
type: 'string',
},
ChecksumCRC32C: {
type: 'string',
},
ChecksumSHA1: {
type: 'string',
},
ChecksumSHA256: {
type: 'string',
},
ETag: {
type: 'string',
},
PartNumber: {
type: 'number',
},
},
} as const;

View File

@ -0,0 +1,7 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "esnext",
"outDir": "dist/esm"
}
}

View File

@ -1,8 +1,18 @@
import {deleteCache, restoreCache, saveCache} from './cache'
import {getCacheVersion} from './internal/cacheHttpClient'
import {getCompressionMethod} from './internal/cacheUtils'
process.env['WARP_CACHE_URL'] = 'http://localhost:8002'
process.env['WARPBUILD_CACHE_URL'] = 'https://cache.dev.warpbuild.dev'
// process.env['WARPBUILD_CACHE_URL'] = 'http://localhost:8000'
// process.env['WARPBUILD_CACHE_URL'] =
// 'https://6134-36-255-234-176.ngrok-free.app'
process.env['RUNNER_TEMP'] = '/Users/prajjwal/Repos/warpbuild/playground/tmp_fs'
process.env['NODE_DEBUG'] = 'http'
process.env['RUNNER_DEBUG'] = '1'
process.env['WARPBUILD_RUNNER_VERIFICATION_TOKEN'] =
'eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MTM0MTg3MzMsInJlcG8iOiJiZW5jaG1hcmtzIiwicmVwb093bmVyIjoiV2FycEJ1aWxkcyIsIngtd2FycGJ1aWxkLW9yZ2FuaXphdGlvbi1pZCI6IndmbW4wODBlaWY4cm5pd3EifQ.a435J9ccjs9V_FzQMdbwTvXOYU8hvRieYkXM7yumlWAJyxDTsq4mi3CP1Ob9y6nLEKr35TYqGwxKFSTOW1oxYQ'
process.env['GITHUB_REPOSITORY'] = 'Warpbuilds/backend-cache'
process.env['GITHUB_REF'] = 'refs/heads/main'
// saveCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
@ -10,12 +20,29 @@ process.env['NODE_DEBUG'] = 'http'
// true
// )
// restoreCache(
// saveCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
// 'test-fs-local-key',
// [],
// {},
// 'test-fs-local-key-2',
// true
// )
// deleteCache(['test-fs-local-key'])
// saveCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
// 'test-fs-local-key',
// true,
// true
// )
restoreCache(
['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
'test-fs-local-key',
['test-fs'],
{},
true,
false
)
// deleteCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
// 'test-fs-local-key'
// )