mirror of https://github.com/actions/toolkit
Fix typo in function name (#590)
parent
fbdf27470c
commit
d972090333
|
@ -2,10 +2,10 @@ import {promises as fs} from 'fs'
|
|||
import * as path from 'path'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
|
||||
test('getArchiveFileSizeIsBytes returns file size', () => {
|
||||
test('getArchiveFileSizeInBytes returns file size', () => {
|
||||
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
||||
|
||||
const size = cacheUtils.getArchiveFileSizeIsBytes(filePath)
|
||||
const size = cacheUtils.getArchiveFileSizeInBytes(filePath)
|
||||
|
||||
expect(size).toBe(11)
|
||||
})
|
||||
|
|
|
@ -123,8 +123,8 @@ test('restore with gzip compressed cache found', async () => {
|
|||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 142
|
||||
const getArchiveFileSizeIsBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
|
@ -147,7 +147,7 @@ test('restore with gzip compressed cache found', async () => {
|
|||
archivePath,
|
||||
undefined
|
||||
)
|
||||
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||
|
@ -184,8 +184,8 @@ test('restore with zstd compressed cache found', async () => {
|
|||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 62915000
|
||||
const getArchiveFileSizeIsBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
|
@ -206,7 +206,7 @@ test('restore with zstd compressed cache found', async () => {
|
|||
archivePath,
|
||||
undefined
|
||||
)
|
||||
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
|
@ -241,8 +241,8 @@ test('restore with cache found for restore key', async () => {
|
|||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 142
|
||||
const getArchiveFileSizeIsBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
|
@ -263,7 +263,7 @@ test('restore with cache found for restore key', async () => {
|
|||
archivePath,
|
||||
undefined
|
||||
)
|
||||
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
|
|
|
@ -49,7 +49,7 @@ test('save with large cache outputs should fail', async () => {
|
|||
|
||||
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(cacheSize)
|
||||
const compression = CompressionMethod.Gzip
|
||||
const getCompressionMock = jest
|
||||
|
|
|
@ -104,7 +104,7 @@ export async function restoreCache(
|
|||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
|
@ -172,7 +172,7 @@ export async function saveCache(
|
|||
}
|
||||
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.debug(`File Size: ${archiveFileSize}`)
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
throw new Error(
|
||||
|
|
|
@ -219,7 +219,7 @@ async function uploadFile(
|
|||
options?: UploadOptions
|
||||
): Promise<void> {
|
||||
// Upload Chunks
|
||||
const fileSize = fs.statSync(archivePath).size
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
|
||||
const fd = fs.openSync(archivePath, 'r')
|
||||
const uploadOptions = getUploadOptions(options)
|
||||
|
@ -300,7 +300,7 @@ export async function saveCache(
|
|||
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
|
||||
)
|
||||
|
|
|
@ -35,7 +35,7 @@ export async function createTempDirectory(): Promise<string> {
|
|||
return dest
|
||||
}
|
||||
|
||||
export function getArchiveFileSizeIsBytes(filePath: string): number {
|
||||
export function getArchiveFileSizeInBytes(filePath: string): number {
|
||||
return fs.statSync(filePath).size
|
||||
}
|
||||
|
||||
|
|
|
@ -190,7 +190,7 @@ export async function downloadCacheHttpClient(
|
|||
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader)
|
||||
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
|
||||
if (actualLength !== expectedLength) {
|
||||
throw new Error(
|
||||
|
|
Loading…
Reference in New Issue