1
0
Fork 0

Fix typo in function name (#590)

pull/784/head
Minh Nguyen 2021-05-03 18:09:44 +03:00 committed by GitHub
parent fbdf27470c
commit d972090333
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 18 additions and 18 deletions

View File

@ -2,10 +2,10 @@ import {promises as fs} from 'fs'
import * as path from 'path' import * as path from 'path'
import * as cacheUtils from '../src/internal/cacheUtils' import * as cacheUtils from '../src/internal/cacheUtils'
test('getArchiveFileSizeIsBytes returns file size', () => { test('getArchiveFileSizeInBytes returns file size', () => {
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt') const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
const size = cacheUtils.getArchiveFileSizeIsBytes(filePath) const size = cacheUtils.getArchiveFileSizeInBytes(filePath)
expect(size).toBe(11) expect(size).toBe(11)
}) })

View File

@ -123,8 +123,8 @@ test('restore with gzip compressed cache found', async () => {
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142 const fileSize = 142
const getArchiveFileSizeIsBytesMock = jest const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize) .mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
@ -147,7 +147,7 @@ test('restore with gzip compressed cache found', async () => {
archivePath, archivePath,
undefined undefined
) )
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
@ -184,8 +184,8 @@ test('restore with zstd compressed cache found', async () => {
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 62915000 const fileSize = 62915000
const getArchiveFileSizeIsBytesMock = jest const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize) .mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
@ -206,7 +206,7 @@ test('restore with zstd compressed cache found', async () => {
archivePath, archivePath,
undefined undefined
) )
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledTimes(1)
@ -241,8 +241,8 @@ test('restore with cache found for restore key', async () => {
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142 const fileSize = 142
const getArchiveFileSizeIsBytesMock = jest const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize) .mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
@ -263,7 +263,7 @@ test('restore with cache found for restore key', async () => {
archivePath, archivePath,
undefined undefined
) )
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledTimes(1)

View File

@ -49,7 +49,7 @@ test('save with large cache outputs should fail', async () => {
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
jest jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(cacheSize) .mockReturnValueOnce(cacheSize)
const compression = CompressionMethod.Gzip const compression = CompressionMethod.Gzip
const getCompressionMock = jest const getCompressionMock = jest

View File

@ -104,7 +104,7 @@ export async function restoreCache(
await listTar(archivePath, compressionMethod) await listTar(archivePath, compressionMethod)
} }
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( core.info(
`Cache Size: ~${Math.round( `Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024) archiveFileSize / (1024 * 1024)
@ -172,7 +172,7 @@ export async function saveCache(
} }
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`) core.debug(`File Size: ${archiveFileSize}`)
if (archiveFileSize > fileSizeLimit) { if (archiveFileSize > fileSizeLimit) {
throw new Error( throw new Error(

View File

@ -219,7 +219,7 @@ async function uploadFile(
options?: UploadOptions options?: UploadOptions
): Promise<void> { ): Promise<void> {
// Upload Chunks // Upload Chunks
const fileSize = fs.statSync(archivePath).size const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`) const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
const fd = fs.openSync(archivePath, 'r') const fd = fs.openSync(archivePath, 'r')
const uploadOptions = getUploadOptions(options) const uploadOptions = getUploadOptions(options)
@ -300,7 +300,7 @@ export async function saveCache(
// Commit Cache // Commit Cache
core.debug('Commiting cache') core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath) const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` `Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
) )

View File

@ -35,7 +35,7 @@ export async function createTempDirectory(): Promise<string> {
return dest return dest
} }
export function getArchiveFileSizeIsBytes(filePath: string): number { export function getArchiveFileSizeInBytes(filePath: string): number {
return fs.statSync(filePath).size return fs.statSync(filePath).size
} }

View File

@ -190,7 +190,7 @@ export async function downloadCacheHttpClient(
if (contentLengthHeader) { if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader) const expectedLength = parseInt(contentLengthHeader)
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath) const actualLength = utils.getArchiveFileSizeInBytes(archivePath)
if (actualLength !== expectedLength) { if (actualLength !== expectedLength) {
throw new Error( throw new Error(