1
0
Fork 0

Support upload from named pipes (#748)

Named pipes report file size as 0, which leads to reading the whole
content into memory (0 is less than 64K). This adds additional check to
make sure that the passed in path is not a named pipe, and in that case
opts for the create-temp-file-to-gzip code path.

When running on GitHub Actions infrastructure on `windows` node, named
pipes can be created using `mkfifo` from MSYS2. In that case `fs.Stats`s
`isFIFO()` returns `false`, and not `true` as expected. This case is
detected by `process.platform` being `win32` and the passed file having
length of 0.

As a side note, when MSYS2's `mkfifo` is run, a pipe file is created:

```
prw-rw-rw- 1 User None  0 Mar 31 12:58 pipe
```

If `fs.stat` is invoked at this point `ENOENT` error will be thrown. As
soon as the pipe is written to, this pipe file is replaced by two same-
named files:

```
-rw-r--r-- 1 User None  0 Mar 31 13:00 pipe
-rw-r--r-- 1 User None  0 Mar 31 13:00 pipe
```

And at this point `fs.stat` `isFIFO()` returns `false`. Even though the
file acts as a named pipe.
pull/949/head
Zoran Regvart 2021-11-29 23:19:02 +01:00 committed by GitHub
parent 45d2019161
commit 7932c147a0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 65 additions and 4 deletions

View File

@ -2,6 +2,10 @@ import * as http from 'http'
import * as io from '../../io/src/io' import * as io from '../../io/src/io'
import * as net from 'net' import * as net from 'net'
import * as path from 'path' import * as path from 'path'
import {mocked} from 'ts-jest/utils'
import {exec, execSync} from 'child_process'
import {createGunzip} from 'zlib'
import {promisify} from 'util'
import {UploadHttpClient} from '../src/internal/upload-http-client' import {UploadHttpClient} from '../src/internal/upload-http-client'
import * as core from '@actions/core' import * as core from '@actions/core'
import {promises as fs} from 'fs' import {promises as fs} from 'fs'
@ -174,6 +178,56 @@ describe('Upload Tests', () => {
expect(uploadResult.uploadSize).toEqual(expectedTotalSize) expect(uploadResult.uploadSize).toEqual(expectedTotalSize)
}) })
function hasMkfifo(): boolean {
try {
// make sure we drain the stdout
return execSync('which mkfifo').toString().length > 0
} catch (e) {
return false
}
}
const withMkfifoIt = hasMkfifo() ? it : it.skip
withMkfifoIt(
'Upload Artifact with content from named pipe - Success',
async () => {
// create a named pipe 'pipe' with content 'hello pipe'
const content = Buffer.from('hello pipe')
const pipeFilePath = path.join(root, 'pipe')
await promisify(exec)('mkfifo pipe', {cwd: root})
// don't want to await here as that would block until read
fs.writeFile(pipeFilePath, content)
const artifactName = 'successful-artifact'
const uploadSpecification: UploadSpecification[] = [
{
absoluteFilePath: pipeFilePath,
uploadFilePath: `${artifactName}/pipe`
}
]
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
const uploadHttpClient = new UploadHttpClient()
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
uploadUrl,
uploadSpecification
)
// accesses the ReadableStream that was passed into sendStream
// eslint-disable-next-line @typescript-eslint/unbound-method
const stream = mocked(HttpClient.prototype.sendStream).mock.calls[0][2]
expect(stream).not.toBeNull()
// decompresses the passed stream
const data: Buffer[] = []
for await (const chunk of stream.pipe(createGunzip())) {
data.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string))
}
const uploaded = Buffer.concat(data)
expect(uploadResult.failedItems.length).toEqual(0)
expect(uploaded).toEqual(content)
}
)
it('Upload Artifact - Failed Single File Upload', async () => { it('Upload Artifact - Failed Single File Upload', async () => {
const uploadSpecification: UploadSpecification[] = [ const uploadSpecification: UploadSpecification[] = [
{ {

View File

@ -219,16 +219,22 @@ export class UploadHttpClient {
httpClientIndex: number, httpClientIndex: number,
parameters: UploadFileParameters parameters: UploadFileParameters
): Promise<UploadFileResult> { ): Promise<UploadFileResult> {
const totalFileSize: number = (await stat(parameters.file)).size const fileStat: fs.Stats = await stat(parameters.file)
const totalFileSize = fileStat.size
// on Windows with mkfifo from MSYS2 stats.isFIFO returns false, so we check if running on Windows node and
// if the file has size of 0 to compensate
const isFIFO =
fileStat.isFIFO() || (process.platform === 'win32' && totalFileSize === 0)
let offset = 0 let offset = 0
let isUploadSuccessful = true let isUploadSuccessful = true
let failedChunkSizes = 0 let failedChunkSizes = 0
let uploadFileSize = 0 let uploadFileSize = 0
let isGzip = true let isGzip = true
// the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
// for creating a new GZip file, an in-memory buffer is used for compression // for creating a new GZip file, an in-memory buffer is used for compression
if (totalFileSize < 65536) { // with named pipes the file size is reported as zero in that case don't read the file in memory
if (!isFIFO && totalFileSize < 65536) {
const buffer = await createGZipFileInBuffer(parameters.file) const buffer = await createGZipFileInBuffer(parameters.file)
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
@ -287,7 +293,8 @@ export class UploadHttpClient {
let uploadFilePath = tempFile.path let uploadFilePath = tempFile.path
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file // compression did not help with size reduction, use the original file for upload and delete the temp GZip file
if (totalFileSize < uploadFileSize) { // for named pipes totalFileSize is zero, this assumes compression did help
if (!isFIFO && totalFileSize < uploadFileSize) {
uploadFileSize = totalFileSize uploadFileSize = totalFileSize
uploadFilePath = parameters.file uploadFilePath = parameters.file
isGzip = false isGzip = false