1
0
Fork 0

Merge pull request #1724 from actions/bethanyj28/update-unzip-stream

Use latest `unzip-stream` and `unzip.Extract`
pull/1725/head
Bethany 2024-04-24 09:09:09 -04:00 committed by GitHub
commit 29885a805e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 28 additions and 64 deletions

View File

@ -1,5 +1,9 @@
# @actions/artifact Releases # @actions/artifact Releases
### 2.1.7
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
### 2.1.6 ### 2.1.6
- Will retry on invalid request responses. - Will retry on invalid request responses.

View File

@ -200,14 +200,12 @@ describe('download-artifact', () => {
} }
) )
await expect( const response = await downloadArtifactPublic(
downloadArtifactPublic( fixtures.artifactID,
fixtures.artifactID, fixtures.repositoryOwner,
fixtures.repositoryOwner, fixtures.repositoryName,
fixtures.repositoryName, fixtures.token
fixtures.token )
)
).rejects.toBeInstanceOf(Error)
expect(downloadArtifactMock).toHaveBeenCalledWith({ expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner, owner: fixtures.repositoryOwner,
@ -223,6 +221,16 @@ describe('download-artifact', () => {
expect(mockGetArtifactMalicious).toHaveBeenCalledWith( expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
fixtures.blobStorageUrl fixtures.blobStorageUrl
) )
// ensure path traversal was not possible
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
).toBe(true)
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
).toBe(true)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
}) })
it('should successfully download an artifact to user defined path', async () => { it('should successfully download an artifact to user defined path', async () => {

View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.5", "version": "2.1.7",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.5", "version": "2.1.7",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
@ -1738,9 +1738,9 @@
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
}, },
"node_modules/unzip-stream": { "node_modules/unzip-stream": {
"version": "0.3.1", "version": "0.3.4",
"resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.1.tgz", "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz",
"integrity": "sha512-RzaGXLNt+CW+T41h1zl6pGz3EaeVhYlK+rdAap+7DxW5kqsqePO8kRtWPaCiVqdhZc86EctSPVYNix30YOMzmw==", "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==",
"dependencies": { "dependencies": {
"binary": "^0.3.0", "binary": "^0.3.0",
"mkdirp": "^0.5.1" "mkdirp": "^0.5.1"

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.6", "version": "2.1.7",
"preview": true, "preview": true,
"description": "Actions artifact lib", "description": "Actions artifact lib",
"keywords": [ "keywords": [

View File

@ -1,7 +1,4 @@
import fs from 'fs/promises' import fs from 'fs/promises'
import * as stream from 'stream'
import {createWriteStream} from 'fs'
import * as path from 'path'
import * as github from '@actions/github' import * as github from '@actions/github'
import * as core from '@actions/core' import * as core from '@actions/core'
import * as httpClient from '@actions/http-client' import * as httpClient from '@actions/http-client'
@ -47,11 +44,6 @@ async function streamExtract(url: string, directory: string): Promise<void> {
await streamExtractExternal(url, directory) await streamExtractExternal(url, directory)
return return
} catch (error) { } catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(
`Artifact download failed with unretryable error: ${error.message}`
)
}
retryCount++ retryCount++
core.debug( core.debug(
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...` `Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
@ -86,8 +78,6 @@ export async function streamExtractExternal(
} }
const timer = setTimeout(timerFn, timeout) const timer = setTimeout(timerFn, timeout)
const createdDirectories = new Set<string>()
createdDirectories.add(directory)
response.message response.message
.on('data', () => { .on('data', () => {
timer.refresh() timer.refresh()
@ -99,46 +89,8 @@ export async function streamExtractExternal(
clearTimeout(timer) clearTimeout(timer)
reject(error) reject(error)
}) })
.pipe(unzip.Parse()) .pipe(unzip.Extract({path: directory}))
.pipe( .on('close', () => {
new stream.Transform({
objectMode: true,
transform: async (entry, _, callback) => {
const fullPath = path.normalize(path.join(directory, entry.path))
if (!directory.endsWith(path.sep)) {
directory += path.sep
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`))
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath)
await resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain()
callback()
})
} else {
entry.autodrain()
callback()
}
} else {
core.info(`Extracting artifact entry: ${fullPath}`)
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath))
await resolveOrCreateDirectory(path.dirname(fullPath))
}
const writeStream = createWriteStream(fullPath)
writeStream.on('finish', callback)
writeStream.on('error', reject)
entry.pipe(writeStream)
}
}
})
)
.on('finish', async () => {
clearTimeout(timer) clearTimeout(timer)
resolve() resolve()
}) })