1
0
Fork 0

Merge pull request #1666 from actions/bethanyj28/download-path

Use `unzip.Parse` over `unzip.Extract`
pull/1668/head
Bethany 2024-02-23 16:22:24 -05:00 committed by GitHub
commit 88f7a7bc65
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 121 additions and 9 deletions

6
package-lock.json generated
View File

@ -6664,9 +6664,9 @@
} }
}, },
"node_modules/ip": { "node_modules/ip": {
"version": "2.0.0", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz",
"integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==",
"dev": true "dev": true
}, },
"node_modules/is-array-buffer": { "node_modules/is-array-buffer": {

View File

@ -113,4 +113,8 @@
### 2.1.1 ### 2.1.1
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts - Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
### 2.1.2
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts

View File

@ -121,6 +121,16 @@ const mockGetArtifactFailure = jest.fn(() => {
} }
}) })
const mockGetArtifactMalicious = jest.fn(() => {
const message = new http.IncomingMessage(new net.Socket())
message.statusCode = 200
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
message.push(null)
return {
message
}
})
describe('download-artifact', () => { describe('download-artifact', () => {
describe('public', () => { describe('public', () => {
beforeEach(setup) beforeEach(setup)
@ -170,6 +180,51 @@ describe('download-artifact', () => {
expect(response.downloadPath).toBe(fixtures.workspaceDir) expect(response.downloadPath).toBe(fixtures.workspaceDir)
}) })
it('should not allow path traversal from malicious artifacts', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactMalicious
}
}
)
await expect(
downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
).rejects.toBeInstanceOf(Error)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
})
it('should successfully download an artifact to user defined path', async () => { it('should successfully download an artifact to user defined path', async () => {
const customPath = path.join(testDir, 'custom') const customPath = path.join(testDir, 'custom')

Binary file not shown.

View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.1", "version": "2.1.2",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.1", "version": "2.1.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.1", "version": "2.1.2",
"preview": true, "preview": true,
"description": "Actions artifact lib", "description": "Actions artifact lib",
"keywords": [ "keywords": [

View File

@ -1,4 +1,7 @@
import fs from 'fs/promises' import fs from 'fs/promises'
import * as stream from 'stream'
import {createWriteStream} from 'fs'
import * as path from 'path'
import * as github from '@actions/github' import * as github from '@actions/github'
import * as core from '@actions/core' import * as core from '@actions/core'
import * as httpClient from '@actions/http-client' import * as httpClient from '@actions/http-client'
@ -44,6 +47,11 @@ async function streamExtract(url: string, directory: string): Promise<void> {
await streamExtractExternal(url, directory) await streamExtractExternal(url, directory)
return return
} catch (error) { } catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(
`Artifact download failed with unretryable error: ${error.message}`
)
}
retryCount++ retryCount++
core.debug( core.debug(
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...` `Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
@ -78,6 +86,8 @@ export async function streamExtractExternal(
} }
const timer = setTimeout(timerFn, timeout) const timer = setTimeout(timerFn, timeout)
const createdDirectories = new Set<string>()
createdDirectories.add(directory)
response.message response.message
.on('data', () => { .on('data', () => {
timer.refresh() timer.refresh()
@ -89,8 +99,51 @@ export async function streamExtractExternal(
clearTimeout(timer) clearTimeout(timer)
reject(error) reject(error)
}) })
.pipe(unzip.Extract({path: directory})) .pipe(unzip.Parse())
.on('close', () => { .pipe(
new stream.Transform({
objectMode: true,
transform: async (entry, _, callback) => {
const fullPath = path.normalize(path.join(directory, entry.path))
if (!directory.endsWith(path.sep)) {
directory += path.sep
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`))
}
core.debug(`Extracting artifact entry: ${fullPath}`)
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath)
await resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain()
callback()
})
} else {
entry.autodrain()
callback()
}
} else {
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath))
await resolveOrCreateDirectory(path.dirname(fullPath)).then(
() => {
entry.autodrain()
callback()
}
)
}
const writeStream = createWriteStream(fullPath)
writeStream.on('finish', callback)
writeStream.on('error', reject)
entry.pipe(writeStream)
}
}
})
)
.on('finish', async () => {
clearTimeout(timer) clearTimeout(timer)
resolve() resolve()
}) })