diff --git a/packages/artifact/README.md b/packages/artifact/README.md new file mode 100644 index 00000000..e69de29b diff --git a/packages/artifact/__tests__/upload-specification.test.ts b/packages/artifact/__tests__/upload-specification.test.ts new file mode 100644 index 00000000..ba1cd9e4 --- /dev/null +++ b/packages/artifact/__tests__/upload-specification.test.ts @@ -0,0 +1,353 @@ +import * as io from '../../io/src/io' +import * as path from 'path' +import {promises as fs} from 'fs' +import * as core from '@actions/core' +import {getUploadSpecification} from '../src/internal-upload-specification' + +const artifactName = 'my-artifact' +const root = path.join(__dirname, '_temp', 'upload-specification') +const goodItem1Path = path.join( + root, + 'folder-a', + 'folder-b', + 'folder-c', + 'good-item1.txt' +) +const goodItem2Path = path.join(root, 'folder-d', 'good-item2.txt') +const goodItem3Path = path.join(root, 'folder-d', 'good-item3.txt') +const goodItem4Path = path.join(root, 'folder-d', 'good-item4.txt') +const goodItem5Path = path.join(root, 'good-item5.txt') +const badItem1Path = path.join( + root, + 'folder-a', + 'folder-b', + 'folder-c', + 'bad-item1.txt' +) +const badItem2Path = path.join(root, 'folder-d', 'bad-item2.txt') +const badItem3Path = path.join(root, 'folder-f', 'bad-item3.txt') +const badItem4Path = path.join(root, 'folder-h', 'folder-i', 'bad-item4.txt') +const badItem5Path = path.join(root, 'folder-h', 'folder-i', 'bad-item5.txt') +const extraFileInFolderCPath = path.join( + root, + 'folder-a', + 'folder-b', + 'folder-c', + 'extra-file-in-folder-c.txt' +) +const amazingFileInFolderHPath = path.join(root, 'folder-h', 'amazing-item.txt') + +const artifactFilesToUpload = [ + goodItem1Path, + goodItem2Path, + goodItem3Path, + goodItem4Path, + goodItem5Path, + extraFileInFolderCPath, + amazingFileInFolderHPath +] + +describe('Search', () => { + beforeAll(async () => { + // mock all output so that there is less noise when running tests + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + + // clear temp directory + await io.rmRF(root) + await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), { + recursive: true + }) + await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-e'), { + recursive: true + }) + await fs.mkdir(path.join(root, 'folder-d'), { + recursive: true + }) + await fs.mkdir(path.join(root, 'folder-f'), { + recursive: true + }) + await fs.mkdir(path.join(root, 'folder-g'), { + recursive: true + }) + await fs.mkdir(path.join(root, 'folder-h', 'folder-i'), { + recursive: true + }) + + await fs.writeFile(goodItem1Path, 'good item1 file') + await fs.writeFile(goodItem2Path, 'good item2 file') + await fs.writeFile(goodItem3Path, 'good item3 file') + await fs.writeFile(goodItem4Path, 'good item4 file') + await fs.writeFile(goodItem5Path, 'good item5 file') + + await fs.writeFile(badItem1Path, 'bad item1 file') + await fs.writeFile(badItem2Path, 'bad item2 file') + await fs.writeFile(badItem3Path, 'bad item3 file') + await fs.writeFile(badItem4Path, 'bad item4 file') + await fs.writeFile(badItem5Path, 'bad item5 file') + + await fs.writeFile(extraFileInFolderCPath, 'extra file') + + await fs.writeFile(amazingFileInFolderHPath, 'amazing file') + /* + Directory structure of files that get created: + root/ + folder-a/ + folder-b/ + folder-c/ + good-item1.txt + bad-item1.txt + extra-file-in-folder-c.txt + folder-e/ + folder-d/ + good-item2.txt + good-item3.txt + good-item4.txt + bad-item2.txt + folder-f/ + bad-item3.txt + folder-g/ + folder-h/ + amazing-item.txt + folder-i/ + bad-item4.txt + bad-item5.txt + good-item5.txt + */ + }) + + it('Upload Specification - Fail non-existent rootDirectory', async () => { + const invalidRootDirectory = path.join( + __dirname, + '_temp', + 'upload-specification-invalid' + ) + expect(() => { + getUploadSpecification( + artifactName, + invalidRootDirectory, + artifactFilesToUpload + ) + }).toThrow(`Provided rootDirectory ${invalidRootDirectory} does not exist`) + }) + + it('Upload Specification - Fail invalid rootDirectory', async () => { + expect(() => { + getUploadSpecification(artifactName, goodItem1Path, artifactFilesToUpload) + }).toThrow( + `Provided rootDirectory ${goodItem1Path} is not a valid directory` + ) + }) + + it('Upload Specification - File does not exist', async () => { + const fakeFilePath = path.join( + artifactName, + 'folder-a', + 'folder-b', + 'non-existent-file.txt' + ) + expect(() => { + getUploadSpecification(artifactName, root, [fakeFilePath]) + }).toThrow(`File ${fakeFilePath} does not exist`) + }) + + it('Upload Specification - Non parent directory', async () => { + const folderADirectory = path.join(root, 'folder-a') + const artifactFiles = [ + goodItem1Path, + badItem1Path, + extraFileInFolderCPath, + goodItem5Path + ] + expect(() => { + getUploadSpecification(artifactName, folderADirectory, artifactFiles) + }).toThrow( + `The rootDirectory: ${folderADirectory} is not a parent directory of the file: ${goodItem5Path}` + ) + }) + + it('Upload Specification - Success', async () => { + const specifications = getUploadSpecification( + artifactName, + root, + artifactFilesToUpload + ) + expect(specifications.length).toEqual(7) + + const absolutePaths = specifications.map(item => item.absoluteFilePath) + expect(absolutePaths).toContain(goodItem1Path) + expect(absolutePaths).toContain(goodItem2Path) + expect(absolutePaths).toContain(goodItem3Path) + expect(absolutePaths).toContain(goodItem4Path) + expect(absolutePaths).toContain(goodItem5Path) + expect(absolutePaths).toContain(extraFileInFolderCPath) + expect(absolutePaths).toContain(amazingFileInFolderHPath) + + for (const specification of specifications) { + if (specification.absoluteFilePath === goodItem1Path) { + expect(specification.uploadFilePath).toEqual( + path.join( + artifactName, + 'folder-a', + 'folder-b', + 'folder-c', + 'good-item1.txt' + ) + ) + } else if (specification.absoluteFilePath === goodItem2Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item2.txt') + ) + } else if (specification.absoluteFilePath === goodItem3Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item3.txt') + ) + } else if (specification.absoluteFilePath === goodItem4Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item4.txt') + ) + } else if (specification.absoluteFilePath === goodItem5Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'good-item5.txt') + ) + } else if (specification.absoluteFilePath === extraFileInFolderCPath) { + expect(specification.uploadFilePath).toEqual( + path.join( + artifactName, + 'folder-a', + 'folder-b', + 'folder-c', + 'extra-file-in-folder-c.txt' + ) + ) + } else if (specification.absoluteFilePath === amazingFileInFolderHPath) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-h', 'amazing-item.txt') + ) + } else { + throw new Error( + 'Invalid specification found. This should never be reached' + ) + } + } + }) + + it('Upload Specification - Success with extra slash', async () => { + const rootWithSlash = `${root}/` + const specifications = getUploadSpecification( + artifactName, + rootWithSlash, + artifactFilesToUpload + ) + expect(specifications.length).toEqual(7) + + const absolutePaths = specifications.map(item => item.absoluteFilePath) + expect(absolutePaths).toContain(goodItem1Path) + expect(absolutePaths).toContain(goodItem2Path) + expect(absolutePaths).toContain(goodItem3Path) + expect(absolutePaths).toContain(goodItem4Path) + expect(absolutePaths).toContain(goodItem5Path) + expect(absolutePaths).toContain(extraFileInFolderCPath) + expect(absolutePaths).toContain(amazingFileInFolderHPath) + + for (const specification of specifications) { + if (specification.absoluteFilePath === goodItem1Path) { + expect(specification.uploadFilePath).toEqual( + path.join( + artifactName, + 'folder-a', + 'folder-b', + 'folder-c', + 'good-item1.txt' + ) + ) + } else if (specification.absoluteFilePath === goodItem2Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item2.txt') + ) + } else if (specification.absoluteFilePath === goodItem3Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item3.txt') + ) + } else if (specification.absoluteFilePath === goodItem4Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item4.txt') + ) + } else if (specification.absoluteFilePath === goodItem5Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'good-item5.txt') + ) + } else if (specification.absoluteFilePath === extraFileInFolderCPath) { + expect(specification.uploadFilePath).toEqual( + path.join( + artifactName, + 'folder-a', + 'folder-b', + 'folder-c', + 'extra-file-in-folder-c.txt' + ) + ) + } else if (specification.absoluteFilePath === amazingFileInFolderHPath) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-h', 'amazing-item.txt') + ) + } else { + throw new Error( + 'Invalid specification found. This should never be reached' + ) + } + } + }) + + it('Upload Specification - Directories should not be included', async () => { + const folderEPath = path.join(root, 'folder-a', 'folder-b', 'folder-e') + const filesWithDirectory = [ + goodItem1Path, + goodItem4Path, + folderEPath, + badItem3Path + ] + const specifications = getUploadSpecification( + artifactName, + root, + filesWithDirectory + ) + expect(specifications.length).toEqual(3) + const absolutePaths = specifications.map(item => item.absoluteFilePath) + expect(absolutePaths).toContain(goodItem1Path) + expect(absolutePaths).toContain(goodItem4Path) + expect(absolutePaths).toContain(badItem3Path) + + for (const specification of specifications) { + if (specification.absoluteFilePath === goodItem1Path) { + expect(specification.uploadFilePath).toEqual( + path.join( + artifactName, + 'folder-a', + 'folder-b', + 'folder-c', + 'good-item1.txt' + ) + ) + } else if (specification.absoluteFilePath === goodItem2Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item2.txt') + ) + } else if (specification.absoluteFilePath === goodItem4Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-d', 'good-item4.txt') + ) + } else if (specification.absoluteFilePath === badItem3Path) { + expect(specification.uploadFilePath).toEqual( + path.join(artifactName, 'folder-f', 'bad-item3.txt') + ) + } else { + throw new Error( + 'Invalid specification found. This should never be reached' + ) + } + } + }) +}) diff --git a/packages/artifact/__tests__/upload.test.ts b/packages/artifact/__tests__/upload.test.ts new file mode 100644 index 00000000..08a758ca --- /dev/null +++ b/packages/artifact/__tests__/upload.test.ts @@ -0,0 +1,453 @@ +import * as http from 'http' +import * as io from '../../io/src/io' +import * as net from 'net' +import * as path from 'path' +import * as uploadHttpClient from '../src/internal-upload-http-client' +import * as core from '@actions/core' +import {promises as fs} from 'fs' +import {getRuntimeUrl} from '../src/internal-config-variables' +import {HttpClient, HttpClientResponse} from '@actions/http-client' +import { + ArtifactResponse, + PatchArtifactSizeSuccessResponse +} from '../src/internal-contracts' +import {UploadSpecification} from '../src/internal-upload-specification' + +const root = path.join(__dirname, '_temp', 'artifact-upload') +const file1Path = path.join(root, 'file1.txt') +const file2Path = path.join(root, 'file2.txt') +const file3Path = path.join(root, 'folder1', 'file3.txt') +const file4Path = path.join(root, 'folder1', 'file4.txt') +const file5Path = path.join(root, 'folder1', 'folder2', 'folder3', 'file5.txt') + +let file1Size = 0 +let file2Size = 0 +let file3Size = 0 +let file4Size = 0 +let file5Size = 0 + +jest.mock('../src/internal-config-variables') +jest.mock('@actions/http-client') + +describe('Upload Tests', () => { + beforeAll(async () => { + // mock all output so that there is less noise when running tests + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + + // setup mocking for calls that got through the HttpClient + setupHttpClientMock() + + // clear temp directory and create files that will be "uploaded" + await io.rmRF(root) + await fs.mkdir(path.join(root, 'folder1', 'folder2', 'folder3'), { + recursive: true + }) + await fs.writeFile(file1Path, 'this is file 1') + await fs.writeFile(file2Path, 'this is file 2') + await fs.writeFile(file3Path, 'this is file 3') + await fs.writeFile(file4Path, 'this is file 4') + await fs.writeFile(file5Path, 'this is file 5') + /* + Directory structure for files that get created: + root/ + file1.txt + file2.txt + folder1/ + file3.txt + file4.txt + folder2/ + folder3/ + file5.txt + */ + + file1Size = (await fs.stat(file1Path)).size + file2Size = (await fs.stat(file2Path)).size + file3Size = (await fs.stat(file3Path)).size + file4Size = (await fs.stat(file4Path)).size + file5Size = (await fs.stat(file5Path)).size + }) + + /** + * Artifact Creation Tests + */ + it('Create Artifact - Success', async () => { + const artifactName = 'valid-artifact-name' + const response = await uploadHttpClient.createArtifactInFileContainer( + artifactName + ) + expect(response.containerId).toEqual('13') + expect(response.size).toEqual(-1) + expect(response.signedContent).toEqual('false') + expect(response.fileContainerResourceUrl).toEqual( + `${getRuntimeUrl()}_apis/resources/Containers/13` + ) + expect(response.type).toEqual('actions_storage') + expect(response.name).toEqual(artifactName) + expect(response.url).toEqual( + `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}` + ) + }) + + it('Create Artifact - Failure', async () => { + const artifactName = 'invalid-artifact-name' + expect( + uploadHttpClient.createArtifactInFileContainer(artifactName) + ).rejects.toEqual( + new Error( + 'Unable to create a container for the artifact invalid-artifact-name' + ) + ) + }) + + /** + * Artifact Upload Tests + */ + it('Upload Artifact - Success', async () => { + /** + * Normally search.findFilesToUpload() would be used for providing information about what to upload. These tests however + * focuses solely on the upload APIs so searchResult[] will be hard-coded + */ + const artifactName = 'successful-artifact' + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `${artifactName}/file1.txt` + }, + { + absoluteFilePath: file2Path, + uploadFilePath: `${artifactName}/file2.txt` + }, + { + absoluteFilePath: file3Path, + uploadFilePath: `${artifactName}/folder1/file3.txt` + }, + { + absoluteFilePath: file4Path, + uploadFilePath: `${artifactName}/folder1/file4.txt` + }, + { + absoluteFilePath: file5Path, + uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt` + } + ] + + const expectedTotalSize = + file1Size + file2Size + file3Size + file4Size + file5Size + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification + ) + expect(uploadResult.failedItems.length).toEqual(0) + expect(uploadResult.size).toEqual(expectedTotalSize) + }) + + it('Upload Artifact - Failed Single File Upload', async () => { + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `this-file-upload-will-fail` + } + ] + + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification + ) + expect(uploadResult.failedItems.length).toEqual(1) + expect(uploadResult.size).toEqual(0) + }) + + it('Upload Artifact - Partial Upload Continue On Error', async () => { + const artifactName = 'partial-artifact' + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `${artifactName}/file1.txt` + }, + { + absoluteFilePath: file2Path, + uploadFilePath: `${artifactName}/file2.txt` + }, + { + absoluteFilePath: file3Path, + uploadFilePath: `${artifactName}/folder1/file3.txt` + }, + { + absoluteFilePath: file4Path, + uploadFilePath: `this-file-upload-will-fail` + }, + { + absoluteFilePath: file5Path, + uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt` + } + ] + + const expectedPartialSize = file1Size + file2Size + file4Size + file5Size + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification, + {continueOnError: true} + ) + expect(uploadResult.failedItems.length).toEqual(1) + expect(uploadResult.size).toEqual(expectedPartialSize) + }) + + it('Upload Artifact - Partial Upload Fail Fast', async () => { + const artifactName = 'partial-artifact' + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `${artifactName}/file1.txt` + }, + { + absoluteFilePath: file2Path, + uploadFilePath: `${artifactName}/file2.txt` + }, + { + absoluteFilePath: file3Path, + uploadFilePath: `${artifactName}/folder1/file3.txt` + }, + { + absoluteFilePath: file4Path, + uploadFilePath: `this-file-upload-will-fail` + }, + { + absoluteFilePath: file5Path, + uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt` + } + ] + + const expectedPartialSize = file1Size + file2Size + file3Size + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification, + {continueOnError: false} + ) + expect(uploadResult.failedItems.length).toEqual(2) + expect(uploadResult.size).toEqual(expectedPartialSize) + }) + + it('Upload Artifact - Failed upload with no options', async () => { + const artifactName = 'partial-artifact' + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `${artifactName}/file1.txt` + }, + { + absoluteFilePath: file2Path, + uploadFilePath: `${artifactName}/file2.txt` + }, + { + absoluteFilePath: file3Path, + uploadFilePath: `${artifactName}/folder1/file3.txt` + }, + { + absoluteFilePath: file4Path, + uploadFilePath: `this-file-upload-will-fail` + }, + { + absoluteFilePath: file5Path, + uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt` + } + ] + + const expectedPartialSize = file1Size + file2Size + file3Size + file5Size + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification + ) + expect(uploadResult.failedItems.length).toEqual(1) + expect(uploadResult.size).toEqual(expectedPartialSize) + }) + + it('Upload Artifact - Failed upload with empty options', async () => { + const artifactName = 'partial-artifact' + const uploadSpecification: UploadSpecification[] = [ + { + absoluteFilePath: file1Path, + uploadFilePath: `${artifactName}/file1.txt` + }, + { + absoluteFilePath: file2Path, + uploadFilePath: `${artifactName}/file2.txt` + }, + { + absoluteFilePath: file3Path, + uploadFilePath: `${artifactName}/folder1/file3.txt` + }, + { + absoluteFilePath: file4Path, + uploadFilePath: `this-file-upload-will-fail` + }, + { + absoluteFilePath: file5Path, + uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt` + } + ] + + const expectedPartialSize = file1Size + file2Size + file3Size + file5Size + const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13` + const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( + uploadUrl, + uploadSpecification, + {} + ) + expect(uploadResult.failedItems.length).toEqual(1) + expect(uploadResult.size).toEqual(expectedPartialSize) + }) + + /** + * Artifact Association Tests + */ + it('Associate Artifact - Success', async () => { + expect(async () => { + uploadHttpClient.patchArtifactSize(130, 'my-artifact') + }).not.toThrow() + }) + + it('Associate Artifact - Not Found', async () => { + expect( + uploadHttpClient.patchArtifactSize(100, 'non-existent-artifact') + ).rejects.toThrow( + 'An Artifact with the name non-existent-artifact was not found' + ) + }) + + it('Associate Artifact - Error', async () => { + expect( + uploadHttpClient.patchArtifactSize(-2, 'my-artifact') + ).rejects.toThrow('Unable to finish uploading artifact my-artifact') + }) + + /** + * Helpers used to setup mocking for the HttpClient + */ + async function emptyMockReadBody(): Promise { + return new Promise(resolve => { + resolve() + }) + } + + function setupHttpClientMock(): void { + /** + * Mocks Post calls that are used during Artifact Creation tests + * + * Simulates success and non-success status codes depending on the artifact name along with an appropriate + * payload that represents an expected response + */ + jest + .spyOn(HttpClient.prototype, 'post') + .mockImplementation(async (requestdata, data) => { + // parse the input data and use the provided artifact name as part of the response + const inputData = JSON.parse(data) + const mockMessage = new http.IncomingMessage(new net.Socket()) + let mockReadBody = emptyMockReadBody + + if (inputData.Name === 'invalid-artifact-name') { + mockMessage.statusCode = 400 + } else { + mockMessage.statusCode = 201 + const response: ArtifactResponse = { + containerId: '13', + size: -1, + signedContent: 'false', + fileContainerResourceUrl: `${getRuntimeUrl()}_apis/resources/Containers/13`, + type: 'actions_storage', + name: inputData.Name, + url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${ + inputData.Name + }` + } + const returnData: string = JSON.stringify(response, null, 2) + mockReadBody = async function(): Promise { + return new Promise(resolve => { + resolve(returnData) + }) + } + } + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: mockReadBody + }) + }) + }) + + /** + * Mocks SendStream calls that are made during Artifact Upload tests + * + * A 500 response is used to simulate a failed upload stream. The uploadUrl can be set to + * include 'fail' to specify that the upload should fail + */ + jest + .spyOn(HttpClient.prototype, 'sendStream') + .mockImplementation(async (verb, requestUrl) => { + const mockMessage = new http.IncomingMessage(new net.Socket()) + mockMessage.statusCode = 200 + if (requestUrl.includes('fail')) { + mockMessage.statusCode = 500 + } + + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: emptyMockReadBody + }) + }) + }) + + /** + * Mocks Patch calls that are made during Artifact Association tests + * + * Simulates success and non-success status codes depending on the input size along with an appropriate + * payload that represents an expected response + */ + jest + .spyOn(HttpClient.prototype, 'patch') + .mockImplementation(async (requestdata, data) => { + const inputData = JSON.parse(data) + const mockMessage = new http.IncomingMessage(new net.Socket()) + + // Get the name from the end of requestdata. Will be something like https://www.example.com/_apis/pipelines/workflows/15/artifacts?api-version=6.0-preview&artifactName=my-artifact + const artifactName = requestdata.split('=')[2] + let mockReadBody = emptyMockReadBody + if (inputData.Size < 1) { + mockMessage.statusCode = 400 + } else if (artifactName === 'non-existent-artifact') { + mockMessage.statusCode = 404 + } else { + mockMessage.statusCode = 200 + const response: PatchArtifactSizeSuccessResponse = { + containerId: 13, + size: inputData.Size, + signedContent: 'false', + type: 'actions_storage', + name: artifactName, + url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}`, + uploadUrl: `${getRuntimeUrl()}_apis/resources/Containers/13` + } + const returnData: string = JSON.stringify(response, null, 2) + mockReadBody = async function(): Promise { + return new Promise(resolve => { + resolve(returnData) + }) + } + } + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: mockReadBody + }) + }) + }) + } +}) diff --git a/packages/artifact/__tests__/util.test.ts b/packages/artifact/__tests__/util.test.ts new file mode 100644 index 00000000..e71652b3 --- /dev/null +++ b/packages/artifact/__tests__/util.test.ts @@ -0,0 +1,99 @@ +import * as utils from '../src/internal-utils' +import * as core from '@actions/core' +import {HttpCodes} from '@actions/http-client' +import {getRuntimeUrl, getWorkFlowRunId} from '../src/internal-config-variables' + +jest.mock('../src/internal-config-variables') + +describe('Utils', () => { + beforeAll(() => { + // mock all output so that there is less noise when running tests + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + }) + + it('Check Artifact Name for any invalid characters', () => { + const invalidNames = [ + 'my\\artifact', + 'my/artifact', + 'my"artifact', + 'my:artifact', + 'myartifact', + 'my|artifact', + 'my*artifact', + 'my?artifact', + 'my artifact', + '' + ] + for (const invalidName of invalidNames) { + expect(() => { + utils.checkArtifactName(invalidName) + }).toThrow() + } + + const validNames = [ + 'my-normal-artifact', + 'myNormalArtifact', + 'm¥ñðrmålÄr†ï£å¢†' + ] + for (const validName of validNames) { + expect(() => { + utils.checkArtifactName(validName) + }).not.toThrow() + } + }) + + it('Test constructing artifact URL', () => { + const runtimeUrl = getRuntimeUrl() + const runId = getWorkFlowRunId() + const artifactUrl = utils.getArtifactUrl() + expect(artifactUrl).toEqual( + `${runtimeUrl}_apis/pipelines/workflows/${runId}/artifacts?api-version=${utils.getApiVersion()}` + ) + }) + + it('Test constructing headers with all optional parameters', () => { + const type = 'application/json' + const size = 24 + const range = 'bytes 0-199/200' + const options = utils.getRequestOptions(type, size, range) + expect(Object.keys(options).length).toEqual(4) + expect(options['Accept']).toEqual( + `${type};api-version=${utils.getApiVersion()}` + ) + expect(options['Content-Type']).toEqual(type) + expect(options['Content-Length']).toEqual(size) + expect(options['Content-Range']).toEqual(range) + }) + + it('Test constructing headers with only required parameter', () => { + const options = utils.getRequestOptions() + expect(Object.keys(options).length).toEqual(1) + expect(options['Accept']).toEqual( + `application/json;api-version=${utils.getApiVersion()}` + ) + }) + + it('Test Success Status Code', () => { + expect(utils.isSuccessStatusCode(HttpCodes.OK)).toEqual(true) + expect(utils.isSuccessStatusCode(201)).toEqual(true) + expect(utils.isSuccessStatusCode(299)).toEqual(true) + expect(utils.isSuccessStatusCode(HttpCodes.NotFound)).toEqual(false) + expect(utils.isSuccessStatusCode(HttpCodes.BadGateway)).toEqual(false) + expect(utils.isSuccessStatusCode(HttpCodes.Forbidden)).toEqual(false) + }) + + it('Test Retry Status Code', () => { + expect(utils.isRetryableStatusCode(HttpCodes.BadGateway)).toEqual(true) + expect(utils.isRetryableStatusCode(HttpCodes.ServiceUnavailable)).toEqual( + true + ) + expect(utils.isRetryableStatusCode(HttpCodes.GatewayTimeout)).toEqual(true) + expect(utils.isRetryableStatusCode(HttpCodes.OK)).toEqual(false) + expect(utils.isRetryableStatusCode(HttpCodes.NotFound)).toEqual(false) + expect(utils.isRetryableStatusCode(HttpCodes.Forbidden)).toEqual(false) + }) +}) diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json new file mode 100644 index 00000000..d8cd2feb --- /dev/null +++ b/packages/artifact/package-lock.json @@ -0,0 +1,26 @@ +{ + "name": "@actions/artifact", + "version": "0.1.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@actions/core": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.1.tgz", + "integrity": "sha512-xD+CQd9p4lU7ZfRqmUcbJpqR+Ss51rJRVeXMyOLrZQImN9/8Sy/BEUBnHO/UKD3z03R686PVTLfEPmkropGuLw==" + }, + "@actions/http-client": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz", + "integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==", + "requires": { + "tunnel": "0.0.6" + } + }, + "tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" + } + } +} diff --git a/packages/artifact/package.json b/packages/artifact/package.json new file mode 100644 index 00000000..0f38fa9b --- /dev/null +++ b/packages/artifact/package.json @@ -0,0 +1,42 @@ +{ + "name": "@actions/artifact", + "version": "0.1.0", + "preview": true, + "description": "Actions artifact lib", + "keywords": [ + "github", + "actions", + "artifact" + ], + "homepage": "https://github.com/actions/toolkit/tree/master/packages/artifact", + "license": "MIT", + "main": "lib/artifact.js", + "types": "lib/artifact.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/artifact" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --audit-level=moderate", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.1", + "@actions/http-client": "^1.0.6" + } +} diff --git a/packages/artifact/src/__mocks__/internal-config-variables.ts b/packages/artifact/src/__mocks__/internal-config-variables.ts new file mode 100644 index 00000000..5b553617 --- /dev/null +++ b/packages/artifact/src/__mocks__/internal-config-variables.ts @@ -0,0 +1,30 @@ +/** + * Mocks default limits for easier testing + */ +export function getUploadFileConcurrency(): number { + return 1 +} + +export function getUploadChunkConcurrency(): number { + return 1 +} + +export function getUploadChunkSize(): number { + return 4 * 1024 * 1024 // 4 MB Chunks +} +/** + * Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables + * that are only available from a node context on the runner. This allows for tests to run + * locally without the env variables actually being set + */ +export function getRuntimeToken(): string { + return 'totally-valid-token' +} + +export function getRuntimeUrl(): string { + return 'https://www.example.com/' +} + +export function getWorkFlowRunId(): string { + return '15' +} diff --git a/packages/artifact/src/artifact-client.ts b/packages/artifact/src/artifact-client.ts new file mode 100644 index 00000000..a7ddc69d --- /dev/null +++ b/packages/artifact/src/artifact-client.ts @@ -0,0 +1,9 @@ +import {ArtifactClient, DefaultArtifactClient} from './internal-artifact-client' +export {ArtifactClient} + +/** + * Constructs an ArtifactClient + */ +export function create(): ArtifactClient { + return DefaultArtifactClient.create() +} diff --git a/packages/artifact/src/internal-artifact-client.ts b/packages/artifact/src/internal-artifact-client.ts new file mode 100644 index 00000000..bf431861 --- /dev/null +++ b/packages/artifact/src/internal-artifact-client.ts @@ -0,0 +1,124 @@ +import * as core from '@actions/core' +import { + UploadSpecification, + getUploadSpecification +} from './internal-upload-specification' +import { + createArtifactInFileContainer, + uploadArtifactToFileContainer, + patchArtifactSize +} from './internal-upload-http-client' +import {UploadResponse} from './internal-upload-response' +import {UploadOptions} from './internal-upload-options' +import {checkArtifactName} from './internal-utils' + +export {UploadResponse, UploadOptions} + +export interface ArtifactClient { + /** + * Uploads an artifact + * + * @param name the name of the artifact, required + * @param files a list of absolute or relative paths that denote what files should be uploaded + * @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded + * @param options extra options for customizing the upload behavior + * @returns single UploadInfo object + */ + uploadArtifact( + name: string, + files: string[], + rootDirectory: string, + options?: UploadOptions + ): Promise +} + +export class DefaultArtifactClient implements ArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create(): DefaultArtifactClient { + return new DefaultArtifactClient() + } + + /** + * Uploads an artifact + */ + async uploadArtifact( + name: string, + files: string[], + rootDirectory: string, + options?: UploadOptions | undefined + ): Promise { + checkArtifactName(name) + + // Get specification for the files being uploaded + const uploadSpecification: UploadSpecification[] = getUploadSpecification( + name, + rootDirectory, + files + ) + const uploadResponse: UploadResponse = { + artifactName: name, + artifactItems: [], + size: 0, + failedItems: [] + } + + if (uploadSpecification.length === 0) { + core.warning(`No files found that can be uploaded`) + } else { + // Create an entry for the artifact in the file container + const response = await createArtifactInFileContainer(name) + if (!response.fileContainerResourceUrl) { + core.debug(response.toString()) + throw new Error( + 'No URL provided by the Artifact Service to upload an artifact to' + ) + } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`) + + // Upload each of the files that were found concurrently + const uploadResult = await uploadArtifactToFileContainer( + response.fileContainerResourceUrl, + uploadSpecification, + options + ) + + //Update the size of the artifact to indicate we are done uploading + await patchArtifactSize(uploadResult.size, name) + + core.info( + `Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload` + ) + + uploadResponse.artifactItems = uploadSpecification.map( + item => item.absoluteFilePath + ) + uploadResponse.size = uploadResult.size + uploadResponse.failedItems = uploadResult.failedItems + } + return uploadResponse + } + + /* + Downloads a single artifact associated with a run + + export async function downloadArtifact( + name: string, + path?: string, + options?: DownloadOptions + ): Promise { + + TODO + } + + Downloads all artifacts associated with a run. Because there are multiple artifacts being downloaded, a folder will be created for each one in the specified or default directory + + export async function downloadAllArtifacts( + path?: string + ): Promise{ + + TODO + } + */ +} diff --git a/packages/artifact/src/internal-config-variables.ts b/packages/artifact/src/internal-config-variables.ts new file mode 100644 index 00000000..aef9d34c --- /dev/null +++ b/packages/artifact/src/internal-config-variables.ts @@ -0,0 +1,35 @@ +export function getUploadFileConcurrency(): number { + return 2 +} + +export function getUploadChunkConcurrency(): number { + return 1 +} + +export function getUploadChunkSize(): number { + return 4 * 1024 * 1024 // 4 MB Chunks +} + +export function getRuntimeToken(): string { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable') + } + return token +} + +export function getRuntimeUrl(): string { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'] + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable') + } + return runtimeUrl +} + +export function getWorkFlowRunId(): string { + const workFlowRunId = process.env['GITHUB_RUN_ID'] + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable') + } + return workFlowRunId +} diff --git a/packages/artifact/src/internal-contracts.ts b/packages/artifact/src/internal-contracts.ts new file mode 100644 index 00000000..8124add4 --- /dev/null +++ b/packages/artifact/src/internal-contracts.ts @@ -0,0 +1,33 @@ +export interface ArtifactResponse { + containerId: string + size: number + signedContent: string + fileContainerResourceUrl: string + type: string + name: string + url: string +} + +export interface CreateArtifactParameters { + Type: string + Name: string +} + +export interface PatchArtifactSize { + Size: number +} + +export interface PatchArtifactSizeSuccessResponse { + containerId: number + size: number + signedContent: string + type: string + name: string + url: string + uploadUrl: string +} + +export interface UploadResults { + size: number + failedItems: string[] +} diff --git a/packages/artifact/src/internal-download-options.ts b/packages/artifact/src/internal-download-options.ts new file mode 100644 index 00000000..e767557a --- /dev/null +++ b/packages/artifact/src/internal-download-options.ts @@ -0,0 +1,7 @@ +export interface DownloadOptions { + /** + * Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder), + * defaults to false if not specified + * */ + createArtifactFolder?: boolean +} diff --git a/packages/artifact/src/internal-download-response.ts b/packages/artifact/src/internal-download-response.ts new file mode 100644 index 00000000..e5069672 --- /dev/null +++ b/packages/artifact/src/internal-download-response.ts @@ -0,0 +1,11 @@ +export interface DownloadResponse { + /** + * The name of the artifact that was downloaded + */ + artifactName: string + + /** + * The full Path to where the artifact was downloaded + */ + downloadPath: string +} diff --git a/packages/artifact/src/internal-upload-http-client.ts b/packages/artifact/src/internal-upload-http-client.ts new file mode 100644 index 00000000..79646c16 --- /dev/null +++ b/packages/artifact/src/internal-upload-http-client.ts @@ -0,0 +1,322 @@ +import {debug, warning, info} from '@actions/core' +import {HttpClientResponse, HttpClient} from '@actions/http-client/index' +import {IHttpClientResponse} from '@actions/http-client/interfaces' +import { + ArtifactResponse, + CreateArtifactParameters, + PatchArtifactSize, + UploadResults +} from './internal-contracts' +import * as fs from 'fs' +import {UploadSpecification} from './internal-upload-specification' +import {UploadOptions} from './internal-upload-options' +import {URL} from 'url' +import { + createHttpClient, + getArtifactUrl, + getContentRange, + getRequestOptions, + isRetryableStatusCode, + isSuccessStatusCode +} from './internal-utils' +import { + getUploadChunkConcurrency, + getUploadChunkSize, + getUploadFileConcurrency +} from './internal-config-variables' + +/** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ +export async function createArtifactInFileContainer( + artifactName: string +): Promise { + const parameters: CreateArtifactParameters = { + Type: 'actions_storage', + Name: artifactName + } + const data: string = JSON.stringify(parameters, null, 2) + const artifactUrl = getArtifactUrl() + const client = createHttpClient() + const requestOptions = getRequestOptions('application/json') + + const rawResponse = await client.post(artifactUrl, data, requestOptions) + const body: string = await rawResponse.readBody() + + if (isSuccessStatusCode(rawResponse.message.statusCode) && body) { + return JSON.parse(body) + } else { + // eslint-disable-next-line no-console + console.log(rawResponse) + throw new Error( + `Unable to create a container for the artifact ${artifactName}` + ) + } +} + +/** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ +export async function uploadArtifactToFileContainer( + uploadUrl: string, + filesToUpload: UploadSpecification[], + options?: UploadOptions +): Promise { + const client = createHttpClient() + const FILE_CONCURRENCY = getUploadFileConcurrency() + const CHUNK_CONCURRENCY = getUploadChunkConcurrency() + const MAX_CHUNK_SIZE = getUploadChunkSize() + debug( + `File Concurrency: ${FILE_CONCURRENCY}, Chunk Concurrency: ${CHUNK_CONCURRENCY} and Chunk Size: ${MAX_CHUNK_SIZE}` + ) + + const parameters: UploadFileParameters[] = [] + + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true + if (options) { + if (options.continueOnError === false) { + continueOnError = false + } + } + + // Prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new URL(uploadUrl) + resourceUrl.searchParams.append('itemPath', file.uploadFilePath) + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + restClient: client, + concurrency: CHUNK_CONCURRENCY, + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }) + } + + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()] + const failedItemsToReport: string[] = [] + let uploadedFiles = 0 + let fileSizes = 0 + let abortPendingFileUploads = false + + // Only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + await Promise.all( + parallelUploads.map(async () => { + while (uploadedFiles < filesToUpload.length) { + const currentFileParameters = parameters[uploadedFiles] + uploadedFiles += 1 + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file) + continue + } + + const uploadFileResult = await uploadFileAsync(currentFileParameters) + fileSizes += uploadFileResult.successfulUploadSize + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file) + if (!continueOnError) { + // Existing uploads will be able to finish however all pending uploads will fail fast + abortPendingFileUploads = true + } + } + } + }) + ) + + info(`Total size of all the files uploaded is ${fileSizes} bytes`) + return { + size: fileSizes, + failedItems: failedItemsToReport + } +} + +/** + * Asynchronously uploads a file. If the file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ +async function uploadFileAsync( + parameters: UploadFileParameters +): Promise { + const fileSize: number = fs.statSync(parameters.file).size + const parallelUploads = [...new Array(parameters.concurrency).keys()] + let offset = 0 + let isUploadSuccessful = true + let failedChunkSizes = 0 + let abortFileUpload = false + + await Promise.all( + parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, parameters.maxChunkSize) + if (abortFileUpload) { + // if we don't want to continue on error, any pending upload chunk will be marked as failed + failedChunkSizes += chunkSize + continue + } + + const start = offset + const end = offset + chunkSize - 1 + offset += parameters.maxChunkSize + const chunk: NodeJS.ReadableStream = fs.createReadStream( + parameters.file, + { + start, + end, + autoClose: false + } + ) + + const result = await uploadChunk( + parameters.restClient, + parameters.resourceUrl, + chunk, + start, + end, + fileSize + ) + + if (!result) { + /** + * Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + * successfully uploaded so the server may report a different size for what was uploaded + **/ + isUploadSuccessful = false + failedChunkSizes += chunkSize + warning(`Aborting upload for ${parameters.file} due to failure`) + abortFileUpload = true + } + } + }) + ) + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: fileSize - failedChunkSizes + } +} + +/** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {HttpClient} restClient RestClient that will be making the appropriate HTTP call + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} totalSize Total size of the file in bytes that is being uploaded + * @returns if the chunk was successfully uploaded + */ +async function uploadChunk( + restClient: HttpClient, + resourceUrl: string, + data: NodeJS.ReadableStream, + start: number, + end: number, + totalSize: number +): Promise { + info( + `Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange( + start, + end, + totalSize + )}` + ) + + const requestOptions = getRequestOptions( + 'application/octet-stream', + totalSize, + getContentRange(start, end, totalSize) + ) + + const uploadChunkRequest = async (): Promise => { + return await restClient.sendStream('PUT', resourceUrl, data, requestOptions) + } + + const response = await uploadChunkRequest() + if (isSuccessStatusCode(response.message.statusCode)) { + debug( + `Chunk for ${start}:${end} was successfully uploaded to ${resourceUrl}` + ) + return true + } else if (isRetryableStatusCode(response.message.statusCode)) { + info( + `Received http ${response.message.statusCode} during chunk upload, will retry at offset ${start} after 10 seconds.` + ) + await new Promise(resolve => setTimeout(resolve, 10000)) + const retryResponse = await uploadChunkRequest() + if (isSuccessStatusCode(retryResponse.message.statusCode)) { + return true + } else { + info(`Unable to upload chunk even after retrying`) + // eslint-disable-next-line no-console + console.log(response) + return false + } + } + + // Upload must have failed spectacularly somehow, log full result for diagnostic purposes + // eslint-disable-next-line no-console + console.log(response) + return false +} + +/** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact. A server side check will be run + * to check that the artifact size is correct for billing purposes + */ +export async function patchArtifactSize( + size: number, + artifactName: string +): Promise { + const client = createHttpClient() + const requestOptions = getRequestOptions('application/json') + const resourceUrl = new URL(getArtifactUrl()) + resourceUrl.searchParams.append('artifactName', artifactName) + + const parameters: PatchArtifactSize = {Size: size} + const data: string = JSON.stringify(parameters, null, 2) + debug(`URL is ${resourceUrl.toString()}`) + + const rawResponse: HttpClientResponse = await client.patch( + resourceUrl.toString(), + data, + requestOptions + ) + const body: string = await rawResponse.readBody() + + if (isSuccessStatusCode(rawResponse.message.statusCode)) { + debug( + `Artifact ${artifactName} has been successfully uploaded, total size ${size}` + ) + debug(body) + } else if (rawResponse.message.statusCode === 404) { + throw new Error(`An Artifact with the name ${artifactName} was not found`) + } else { + // eslint-disable-next-line no-console + console.log(body) + throw new Error(`Unable to finish uploading artifact ${artifactName}`) + } +} + +interface UploadFileParameters { + file: string + resourceUrl: string + restClient: HttpClient + concurrency: number + maxChunkSize: number + continueOnError: boolean +} + +interface UploadFileResult { + isSuccess: boolean + successfulUploadSize: number +} diff --git a/packages/artifact/src/internal-upload-options.ts b/packages/artifact/src/internal-upload-options.ts new file mode 100644 index 00000000..63d4febe --- /dev/null +++ b/packages/artifact/src/internal-upload-options.ts @@ -0,0 +1,18 @@ +export interface UploadOptions { + /** + * Indicates if the artifact upload should continue if file or chunk fails to upload from any error. + * If there is a error during upload, a partial artifact will always be associated and available for + * download at the end. The size reported will be the amount of storage that the user or org will be + * charged for the partial artifact. Defaults to true if not specified + * + * If set to false, and an error is encountered, all other uploads will stop and any files or chunks + * that were queued will not be attempted to be uploaded. The partial artifact available will only + * include files and chunks up until the failure + * + * If set to true and an error is encountered, the failed file will be skipped and ignored and all + * other queued files will be attempted to be uploaded. The partial artifact at the end will have all + * files with the exception of the problematic files(s)/chunks(s) that failed to upload + * + */ + continueOnError?: boolean +} diff --git a/packages/artifact/src/internal-upload-response.ts b/packages/artifact/src/internal-upload-response.ts new file mode 100644 index 00000000..f40379ed --- /dev/null +++ b/packages/artifact/src/internal-upload-response.ts @@ -0,0 +1,22 @@ +export interface UploadResponse { + /** + * The name of the artifact that was uploaded + */ + artifactName: string + + /** + * A list of all items that are meant to be uploaded as part of the artifact + */ + artifactItems: string[] + + /** + * Total size of the artifact in bytes that was uploaded + */ + size: number + + /** + * A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if + * continueOnError is set to false). This is a subset of artifactItems. + */ + failedItems: string[] +} diff --git a/packages/artifact/src/internal-upload-specification.ts b/packages/artifact/src/internal-upload-specification.ts new file mode 100644 index 00000000..0df86666 --- /dev/null +++ b/packages/artifact/src/internal-upload-specification.ts @@ -0,0 +1,92 @@ +import * as fs from 'fs' +import {debug} from '@actions/core' +import {join, normalize, resolve} from 'path' +import {checkArtifactName} from './internal-utils' + +export interface UploadSpecification { + absoluteFilePath: string + uploadFilePath: string +} + +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +export function getUploadSpecification( + artifactName: string, + rootDirectory: string, + artifactFiles: string[] +): UploadSpecification[] { + checkArtifactName(artifactName) + + const specifications: UploadSpecification[] = [] + + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`) + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error( + `Provided rootDirectory ${rootDirectory} is not a valid directory` + ) + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = normalize(rootDirectory) + rootDirectory = resolve(rootDirectory) + + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`) + } + + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = normalize(file) + file = resolve(file) + if (!file.startsWith(rootDirectory)) { + throw new Error( + `The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}` + ) + } + + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: join(artifactName, file.replace(rootDirectory, '')) + }) + } else { + // Directories are rejected by the server during upload + debug(`Removing ${file} from rawSearchResults because it is a directory`) + } + } + return specifications +} diff --git a/packages/artifact/src/internal-utils.ts b/packages/artifact/src/internal-utils.ts new file mode 100644 index 00000000..afc24342 --- /dev/null +++ b/packages/artifact/src/internal-utils.ts @@ -0,0 +1,115 @@ +import {debug} from '@actions/core' +import {HttpCodes, HttpClient} from '@actions/http-client' +import {BearerCredentialHandler} from '@actions/http-client/auth' +import {IHeaders} from '@actions/http-client/interfaces' +import { + getRuntimeToken, + getRuntimeUrl, + getWorkFlowRunId +} from './internal-config-variables' + +/** + * Parses a env variable that is a number + */ +export function parseEnvNumber(key: string): number | undefined { + const value = Number(process.env[key]) + if (Number.isNaN(value) || value < 0) { + return undefined + } + return value +} + +/** + * Various utility functions to help with the necessary API calls + */ +export function getApiVersion(): string { + return '6.0-preview' +} + +export function isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + return statusCode >= 200 && statusCode < 300 +} + +export function isRetryableStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout + ] + return retryableStatusCodes.includes(statusCode) +} + +export function getContentRange( + start: number, + end: number, + total: number +): string { + // Format: `bytes start-end/fileSize + // start and end are inclusive + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/200 + return `bytes ${start}-${end}/${total}` +} + +export function getRequestOptions( + contentType?: string, + contentLength?: number, + contentRange?: string +): IHeaders { + const requestOptions: IHeaders = { + Accept: `application/json;api-version=${getApiVersion()}` + } + if (contentType) { + requestOptions['Content-Type'] = contentType + } + if (contentLength) { + requestOptions['Content-Length'] = contentLength + } + if (contentRange) { + requestOptions['Content-Range'] = contentRange + } + return requestOptions +} + +export function createHttpClient(): HttpClient { + return new HttpClient('action/artifact', [ + new BearerCredentialHandler(getRuntimeToken()) + ]) +} + +export function getArtifactUrl(): string { + const artifactUrl = `${getRuntimeUrl()}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}` + debug(`Artifact Url: ${artifactUrl}`) + return artifactUrl +} + +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + */ +const invalidCharacters = ['\\', '/', '"', ':', '<', '>', '|', '*', '?', ' '] + +/** + * Scans the name of the item being uploaded to make sure there are no illegal characters + */ +export function checkArtifactName(name: string): void { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`) + } + for (const invalidChar of invalidCharacters) { + if (name.includes(invalidChar)) { + throw new Error( + `Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid characters include: ${invalidCharacters.toString()}.` + ) + } + } +} diff --git a/packages/artifact/tsconfig.json b/packages/artifact/tsconfig.json new file mode 100644 index 00000000..a8b812a6 --- /dev/null +++ b/packages/artifact/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "baseUrl": "./", + "outDir": "./lib", + "rootDir": "./src" + }, + "include": [ + "./src" + ] +} \ No newline at end of file