diff --git a/README.md b/README.md index f5f8e6d..36bb0a9 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,6 @@ You can do most things with the `Image` class. It takes care of tedious tasks li Create a new image based off of the official node image with your files in it. ```js -import {Image} from 'container-image-builder' ;(async()=>{ const image = new Image('node:lts-slim','gcr.io/my-project/my-image') @@ -94,7 +93,7 @@ Defined in the order that they compose into an "Image": ## API - `const {Image} = require('container-image-builder')` -- or `import {Image} from 'container-image-builder'` in typescript etc. +- or `import {Image} from 'container-image-builder'` in typescript etc. ### Image builder API @@ -104,7 +103,8 @@ Defined in the order that they compose into an "Image": - targetImage the name of the image you're going to be saving to. calls to image.save() will replace this image. -- `image.addFiles({[localDirectory]:targetDirectory},options): Promise<..>` +- `image.addFiles({[targetDirectory]:localDirectory},options): Promise<..>` + - tar local directories and place each at targetDirectory in a single layer. - symlinks are replaced with their files/directories as they are written to the layer tarball by default. - options @@ -120,6 +120,11 @@ Defined in the order that they compose into an "Image": - `image.addFiles(localDirectory,targetDirectory,options) :Promise<..>` - `image.addFiles(localDirectory,options) :Promise<..>` + - _BREAKING CHANGE_ between 1x AND 2x + - positions of `targetDirectory` and `localDirectory` were flipped in the object. + - when paths are specified as an object the keys are now `targetDirectory`. + - this enables copying the same files into a container to different paths and CustomFiles + - `image.save(tags?: string[], options)` - save changes to the image. by default this saves the updated image as the `latest` tag - `tags`, `string[]` @@ -192,6 +197,40 @@ Defined in the order that they compose into an "Image": - remove the layer tagged in the manifest by digest. save it's offset in the array. - remove the uncompressedDigest from the image config that matches the offset above +- `const {CustomFile} = require('container-image-builder')` + - you can pass CustomFile to image.addFiles as a localPath to write in memory data or a stream to the layer tarball. + - `image.addFiles({'/help.md':new CustomFile({data:Buffer.from('hello')})})` + - `image.addFiles({'/google.html':new CustomFile({data:request('https://google.com'),size:**you must have size beforehand for streams**})})` + - useful for creating whiteout files etc. + +- `customFile = new CustomFile(options)` + - options + - mode + - defaults to `0o644` owner read write, everyone read. + - see [fs.chmod](https://nodejs.org/dist/latest-v10.x/docs/api/fs.html#fs_file_modes) + - permission bits are extracted from the mode provided via `& 0o7777` and type bits are set based on type. + - size + - required if stream. optional with buffer + - data + - a stream or buffer of data which will be the contents of the file. + - required if type is File + - type + - defaults to File + - supported are File, Directory, Symlink + - linkPath + - only used if Symlink + +- `customFile.uid` + - default 0. set to number if you want to set uid +- `customFile.gid` + - default 0. set to number if you want to set gid +- `customFile.ctime` + - default new Date(). set to Date if you want to set +- `customFile.atime` + - default new Date(). set to Date if you want to set +- `customFile.mtime` + - default new Date(). set to Date if you want to set + ### docker registry auth `const {auth} = require('container-image-builder')` @@ -267,7 +306,7 @@ like adding a new blob directly to the target registry before you call addLayer. - the sha256 sum of the blob you want to download - stream, boolean - default false - - if you'ed like to download to a buffer or resolve to a readable stream. + - if you'd like to download to a buffer or resolve to a readable stream. - `client.upload(blob, contentLength, digest) Promise<{contentLength: number, digest: string}> ` - note: upload a blob to the registry. you do not need to know the content length and digest before hand. if they're not provided they'll be calculated on the fly and a 2 step upload will be performed. It's more efficient if you know the contentLength and digest beforehand, but if you're streaming it can be more efficient to calculate on the fly. diff --git a/package-lock.json b/package-lock.json index de2a835..9f64f93 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2404,9 +2404,9 @@ "dev": true }, "js-yaml": { - "version": "3.12.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.1.tgz", - "integrity": "sha512-um46hB9wNOKlwkHgiuyEVAybXBjwFUV0Z/RaHJblRd9DXltue9FTYvzCr9ErQrK9Adz5MU4gHWVaNUfdmrC8qA==", + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.0.tgz", + "integrity": "sha512-pZZoSxcCYco+DIKBTimr67J6Hy+EYGZDY/HCWC+iAEA9h1ByhMXAIVUXMcMFpOCxQ/xjXmPI2MkDL5HRm5eFrQ==", "dev": true, "requires": { "argparse": "^1.0.7", diff --git a/package.json b/package.json index 5e43d6a..55621df 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "fix": "gts fix", "prepare": "npm run compile", "pretest": "npm run compile", + "pretest-one": "npm run compile", "posttest": "npm run check", "version": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md" }, diff --git a/src/index.ts b/src/index.ts index f276f3b..2776b36 100644 --- a/src/index.ts +++ b/src/index.ts @@ -13,6 +13,7 @@ // limitations under the License. // import * as crypto from 'crypto'; +import {GoogleAuthOptions} from 'google-auth-library'; import * as retry from 'p-retry'; import * as path from 'path'; import * as zlib from 'zlib'; @@ -24,7 +25,6 @@ import {ImageLocation, parse as parseSpecifier} from './image-specifier'; import * as packer from './packer'; import {pending, PendingTracker} from './pending'; import {ImageConfig, ManifestV2, RegistryClient} from './registry'; -import { GoogleAuthOptions } from 'google-auth-library'; const tar = require('tar'); @@ -68,11 +68,12 @@ export class Image { // manifest constructor( - imageSpecifier: string, targetImage?: string|ImageOptions, options?: ImageOptions) { + imageSpecifier: string, targetImage?: string|ImageOptions, + options?: ImageOptions) { this.options = options || {}; - if(typeof targetImage !== 'string'){ - this.options = this.options || targetImage + if (typeof targetImage !== 'string') { + this.options = this.options || targetImage; targetImage = undefined; } @@ -151,14 +152,14 @@ export class Image { throw new Error( 'specifying a target directory name when the dir is an object of name:target doesn\'t make sense. try addFiles({dir:target})'); } - dir = {[dir]: targetDir}; + dir = {[targetDir]: dir}; } else if (targetDir) { // options! options = targetDir; } // have to wrap in promise because the tar stream can emit error out of band - const p = new Promise(async (resolve, reject) => { + let p = new Promise(async (resolve, reject) => { const tarStream = packer.pack(dir, options); tarStream.on('error', (e: Error) => reject(e)); @@ -182,7 +183,7 @@ export class Image { result.digest, uncompressedDigest, result.contentLength)); }); - this.pending.track(p); + p = this.pending.track(p); return p as Promise<{ mediaType: string; digest: string; size: number; @@ -253,14 +254,13 @@ export class Image { Cmd?: string[], WorkingDir?: string }) { - tags = tags || ['latest']; - - options = options || {}; - const targetImage = this.targetImage; const client = await this.client(targetImage, true); const imageData = await this.getImageData(); + tags = tags || [targetImage.tag || 'latest']; + options = options || {}; + await this.syncBaseImage(options); await Promise.all(this.pending.active()); @@ -406,7 +406,7 @@ export const auth = async ( try { if (image.registry.indexOf('gcr.io') > -1) { return await gcrAuth( - image, scope, options ? options['gcr.io']||{} : {}); + image, scope, options ? options['gcr.io'] || {} : {}); } else if (image.registry.indexOf('docker.io') > -1) { return await dockerAuth( image, scope, options ? options['docker.io'] : undefined); @@ -423,11 +423,15 @@ export const auth = async ( return res; }; +export const pack = packer.pack; + +// expose CustomFile to pass in image.addFiles +export const CustomFile = packer.CustomFile; export interface AuthConfig { - 'gcr.io'?:GoogleAuthOptions; + 'gcr.io'?: GoogleAuthOptions; // tslint:disable-next-line:no-any - 'docker.io'?:any; + 'docker.io'?: any; // tslint:disable-next-line:no-any [k: string]: any; } diff --git a/src/packer.ts b/src/packer.ts index 36a05b7..cc5be82 100644 --- a/src/packer.ts +++ b/src/packer.ts @@ -14,7 +14,7 @@ import * as fs from 'fs'; import * as _path from 'path'; -import {Readable} from 'stream'; +import {PassThrough, Readable, Writable} from 'stream'; import {logger} from './emitter'; import * as walker from './walker'; @@ -40,9 +40,10 @@ export type PackOptions = { }&walker.Options; export const pack = - (paths: {[fromPath: string]: string}|string, options?: PackOptions) => { + (paths: {[toPath: string]: string|CustomFile}|string, + options?: PackOptions) => { // thanks typescript. - let pathObj: {[fromPath: string]: string} = {}; + let pathObj: {[toPath: string]: string|CustomFile} = {}; if (typeof paths === 'string') { pathObj[paths] = paths; } else { @@ -51,6 +52,8 @@ export const pack = options = options || {}; + const outer = new PassThrough(); + // flatten every link into the tree its in options.find_links = false; options.no_return = true; @@ -58,11 +61,14 @@ export const pack = let ends = 0; let starts = 0; - const queue: Array<{path: string, toPath: string, stat: fs.Stats}> = []; + const queue: + Array<{path: string, toPath: string, stat: fs.Stats | CustomFile}> = + []; // tar gzip:false etc. const pack = new Pack( Object.assign({}, options.tar || {}, {gzip: false, jobs: Infinity})); + let working = false; const work = () => { if (working) return; @@ -78,7 +84,9 @@ export const pack = starts++; let entry; const {path, stat, toPath} = obj; + entry = pathToReadEntry({path, stat, toPath, portable: false}); + entry.on('end', () => { ends++; working = false; @@ -87,6 +95,7 @@ export const pack = entry.on('error', (err: Error) => { pack.emit('error', err); }); + pack.write(entry); }; @@ -94,21 +103,31 @@ export const pack = let walkEnded = false; const walks: Array> = []; - Object.keys(pathObj).forEach((path) => { - const toPath = pathObj[path]; + Object.keys(pathObj).forEach((toPath) => { + let path = pathObj[toPath]; + + if (path instanceof CustomFile) { + queue.push({ + path: toPath, + toPath, + stat: path, + }); + return work(); + } path = _path.resolve(path); - // ill need to use this to pause and resume. TODO + // tslint:disable-next-line:only-arrow-functions walks.push(walker.walk(path, options, function(file, stat) { queue.push({ path: file, - toPath: _path.join(toPath, file.replace(path, '')), + toPath: _path.join(toPath, file.replace(path as string, '')), stat, }); work(); })); }); + Promise.all(walks) .then(() => { walkEnded = true; @@ -117,18 +136,18 @@ export const pack = } }) .catch((e) => { - pack.emit('error', e); + outer.emit('error', e); }); - return pack as Readable; + return pack.pipe(outer); }; function pathToReadEntry(opts: { path: string, toPath?: string, - linkpath?: string, stat: fs.Stats, + linkpath?: string, stat: fs.Stats|CustomFile, mtime?: number, - noMtime?: boolean, portable: boolean + noMtime?: boolean, portable: boolean, }) { const {path, linkpath, stat} = opts; let {toPath} = opts; @@ -145,7 +164,7 @@ function pathToReadEntry(opts: { // dont write an mtime const noMtime = opts.noMtime; // dont write anything other than size, linkpath, path and mode - const portable = opts.portable; + const portable = opts.portable || true; // add trailing / to directory paths toPath = toPath || path; @@ -154,23 +173,42 @@ function pathToReadEntry(opts: { } const header = new Header({ - path: toPath, - // if this is a link. the path the link points to. - linkpath, - mode: modeFix(stat.mode, stat.isDirectory()), - uid: portable ? null : stat.uid, - gid: portable ? null : stat.gid, - size: stat.isDirectory() ? 0 : stat.size, - mtime: noMtime ? null : mtime || stat.mtime, - type: statToType(stat), - uname: portable ? null : stat.uid === myuid ? myuser : '', - atime: portable ? null : stat.atime, - ctime: portable ? null : stat.ctime - }); - - const entry = new ReadEntry(header); - - if (stat.isFile()) { + path: toPath, + // if this is a link. the path the link points to. + linkpath, + mode: modeFix(stat.mode, stat.isDirectory()), + uid: portable ? null : stat.uid || 0, + gid: portable ? null : stat.gid || 0, + size: stat.isDirectory() ? 0 : stat.size, + mtime: noMtime ? null : mtime || stat.mtime, + uname: portable ? null : stat.uid === myuid ? myuser : '', + atime: portable ? null : stat.atime, + ctime: portable ? null : stat.ctime + }) as Header; + + + header.type = statToType(stat) || 'File'; + + const entry = new ReadEntry(header) as ReadEntry; + + if (stat instanceof CustomFile) { + if (stat.data) { + if ((stat.data as Readable).pipe) { + (stat.data as Readable).pipe(entry); + } else { + // if we write the entry data directly via entry.write it causes the + // entry stream to never complete. + const ps = new PassThrough(); + ps.pause(); + ps.on('resume', () => { + ps.end(stat.data); + }); + ps.pipe(entry); + } + } else { + entry.end(); + } + } else if (stat.isFile()) { fs.createReadStream(path).pipe(entry); } else { entry.end(); @@ -179,11 +217,77 @@ function pathToReadEntry(opts: { return entry; } +export class CustomFile { + mode: number; + linkPath?: string; + data?: Buffer|Readable; + + uid = 1; + gid = 1; + ctime = new Date(); + atime = new Date(); + mtime = new Date(); + size = 0; -function statToType(stat: fs.Stats) { + constructor(opts: { + mode?: number, // 0 + type?: string, + linkPath?: string, + data?: Buffer|Readable, + size?: number + }) { + const type = opts.type || 'File'; + // Take permissions from mode. Then set file type. + this.mode = ((opts.mode || 0) & 0o7777) | entryTypeToMode(type) | 0o644; + this.linkPath = opts.linkPath; + this.data = opts.data; + this.size = opts.size || 0; + if (Buffer.isBuffer(opts.data)) { + this.size = opts.data.length; + } else if (!this.size && type === 'File') { + throw new Error( + 'if data is not a buffer and this CustomFile is a "File" `opts.size` is required'); + } + } + + isDirectory() { + return (this.mode & fs.constants.S_IFMT) === fs.constants.S_IFDIR; + } + + isSymbolicLink() { + return (this.mode & fs.constants.S_IFMT) === fs.constants.S_IFLNK; + } + + isFile() { + return (this.mode & fs.constants.S_IFMT) === fs.constants.S_IFREG; + } +} + +function statToType(stat: fs.Stats|CustomFile) { if (stat.isDirectory()) return 'Directory'; if (stat.isSymbolicLink()) return 'SymbolicLink'; if (stat.isFile()) return 'File'; // return nothing if unsupported. return; } + +function entryTypeToMode(type: string) { + if (type === 'Directory') return fs.constants.S_IFDIR; + if (type === 'SymbolicLink') return fs.constants.S_IFLNK; + if (type === 'File') return fs.constants.S_IFREG; + // return nothing if unsupported. + throw new Error( + 'unsupported entry type ' + type + + '. support types are "Directory", "SymbolicLink", "File"'); +} + + +interface Header { + // tslint:disable-next-line:no-any + constructor(stat: {[k: string]: any}): Header; + type: string; +} + +interface ReadEntry extends Writable { + constructor(): ReadEntry; +} diff --git a/src/pending.ts b/src/pending.ts index 47b7b5f..3883a67 100644 --- a/src/pending.ts +++ b/src/pending.ts @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -export const pending = () => { +export const pending = (): PendingTracker => { // tslint:disable-next-line:no-any const active: {[k: string]: Promise} = {}; let inc = 0; @@ -21,19 +21,39 @@ export const pending = () => { active: () => { return Object.values(active); }, - track: (p: Promise) => { + track: (p: Promise): Promise => { if (inc + 1 === inc) inc = -1; active[inc++] = p; - return p.finally(() => { - delete active[inc]; + const _inc = inc; + if (p.finally) { + p.finally(() => { + delete active[_inc]; + }); + return p; + } + + // tslint:disable-next-line:no-any + let resolve: any, reject: any; + const proxy = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + + p.then((value) => { + delete active[_inc]; + resolve(value); + }).catch((e) => { + delete active[_inc]; + reject(e); }); + return proxy as Promise; } - } as PendingTracker; + }; }; export type PendingTracker = { // tslint:disable-next-line:no-any active: () => Array>, - track: (p: Promise) => void + track: (p: Promise) => Promise }; \ No newline at end of file diff --git a/src/walker.ts b/src/walker.ts index c87ad25..68dbda4 100644 --- a/src/walker.ts +++ b/src/walker.ts @@ -45,8 +45,6 @@ export const walk = async ( ignoreTree[entryDir] = ignores; } - console.log(ignoreTree); - const applyRules = (dir: string, files: string[]) => { const currentDir = dir; @@ -76,12 +74,12 @@ export const walk = async ( return files; }; - //TODO: call user provided filter function first if set. - //let origFilter:(dir:string,files:string[])=>string[]|Promise; - //if(options.filter) origFilter = options.filter; + // TODO: call user provided filter function first if set. + // let origFilter:(dir:string,files:string[])=>string[]|Promise; + // if(options.filter) origFilter = options.filter; options.filter = (dir, files) => { - //if(origFilter) files = await origFilter(dir,files); + // if(origFilter) files = await origFilter(dir,files); if (!files.length) return []; const unread: Array> = []; @@ -138,4 +136,4 @@ export const readIgnore = (file: string): Promise=> { resolve(rules); }); }); -}; \ No newline at end of file +}; diff --git a/test/packer-custom-file.ts b/test/packer-custom-file.ts new file mode 100644 index 0000000..ef4770c --- /dev/null +++ b/test/packer-custom-file.ts @@ -0,0 +1,104 @@ +import * as assert from 'assert'; +import * as path from 'path'; +import {PassThrough, Readable} from 'stream'; + +import {CustomFile, pack} from '../src/packer'; + +describe('packer customFiles', () => { + it('packs a custom file', (done) => { + const nodeTar = require('tar'); + + const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); + + /* + packer: entry header Header { + cksumValid: false, + needPax: false, + nullBlock: false, + block: null, + path: '/a-file', + mode: 420, + uid: null, + gid: null, + size: 8, + mtime: 2019-03-27T23:20:06.287Z, + cksum: null, + linkpath: null, + uname: null, + gname: null, + devmaj: 0, + devmin: 0, + atime: null, + ctime: null, + [Symbol(type)]: '0' } + */ + + const tar = + pack({'/a-file': new CustomFile({data: Buffer.from('content')})}); + + let paths: string[] = []; + const data: {[k: string]: Buffer[]} = {}; + const bufs: Buffer[] = []; + + const extract = tar.pipe(new nodeTar.Parse()); + + extract.on('entry', (e: Readable&{path: string}) => { + paths.push(e.path); + data[e.path] = []; + e.on('data', (buf) => { + data[e.path].push(buf); + }); + }); + + extract.on('end', () => { + paths = paths.sort(); + + + assert.deepStrictEqual(['a-file'], paths, 'found custom file'); + + assert.strictEqual( + Buffer.concat(data['a-file']) + '', 'content', + 'should be able to extract content from custom files.'); + + done(); + }); + }); + + + + it('packs a custom file from stream', (done) => { + const nodeTar = require('tar'); + + const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); + + const s = new PassThrough(); + + const tar = pack({'/b-file': new CustomFile({data: s, size: 4})}); + s.end(Buffer.from('bork')); + + let paths: string[] = []; + + const data: {[k: string]: Buffer[]} = {}; + + const extract = tar.pipe(nodeTar.t()); + extract.on('entry', (e: Readable&{path: string}) => { + paths.push(e.path); + data[e.path] = []; + e.on('data', (buf) => { + data[e.path].push(buf); + }); + e.resume(); + }); + + extract.on('end', () => { + paths = paths.sort(); + assert.deepStrictEqual(['b-file'], paths, 'found custom file'); + + assert.strictEqual( + Buffer.concat(data['b-file']) + '', 'bork', + 'should be able to extract content from custom files.'); + + done(); + }); + }); +}); \ No newline at end of file diff --git a/test/packer.ts b/test/packer.ts index e731750..00dc032 100644 --- a/test/packer.ts +++ b/test/packer.ts @@ -1,8 +1,8 @@ import * as assert from 'assert'; import * as path from 'path'; -import {Readable} from 'stream'; +import {PassThrough, Readable, Transform} from 'stream'; -import {pack} from '../src/packer'; +import {CustomFile, pack} from '../src/packer'; describe('can pack', () => { it('packs a directory', (done) => { @@ -10,7 +10,7 @@ describe('can pack', () => { const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); - const tar = pack({[fixtureDir]: '/apples'}); + const tar = pack({'/apples': fixtureDir}); let paths: string[] = []; @@ -40,7 +40,7 @@ describe('can pack', () => { const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); - const tar = pack({[fixtureDir]: '/apples'}, {ignoreFiles: ['.ignore']}); + const tar = pack({'/apples': fixtureDir}, {ignoreFiles: ['.ignore']}); let paths: string[] = []; @@ -70,7 +70,7 @@ describe('can pack', () => { const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); - const tar = pack({[fixtureDir]: '/apples'}, {ignores: ['**/test']}); + const tar = pack({'/apples': fixtureDir}, {ignores: ['**/test']}); let paths: string[] = []; @@ -93,4 +93,114 @@ describe('can pack', () => { done(); }); }); + + it('packs multiple directories', (done) => { + const nodeTar = require('tar'); + + const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); + const otherFixtureDir = + path.join(__dirname, '..', '..', 'fixtures', 'outside-of-project'); + + const tar = pack({'/apples': fixtureDir, '/oranges': otherFixtureDir}); + + let paths: string[] = []; + + const extract = tar.pipe(nodeTar.t()); + extract.on('entry', (e: Readable&{path: string}) => { + paths.push(e.path); + e.resume(); + }); + + extract.on('end', () => { + paths = paths.sort(); + assert.deepStrictEqual( + [ + 'apples/.ignore', 'apples/index.js', 'apples/lib/', + 'apples/lib/a-file.js', 'apples/test/', 'apples/test/taco.yaml', + 'apples/test/test.js', 'oranges/a-file.js' + ], + paths, 'should have tarred exactly the specified entries'); + + console.log(paths); + done(); + }); + }); + + + it('packs a single file', (done) => { + const nodeTar = require('tar'); + + const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); + + const tar = pack({'/apples/index.js': path.join(fixtureDir, 'index.js')}); + + let paths: string[] = []; + + const extract = tar.pipe(nodeTar.t()); + extract.on('entry', (e: Readable&{path: string}) => { + paths.push(e.path); + e.resume(); + }); + + extract.on('end', () => { + paths = paths.sort(); + assert.deepStrictEqual( + ['apples/index.js'], paths, 'ignored test files with **/test glob'); + + console.log(paths); + done(); + }); + }); + + + it('packs a single file and directory', (done) => { + const nodeTar = require('tar'); + + const otherFixtureDir = + path.join(__dirname, '..', '..', 'fixtures', 'outside-of-project'); + const fixtureDir = path.join(__dirname, '..', '..', 'fixtures', 'project'); + + const tar = pack({ + '/apples': otherFixtureDir, + '/apples/index.js': path.join(fixtureDir, 'index.js') + }); + + let paths: string[] = []; + + const extract = tar.pipe(nodeTar.t()); + extract.on('entry', (e: Readable&{path: string}) => { + paths.push(e.path); + e.resume(); + }); + + extract.on('end', () => { + paths = paths.sort(); + assert.deepStrictEqual( + ['apples/a-file.js', 'apples/index.js'], paths, + 'packed directory and single file'); + + // console.log(paths); + done(); + }); + }); + + it('errors when asked to pack a file that doesnt exist', (done) => { + const nodeTar = require('tar'); + const tar = pack({'/apples': 'doesntexist'}); + + tar.on('error', (err: Error) => { + assert.ok( + err, + 'should have gotten error packing source file that doesn\'t exist.'); + done(); + }); + + tar.on('end', () => { + assert.fail('SHOULD HAVE ERRORED'); + done(); + }); + }); + + + // TODO packs directory that doesnt exist });