pax_global_header00006660000000000000000000000064150136704500014513gustar00rootroot0000000000000052 comment=2ceedf4cf807e89a071ebd585291aa785c980829 tar-fs-3.0.9/000077500000000000000000000000001501367045000127205ustar00rootroot00000000000000tar-fs-3.0.9/.github/000077500000000000000000000000001501367045000142605ustar00rootroot00000000000000tar-fs-3.0.9/.github/workflows/000077500000000000000000000000001501367045000163155ustar00rootroot00000000000000tar-fs-3.0.9/.github/workflows/test-node.yml000066400000000000000000000007621501367045000207470ustar00rootroot00000000000000name: Build Status on: push: branches: - master pull_request: branches: - master jobs: build: strategy: matrix: node-version: [lts/*] os: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - run: npm install - run: npm test tar-fs-3.0.9/.gitignore000066400000000000000000000001141501367045000147040ustar00rootroot00000000000000node_modules test/fixtures/copy test/fixtures/invalid test/fixtures/outside tar-fs-3.0.9/LICENSE000066400000000000000000000020661501367045000137310ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2014 Mathias Buus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.tar-fs-3.0.9/README.md000066400000000000000000000105131501367045000141770ustar00rootroot00000000000000# tar-fs Filesystem bindings for [tar-stream](https://github.com/mafintosh/tar-stream). ``` npm install tar-fs ``` ## Usage tar-fs allows you to pack directories into tarballs and extract tarballs into directories. It doesn't gunzip for you, so if you want to extract a `.tar.gz` with this you'll need to use something like [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in addition to this. ``` js const tar = require('tar-fs') const fs = require('fs') // packing a directory tar.pack('./my-directory').pipe(fs.createWriteStream('my-tarball.tar')) // extracting a directory fs.createReadStream('my-other-tarball.tar').pipe(tar.extract('./my-other-directory')) ``` To ignore various files when packing or extracting add a ignore function to the options. `ignore` is also an alias for `filter`. Additionally you get `header` if you use ignore while extracting. That way you could also filter by metadata. ``` js const pack = tar.pack('./my-directory', { ignore (name) { return path.extname(name) === '.bin' // ignore .bin files when packing } }) const extract = tar.extract('./my-other-directory', { ignore (name) { return path.extname(name) === '.bin' // ignore .bin files inside the tarball when extracing } }) const extractFilesDirs = tar.extract('./my-other-other-directory', { ignore (_, header) { // pass files & directories, ignore e.g. symlinks return header.type !== 'file' && header.type !== 'directory' } }) ``` You can also specify which entries to pack using the `entries` option ```js const pack = tar.pack('./my-directory', { entries: ['file1', 'subdir/file2'] // only the specific entries will be packed }) ``` If you want to modify the headers when packing/extracting add a map function to the options ``` js const pack = tar.pack('./my-directory', { map (header) { header.name = 'prefixed/'+header.name return header } }) const extract = tar.extract('./my-directory', { map (header) { header.name = 'another-prefix/'+header.name return header } }) ``` Similarly you can use `mapStream` incase you wanna modify the input/output file streams ``` js const pack = tar.pack('./my-directory', { mapStream (fileStream, header) { // NOTE: the returned stream HAS to have the same length as the input stream. // If not make sure to update the size in the header passed in here. if (path.extname(header.name) === '.js') { return fileStream.pipe(someTransform) } return fileStream } }) const extract = tar.extract('./my-directory', { mapStream (fileStream, header) { if (path.extname(header.name) === '.js') { return fileStream.pipe(someTransform) } return fileStream } }) ``` Set `options.fmode` and `options.dmode` to ensure that files/directories extracted have the corresponding modes ``` js const extract = tar.extract('./my-directory', { dmode: parseInt(555, 8), // all dirs should be readable fmode: parseInt(444, 8) // all files should be readable }) ``` It can be useful to use `dmode` and `fmode` if you are packing/unpacking tarballs between *nix/windows to ensure that all files/directories unpacked are readable. Alternatively you can set `options.readable` and/or `options.writable` to set the dmode and fmode to readable/writable. ``` js var extract = tar.extract('./my-directory', { readable: true, // all dirs and files should be readable writable: true, // all dirs and files should be writable }) ``` Set `options.strict` to `false` if you want to ignore errors due to unsupported entry types (like device files) To dereference symlinks (pack the contents of the symlink instead of the link itself) set `options.dereference` to `true`. ## Copy a directory Copying a directory with permissions and mtime intact is as simple as ``` js tar.pack('source-directory').pipe(tar.extract('dest-directory')) ``` ## Interaction with [`tar-stream`](https://github.com/mafintosh/tar-stream) Use `finalize: false` and the `finish` hook to leave the pack stream open for further entries (see [`tar-stream#pack`](https://github.com/mafintosh/tar-stream#packing)), and use `pack` to pass an existing pack stream. ``` js const mypack = tar.pack('./my-directory', { finalize: false, finish (sameAsMypack) { mypack.entry({name: 'generated-file.txt'}, "hello") tar.pack('./other-directory', { pack: sameAsMypack }) } }) ``` ## License MIT tar-fs-3.0.9/SECURITY.md000066400000000000000000000003011501367045000145030ustar00rootroot00000000000000## Security contact information To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. tar-fs-3.0.9/index.js000066400000000000000000000250041501367045000143660ustar00rootroot00000000000000const tar = require('tar-stream') const pump = require('pump') const fs = require('fs') const path = require('path') const win32 = (global.Bare?.platform || process.platform) === 'win32' exports.pack = function pack (cwd, opts) { if (!cwd) cwd = '.' if (!opts) opts = {} const xfs = opts.fs || fs const ignore = opts.ignore || opts.filter || noop const mapStream = opts.mapStream || echo const statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort) const strict = opts.strict !== false const umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() const pack = opts.pack || tar.pack() const finish = opts.finish || noop let map = opts.map || noop let dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 let fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 if (opts.strip) map = strip(map, opts.strip) if (opts.readable) { dmode |= parseInt(555, 8) fmode |= parseInt(444, 8) } if (opts.writable) { dmode |= parseInt(333, 8) fmode |= parseInt(222, 8) } onnextentry() function onsymlink (filename, header) { xfs.readlink(path.join(cwd, filename), function (err, linkname) { if (err) return pack.destroy(err) header.linkname = normalize(linkname) pack.entry(header, onnextentry) }) } function onstat (err, filename, stat) { if (pack.destroyed) return if (err) return pack.destroy(err) if (!filename) { if (opts.finalize !== false) pack.finalize() return finish(pack) } if (stat.isSocket()) return onnextentry() // tar does not support sockets... let header = { name: normalize(filename), mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask, mtime: stat.mtime, size: stat.size, type: 'file', uid: stat.uid, gid: stat.gid } if (stat.isDirectory()) { header.size = 0 header.type = 'directory' header = map(header) || header return pack.entry(header, onnextentry) } if (stat.isSymbolicLink()) { header.size = 0 header.type = 'symlink' header = map(header) || header return onsymlink(filename, header) } // TODO: add fifo etc... header = map(header) || header if (!stat.isFile()) { if (strict) return pack.destroy(new Error('unsupported type for ' + filename)) return onnextentry() } const entry = pack.entry(header, onnextentry) const rs = mapStream(xfs.createReadStream(path.join(cwd, filename), { start: 0, end: header.size > 0 ? header.size - 1 : header.size }), header) rs.on('error', function (err) { // always forward errors on destroy entry.destroy(err) }) pump(rs, entry) } function onnextentry (err) { if (err) return pack.destroy(err) statNext(onstat) } return pack } function head (list) { return list.length ? list[list.length - 1] : null } function processGetuid () { return process.getuid ? process.getuid() : -1 } function processUmask () { return process.umask ? process.umask() : 0 } exports.extract = function extract (cwd, opts) { if (!cwd) cwd = '.' if (!opts) opts = {} cwd = path.resolve(cwd) const xfs = opts.fs || fs const ignore = opts.ignore || opts.filter || noop const mapStream = opts.mapStream || echo const own = opts.chown !== false && !win32 && processGetuid() === 0 const extract = opts.extract || tar.extract() const stack = [] const now = new Date() const umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() const strict = opts.strict !== false let map = opts.map || noop let dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 let fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 if (opts.strip) map = strip(map, opts.strip) if (opts.readable) { dmode |= parseInt(555, 8) fmode |= parseInt(444, 8) } if (opts.writable) { dmode |= parseInt(333, 8) fmode |= parseInt(222, 8) } extract.on('entry', onentry) if (opts.finish) extract.on('finish', opts.finish) return extract function onentry (header, stream, next) { header = map(header) || header header.name = normalize(header.name) const name = path.join(cwd, path.join('/', header.name)) if (ignore(name, header)) { stream.resume() return next() } const dir = path.join(name, '.') === path.join(cwd, '.') ? cwd : path.dirname(name) validate(xfs, dir, path.join(cwd, '.'), function (err, valid) { if (err) return next(err) if (!valid) return next(new Error(dir + ' is not a valid path')) if (header.type === 'directory') { stack.push([name, header.mtime]) return mkdirfix(name, { fs: xfs, own, uid: header.uid, gid: header.gid, mode: header.mode }, stat) } mkdirfix(dir, { fs: xfs, own, uid: header.uid, gid: header.gid, // normally, the folders with rights and owner should be part of the TAR file // if this is not the case, create folder for same user as file and with // standard permissions of 0o755 (rwxr-xr-x) mode: 0o755 }, function (err) { if (err) return next(err) switch (header.type) { case 'file': return onfile() case 'link': return onlink() case 'symlink': return onsymlink() } if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')')) stream.resume() next() }) }) function stat (err) { if (err) return next(err) utimes(name, header, function (err) { if (err) return next(err) if (win32) return next() chperm(name, header, next) }) } function onsymlink () { if (win32) return next() // skip symlinks on win for now before it can be tested xfs.unlink(name, function () { const dst = path.resolve(path.dirname(name), header.linkname) if (!inCwd(dst)) return next(new Error(name + ' is not a valid symlink')) xfs.symlink(header.linkname, name, stat) }) } function onlink () { if (win32) return next() // skip links on win for now before it can be tested xfs.unlink(name, function () { const link = path.join(cwd, path.join('/', header.linkname)) fs.realpath(link, function (err, dst) { if (err || !inCwd(dst)) return next(new Error(name + ' is not a valid hardlink')) xfs.link(dst, name, function (err) { if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) { stream = xfs.createReadStream(dst) return onfile() } stat(err) }) }) }) } function inCwd (dst) { return dst.startsWith(cwd) } function onfile () { const ws = xfs.createWriteStream(name) const rs = mapStream(stream, header) ws.on('error', function (err) { // always forward errors on destroy rs.destroy(err) }) pump(rs, ws, function (err) { if (err) return next(err) ws.on('close', stat) }) } } function utimesParent (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry let top while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop() if (!top) return cb() xfs.utimes(top[0], now, top[1], cb) } function utimes (name, header, cb) { if (opts.utimes === false) return cb() if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb) if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link? xfs.utimes(name, now, header.mtime, function (err) { if (err) return cb(err) utimesParent(name, cb) }) } function chperm (name, header, cb) { const link = header.type === 'symlink' /* eslint-disable n/no-deprecated-api */ const chmod = link ? xfs.lchmod : xfs.chmod const chown = link ? xfs.lchown : xfs.chown /* eslint-enable n/no-deprecated-api */ if (!chmod) return cb() const mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask if (chown && own) chown.call(xfs, name, header.uid, header.gid, onchown) else onchown(null) function onchown (err) { if (err) return cb(err) if (!chmod) return cb() chmod.call(xfs, name, mode, cb) } } function mkdirfix (name, opts, cb) { // when mkdir is called on an existing directory, the permissions // will be overwritten (?), to avoid this we check for its existance first xfs.stat(name, function (err) { if (!err) return cb(null) if (err.code !== 'ENOENT') return cb(err) xfs.mkdir(name, { mode: opts.mode, recursive: true }, function (err, made) { if (err) return cb(err) chperm(name, opts, cb) }) }) } } function validate (fs, name, root, cb) { if (name === root) return cb(null, true) fs.lstat(name, function (err, st) { if (err && err.code !== 'ENOENT' && err.code !== 'EPERM') return cb(err) if (err || st.isDirectory()) return validate(fs, path.join(name, '..'), root, cb) cb(null, false) }) } function noop () {} function echo (name) { return name } function normalize (name) { return win32 ? name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_') : name } function statAll (fs, stat, cwd, ignore, entries, sort) { if (!entries) entries = ['.'] const queue = entries.slice(0) return function loop (callback) { if (!queue.length) return callback(null) const next = queue.shift() const nextAbs = path.join(cwd, next) stat.call(fs, nextAbs, function (err, stat) { // ignore errors if the files were deleted while buffering if (err) return callback(entries.indexOf(next) === -1 && err.code === 'ENOENT' ? null : err) if (!stat.isDirectory()) return callback(null, next, stat) fs.readdir(nextAbs, function (err, files) { if (err) return callback(err) if (sort) files.sort() for (let i = 0; i < files.length; i++) { if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i])) } callback(null, next, stat) }) }) } } function strip (map, level) { return function (header) { header.name = header.name.split('/').slice(level).join('/') const linkname = header.linkname if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) { header.linkname = linkname.split('/').slice(level).join('/') } return map(header) } } tar-fs-3.0.9/package.json000066400000000000000000000021401501367045000152030ustar00rootroot00000000000000{ "name": "tar-fs", "version": "3.0.9", "description": "filesystem bindings for tar-stream", "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" }, "imports": { "fs": { "bare": "bare-fs", "default": "fs" }, "path": { "bare": "bare-path", "default": "path" } }, "files": [ "index.js" ], "standard": { "ignore": [ "test/fixtures/**" ] }, "keywords": [ "tar", "fs", "file", "tarball", "directory", "stream" ], "devDependencies": { "brittle": "^3.1.3", "rimraf": "^2.6.3", "standard": "^17.0.1" }, "scripts": { "test": "standard && brittle test/index.js" }, "bugs": { "url": "https://github.com/mafintosh/tar-fs/issues" }, "homepage": "https://github.com/mafintosh/tar-fs", "main": "index.js", "directories": { "test": "test" }, "author": "Mathias Buus", "license": "MIT", "repository": { "type": "git", "url": "https://github.com/mafintosh/tar-fs.git" } } tar-fs-3.0.9/test/000077500000000000000000000000001501367045000136775ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/000077500000000000000000000000001501367045000155505ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/a/000077500000000000000000000000001501367045000157705ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/a/hello.txt000066400000000000000000000000141501367045000176270ustar00rootroot00000000000000hello world tar-fs-3.0.9/test/fixtures/b/000077500000000000000000000000001501367045000157715ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/b/a/000077500000000000000000000000001501367045000162115ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/b/a/test.txt000066400000000000000000000000051501367045000177240ustar00rootroot00000000000000test tar-fs-3.0.9/test/fixtures/c/000077500000000000000000000000001501367045000157725ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/c/.gitignore000066400000000000000000000000051501367045000177550ustar00rootroot00000000000000link tar-fs-3.0.9/test/fixtures/d/000077500000000000000000000000001501367045000157735ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/file1000066400000000000000000000000001501367045000167040ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/file2000066400000000000000000000000001501367045000167050ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/sub-dir/000077500000000000000000000000001501367045000173405ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/sub-dir/file5000066400000000000000000000000001501367045000202550ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/sub-files/000077500000000000000000000000001501367045000176645ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/sub-files/file3000066400000000000000000000000001501367045000205770ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/d/sub-files/file4000066400000000000000000000000001501367045000206000ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/e/000077500000000000000000000000001501367045000157745ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/e/directory/000077500000000000000000000000001501367045000200005ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/e/directory/.ignore000066400000000000000000000000001501367045000212520ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/e/file000066400000000000000000000000001501367045000166240ustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/e/symlink000077700000000000000000000000001501367045000210112symlinkustar00rootroot00000000000000tar-fs-3.0.9/test/fixtures/invalid.tar000066400000000000000000000050001501367045000177010ustar00rootroot00000000000000foo000644 000000 000000 00000000000 13271352614 010141 2../ustar00000000 000000 foo/bar000644 000000 000000 00000000005 13271352614 010475 0ustar00000000 000000 hellotar-fs-3.0.9/test/index.js000066400000000000000000000223241501367045000153470ustar00rootroot00000000000000const test = require('brittle') const rimraf = require('rimraf') const tar = require('../index') const tarStream = require('tar-stream') const path = require('path') const fs = require('fs') const os = require('os') const win32 = os.platform() === 'win32' const mtime = function (st) { return Math.floor(st.mtime.getTime() / 1000) } test('copy a -> copy/a', function (t) { t.plan(5) const a = path.join(__dirname, 'fixtures', 'a') const b = path.join(__dirname, 'fixtures', 'copy', 'a') rimraf.sync(b) tar.pack(a) .pipe(tar.extract(b)) .on('finish', function () { const files = fs.readdirSync(b) t.is(files.length, 1) t.is(files[0], 'hello.txt') const fileB = path.join(b, files[0]) const fileA = path.join(a, files[0]) t.alike(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8')) t.alike(fs.statSync(fileB).mode, fs.statSync(fileA).mode) t.alike(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA))) }) }) test('copy b -> copy/b', function (t) { t.plan(8) const a = path.join(__dirname, 'fixtures', 'b') const b = path.join(__dirname, 'fixtures', 'copy', 'b') rimraf.sync(b) tar.pack(a) .pipe(tar.extract(b)) .on('finish', function () { const files = fs.readdirSync(b) t.is(files.length, 1) t.is(files[0], 'a') const dirB = path.join(b, files[0]) const dirA = path.join(a, files[0]) t.alike(fs.statSync(dirB).mode, fs.statSync(dirA).mode) t.alike(mtime(fs.statSync(dirB)), mtime(fs.statSync(dirA))) t.ok(fs.statSync(dirB).isDirectory()) const fileB = path.join(dirB, 'test.txt') const fileA = path.join(dirA, 'test.txt') t.alike(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8')) t.alike(fs.statSync(fileB).mode, fs.statSync(fileA).mode) t.alike(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA))) }) }) test('symlink', function (t) { if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow t.plan(1) t.ok(true) return } t.plan(5) const a = path.join(__dirname, 'fixtures', 'c') rimraf.sync(path.join(a, 'link')) fs.symlinkSync('.gitignore', path.join(a, 'link')) const b = path.join(__dirname, 'fixtures', 'copy', 'c') rimraf.sync(b) tar.pack(a) .pipe(tar.extract(b)) .on('finish', function () { const files = fs.readdirSync(b).sort() t.is(files.length, 2) t.is(files[0], '.gitignore') t.is(files[1], 'link') const linkA = path.join(a, 'link') const linkB = path.join(b, 'link') t.alike(mtime(fs.lstatSync(linkB)), mtime(fs.lstatSync(linkA))) t.alike(fs.readlinkSync(linkB), fs.readlinkSync(linkA)) }) }) test('follow symlinks', function (t) { if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow t.plan(1) t.ok(true) return } t.plan(5) const a = path.join(__dirname, 'fixtures', 'c') rimraf.sync(path.join(a, 'link')) fs.symlinkSync('.gitignore', path.join(a, 'link')) const b = path.join(__dirname, 'fixtures', 'copy', 'c-dereference') rimraf.sync(b) tar.pack(a, { dereference: true }) .pipe(tar.extract(b)) .on('finish', function () { const files = fs.readdirSync(b).sort() t.is(files.length, 2) t.is(files[0], '.gitignore') t.is(files[1], 'link') const file1 = path.join(b, '.gitignore') const file2 = path.join(b, 'link') t.alike(mtime(fs.lstatSync(file1)), mtime(fs.lstatSync(file2))) t.alike(fs.readFileSync(file1), fs.readFileSync(file2)) }) }) test('strip', function (t) { t.plan(2) const a = path.join(__dirname, 'fixtures', 'b') const b = path.join(__dirname, 'fixtures', 'copy', 'b-strip') rimraf.sync(b) tar.pack(a) .pipe(tar.extract(b, { strip: 1 })) .on('finish', function () { const files = fs.readdirSync(b).sort() t.is(files.length, 1) t.is(files[0], 'test.txt') }) }) test('strip + map', function (t) { t.plan(2) const a = path.join(__dirname, 'fixtures', 'b') const b = path.join(__dirname, 'fixtures', 'copy', 'b-strip') rimraf.sync(b) const uppercase = function (header) { header.name = header.name.toUpperCase() return header } tar.pack(a) .pipe(tar.extract(b, { strip: 1, map: uppercase })) .on('finish', function () { const files = fs.readdirSync(b).sort() t.is(files.length, 1) t.is(files[0], 'TEST.TXT') }) }) test('map + dir + permissions', function (t) { t.plan(win32 ? 1 : 2) // skip chmod test, it's not working like unix const a = path.join(__dirname, 'fixtures', 'b') const b = path.join(__dirname, 'fixtures', 'copy', 'a-perms') rimraf.sync(b) const aWithMode = function (header) { if (header.name === 'a') { header.mode = parseInt(700, 8) } return header } tar.pack(a) .pipe(tar.extract(b, { map: aWithMode })) .on('finish', function () { const files = fs.readdirSync(b).sort() const stat = fs.statSync(path.join(b, 'a')) t.is(files.length, 1) if (!win32) { t.is(stat.mode & parseInt(777, 8), parseInt(700, 8)) } }) }) test('specific entries', function (t) { t.plan(6) const a = path.join(__dirname, 'fixtures', 'd') const b = path.join(__dirname, 'fixtures', 'copy', 'd-entries') const entries = ['file1', 'sub-files/file3', 'sub-dir'] rimraf.sync(b) tar.pack(a, { entries }) .pipe(tar.extract(b)) .on('finish', function () { const files = fs.readdirSync(b) t.is(files.length, 3) t.not(files.indexOf('file1'), -1) t.not(files.indexOf('sub-files'), -1) t.not(files.indexOf('sub-dir'), -1) const subFiles = fs.readdirSync(path.join(b, 'sub-files')) t.alike(subFiles, ['file3']) const subDir = fs.readdirSync(path.join(b, 'sub-dir')) t.alike(subDir, ['file5']) }) }) test('check type while mapping header on packing', function (t) { t.plan(3) const e = path.join(__dirname, 'fixtures', 'e') const checkHeaderType = function (header) { if (header.name.indexOf('.') === -1) t.is(header.type, header.name) } tar.pack(e, { map: checkHeaderType }) }) test('finish callbacks', function (t) { t.plan(3) const a = path.join(__dirname, 'fixtures', 'a') const b = path.join(__dirname, 'fixtures', 'copy', 'a') rimraf.sync(b) let packEntries = 0 let extractEntries = 0 const countPackEntry = function (header) { packEntries++ } const countExtractEntry = function (header) { extractEntries++ } const onPackFinish = function (passedPack) { t.is(packEntries, 2, 'All entries have been packed') // 2 entries - the file and base directory t.is(passedPack, pack, 'The finish hook passes the pack') } const onExtractFinish = function () { t.is(extractEntries, 2) } const pack = tar.pack(a, { map: countPackEntry, finish: onPackFinish }) pack.pipe(tar.extract(b, { map: countExtractEntry, finish: onExtractFinish })) .on('finish', function () { t.end() }) }) test('not finalizing the pack', function (t) { t.plan(2) const a = path.join(__dirname, 'fixtures', 'a') const b = path.join(__dirname, 'fixtures', 'b') const out = path.join(__dirname, 'fixtures', 'copy', 'merged-packs') rimraf.sync(out) const prefixer = function (prefix) { return function (header) { header.name = path.join(prefix, header.name) return header } } tar.pack(a, { map: prefixer('a-files'), finalize: false, finish: packB }) function packB (pack) { tar.pack(b, { pack, map: prefixer('b-files') }) .pipe(tar.extract(out)) .on('finish', assertResults) } function assertResults () { const containers = fs.readdirSync(out) t.alike(containers, ['a-files', 'b-files']) const aFiles = fs.readdirSync(path.join(out, 'a-files')) t.alike(aFiles, ['hello.txt']) } }) test('do not extract invalid tar', function (t) { if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow t.plan(1) t.ok(true) return } t.plan(2) const a = path.join(__dirname, 'fixtures', 'invalid.tar') const out = path.join(__dirname, 'fixtures', 'invalid') rimraf.sync(out) fs.createReadStream(a) .pipe(tar.extract(out)) .on('error', function (err) { t.ok(/is not a valid symlink/i.test(err.message)) fs.stat(path.join(out, '../bar'), function (err) { t.ok(err) }) }) .on('finish', function () { t.fail('should not finish') }) }) test('no abs hardlink targets', function (t) { if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow t.plan(1) t.ok(true) return } t.plan(3) const out = path.join(__dirname, 'fixtures', 'invalid') const outside = path.join(__dirname, 'fixtures', 'outside') rimraf.sync(out) const s = tarStream.pack() fs.writeFileSync(outside, 'something') s.entry({ type: 'link', name: 'link', linkname: outside }) s.entry({ name: 'link' }, 'overwrite') s.finalize() s.pipe(tar.extract(out)) .on('error', function (err) { t.ok(err, 'had error') fs.readFile(outside, 'utf-8', function (err, str) { t.absent(err, 'no error') t.is(str, 'something') }) }) })