Server IP : 85.214.239.14 / Your IP : 18.118.253.124 Web Server : Apache/2.4.62 (Debian) System : Linux h2886529.stratoserver.net 4.9.0 #1 SMP Tue Jan 9 19:45:01 MSK 2024 x86_64 User : www-data ( 33) PHP Version : 7.4.18 Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare, MySQL : OFF | cURL : OFF | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : OFF Directory : /proc/2/root/proc/2/root/proc/2/cwd/lib/node_modules/npm/node_modules/tar/lib/ |
Upload File : |
'use strict' // XXX: This shares a lot in common with extract.js // maybe some DRY opportunity here? // tar -t const hlo = require('./high-level-opt.js') const Parser = require('./parse.js') const fs = require('fs') const fsm = require('fs-minipass') const path = require('path') const stripSlash = require('./strip-trailing-slashes.js') module.exports = (opt_, files, cb) => { if (typeof opt_ === 'function') { cb = opt_, files = null, opt_ = {} } else if (Array.isArray(opt_)) { files = opt_, opt_ = {} } if (typeof files === 'function') { cb = files, files = null } if (!files) { files = [] } else { files = Array.from(files) } const opt = hlo(opt_) if (opt.sync && typeof cb === 'function') { throw new TypeError('callback not supported for sync tar functions') } if (!opt.file && typeof cb === 'function') { throw new TypeError('callback only supported with file option') } if (files.length) { filesFilter(opt, files) } if (!opt.noResume) { onentryFunction(opt) } return opt.file && opt.sync ? listFileSync(opt) : opt.file ? listFile(opt, cb) : list(opt) } const onentryFunction = opt => { const onentry = opt.onentry opt.onentry = onentry ? e => { onentry(e) e.resume() } : e => e.resume() } // construct a filter that limits the file entries listed // include child entries if a dir is included const filesFilter = (opt, files) => { const map = new Map(files.map(f => [stripSlash(f), true])) const filter = opt.filter const mapHas = (file, r) => { const root = r || path.parse(file).root || '.' const ret = file === root ? false : map.has(file) ? map.get(file) : mapHas(path.dirname(file), root) map.set(file, ret) return ret } opt.filter = filter ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) : file => mapHas(stripSlash(file)) } const listFileSync = opt => { const p = list(opt) const file = opt.file let threw = true let fd try { const stat = fs.statSync(file) const readSize = opt.maxReadSize || 16 * 1024 * 1024 if (stat.size < readSize) { p.end(fs.readFileSync(file)) } else { let pos = 0 const buf = Buffer.allocUnsafe(readSize) fd = fs.openSync(file, 'r') while (pos < stat.size) { const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) pos += bytesRead p.write(buf.slice(0, bytesRead)) } p.end() } threw = false } finally { if (threw && fd) { try { fs.closeSync(fd) } catch (er) {} } } } const listFile = (opt, cb) => { const parse = new Parser(opt) const readSize = opt.maxReadSize || 16 * 1024 * 1024 const file = opt.file const p = new Promise((resolve, reject) => { parse.on('error', reject) parse.on('end', resolve) fs.stat(file, (er, stat) => { if (er) { reject(er) } else { const stream = new fsm.ReadStream(file, { readSize: readSize, size: stat.size, }) stream.on('error', reject) stream.pipe(parse) } }) }) return cb ? p.then(cb, cb) : p } const list = opt => new Parser(opt)