How to use the tar.Parse function in tar

To help you get started, we’ve selected a few tar examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github alibaba / ice / tools / iceworks / app / main / scaffolder / lib / utils.js View on Github external
})
    );
    req
      .on('progress', (state) => {
        progressFunc(state);
      })
      .on('error', (error) => {
        error.name = 'download-tarball-error';
        error.data = {
          url: tarballURL,
        };
        logger.error(error);
        reject(error);
      })
      .pipe(zlib.Unzip()) // eslint-disable-line
      .pipe(tar.Parse()) // eslint-disable-line
      .on('entry', (entry) => {
        if (!/src|mock\//.test(entry.path)) {
          return;
        }

        const isMockFiles = entry.path.indexOf('mock/') !== -1;

        let destPath = ''; // 生成文件的路径
        if (isMockFiles) {
          destPath = path.join(
            clientPath,
            entry.path.replace(/^package\//, '')
          );
        } else {
          const realPath = entry.path
            .replace(/^package\//, '')
github thlorenz / irish-pub / index.js View on Github external
exec('npm pack ' + root, function (err, stdout, stderr) {
    if (err) return out.emit('error', 'Failed to pack archive: ' + err);

    // npm logs created filename on stdout
    var tarFile = path.join(process.cwd(), stdout.trim().split(/\n+/).pop());

    fs.createReadStream(tarFile)
      .on('error', out.emit.bind(out, 'error'))
      .pipe(zlib.createGunzip())
      .on('error', out.emit.bind(out, 'error'))
      .pipe(tar.Parse())
      .on('error', out.emit.bind(out, 'error'))
      .on('entry', function (e) {
        out.write(e.path.replace(/^package\//, '') + '\n');
      })
      .on('end', function () {
        fs.unlink(tarFile, function (err) {
          if (err) return out.emit(err);
          out.emit('end')
        })
      })
  })
}
github alanhoff / node-tar.gz / index.js View on Github external
TarGz.prototype.createParseStream = function() {
  var stream1 = zlib.createGunzip(this._options.zlib);
  var stream2 = tar.Parse();

  this._bubble(stream1, stream2);

  // Capture the entry event
  stream2.on('entry', function(entry) {
    stream1.emit('entry', entry);
  });

  stream1.pipe(stream2);
  return stream1;
};
github alibaba / ice / packages / ice-npm-utils / lib / index.js View on Github external
return new Promise((resolve, reject) => {
    const allFiles = [];
    const allWriteStream = [];
    const dirCollector = [];

    progress(
      request({
        url: tarball,
        timeout: 10000,
      })
    )
      .on('progress', progressFunc)
      .on('error', reject)
      .pipe(zlib.Unzip())
      .pipe(new tar.Parse())
      .on('entry', (entry) => {
        if (entry.type === 'Directory') {
          entry.resume();
          return;
        }
        const realPath = entry.path.replace(/^package\//, '');

        let filename = path.basename(realPath);

        // _gitignore -> .gitignore
        // Special logic:_package.json -> package.json
        if (filename === '_package.json') {
          filename = filename.replace(/^_/, '');
        } else {
          filename = filename.replace(/^_/, '.');
        }
github npm / pacote / lib / finalize-manifest.js View on Github external
const needsBin = !!(!pkg || (
    !pkg.bin &&
    pkg.directories &&
    pkg.directories.bin
  ))
  const needsIntegrity = !pkg || (!pkg._integrity && pkg._integrity !== false)
  const needsShasum = !pkg || (!pkg._shasum && pkg._shasum !== false)
  const needsHash = needsIntegrity || needsShasum
  const needsManifest = !pkg || !pkg.name
  const needsExtract = needsShrinkwrap || needsBin || needsManifest
  if (!needsShrinkwrap && !needsBin && !needsHash && !needsManifest) {
    return BB.resolve({})
  } else {
    opts = optCheck(opts)
    const tarStream = fetchFromManifest(pkg, spec, opts)
    const extracted = needsExtract && new tar.Parse()
    return BB.join(
      needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
      needsManifest && jsonFromStream('package.json', extracted),
      needsBin && getPaths(extracted),
      needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1', 'sha512'] }),
      needsExtract && pipe(tarStream, extracted),
      (sr, mani, paths, hash) => {
        if (needsManifest && !mani) {
          const err = new Error(`Non-registry package missing package.json: ${spec}.`)
          err.code = 'ENOPACKAGEJSON'
          throw err
        }
        const extraProps = mani || {}
        delete extraProps._resolved
        // drain out the rest of the tarball
        tarStream.resume()
github microsoft / join-dev-design / time-travel / build.js View on Github external
const parseTarball = async options => {
  const { url, id } = options;
  const tarball = await fetch(url);
  const tarballStream = tarball.body;
  const unzipPath = path.join(historyFolderPath, id);
  const parse = new tar.Parse();

  tarballStream
    .on("error", console.error)
    .pipe(parse)
    .on("entry", async function(entry) {
      const type = entry.type;
      const tpath = entry.path;
      const [root, subDir1, subDir2, ...rest] = tpath.split(path.sep);
      console.log(type, tpath);

      if (subDir1 === "docs" && subDir2 !== "time-travel" && type === "File") {
        const docsPath = path.join(unzipPath, subDir1, subDir2, rest.join(""));

        try {
          await fs.ensureFile(docsPath);
        } catch (err) {
github transloadit / uppy / bin / upload-to-cdn.js View on Github external
async function getRemoteDistFiles (packageName, version) {
  const files = new Map()
  const tarball = pacote.tarball.stream(`${packageName}@${version}`)
    .pipe(new tar.Parse())

  tarball.on('entry', (readEntry) => {
    if (readEntry.path.startsWith('package/dist/')) {
      readEntry
        .pipe(concat((buf) => {
          files.set(readEntry.path.replace(/^package\/dist\//, ''), buf)
        }))
        .on('error', (err) => {
          tarball.emit('error', err)
        })
    } else {
      readEntry.resume()
    }
  })

  await finished(tarball)
github npm / node-tar / benchmarks / parse / old-sync.js View on Github external
const Parse = require('tar').Parse
const path = require('path')
const file = process.argv[2] || path.resolve(__dirname, '../npm.tar')
const fs = require('fs')
const data = fs.readFileSync(file)

const timer = require('../timer.js')()
const p = new Parse()
p.on('entry', entry => entry.resume())
p.on('end', timer)
p.end(data)
github entropic-dev / entropic / services / storage / lib / clone-legacy-package.js View on Github external
async function syncVersion(storage, pkg, version, data) {
  const tarball = pacote.tarball.stream(`${pkg}@${version}`);
  const untar = new tar.Parse();
  const files = {};
  const pending = [];

  untar.on('entry', entry => {
    if (entry.type === 'File') {
      const filename = './' + String(entry.path).replace(/^\/+/g, '');
      const passthrough = new PassThrough();
      passthrough.pause();

      const stream = entry.pipe(passthrough);
      const addFile = storage.add(stream).then(r => {
        files[filename] = r;
      });
      addFile.catch(() => {});
      pending.push(addFile);
    } else {
github dcodeIO / ClosureCompiler.js / scripts / configure.js View on Github external
var alldone = true;
            var names = Object.keys(files);
            for (var i=0; i