How to use tar-stream - 10 common examples

To help you get started, we’ve selected a few tar-stream examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Derpthemeus / ChampionMasteryLookup / ts / staticDataUpdater.ts View on Github external
https.get(url, (response: http.IncomingMessage) => {
			if (response.statusCode === 200) {
				/** A promise for each file that needs to be saved. Each promise will be resolved when the file has been saved, or rejected if an error occurs */
				const promises: Promise[] = [];

				const tarStream = tar.extract();
				tarStream.on("error", (err: Error) => {
					reject(new VError(err, "Error reading tarball stream"));
				});

				const championJsonRegex = XRegExp(`^(.\\/)?${XRegExp.escape(ddragonVersion)}\\/data\\/en_US\\/champion\\.json$`);
				const profileIconRegex = XRegExp(`^(.\\/)?${XRegExp.escape(ddragonVersion)}\\/img\\/profileicon\\/.+[^\\/]$`);
				const championIconRegex = XRegExp(`^(.\\/)?${XRegExp.escape(ddragonVersion)}\\/img\\/champion\\/.+[^\\/]$`);

				let entriesChecked: number = 0;
				tarStream.on("entry", (header: { name: string }, entryStream: stream.Readable, next: Function) => {
					if (++entriesChecked % 1000 === 0) {
						console.log(`Checked ${entriesChecked} entries in the tarball...`);
					}
					if (profileIconRegex.test(header.name)) {
						const promise: Promise = saveEntry(entryStream, profileIconsPath, header.name);
						// This is needed to suppress an UnhandledPromiseRejectionWarning (the rejection will actually be handled later by Promise.all())
github thlorenz / spinup / lib / inject-dockerfile.js View on Github external
pack.finalize();
    });

  tarballStream.on('error', console.error).pipe(extract);
  return pack;
};

function inspect(obj, depth) {
  console.error(require('util').inspect(obj, false, depth || 5, true));
}

// Test
if (!module.parent && typeof window === 'undefined') {
  var dir = path.join(__dirname, '..', 'tmp');  
  var resdir = path.join(__dirname, '..', 'result');  
  var pack = tar.pack();
  var extract = tar.extract();

  var gunzip = require('zlib').createGunzip();

  var ins = fs.createReadStream(dir + '/in.tar.gz', 'utf8').pipe(gunzip);

  go(ins, true)
    .on('error', console.error)
    .pipe(fs.createWriteStream(resdir + '/out.tar', 'utf8'));
}
github heroku / cli / commands / update.js View on Github external
return new Promise(resolve => {
      this.fs.removeSync(dir)
      let extract = tar.extract()
      extract.on('entry', (header, stream, next) => {
        let p = path.join(dir, header.name)
        let opts = {mode: header.mode}
        switch (header.type) {
          case 'directory':
            this.fs.mkdirpSync(p, opts)
            next()
            break
          case 'file':
            stream.pipe(this.fs.createWriteStream(p, opts))
            break
          case 'symlink':
            // ignore symlinks since they will not work on windows
            next()
            break
          default: throw new Error(header.type)
github tessel / t2-cli / lib / update-fetch.js View on Github external
// System Objects
var path = require('path');
var fs = require('fs');

// Third Party Dependencies
var gunzip = require('zlib').createGunzip();
var extract = require('tar-stream').extract();
var Progress = require('t2-progress');
var request = require('request');
var streamToBuffer = require('stream-to-buffer');
var urljoin = require('url-join');
var semver = require('semver');

// Internal
var log = require('./log');
var remote = require('./remote');

const BUILD_SERVER_ROOT = `https://${remote.BUILDS_HOSTNAME}/t2`;
const FIRMWARE_PATH = urljoin(BUILD_SERVER_ROOT, 'firmware');
const BUILDS_JSON_FILE = urljoin(FIRMWARE_PATH, 'builds.json');
const OPENWRT_BINARY_FILE = 'openwrt.bin';
const FIRMWARE_BINARY_FILE = 'firmware.bin';
github spark3dp / print-manager / spark-print-mgr / printableTranslation / translators / Autodesk-Ember.js View on Github external
EmberTranslator.prototype.startTranslation = function (inputPath, outputPath) {
    // create the tar -> gzip -> file stream
    this.fstream = fs.createWriteStream( outputPath );
    this.gzip = zlib.createGzip();
    this.pack = tar.pack();

    this.gzip.pipe( this.fstream );
    this.pack.pipe( this.gzip );
    
    // write the settings
    this.writeSettings();

    return DLPTranslator.prototype.startTranslation.apply( this, [inputPath, outputPath] );
};
github digidem / osm-p2p-syncfile / index.js View on Github external
mfeed.ready(function () {
    // re-pack syncfile multifeed dir into tarball
    var tarPath = path.join(self._syncdir, 'osm-p2p-db.tar')
    var tarSize = 0
    var tcount = through(function (chunk, _, next) { tarSize += chunk.length; next(null, chunk) })

    // 1. create new tar.pack() stream, to be piped to fs.createWriteStream inside self._syncdir
    var pack = tar.pack()

    // 2. recursively walk files in self._syncdir (skip new tar file)
    var rd = readdirp({root: path.join(self._syncdir, 'multifeed')})

    // 3. write all to the tar file
    var twrite = through.obj(function (file, _, next) {
      if (file.path === 'osm-p2p-db.tar') return next()
      debug('file', file.fullPath, file.stat.size)
      var entry = pack.entry({ name: file.path, size: file.stat.size }, function (err) {
        debug('wrote', file.path)
        if (err) return next(err)
        else next()
      })
      pump(fs.createReadStream(file.fullPath), entry)
    })
github teambit / bit / src / api / consumer / lib / doctor.ts View on Github external
async function _generateExamineResultsTarFile(
  examineResults: ExamineResult[],
  envMeta: DoctorMetaData
): Promise {
  const pack = tar.pack(); // pack is a streams2 stream
  const debugLog = await _getDebugLogAsStream();
  const consumerInfo = await _getConsumerInfo();
  let bitmap;
  if (consumerInfo && consumerInfo.path) {
    bitmap = _getBitMap(consumerInfo.path);
  }
  pack.entry({ name: 'env-meta.json' }, JSON.stringify(envMeta, null, 2));
  pack.entry({ name: 'doc-results.json' }, JSON.stringify(examineResults, null, 2));
  if (debugLog) {
    pack.entry({ name: 'debug.log' }, debugLog);
  }
  if (bitmap) {
    pack.entry({ name: '.bitmap' }, bitmap);
  }
  if (consumerInfo && consumerInfo.consumerConfig) {
    pack.entry({ name: 'config.json' }, JSON.stringify(consumerInfo.consumerConfig.toPlainObject(), null, 4));
github digidem / indexed-tarball / single.js View on Github external
function appendMeta (fd, pos, meta, cb) {
  var data = Buffer.from(JSON.stringify(meta), 'utf8')

  var header = tarHeader.encode({
    name: '___index.json',
    type: 'file',
    mode: parseInt('644', 8),
    uid: 0,
    gid: 0,
    mtime: new Date(),
    size: data.length
  })

  // leftover bytes to reach 512 block boundary, plus another 512 * 2 = 1024 to mark the end-of-file
  var padding = Buffer.alloc(512 - (data.length % 512) + 512 + 512).fill(0)

  var buf = Buffer.concat([header, data, padding])

  fs.write(fd, buf, 0, buf.length, pos, cb)
}
github digidem / indexed-tarball / single.js View on Github external
function write (archive, start) {
      // 3. Prepare the tar archive for appending.
      var fsOpts = {
        flags: 'r+',
        start: start !== undefined ? start : 0
      }
      if (fsOpts.start < 0) fsOpts.start = 0
      var appendStream = fs.createWriteStream(self.filepath, fsOpts)

      // 4. Write tar header, without size info (yet).
      var header = tarHeader.encode({
        name: filepath,
        type: 'file',
        mode: parseInt('644', 8),
        uid: 0,
        gid: 0,
        mtime: new Date(),
        size: 0
      })
      appendStream.write(header)

      // 5. Write data.
      t.pipe(appendStream)
      t.on('end', function () {
        // 6. Pad the remaining bytes to fit a 512-byte block.
        var leftover = 512 - (size % 512)
        if (leftover === 512) leftover = 0
github digidem / osm-p2p-syncfile / test / create.js View on Github external
setupAndClose(function (err) {
        t.error(err)
        t.ok(fs.existsSync(filepath))
        t.equal(fs.readdirSync(dir).length, 1)
        t.notEqual(fs.readdirSync(dir).indexOf('sync.tar'), -1)

        var seen = []
        var ex = tar.extract()
        fs.createReadStream(filepath).pipe(ex)
        ex.on('entry', function (header, stream, next) {
          seen.push(header.name)
          stream.resume()
          stream.on('end', next)
        })
        ex.on('finish', function () {
          t.deepEquals(seen.sort(), ['___index.json', 'osm-p2p-db.tar'])
          t.end()
        })
      })
    })

tar-stream

tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.

MIT
Latest version published 3 months ago

Package Health Score

83 / 100
Full package analysis

Similar packages