How to use the mississippi.through function in mississippi

To help you get started, we’ve selected a few mississippi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github graalvm / graaljs / graal-nodejs / deps / npm / node_modules / pacote / lib / fetchers / file.js View on Github external
tarball (spec, opts) {
    const src = spec._resolved || spec.fetchSpec
    const stream = through()
    statAsync(src).then(stat => {
      if (spec._resolved) { stream.emit('manifest', spec) }
      if (stat.size <= MAX_BULK_SIZE) {
        // YAY LET'S DO THING IN BULK
        return readFileAsync(src).then(data => {
          if (opts.cache) {
            return cacache.put(
              opts.cache, `pacote:tarball:file:${src}`, data, {
                integrity: opts.integrity
              }
            ).then(integrity => ({ data, integrity }))
          } else {
            return { data }
          }
        }).then(info => {
          if (info.integrity) { stream.emit('integrity', info.integrity) }
github graalvm / graaljs / deps / npm / lib / search / all-package-metadata.js View on Github external
let updatedWritten = false
  const inStream = ms.pipeline.obj(
    ms.through.obj((pkg, enc, cb) => {
      if (!updatedWritten && typeof pkg === 'number') {
        // This is the `_updated` value getting sent through.
        updatedWritten = true
        return cb(null, ['_updated', pkg])
      } else if (typeof pkg !== 'object') {
        this.emit('error', new Error('invalid value written to input stream'))
      } else {
        // The [key, val] format is expected by `jsonstream` for object writing
        cb(null, [pkg.name, pkg])
      }
    }),
    JSONStream.stringifyObject('{', ',', '}'),
    ms.through((chunk, enc, cb) => {
      // This tees off the buffer data to `outStream`, and then continues
      // the pipeline as usual
      outStream.write(chunk, enc, () => cb(null, chunk))
    }),
    // And finally, we write to the cache file.
    writer
  )
  inStream.write(latest)
  return inStream
}
github npm / npme-installer / lib / download-tar.js View on Github external
req.on('response', function (res) {
      responded = true
      if (res.statusCode !== 200) {
        var e = new Error()
        e.statusCode = res.statusCode
        return cb(e)
      }

      mis.pipe(res, mis.through(function (chunk, enc, cb) {
        hash.update(chunk)
        cb(false, chunk)
      }), fs.createWriteStream(tmpname), function (err) {
        if (err) {
          console.error('failed downloading ' + tarUrl + ' err = ' + err.message)
          return cb(err)
        }

        var resultShasum = hash.digest().toString('hex')

        if (resultShasum !== shasum) {
          return cb(new Error('shasum mismatch got: ' + resultShasum + ' need ' + shasum))
        }

        fs.rename(tmpname, targetName, function (err) {
          // yay! all done.
github npm / pacote / lib / registry / request.js View on Github external
function registryStream (key, uri, registry, meta, opts) {
  var stream = through()
  client(opts).get(uri, {
    etag: meta && meta.etag,
    lastModified: meta && meta.lastModified,
    follow: opts.follow,
    auth: opts.auth && opts.auth[registryKey(registry)],
    timeout: opts.timeout,
    streaming: true
  }, function (err, res) {
    if (err) { return stream.emit('error', err) }
    var decoder = res.headers['content-encoding'] === 'gzip'
    ? gunzip()
    : through()
    if (res.statusCode === 304) {
      opts.log.silly('registry.get', 'cached data valid')
      res.on('data', function () {}) // Just drain it
      stream.emit('cached')
github npm / pacote / lib / registry / request.js View on Github external
}, function (err, res) {
    if (err) { return stream.emit('error', err) }
    var decoder = res.headers['content-encoding'] === 'gzip'
    ? gunzip()
    : through()
    if (res.statusCode === 304) {
      opts.log.silly('registry.get', 'cached data valid')
      res.on('data', function () {}) // Just drain it
      stream.emit('cached')
      stream.unpipe()
      stream.end()
    } else if (opts.cache) {
      opts.log.silly('registry.get', 'request successful. streaming data to cache')
      var localopt = Object.create(opts)
      localopt.metadata = {
        etag: res.headers['etag'],
        lastModified: res.headers['last-modified'],
        cacheControl: res.headers['cache-conrol'],
        time: +(new Date())
      }
      var cacheStream = cache.put.stream(opts.cache, key, localopt)
github orchoban / react.cordova / node_modules / npm / lib / search / all-package-metadata.js View on Github external
function _createCacheOutStream () {
  // NOTE: this looks goofy, but it's necessary in order to get
  //       JSONStream to play nice with the rest of everything.
  return ms.pipeline.obj(
    ms.through(),
    JSONStream.parse('*', (obj, key) => {
      // This stream happens to get _updated passed through it, for
      // implementation reasons. We make sure to filter it out cause
      // the fact that it comes t
      if (typeof obj === 'object') {
        return obj
      }
    }),
    ms.through.obj()
  )
}
github anandthakker / doiuse.com / lib / limit.js View on Github external
module.exports = function limit (size, onLimit) {
  var soFar = 0

  size = through.obj()
  var lim = through(function (chunk, enc, next) {
    soFar += chunk.length
    if (soFar > size && !(size = onLimit(soFar, chunk))) {
      next(new Error('Limit reached.'))
    } else {
      next(null, chunk)
    }
  },
    function end (next) {
      this.push(null)
      size.end('' + soFar)
      lim.ended = true
      next()
    })

  lim.size = size
github npm / pacote / lib / handlers / directory / tarball.js View on Github external
function fromManifest (manifest, spec, opts) {
  const stream = through()
  packDir(manifest, manifest._resolved, manifest._resolved, stream, opts).catch(err => {
    stream.emit('error', err)
  })
  return stream
}
github sanity-io / sanity / packages / @sanity / export / src / AssetHandler.js View on Github external
function writeHashedStream(filePath, stream) {
  let size = 0
  const md5 = crypto.createHash('md5')
  const sha1 = crypto.createHash('sha1')

  const hasher = miss.through((chunk, enc, cb) => {
    size += chunk.length
    md5.update(chunk)
    sha1.update(chunk)
    cb(null, chunk)
  })

  return new Promise((resolve, reject) =>
    miss.pipe(stream, hasher, fse.createWriteStream(filePath), err => {
      if (err) {
        reject(err)
        return
      }

      resolve({
        size,
        sha1: sha1.digest('hex'),