How to use the stream-json/Parser.parser function in stream-json

To help you get started, we’ve selected a few stream-json examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ekalinin / sitemap.js / examples / express.example.js View on Github external
app.get('/sitemap.xml', function(req, res) {
  res.header('Content-Type', 'application/xml');
  res.header('Content-Encoding', 'gzip');
  // if we have a cached entry send it
  if (sitemap) {
    res.send(sitemap)
    return
  }
  try {
    // this could just as easily be a db response
    const gzippedStream = fs.createReadStream(resolve(__dirname, '..', 'tests', 'mocks', 'perf-data.json'))
    .pipe(parser())
    .pipe(streamArray()) // replace with streamValues for JSONStream
    .pipe(map.obj(chunk => chunk.value))
    .pipe(new SitemapStream({ hostname: 'https://example.com/' }))
    .pipe(createGzip())

    // cache the response
    streamToPromise(gzippedStream).then(sm => sitemap = sm)
    // stream the response
    gzippedStream.pipe(res).on('error', (e) => {throw e})
  } catch (e) {
    console.error(e)
    res.status(500).end()
  }
});
github piiojs / redis-utils / migrate / upload.js View on Github external
const host = argv.h;
const port = argv.p;
const { filename } = argv;

const startTime = new Date();
let key;
let value;
let keysCount = 0;
const promises = [];

const redis = new Redis({
  host,
  port
});

const pipeline = fs.createReadStream(filename).pipe(parser());
pipeline.on('data', (data) => {
  switch (data.name) {
    case 'keyValue':
      key = data.value;
      break;
    case 'stringValue':
      ({ value } = data);
      process.stdout.clearLine();
      process.stdout.cursorTo(0);
      keysCount += 1;
      process.stdout.write(`Adding ${keysCount} keys`);
      promises.push(redis.set(key, value));
      break;
      // no default
  }
});
github esri-es / arcgis_websocket_server / streamserver / pipelines / default.js View on Github external
function compose(ctx) {
  let pipeline = [
    parser({jsonStreaming: true}),
    streamValues()
  ];
  if (sanityCheck(CUSTOM_PIPELINE)) {
    pipeline.push(..._injectCtx(CUSTOM_PIPELINE,ctx));
  } else {
    console.log(`Default Pipeline setup...[Skipping custom pipeline]`);
    if (CUSTOM_PIPELINE.length > 0) {
      console.warn(`Something is wrong : Please review your custom pipeline`);
      process.exit(12);
    }
  }

  return pipeline;
}

stream-json

stream-json is the micro-library of Node.js stream components for creating custom JSON processing pipelines with a minimal memory footprint. It can parse JSON files far exceeding available memory streaming individual primitives using a SAX-inspired API. I

BSD-3-Clause
Latest version published 13 days ago

Package Health Score

80 / 100
Full package analysis