How to use fast-csv - 10 common examples

To help you get started, we’ve selected a few fast-csv examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github DFEAGILEDEVOPS / MTC / load-test / bin / create-teacher-logins-csv.js View on Github external
async function batched () {
  winston.info(`creating ${totalBatches} csv file `)
  let currentBatch = 0
  let teacherIndex = 1
  while (currentBatch < totalBatches) {
    winston.info(`creating batch ${currentBatch}`)
    // const csvHeaders = ['username', 'password']
    const csvStream = csv.format()
    const writableStream = fs.createWriteStream(path.join(__dirname, `${currentBatch}-teacherLogins.csv`))
    csvStream.pipe(writableStream)
    // csvStream.write(csvHeaders)
    let batchIndex = 0
    while (batchIndex < batchSize) {
      batchIndex++
      csvStream.write([`teacher${teacherIndex}`, 'password'])
      teacherIndex++
    }
    csvStream.end()
    currentBatch++
  }
}
github chriskinsman / DynamoDbExportCsv / lib / DynamoDbExportCsv.js View on Github external
function (done) {
                // Form the filename with the table name as the subdirectory and the base of the filename
                // then th segemnt and the file within the segment
                var fileName = table + "-" + segment + "-" + fileCount + ".csv";
                if (compressed) {
                    fileName += ".gz";
                }

                csvStream = csv.createWriteStream({ headers: true, maxBufferSize: 10000 });

                var writableStream;
                if (s3Bucket) {
                    var filePath = '';
                    if (s3Path) {
                        filePath += s3Path + "/";
                    }
                    filePath += table + "/" + fileName;
                    writableStream = s3StreamUpload(s3, { Bucket: s3Bucket, Key: filePath }, { concurrent: totalSegments });
                    self.emit(infoEvent, "Starting new file: s3://" + s3Bucket + "/" + filePath);
                }
                else {
                    writableStream = fs.createWriteStream(table + '/' + fileName);
                    self.emit(infoEvent, "Starting new file: " + fileName);
                }
github DFEAGILEDEVOPS / MTC / design / reference / old-pupil-app / bin / extract-logins-and-answers.js View on Github external
#!/usr/bin/env node
'use strict'

const mongoose = require('mongoose')
mongoose.promise = global.Promise
const LogonEvent = require('../models/logon-event')
const Answers = require('../models/answer')
const csv = require('fast-csv')
const fs = require('fs')
const csvStream = csv.createWriteStream({headers: true})
const writableStream = fs.createWriteStream('out.csv')
const config = require('../config')

writableStream.on('finish', function () {
  console.error('DONE!')
})

csvStream.pipe(writableStream)

mongoose.connect(config.MONGO_CONNECTION_STRING, async function (error) {
  if (error) { console.error(error) }

  let answers

  // extract all complete answers
  try {
github IBM / predictive-market-using-arria / arriaHelpers / arriaRequestBuilder.js View on Github external
function loadVcvValues(vcvReadStream, factors){
    var deferred = Q.defer();
    let correlIds = factors.getCorrelFactorIds();
    console.log('Load vcv data for Arria');
    csv
    .fromStream(vcvReadStream)
    .on("data", function(row){
        // check factors are in key factors
        if (correlIds.includes(row[0]) && correlIds.includes(row[1])){
            // see if variance
            if (row[0] == row[1]){
                let ids = FactorCollection.convertFromCorrelId(row[0])
                factors.setVariance(parseFloat(row[2]), ids[0], ids[1]);
                if (row[0] == factors.shockedFactorId){
                    factors.setCovariance(parseFloat(row[2]), factors.shockedFactorId);
                }
            } // else correl 
            else {
                if (row.slice(0,2).includes(factors.shockedFactorId)){
                    let otherFactor = (row[0] == factors.shockedFactorId) ? row[1] : row[0];
                    let ids = FactorCollection.convertFromCorrelId(otherFactor)
github craigmw / CellarWarden / compressLog.js View on Github external
var field = []; 
var timeStamp = 0;
var timeNow = new Date();
var msDays = 1000 * 60 * 60 * 24;   //Number of milliseconds in a day.
var msMinutes = 1000 * 60;          //Number of milliseconds in a minute. 


var alarmRecords = 0;
var alarmRecord = [""];
var lastKept = 0;

//Load alarms logfile to make sure that these records not deleted.
// If alarms logfile does not exist, ignore.
utils.log('Reading alarms logfile: ' + alarmsLogFile );
if ( utils.fileExists( alarmsLogFile ) ) {
    csv
        .fromPath( alarmsLogFile )
        .on("data", function(data) {
            record = data.toString();
            field = record.split(',');
            timeStamp = Date.parse( field[0] );
            alarmRecord[ alarmRecords ] = field[0];
            //process.stdout.write( alarmRecords + ": " + timeStamp + "-" + record + '\r');
            alarmRecords++;
        })
        .on("end", function(){
            utils.log("\n Done");
            //Display alarm logfile records.
            /* if( verbose ) {
                utils.log( 'Displaying loaded alarm records...');
                for (var i = 0; i < alarmRecords; i++) {
                    utils.log( 'alarmRecord[' + i + ']: ' + alarmRecord[i] );
github blockstack / app.co / scripts / rankings.js View on Github external
const fs = require('fs');
const csv = require('fast-csv');
require('sepia'); /* eslint import/no-extraneous-dependencies: [0] */
require('dotenv').config();

const twitter = require('../common/lib/twitter');

// const { getRank } = require('../common/lib/similarweb.js');

const filename = './common/data/dapps.csv';
const apps = [];

csv
  .fromPath(filename, {
    headers: true,
  })
  .on('data', async (data) => {
    apps.push(data);
  })
  .on('end', async () => {
    await twitter.fetchMentions(apps);

    const writeStream = fs.createWriteStream('./common/data/dapps-ranked.csv');
    csv.write(apps, { headers: true }).pipe(writeStream);

    console.log('done');
  });
github chriswhong / amazon-orders-collage / get-photos.js View on Github external
.trim();

    // strip out the metadata at the beginning of the img source
    return image.replace(/^data:image\/jpeg;base64,/, '');
  });
};

const processAsin = (ASIN, destFile) => scrapePhoto(ASIN)
  .then(base64Data => fs.outputFile(destFile, Buffer.from(base64Data, 'base64')));

let count = 0; // use count integer for image filenames

const csvPath = process.argv[2];

// parse the csv of orders
csv
  .fromPath(csvPath, { headers: true })
  .on('data', (row) => {
    // get the product's ASIN (unique ID)
    const ASIN = row['ASIN/ISBN'];
    processAsin(ASIN, `product_images/${count}.jpg`).catch((err) => {
      // if there's an error scraping an image for this row, ignore it
      console.error("Oops, couldn't get an image...", err);
    });
    count += 1; // increment count
  })
  .on('end', () => {
    console.log('Done!');
  })
  .on('error', (error) => {
    console.error(error);
  });
github bhushankumarl / amazon-mws / examples / javaScript / sample / getReportAsReadableStram.js View on Github external
}
            if (rows.length >= 5000 || (end && rows.length)) {
                sendToDB(rows.splice(0, 0), callback);
                rows = [];
            }
        }

        function sendToDB(data, callback) {
            // Send your data to the db
            console.log(data.length);
            callback();
        }

        var decodeStream = iconv.decodeStream('ISO-8859-1');
        response.pipe(decodeStream);
        var csvStream = csv.parse({
            delimiter: '\t',
            headers: true,
            discardUnmappedColumns: true,
            ignoreEmpty: true,
            trim: true
        });
        decodeStream.pipe(csvStream);
        csvStream.transform(function (data, cb) {
            processRowsInBatches(data, false, cb);
        });
        csvStream
            .on('error', function (error) { console.error(error); })
            .on('finish', function () {
                console.log('Finished proccessing stream');
                // Call processRowsInBatches to proccess remaining rows
                processRowsInBatches(undefined, true, function () {
github hochschule-darmstadt / openartbrowser / scripts / data_manipulation / addlanguage.js View on Github external
function getLanguageConfig() {
	var read = fs.createReadStream(csvFilePath)
		.pipe(csv.parse(options))
		.on('data', function (data) {  // this function executes once the data has been retrieved
			console.log(data);  // data is already an array
		})
		.on('data-invalid', (err) => console.log("Error! data invalid"))
		.on('end', function (data) {
			console.log('Read finished');
			return data;
		})
	
}
github sat-utils / sat-api / packages / api-lib / libs / ingest-csv.js View on Github external
function processFiles({
  bucket,
  key,
  transform,
  currentFileNum = 0,
  lastFileNum = 0,
  arn = null,
  retries = 0
}) {
  const maxRetries = 5
  const nextFileNum = (currentFileNum < lastFileNum) ? currentFileNum + 1 : null

  // CSV stream from file
  const csvStream = csv.parse({ headers: true, objectMode: true })
  const _key = `${key}${currentFileNum}.csv`
  s3.getObject({ Bucket: bucket, Key: _key }).createReadStream().pipe(csvStream)

  console.log(`Processing s3://${bucket}/${_key}`)

  return es.stream(csvStream, transform)
    .then(() => {
      invokeLambda(bucket, _key, nextFileNum, lastFileNum, arn, 0)
    }).catch(() => {
      // if CSV failed, try it again
      if (retries < maxRetries) {
        invokeLambda(bucket, _key, currentFileNum, lastFileNum, arn, retries + 1)
      } else {
        // log and move onto the next one
        console.log(`error: maxRetries hit in file ${currentFileNum}`)
        invokeLambda(bucket, _key, nextFileNum, lastFileNum, arn, 0)