How to use the fast-csv.fromPath function in fast-csv

To help you get started, we’ve selected a few fast-csv examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github craigmw / CellarWarden / compressLog.js View on Github external
var field = []; 
var timeStamp = 0;
var timeNow = new Date();
var msDays = 1000 * 60 * 60 * 24;   //Number of milliseconds in a day.
var msMinutes = 1000 * 60;          //Number of milliseconds in a minute. 


var alarmRecords = 0;
var alarmRecord = [""];
var lastKept = 0;

//Load alarms logfile to make sure that these records not deleted.
// If alarms logfile does not exist, ignore.
utils.log('Reading alarms logfile: ' + alarmsLogFile );
if ( utils.fileExists( alarmsLogFile ) ) {
    csv
        .fromPath( alarmsLogFile )
        .on("data", function(data) {
            record = data.toString();
            field = record.split(',');
            timeStamp = Date.parse( field[0] );
            alarmRecord[ alarmRecords ] = field[0];
            //process.stdout.write( alarmRecords + ": " + timeStamp + "-" + record + '\r');
            alarmRecords++;
        })
        .on("end", function(){
            utils.log("\n Done");
            //Display alarm logfile records.
            /* if( verbose ) {
                utils.log( 'Displaying loaded alarm records...');
                for (var i = 0; i < alarmRecords; i++) {
                    utils.log( 'alarmRecord[' + i + ']: ' + alarmRecord[i] );
github blockstack / app.co / scripts / rankings.js View on Github external
const fs = require('fs');
const csv = require('fast-csv');
require('sepia'); /* eslint import/no-extraneous-dependencies: [0] */
require('dotenv').config();

const twitter = require('../common/lib/twitter');

// const { getRank } = require('../common/lib/similarweb.js');

const filename = './common/data/dapps.csv';
const apps = [];

csv
  .fromPath(filename, {
    headers: true,
  })
  .on('data', async (data) => {
    apps.push(data);
  })
  .on('end', async () => {
    await twitter.fetchMentions(apps);

    const writeStream = fs.createWriteStream('./common/data/dapps-ranked.csv');
    csv.write(apps, { headers: true }).pipe(writeStream);

    console.log('done');
  });
github chriswhong / amazon-orders-collage / get-photos.js View on Github external
.trim();

    // strip out the metadata at the beginning of the img source
    return image.replace(/^data:image\/jpeg;base64,/, '');
  });
};

const processAsin = (ASIN, destFile) => scrapePhoto(ASIN)
  .then(base64Data => fs.outputFile(destFile, Buffer.from(base64Data, 'base64')));

let count = 0; // use count integer for image filenames

const csvPath = process.argv[2];

// parse the csv of orders
csv
  .fromPath(csvPath, { headers: true })
  .on('data', (row) => {
    // get the product's ASIN (unique ID)
    const ASIN = row['ASIN/ISBN'];
    processAsin(ASIN, `product_images/${count}.jpg`).catch((err) => {
      // if there's an error scraping an image for this row, ignore it
      console.error("Oops, couldn't get an image...", err);
    });
    count += 1; // increment count
  })
  .on('end', () => {
    console.log('Done!');
  })
  .on('error', (error) => {
    console.error(error);
  });
github Code-dot-mil / crossfeed-web / server / h1_api.js View on Github external
router.post('/importCsv', upload.single('csv'), function(req, res) {
	var fileRows = [], fileHeader;

	// open uploaded file
	csv.fromPath(req.file.path)
	.on("data", function (data) {
	  fileRows.push(data); // push each row
	})
	.on("end", async () => {
	  fs.unlinkSync(req.file.path);   // remove temp file

	  fileRows.shift(); //skip header row
	  var items = fileRows.map((row) => {
	  	return {
			hackerone_id: row[1],
			title: row[2],
			severity: row[3],
			state: row[5],
			substate: row[6],
			weakness: row[7],
			reported_at: row[8],
github DinoChiesa / EdgeTools / provisionDevPortalUsers / provisionDevPortalUsers.js View on Github external
function readDataFile(cb) {
  var result = [];
  if (gOptions.options.verbose) {
    console.log('data file: ' + gOptions.options.datafile);
  }
  csvparser
    .fromPath(gOptions.options.datafile,  {headers: true})
    .on("data", function(data){
      if (data.email !== '' && data.username !== '') {
        result.push(data);
      }
    })
    .on("end", function(){
      if (gOptions.options.verbose) {
        console.log('read %d users.', result.length);
      }
      cb(null, result);
    });
}
github craigmw / CellarWarden / compressLog.js View on Github external
function parseLogFile() {
    utils.log( 'Parsing logfile ' + logFile + ' records...' );
    csv
        .fromPath( logFile, {headers: false} )
        .validate( function(data) {
            record = data.toString();
            field = record.split(',');
            count++;
            return keepRecord( field[0], record, timeNow );
        })
        .on("data", function(data){
            record = data.toString();
            //field = record.split(',');
            //timeStamp = Date.parse( field[0] );
            if (verbose) {
                process.stdout.write( count + '-' + keepCount + " - " + record + '\r');
            };
            keepCount++;
        })
github BarnesFoundation / barnes-tms-extract / csv_es / esImport.js View on Github external
function writeCSVToES(csvFilePath, client) {
	csv
		.fromPath(csvFilePath, { headers: true })
		.on('data', (data) => {
			pushCSVDataToES(data, client);
		})
		.on('end', () => {
			console.log('Finished export');
		});
}
github BarnesFoundation / barnes-tms-extract / src / csv_es / src / script / esCollection.js View on Github external
return new Promise((resolve, reject) => {
			const allCSVIds = new Set();
			const csvFilePath = path.join(this._csvRootDir, csvExport, 'objects.csv');
			try {
				csv
					.fromPath(csvFilePath, { headers: true })
					.on('data', (data) => {
						allCSVIds.add(parseInt(data.id));
					})
					.on('end', () => {
						resolve(allCSVIds);
					});
			} catch (e) {
				reject(e);
			}
		});
	}
github EasyERP / EasyERP_open_source / routes / importFile.js View on Github external
function getData(callback) {
                    callback(null, data);
                }

                tasksWaterflow = [getData, parse, findAndReplaceObjectId, saveToDbOrUpdate];

                async.waterfall(tasksWaterflow, function (err) {
                    if (err) {
                        error = err;
                    }
                    callback();
                });
            }, 1000);

            csv
                .fromPath(filePath)
                .validate(function (data) {

                    if (!headers) {
                        headers = data;

                        if (headers.length != expertedKey.length) {
                            error = new Error('Different lengths headers');
                            error.status = 400;
                            return next(error);
                        }

                        for (var i = expertedKey.length - 1; i >= 0; i--) {

                            if (headers[i] !== expertedKey[i]) {
                                error = new Error('Field \"' + headers[i] + '\" not valid. Need ' + expertedKey[i]);
github antonycourtney / tad / src / csvimport.js View on Github external
.then(() => (isFinal ? null : db.run('begin')))
      return retp
    }

    /*
     * TODO: multiple sources indicate wrapping inserts in a transaction is key to getting
     * decent bulk load performance.
     * We're currently wrapping all inserts in one huge transaction. Should probably break
     * this into more reasonable (50K rows?) chunks.
     */
    await db.run(dropStmt)
    await db.run(createStmt)
    log.log('table created')
    await db.run('begin')
    const insertStmt = await db.prepare(insertStmtStr)
    const rowCount = await consumeStream(csv.fromPath(pathname, md.csvOptions),
                             insertRow(insertStmt), commitBatch, md.rowCount,
                           hasHeaderRow)
    log.log('consumeStream completed, rowCount: ', rowCount)
    insertStmt.finalize()
    return md
  } catch (err) {
    log.error(err, err.stack)
    throw err
  }
}