How to use the csv.parse function in csv

To help you get started, we’ve selected a few csv examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github oaeproject / Hilary / packages / oae-principals / lib / api.user.js View on Github external
return _cleanUpCSVFile(userCSV, () => {
      callback(validator.getFirstError());
    });
  }

  // Create a new context object on the request tenant
  const adminCtx = new Context(tenant, ctx.user());

  // Will contain an entry for each user in the CSV file
  const data = [];

  // The CSV module works with streams, so get a readable stream to the uploaded CSV file
  const input = fs.createReadStream(userCSV.path);

  // Pipe the stream to a CSV parser and keep track of the user records
  const parser = csv.parse({ trim: true });
  input.pipe(parser);
  parser.on('readable', () => {
    let user = parser.read();
    while (user) {
      data.push(user);
      user = parser.read();
    }
  });

  parser
    .on('finish', () => {
      // If the CSV parse was successful, we call the callback to prevent the request from timing out
      // whilst the users are being loaded
      PrincipalsEmitter.emit('preCSVUserImport');
      callback();
github OpenBookPrices / country-data / data / currency_csv_to_json.js View on Github external
// Take the csv and convert to json and tidy it up so that it is consistent.

var path = require('path');
var _ = require('underscore');
var csv = require('csv');
var canonicalJSON = require('canonical-json');
var fs = require('fs');

var output = [];

// read in the CSV
var csvFile = path.join( __dirname, 'currencies.csv' );
var input = fs.createReadStream(csvFile);


var parser = csv.parse({"columns": true});

parser.on('readable', function () {
  var record = null;
  while(record = parser.read()){
    // convert decimals to and number
    record.decimals = parseInt(record.decimals);
    output.push(record);
  }
});

parser.on('finish', function(){

  // sort by code
  output = _.sortBy(output, function (i) { return i.code;} );

  // print out results to stdout
github frictionlessdata / tableschema-js / src / table.js View on Github external
async function createRowStream(source, encoding, parserOptions) {
  const parser = csv.parse({ltrim: true, relax_column_count: true, ...parserOptions})
  let stream

  // Stream factory
  if (isFunction(source)) {
    stream = source()

  // Node stream
  } else if (source.readable) {
    stream = source

  // Inline source
  } else if (isArray(source)) {
    stream = new Readable({objectMode: true})
    for (const row of source) stream.push(row)
    stream.push(null)
github OpenEnergyDashboard / OED / src / server / services / loadFromCsvStream.js View on Github external
return conn.tx(t => new Promise(resolve => {
		let rejected = false;
		const error = null;
		const MIN_INSERT_BUFFER_SIZE = 1000;
		let modelsToInsert = [];
		const pendingInserts = [];

		const parser = csv.parse();

		function insertQueuedModels() {
			const insert = bulkInsertModels(modelsToInsert, t);
			pendingInserts.push(insert);
			modelsToInsert = [];
		}

		// Defines how the parser behaves when it has new data (models to be inserted)
		parser.on('readable', () => {
			let row;
			// We can only get the next row once so we check that it isn't null at the same time that we assign it
			while ((row = parser.read()) !== null) { // tslint:disable-line no-conditional-assignment
				if (!rejected) {
					modelsToInsert.push(mapRowToModel(row));
				}
			}
github OpenEnergyDashboard / OED / app / controllers / parseCSV.js View on Github external
res.on('end', () => {
            // parse csv
            CSV.parse(csv, (err, result) => {
                val = result;
                callback(val, meter_id);
            });
        });
    });
github stanford-oval / almond-cloud / scripts / import_turking.js View on Github external
db.withTransaction((dbClient) => {
        let promises = [];

        const parser = csv.parse({ columns: true, delimiter: '\t' });
        process.stdin.pipe(parser);

        return Q.Promise((callback, errback) => {
            parser.on('data', (row) => {
                //console.log(row);
                let {id,thingtalk,paraphrase} = row;
                //let [,utterance,tt] = row;
                let testTrain = '';
                /*
                if (coin(testProbability))
                    testTrain = '-test';
                else if (coin(devProbability))
                    testTrain = '-dev';
                else
                    testTrain = '-train';
                */
github ideal-postcodes / postcodes.io / data / scripts / index.js View on Github external
async.each(configs, (config, next) => {
		const delimiter = config.delimiter || "	";
		const file = toFilePath(config.file);
		const transform = config.transform;
		const parseOptions = Object.assign({
			delimiter: delimiter
		}, config.parseOptions || {});
		const encoding = config.encoding || "binary";

		fs.createReadStream(file, { encoding: encoding })
		.pipe(csv.parse(parseOptions))
		.on("end", next)
		.on("error", next)
		.on("data", row => {
			if (row.join("").trim().length === 0) return;
			const parsedRow = transform(row);
			if (parsedRow.length) {
				output.set(parsedRow[0], parsedRow[1]);
			}
		});
	}, error => {
		if (error) return callback(error);
github lastlegion / anyToJSON / anyToJSON.js View on Github external
fs.readFile(path, 'utf8', function(err,data){
        csvLib.parse(data, {ltrim: true, columns: true}, function(err, data){
          callback(data);
        })

      });
}
github stanford-oval / almond-cloud / scripts / turk_to_sempre.js View on Github external
function main() {
    var inp_format = process.argv[2];
    var fin = path.join(process.argv[3], 'data.csv');
    var fout = path.join(process.argv[3], 'data-sempre.csv');

    var output = csv.stringify();
    var parser = csv.parse();
    var file = fs.createWriteStream(fout);
    output.pipe(file);

    fs.createReadStream(fin)
        .pipe(parser)
        .on('data', (row) => {
            var tt = ThingTalk.Grammar.parse(row[1]);
            var json = SEMPRESyntax.toSEMPRE(tt, false);

            var ex = {
                id: row[0],
                target_json: json,
                target_tt: tt,
                sythetic: row[2],
                utterance: row[3]
            };
github stanford-oval / almond-cloud / util / upload_dataset.js View on Github external
}

                await entityModel.create(dbClient, {
                    name: req.body.entity_name,
                    id: req.body.entity_id,
                    is_well_known: false,
                    has_ner_support: !req.body.no_ner_support
                });

                if (req.body.no_ner_support)
                    return;

                if (!req.files.upload || !req.files.upload.length)
                    throw new BadRequestError(req._("You must upload a CSV file with the entity values."));

                const parser = csv.parse({delimiter: ','});
                fs.createReadStream(req.files.upload[0].path).pipe(parser);

                const transformer = Stream.Transform({
                    objectMode: true,

                    transform(row, encoding, callback) {
                        if (row.length !== 2) {
                            callback();
                            return;
                        }

                        const value = row[0].trim();
                        const name = row[1];

                        const tokens = tokenizer.tokenize(name);
                        const canonical = tokens.join(' ');

csv

A mature CSV toolset with simple api, full of options and tested against large datasets.

MIT
Latest version published 2 months ago

Package Health Score

93 / 100
Full package analysis