How to use the fast-csv.parse function in fast-csv

To help you get started, we’ve selected a few fast-csv examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bhushankumarl / amazon-mws / examples / javaScript / sample / getReportAsReadableStram.js View on Github external
}
            if (rows.length >= 5000 || (end && rows.length)) {
                sendToDB(rows.splice(0, 0), callback);
                rows = [];
            }
        }

        function sendToDB(data, callback) {
            // Send your data to the db
            console.log(data.length);
            callback();
        }

        var decodeStream = iconv.decodeStream('ISO-8859-1');
        response.pipe(decodeStream);
        var csvStream = csv.parse({
            delimiter: '\t',
            headers: true,
            discardUnmappedColumns: true,
            ignoreEmpty: true,
            trim: true
        });
        decodeStream.pipe(csvStream);
        csvStream.transform(function (data, cb) {
            processRowsInBatches(data, false, cb);
        });
        csvStream
            .on('error', function (error) { console.error(error); })
            .on('finish', function () {
                console.log('Finished proccessing stream');
                // Call processRowsInBatches to proccess remaining rows
                processRowsInBatches(undefined, true, function () {
github hochschule-darmstadt / openartbrowser / scripts / data_manipulation / addlanguage.js View on Github external
function getLanguageConfig() {
	var read = fs.createReadStream(csvFilePath)
		.pipe(csv.parse(options))
		.on('data', function (data) {  // this function executes once the data has been retrieved
			console.log(data);  // data is already an array
		})
		.on('data-invalid', (err) => console.log("Error! data invalid"))
		.on('end', function (data) {
			console.log('Read finished');
			return data;
		})
	
}
github sat-utils / sat-api / packages / api-lib / libs / ingest-csv.js View on Github external
function processFiles({
  bucket,
  key,
  transform,
  currentFileNum = 0,
  lastFileNum = 0,
  arn = null,
  retries = 0
}) {
  const maxRetries = 5
  const nextFileNum = (currentFileNum < lastFileNum) ? currentFileNum + 1 : null

  // CSV stream from file
  const csvStream = csv.parse({ headers: true, objectMode: true })
  const _key = `${key}${currentFileNum}.csv`
  s3.getObject({ Bucket: bucket, Key: _key }).createReadStream().pipe(csvStream)

  console.log(`Processing s3://${bucket}/${_key}`)

  return es.stream(csvStream, transform)
    .then(() => {
      invokeLambda(bucket, _key, nextFileNum, lastFileNum, arn, 0)
    }).catch(() => {
      // if CSV failed, try it again
      if (retries < maxRetries) {
        invokeLambda(bucket, _key, currentFileNum, lastFileNum, arn, retries + 1)
      } else {
        // log and move onto the next one
        console.log(`error: maxRetries hit in file ${currentFileNum}`)
        invokeLambda(bucket, _key, nextFileNum, lastFileNum, arn, 0)
github exceljs / exceljs / lib / csv / csv.js View on Github external
const datumNumber = Number(datum);
        if (!Number.isNaN(datumNumber) && datumNumber !== Infinity) {
          return datumNumber;
        }
        const dt = dayjs(datum, dateFormats, true);
        if (dt.isValid()) {
          return new Date(dt.valueOf());
        }
        const special = SpecialValues[datum];
        if (special !== undefined) {
          return special;
        }
        return datum;
      };

    const csvStream = fastCsv.parse(options)
      .on('data', data => {
        worksheet.addRow(data.map(map));
      })
      .on('end', () => {
        csvStream.emit('worksheet', worksheet);
      });
    return csvStream;
  }
github NetsBlox / NetsBlox / utils / rpc / eclipse-2017 / checkStations.js View on Github external
function loadStations(fileName){
    let stations = [];
    let deferred = Q.defer();
    let stream = fs.createReadStream(fileName);
    var csvStream = csv
        .parse({headers:true, objectMode:true})
        .on('data', function(data){
            data.latitude = parseFloat(data.latitude);
            data.longitude = parseFloat(data.longitude);
            data.distance = parseFloat(data.distance);
            data.views = parseInt(data.views);
            data.elevation = parseInt(data.elevation);
            delete data.updatedAt;
            stations.push(data);
        })
        .on('end', function(){
            console.log('done loading stations');
            deferred.resolve(stations);
        });

    stream.pipe(csvStream);
github linkedconnections / linked-connections-server / lib / routes / stops.js View on Github external
"name": "http://xmlns.com/foaf/0.1/name",
                        "longitude": "http://www.w3.org/2003/01/geo/wgs84_pos#long",
                        "latitude": "http://www.w3.org/2003/01/geo/wgs84_pos#lat",
                        "dct:spatial": {
                            "@type": "@id"
                        },
                    },
                    "@id": (this.server_config.protocol || "http") + "://" + this.server_config.hostname + "/" + company + '/stops',
                    "@graph": []
                };
                let feed = feeds[feeds.length - 1];
                let uncompressed = await utils.readAndUnzip(this.storage + '/datasets/' + company + '/' + feed);
                let stops_uri_template = uri_templates(dataset['baseURIs']['stop']);

                fs.createReadStream(uncompressed + '/stops.txt', { encoding: 'utf8', objectMode: true })
                    .pipe(csv.parse({ objectMode: true, headers: true }))
                    .on('data', data => {
                        skeleton['@graph'].push({
                            "@id": stops_uri_template.fill({ [stops_uri_template.varNames[0]]: data[stops_uri_template.varNames[0].split('.')[1]].trim() }),
                            "dct:spatial": dataset['geographicArea'] || "",
                            "latitude": data['stop_lat'].trim(),
                            "longitude": data['stop_lon'].trim(),
                            "name": data['stop_name'].trim(),
                        })
                    }).on('end', async () => {
                        await del([uncompressed], { force: true });
                        resolve(skeleton);
                    });
            } else {
                resolve(null);
            }
        });
github C2FO / fast-csv / examples / benchmark / index.js View on Github external
const benchmarkFastCsv = type => num => {
    const file = path.resolve(__dirname, `./assets/${num}.${type}.csv`);
    const stream = fs
        .createReadStream(file)
        .pipe(fastCsv.parse({ headers: true }))
        .transform(data => {
            const ret = {};
            ['first_name', 'last_name', 'email_address'].forEach(prop => {
                ret[camelize(prop)] = data[prop];
            });
            ret.address = data.address;
            return ret;
        });
    return promisfyStream(stream, num);
};
github awslabs / aws-connected-vehicle-solution / source / resources / helper / lib / dynamodb-helper.js View on Github external
dynamoDBHelper.prototype.loadPois = function(ddbTable, cb) {

        let parser = csv.parse();
        let fileStream = fs.createReadStream('./marketing-pois.csv');
        fileStream
            .on('readable', function() {
                var data;
                while ((data = fileStream.read()) !== null) {
                    parser.write(data);
                }
            })
            .on('end', function() {
                parser.end();
            });

        parser
            .on('readable', function() {
                var data;
                while ((data = parser.read()) !== null) {
github samtecspg / conveyor / api / plugins / sources / text-file / index.js View on Github external
const parse = (err) => {

                if (err) {
                    return reply(JSON.stringify(err));
                }
                const stream = fs.createReadStream(path);
                const csvStream = csv
                    .parse({
                        headers: true,
                        ignoreEmpty: true
                    })
                    .transform((data, next) => {

                        format({ data, next });
                    })
                    .on('end', () => {

                        fs.unlinkSync(path);
                        const ret = {
                            filename: payload.file.hapi.filename,
                            headers: payload.file.hapi.headers
                        };