Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function (done) {
// Form the filename with the table name as the subdirectory and the base of the filename
// then th segemnt and the file within the segment
var fileName = table + "-" + segment + "-" + fileCount + ".csv";
if (compressed) {
fileName += ".gz";
}
csvStream = csv.createWriteStream({ headers: true, maxBufferSize: 10000 });
var writableStream;
if (s3Bucket) {
var filePath = '';
if (s3Path) {
filePath += s3Path + "/";
}
filePath += table + "/" + fileName;
writableStream = s3StreamUpload(s3, { Bucket: s3Bucket, Key: filePath }, { concurrent: totalSegments });
self.emit(infoEvent, "Starting new file: s3://" + s3Bucket + "/" + filePath);
}
else {
writableStream = fs.createWriteStream(table + '/' + fileName);
self.emit(infoEvent, "Starting new file: " + fileName);
}
#!/usr/bin/env node
'use strict'
const mongoose = require('mongoose')
mongoose.promise = global.Promise
const LogonEvent = require('../models/logon-event')
const Answers = require('../models/answer')
const csv = require('fast-csv')
const fs = require('fs')
const csvStream = csv.createWriteStream({headers: true})
const writableStream = fs.createWriteStream('out.csv')
const config = require('../config')
writableStream.on('finish', function () {
console.error('DONE!')
})
csvStream.pipe(writableStream)
mongoose.connect(config.MONGO_CONNECTION_STRING, async function (error) {
if (error) { console.error(error) }
let answers
// extract all complete answers
try {
(columns
? (headers ? [columns] : []).concat(
rows.map(row => columns.map(key => row[key]))
)
: rows
).map(row =>
row.map(val =>
val && val.length > maxLength
? `${val.substr(0, maxLength).trim()}…`
: val || ''
)
)
);*/
case 'csv':
case 'tsv':
let formatStream = csv.createWriteStream({
headers,
delimiter: (format == 'tsv' ? '\t' : ',')
})
.transform(row => columns ? Object.assign({}, ...columns.map(prop => ({[prop]: row[prop]}))) : row);
return stream.pipe(formatStream);
default:
throw new Error(
'Invalid format specified, options are: ndjson, tsv csv' // table,
);
}
};
controller.downloadResults = async (req, res, next) => {
// TODO: refactor to make it smaller
const csvStream = csv.createWriteStream()
const pupils = await pupilDataService.sqlFindPupilsBySchoolId(req.user.schoolId)
const schoolData = await schoolDataService.sqlFindOneById(pupils[0].school_id)
// Format the pupils
let pupilsFormatted = await Promise.all(pupils.map(async (p) => {
const fullName = `${p.foreName} ${p.lastName}`
const dob = dateService.formatUKDate(p.dateOfBirth)
const answersSet = null // await pupilService.fetchAnswers(p.id) // method has been removed!
if (!answersSet) return
let answers = answersSet.answers && answersSet.answers.sort((a1, a2) => {
const f1 = a1.factor1 - a2.factor1
if (f1 !== 0) return f1
return a1.factor2 - a2.factor2
})
answers = answers.map(a => {
const question = `${a.factor1}x${a.factor2}`
const pupilAnswer = a.input
return new Promise(async resolve => { // eslint-disable-line no-async-promise-executor
const stream = fs.createWriteStream(fileNameWithPath, { mode: 0o600 })
const csvStream = csv.createWriteStream({ headers: true })
csvStream.pipe(stream)
const request = await sqlService.getRequest()
const recordSetFunc = () => {}
const rowFunc = (row) => {
try {
const data = JSON.parse(row.jsonData)
if (!csvStream.write(data)) {
// Will pause every until `drain` event is emitted
request.pause()
csvStream.once('drain', function () { request.resume() })
}
} catch (error) {
this.logger.error(`streamReport(): [onRow]: Failed to write data for ${row.checkId}: ${error.message}`)
}
readFile("../extra/british_boys.csv", "M", function() {
names.sort(function(a, b) {
if (a.gender == b.gender) {
return parseInt(b["2015"]) - parseInt(a["2015"]);
} else {
return b.gender < a.gender;
}
});
var csvStream = csv.createWriteStream({ headers: headers_output }),
writableStream = fs.createWriteStream("../flat/british.csv");
writableStream.on("finish", function(){
console.log("DONE!");
});
csvStream.pipe(writableStream);
names.forEach(function(d) {
csvStream.write(d);
});
csvStream.end();
});
});
constructor(filename) {
this.filename = filename;
this.csvStream = csv.createWriteStream({delimiter: "\t", headers: true});
}
process.stdout.write( count + '-' + keepCount + " - " + record + '\r');
};
keepCount++;
})
.on("end", function(){
fs.appendFile( outLogFile, '\n', function( err1 ) {
if (err1 ) {
utils.log( 'Could not complete writing of ' + outLogFile + '. Error: ' + err1 );
} else {
utils.log("\n Done");
utils.log( 'Total records parsed: ' + count );
utils.log( 'Total records kept: ' + keepCount );
};
});
})
.pipe(csv.createWriteStream({headers: false}))
.pipe(fs.createWriteStream( outLogFile, {encoding: "utf8"}))
};
if (!req.params.year) {
res.locals.error = {
status: 404,
msg: 'Missing parameter: year'
};
return next();
}
if (!req.params.month) {
res.locals.error = {
status: 404,
msg: 'Missing parameter: month'
};
}
const csvStream = csv.createWriteStream({
headers: true
});
const months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
const year = req.params.year;
const month = req.params.month;
res.setHeader('Content-disposition', 'attachment; filename=' + months[month] + '-' + year + '-report.csv');
res.setHeader('Content-Type', 'text/csv');
csvStream.pipe(res);
const currentYear = new Date().getFullYear();
const minimumDate = new Date();
minimumDate.setFullYear(year, month, 1);
let maximumDate;
if (year === currentYear) {
const currentMonth = new Date().getMonth();