Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const through2 = require("through2");
const byline = require("byline");
const turf = require("@turf/turf");
const OSRM = require("osrm");
const random = require("random");
const moment = require("moment");
const osrm = new OSRM("./data/nyc.osrm");
const DRIFT = 0.01;
const PERIOD = 20;
var normal = random.normal();
var k = 0;
process.stdin.pipe(byline.createStream()).pipe(
through2((chunk, enc, next) => {
k++;
if (k % 1000 === 0) console.error(k);
var line = chunk.toString();
if (line.length && k > 1) {
// skip header or trailing endline
var cell = line.split(",");
var record = {
timeA: moment(cell[1]),
timeB: moment(cell[2]),
ptA: [+cell[5], +cell[6]],
ptB: [+cell[9], +cell[10]]
};
processCallback: (process) => {
byline(process.stderr).on('data', (chunk) => {
// if (chunk.startsWith('Counting objects: ')) {
// const percent = this.tryParse(chunk)
// if (percent) {
// console.log('total object', percent)
// this.setReceivingProgress(repo, percent)
// }
// return
// }
if (chunk.startsWith('Receiving objects: ')) {
const percent = this.tryParse(chunk);
if (percent) {
// console.log('receiving progress', percent)
this.setReceivingProgress(repo, percent);
}
return;
}
exports.handler = function(event, context) {
console.log('Received event: ', JSON.stringify(event, null, 2));
/* == Streams ==
* To avoid loading an entire (typically large) log file into memory,
* this is implemented as a pipeline of filters, streaming log data
* from S3 to ES.
* Flow: S3 file stream -> Log Line stream -> Log Record stream -> ES
*/
var lineStream = new LineStream();
// A stream of log records, from parsing each log line
var recordStream = new stream.Transform({objectMode: true})
recordStream._transform = function(line, encoding, done) {
var logRecord = parse(line.toString());
var serializedRecord = JSON.stringify(logRecord);
this.push(serializedRecord);
totLogLines ++;
done();
}
event.Records.forEach(function(record) {
var bucket = record.s3.bucket.name;
var objKey = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' '));
s3LogsToES(bucket, objKey, context, lineStream, recordStream);
});
}
async function run() {
let stream = fs.createReadStream('txt/millionsOfGuesses.txt');
stream.setEncoding('utf8');
// read the file stream a line at a time.
let lineStream = byline.createStream(stream, { keepEmptyLines: false });
// awaitable interface for reading the stream.
let reader = aw.createReader(lineStream);
// readline for prompting the user.
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
try {
let line;
while (null !== (line = await reader.readAsync())) {
let guessedCard = await checkGuess(rl, line);
if (guessedCard) {
export function spawn (cmd, args, opts) {
const child = chip.spawn(cmd, args, opts);
const stdout = byline(child.stdout);
const stderr = byline(child.stderr);
const lines = [];
let onData = function (data) {
const time = (new Date()).getTime();
lines.push([ time, data.toString() ]); // TODO chalk stdout/stderr?
const { thresholds } = this; // eslint-disable-line no-invalid-this
if (thresholds) {
for (const key in thresholds) {
if (data.indexOf(key) >= 0) {
const p = thresholds[key];
log.showProgress(p);
if (DEBUG_THRESHOLDS) {
lines.push([ time, '************' ]);
lines.push([ time, p + ': ' + key ]);
lines.push([ time, '************' ]);
export function spawn (cmd, args, opts) {
const child = chip.spawn(cmd, args, opts);
const stdout = byline(child.stdout);
const stderr = byline(child.stderr);
const lines = [];
let onData = function (data) {
const time = (new Date()).getTime();
lines.push([ time, data.toString() ]); // TODO chalk stdout/stderr?
const { thresholds } = this; // eslint-disable-line no-invalid-this
if (thresholds) {
for (const key in thresholds) {
if (data.indexOf(key) >= 0) {
const p = thresholds[key];
log.showProgress(p);
if (DEBUG_THRESHOLDS) {
lines.push([ time, '************' ]);
lines.push([ time, p + ': ' + key ]);
lines.push([ time, '************' ]);
}
const getSurroundingCode = (file, lineNumber, cb) => {
const start = lineNumber - SURROUNDING_LINES
const end = lineNumber + SURROUNDING_LINES
const reader = createReadStream(file, { encoding: 'utf8' })
const splitter = new byline.LineStream({ keepEmptyLines: true })
const slicer = new CodeRange({ start, end })
// if the slicer has enough lines already, no need to keep reading from the file
slicer.on('done', () => reader.destroy())
pump(reader, splitter, slicer, (err) => {
// reader.destroy() causes a "premature close" error which we can tolerate
if (err && err.message !== 'premature close') return cb(err)
cb(null, slicer.getCode())
})
}
exports.parserExecute = function (urls) {
if (urls && Array.isArray(urls)) {
var results = [];
for (var i = 0, l = urls.length; i < l; i++) {
results.push(parseUrl(urls[i]));
}
return results;
} else {
var stdin = process.stdin;
var stdout = process.stdout;
var stream = byline.createStream(stdin);
stream.on('end', function () {
process.exit(0);
});
stream.on('close', function () {
process.exit(0);
});
stream.on('data', function (line) {
stdout.write(JSON.stringify(parseUrl(line.toString())) + '\n');
});
}
};
.on('root',function(){
var rs = fs.createReadStream(npmfile);
rs.setEncoding('ascii')
var linestream = byline.createStream()
linestream.on('data', function(line){
try {
try {
var data = JSON.parse( cutcomma(line) )
}
catch(e){
console.log(e)
return
}
var v = data.value
var doc = {
name:v.name,
exports.parserExecute = function (urls) {
if (urls && Array.isArray(urls)) {
var results = [];
for (var i = 0, l = urls.length; i < l; i++) {
results.push(parseUrl(urls[i]));
}
return results;
} else {
var stdin = process.stdin;
var stdout = process.stdout;
var stream = byline.createStream(stdin);
stream.on('end', function () {
process.exit(0);
});
stream.on('close ', function () {
process.exit(0);
});
stream.on('data', function (line) {
stdout.write(JSON.stringify(parseUrl(line.toString())) + '\n');
});
}
};