Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
exports.handler = function(event, context) {
console.log('Received event: ', JSON.stringify(event, null, 2));
/* == Streams ==
* To avoid loading an entire (typically large) log file into memory,
* this is implemented as a pipeline of filters, streaming log data
* from S3 to ES.
* Flow: S3 file stream -> Log Line stream -> Log Record stream -> ES
*/
var lineStream = new LineStream();
// A stream of log records, from parsing each log line
var recordStream = new stream.Transform({objectMode: true})
recordStream._transform = function(line, encoding, done) {
var logRecord = parse(line.toString());
var serializedRecord = JSON.stringify(logRecord);
this.push(serializedRecord);
totLogLines ++;
done();
}
event.Records.forEach(function(record) {
var bucket = record.s3.bucket.name;
var objKey = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' '));
s3LogsToES(bucket, objKey, context, lineStream, recordStream);
});
}
const getSurroundingCode = (file, lineNumber, cb) => {
const start = lineNumber - SURROUNDING_LINES
const end = lineNumber + SURROUNDING_LINES
const reader = createReadStream(file, { encoding: 'utf8' })
const splitter = new byline.LineStream({ keepEmptyLines: true })
const slicer = new CodeRange({ start, end })
// if the slicer has enough lines already, no need to keep reading from the file
slicer.on('done', () => reader.destroy())
pump(reader, splitter, slicer, (err) => {
// reader.destroy() causes a "premature close" error which we can tolerate
if (err && err.message !== 'premature close') return cb(err)
cb(null, slicer.getCode())
})
}
this.textAnalysisStream = function(filters) {
var rejectText = textFilters.createRejectFn(filters);
var counter = new WordsCounter();
return multipipe(
new LineStream(),
filter(function(line) {
return !rejectText(decoder.write(line).toLowerCase());
}),
through2.obj(function(line, enc, callback) {
var words = decoder.write(line).toLowerCase().split(/\s+/);
counter.addWords(_.reject(words, rejectText));
callback();
}, function(callback) {
this.push(counter.report());
callback();
})
);
};
};
this.normaliseLogStream = function(inputStream, pathEvalCallback) {
var firstLine = true;
return inputStream
.pipe(new LineStream({ keepEmptyLines: true }))
.pipe(map(function(chunk) {
var line = normaliseCommitAuthorData(decoder.write(chunk));
if (firstLine) {
firstLine = false;
return line;
}
return '\n' + line;
}))
.pipe(filter(function(chunk) {
return validCommitData(decoder.write(chunk), pathEvalCallback);
}));
};
};
const url = `${kc.getCurrentCluster().server}/api/v1/namespaces/${
this.project.kubernetes.namespace
}/pods`;
const requestOptions = {
qs: {
watch: true,
timeoutSeconds: 200,
labelSelector: `build=${this.event.buildID},jobname=${this.job.name}`
},
method: "GET",
uri: url,
useQuerystring: true,
json: true
};
kc.applyToRequest(requestOptions);
const stream = new byline_1.LineStream();
stream.on("data", data => {
let obj = null;
try {
if (data instanceof Buffer) {
obj = JSON.parse(data.toString());
} else {
obj = JSON.parse(data);
}
} catch (e) { } //let it stay connected.
if (obj && obj.object) {
this.pod = obj.object as kubernetes.V1Pod;
}
});
const req = request(requestOptions, (error, response, body) => {
if (error) {
this.logger.error(error.body.message);
routeFunctions.classify = function (req, res, next) {
var maxNGramLength = req.query.maxNGramLength || 1
var field = req.query.field || '*'
req
.pipe(new LineStream())
.pipe(options.si.classify({
maxNGramLength: maxNGramLength,
field: field
}))
.pipe(JSONStream.stringify('', '\n', ''))
.pipe(res)
.on('finish', function () {
return next()
})
}
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
const { createGunzip } = require('zlib');
const { createReadStream } = require('fs');
const { Transform } = require('stream');
const LineStream = require('byline').LineStream;
const input = createReadStream('./production-logs/production-out__2016-10-07_00-00-00.log.gz');
const gunzip = createGunzip();
const byLine = new LineStream();
const stripAnsi = RegExp(['[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\\u0007)','(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))'].join('|'), 'g');
class Forwarder extends Transform {
_transform(line, encoding, callback) {
const match = String(line).replace(stripAnsi, '').match(/^([^\s]+)\s+-\s+([^:]+):\s+(.+)/);
if (match) {
const date = new Date(match[1]);
const level = match[2];
const msg = match[3];
this.push(level + ' - ' + msg + '\n');
}
callback();
}
}
const filter = new Forwarder();
module.exports.watch = function watch(config, path, queryParams, callback, done) {
const url = config.getCurrentCluster().server + path;
queryParams.watch = true;
const requestOptions = {
headers: {},
};
config.applyToRequest(requestOptions);
const keys = Object.keys(requestOptions);
if (keys.length !== 1 && keys[0] !== 'headers') {
throw new Error('Unexpected request options passed');
}
const stream = new byline.LineStream();
stream.on('data', function (data) {
if (data instanceof Buffer) {
obj = JSON.parse(data.toString());
} else {
obj = JSON.parse(data);
}
if (typeof obj === 'object' && obj.object) {
callback(obj.type, obj.object);
} else {
throw new Error('unexpected ' + typeof obj + ': ' + JSON.stringify(obj));
}
});
stream.on('end', function () {
done(null);