Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function search (query) {
if (!query) return
metadatSet.clear()
self.set('loading', true)
query = '*' + query + '*'
self.set('query', query)
self.set('all', false)
var opts = {
query: query,
limit: self.get('limit'),
offset: self.get('offset')
}
var stream = from.obj(SEARCH_FIELDS).pipe(through.obj(function (field, enc, next) {
dathubClient.metadats.searchByField(field, opts, function (err, resp, json) {
if (err) return next(err)
metadatSet.addItems(json.rows)
next()
})
}))
stream.on('error', function (err) {
alert("There was a epic failure. Please open a github issue with your browser's console output")
console.error(err)
self.set('loading', false)
})
stream.on('finish', function () {
// all items have been loaded
self.set('loading', false)
archive.list((err, entries) => {
if (err)
return onerror(err)
// remove duplicates
var entriesMap = {}
entries.forEach(e => entriesMap[e.name] = e)
var entriesKeys = Object.keys(entriesMap)
// create listing stream
var listingStream = from2.obj((size, next) => {
if (entriesKeys.length === 0)
return next(null, null)
next(null, entriesMap[entriesKeys.shift()])
})
// create the writestream
var zipWriteStream = listingStream
.pipe(through2Concurrent.obj({ maxConcurrency: 3 }, (entry, enc, cb) => {
// files only
if (entry.type != 'file')
return cb()
// pipe each entry into the zip
log('[DAT] Zipfile writing', JSON.stringify(entry))
var fileReadStream = archive.createFileReadStream(entry)
zipfile.addReadStream(fileReadStream, entry.name)
function memoryUsage (opts) {
if (!opts) return memoryUsage({})
else if (Number.isFinite(opts)) return memoryUsage({freq: opts})
else if (!opts.freq) opts.freq = 5000
var gcSample = null
var lastScheduledSample = 0
var stream = from.obj(function (size, next) {
if (gcSample) {
var result = gcSample
gcSample = null // set to null before calling next
next(null, result)
} else {
var ms = opts.freq - (Date.now() - lastScheduledSample)
setTimeout(measure, ms, next).unref()
}
})
if (opts.gc) {
var profiler = require('gc-profiler')
stream.once('resume', function () {
profiler.on('gc', function (info) {
gcSample = process.memoryUsage()
server.listen(4200, function () {
var original = net.connect(4200)
var instance = tentacoli()
pump(original, instance, original)
instance.request({
cmd: 'a request',
streams: {
inStream: from.obj(['hello', 'world'])
}
}, function (err, result) {
if (err) {
throw err
}
console.log('--> result is', result.data)
console.log('--> stream data:')
result.streams.echo.pipe(through.obj(function (chunk, enc, cb) {
cb(null, chunk + '\n')
})).pipe(process.stdout)
result.streams.echo.on('end', function () {
console.log('--> ended')
instance.destroy()
server.close()
function matchingStream (current, pattern) {
var matcher = new Qlobber(QlobberOpts)
matcher.add(pattern, true)
return from2.obj(function match (size, next) {
var entry
while ((entry = current.shift()) != null) {
if (matcher.match(entry.topic).length > 0) {
setImmediate(next, null, entry)
return
}
}
if (!entry) this.push(null)
})
}
Subgraph.prototype.createReadStream = function (link) {
if (!link) throw new Error('key is required')
link = toBuffer(link)
var self = this
var stream = from.obj(read)
stream.length = -1
return stream
function read (size, cb) {
if (!link) return cb(null, null)
self.db.get(self.prefix + link.toString('hex'), {valueEncoding: messages.Node}, function (err, node) {
if (err && err.notFound) {
if (first) ready(0)
return cb(null, null)
}
if (err) return cb(err)
var first = stream.length === -1
if (first) ready(node.index + 1)
link = node.link
cb(null, node)
upring.add('ns:monitoring,cmd:trace', function (req, reply) {
streams++
const stream = from.obj(function (n, cb) {
setTimeout(function () {
const keys = lru.values()
lru.reset()
cb(null, {
id: upring.whoami(),
keys
})
}, maxAge)
})
upring.on('prerequest', trace)
eos(stream, function () {
streams--
if (streams === 0) {
upring.removeListener('prerequest', trace)
Feed.prototype.createStream = function (opts) {
if (!opts) opts = {}
var self = this
var bytes = this._decode ? opts.bytes : opts.bytes !== false
var start = opts.start || 0
var cursor = bytes ? this.cursor() : null
return bytes ? from(readCursor) : from.obj(readBlocks)
function readBlocks (size, cb) {
self.get(start++, cb)
}
function readCursor (size, cb) {
if (start) {
cursor.read(start, cb)
start = 0
} else {
cursor.next(cb)
}
}
}
Feed.prototype.createReadStream = function (opts) {
if (!opts) opts = {}
var self = this
var start = opts.start || 0
var end = typeof opts.end === 'number' ? opts.end : -1
var live = !!opts.live
var snapshot = opts.snapshot !== false
var first = true
var range = this.download({start: start, end: end, linear: true})
return from.obj(read).on('end', cleanup).on('close', cleanup)
function read (size, cb) {
if (!self.opened) return open(size, cb)
if (!self.readable) return cb(new Error('Feed is closed'))
if (first) {
if (end === -1) {
if (live) end = Infinity
else if (snapshot) end = self.length
if (start > end) return cb(null, null)
}
if (opts.tail) start = self.length
first = false
}
if (start === end || (end === -1 && start === self.length)) return cb(null, null)
feed.createReadStream = function(opts, cb) {
if (typeof opts === 'function') return feed.createReadStream(null, opts)
if (!opts) opts = {}
var since = opts.since || 0
if (opts.live) {
return from.obj(function read(size, cb) {
db.get(lexint.pack(since+1, 'hex'), {valueEncoding:'binary'}, function(err, value) {
if (err && err.notFound) return feed.notify.push([read, cb])
if (err) return cb(err)
cb(null, {change:++since, value:value})
})
})
}
var rs = db.createReadStream({
gt: lexint.pack(since, 'hex'),
limit: opts.limit,
reverse: opts.reverse,
valueEncoding: 'binary'
})
var format = function(data, enc, cb) {