Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
uploadWithProgress: function(fileBuffer, fileSize, emitter) {
const progressor = progressStream({length: fileSize, speed: 1}, function(progress) {
console.log('Zip upload: Status =' + parseInt(progress.percentage, 10) + '%');
emitter.emit('upload.progress', progress);
});
const fileBufferStream = new streamBuffer.ReadableStreamBuffer({
// frequency: 100, // in milliseconds.
chunkSize: 4096 // in bytes.
});
fileBufferStream.put(fileBuffer);
fileBufferStream
.pipe(progressor)
.pipe(fs.createWriteStream(path.join(uploadsPath, 'upload.zip')));
},
cleanUploads: function(id) {
// since pkgcloud offers no readFile or writeFile API methods
if (typeof self.client.readFile === "function") {
return self.client.readFile(readPath, function (err, file) {
var vFile = new File({
cwd: "/",
base: "/",
path: path,
contents: file
});
cb(null, new Buffer(file.contents), vFile);
});
}
// create a new buffer and output stream for capturing the hook.res.write and hook.res.end calls from inside the hook
// this is used as an intermediary to pipe hook output to other streams ( such as another hook )
var buffer = new streamBuffers.WritableStreamBuffer({
initialSize: (100 * 1024), // start as 100 kilobytes.
incrementAmount: (10 * 1024) // grow by 10 kilobytes each time buffer overflows.
});
var _remote;
if (self.adapter === "google") {
_remote = encodeURIComponent(self.root + "/" + path);
} else {
_remote = self.root + "/" + path;
}
// console.log('vfs.readFile'.green, _remote);
var readStream = self.client.download({
container: self.bucket,
const fs = require("fs")
const path = require("path")
const semanticRelease = require("semantic-release")
const { WritableStreamBuffer } = require("stream-buffers")
const stdoutBuffer = new WritableStreamBuffer()
const stderrBuffer = new WritableStreamBuffer()
function getBuildVersion() {
return semanticRelease(
{
// Core options
dryRun: true,
branch: "master",
repositoryUrl: "https://github.com/justindujardin/mathy.git",
},
{
cwd: "./",
stdout: stdoutBuffer,
stderr: stderrBuffer,
}
).then((result: any) => {
if (result) {
package (chaincodePath, metadataPath) {
logger.debug('packaging GOLANG from %s', chaincodePath);
// Determine the user's $GOPATH
const goPath = process.env.GOPATH;
// Compose the path to the chaincode project directory
const projDir = path.join(goPath, 'src', chaincodePath);
// We generate the tar in two phases: First grab a list of descriptors,
// and then pack them into an archive. While the two phases aren't
// strictly necessary yet, they pave the way for the future where we
// will need to assemble sources from multiple packages
const buffer = new sbuf.WritableStreamBuffer();
return this.findSource(goPath, projDir).then((srcDescriptors) => {
if (metadataPath) {
return super.findMetadataDescriptors(metadataPath)
.then((metaDescriptors) => {
return srcDescriptors.concat(metaDescriptors);
});
} else {
return srcDescriptors;
}
}).then((descriptors) => {
return super.generateTarGz(descriptors, buffer);
}).then(() => {
return buffer.getContents();
});
}
const isFile = results.isFile;
const result = results.result;
if (isFile) {
const filename = Path.basename(result).replace(/"/g, '\\"');
const contentDisposition = 'attachment; filename="' + filename + '"';
const stream = Fs.createReadStream(result);
return reply(stream)
.header('Content-Disposition', contentDisposition)
.header('Content-Length', results.size);
}
const stream = new StreamBuffers.ReadableStreamBuffer({
frequency: 10, // in milliseconds.
chunkSize: 204800 // 200Ko
});
const pathName = path === '/' ? '' : '_' + require('path').basename(path);
const filename = (course + pathName + '.zip').replace(/"/g, '\\"');
const contentDisposition = 'attachment; filename="' + filename + '"';
stream.put(result);
stream.stop();
return reply(stream)
.type('application/zip')
.header('Content-Disposition', contentDisposition);
const validParams = {
type: 'object',
maxProperties: 1,
required: ['meter_id'],
properties: {
meter_id: {
type: 'number'
}
}
};
if (!validate(req.params, validParams).valid || !req.file.buffer) {
res.sendStatus(400);
} else {
try {
const id = parseInt(req.params.meter_id);
const myReadableStreamBuffer = new streamBuffers.ReadableStreamBuffer({
frequency: 10,
chunkSize: 2048
});
myReadableStreamBuffer.put(req.file.buffer);
// stop() indicates we are done putting the data in our readable stream.
myReadableStreamBuffer.stop();
try {
await streamToDB(myReadableStreamBuffer, row => {
const readRate = Number(row[0]);
const endTimestamp = moment(row[1], 'MM/DD/YYYY HH:mm');
const startTimestamp = moment(row[1], 'MM/DD/YYYY HH:mm').subtract(60, 'minutes');
return new Reading(id, readRate, startTimestamp, endTimestamp);
}, (readings, tx) => Reading.insertOrUpdateAll(readings, tx));
res.status(200).json({ success: true });
} catch (e) {
res.status(403).json({ success: false, message: 'Failed to upload data.' });
const createAudioStream = function(file) {
const options = {
frequency: 200,
chunkSize: 32000
};
const audioStream = new streamBuffers.ReadableStreamBuffer(options);
audioStream.put(file);
// add some silences at the end to tell the service that it is the end of the sentence
audioStream.put(new Buffer(160000));
audioStream.stop();
return audioStream;
};
fs.access(filepath, (error) => {
if (error) {
return callback ? callback(new Error(`could not find file ${filepath}`)) : null;
}
absoluteFilepath = path.resolve(filepath);
const options = {
frequency: 100,
chunkSize: 32000
};
const audioStream = new streamBuffers.ReadableStreamBuffer(options);
fs.readFile(absoluteFilepath, (error, file) => {
audioStream.put(file);
// add some silences at the end to tell the service that it is the end of the sentence
audioStream.put(new Buffer(160000));
audioStream.stop();
audioStream.on('data', (data) => this.sendBytes(data));
audioStream.on('end', () => {if (callback) return callback()});
});
});
};
module.exports.convertFrameToBuffer = function(frame) {
var buffer = new sb.WritableStreamBuffer();
frames.writeFrame(frame, buffer);
return buffer.getContents();
};
exports.createReadStream = function (buffer, options) {
buffer = Buffer.isBuffer(buffer) ? buffer : new Buffer(buffer);
var stream = new streamBuffers.ReadableStreamBuffer(options);
stream.put(buffer);
stream.destroySoon();
return stream;
};