Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function loop () {
if (i === files.length) {
// re-render
console.log('added files', files)
fetchArchiveInfo(render)
return
}
var file = files[i++]
var stream = fileReader(file)
var entry = { name: (file.fullPath||file.name).replace(/^\//, ''), mtime: Date.now(), ctime: Date.now() }
pump(
stream,
beaker.dat.createFileWriteStream(archiveInfo.key, entry),
err => {
if (err) {
console.error('Error writing file', entry, err)
// TODO inform user!
}
loop()
}
)
}
}
// write response code and headers
res.writeHead(clientRes.statusCode, clientRes.headers);
// using pump is deliberate - see the pump docs for why
pump(clientRes, res);
});
// this can happen when underlying agent produces an error
// in our case we 504 gateway error this?
// if we have already sent headers?
clientReq.once('error', (err) => {
// TODO(roman): if headers not sent - respond with gateway unavailable
});
// using pump is deliberate - see the pump docs for why
pump(req, clientReq);
}
export default function editorStyles(done) {
return pump([
src( paths.styles.editorSrc, {sourcemaps: !isProd} ),
editorStylesBeforeReplacementStream(),
// Only do string replacements when building for production
gulpPlugins.if(
isProd,
getStringReplacementTasks()
),
editorStylesAfterReplacementStream(),
dest(paths.styles.editorDest, {sourcemaps: !isProd}),
], done);
}
} else {
updateOpts()
stream = source.createReadStream(opts)
}
const pipeline = [
// any index
stream,
through.obj(function (data, enc, cb) {
if (data.type === 'del') cb()
else cb(null, data.value) // figure out how to make it store utf8
}),
getEntry()
]
return pump.apply(null, pipeline)
function updateOpts () {
utils.extend(opts, getRangeOpts(index, prop, value))
}
}
if (!res) return next(new Error('no res'))
if (res.statusCode !== 200) return next(new Error('downloading returned ' + res.statusCode))
// Consider using res.headers for verifying content-length and content-disposition for saveTo
res.on('error', next)
res.on('data', function (d) { len += d.length; hash.update(d) })
var pipes = [stream]
if (update.ungzip) pipes.push(gunzip())
if (update.untar) pipes.push(tar.extract(saveTo, { fs: fs }))
else pipes.push(fs.createWriteStream(saveTo))
pump.apply(null, pipes.concat([ function (err) { next(err) } ]))
})
},
}
}))
const modifier = transform ? through2.obj((obj, _, cb) =>
cb(null, transform(obj))
) : through2.obj()
const end = (err) => {
query.close(() => {
model.sequelize.connectionManager.releaseConnection(conn)
.then(() => null)
.catch(() => null)
})
if (err) out.emit('error', err)
}
const out = pump(query, modifier, end)
return out
}
this.logger = pino(
{
base: null,
safe: true,
level: "trace",
},
stream,
);
this.fileStream = this.getFileStream();
const consoleTransport = this.createPrettyTransport(this.options.levels.console, { colorize: true });
const fileTransport = this.createPrettyTransport(this.options.levels.file, { colorize: false });
pump(stream, split(), consoleTransport, process.stdout);
pump(stream, split(), fileTransport, this.fileStream);
return this;
}
export default function jsMin(done) {
pump([
src(paths.scripts.min),
gulpPlugins.newer({
dest: paths.scripts.dest,
extra: [paths.config.themeConfig]
}),
dest(paths.verbose),
dest(paths.scripts.dest),
], done);
}
return new Promise((resolve, reject) => {
pump(
getFileReaderStream(temporary_file.file),
getBase64Transform(),
file_upload_stream,
(error) => {
if (error) {
error.file_name = temporary_file.file.name;
error.file_size = temporary_file.file.size;
reject(error);
return;
}
resolve(temporary_file_id);
}
);
});
}
'./assets/css/push.css'
];
const fileDest = './dist';
const cssNextOpts = {
features: {
autoprefixer: {
browsers: ['last 2 versions']
}
}
};
const taskOpts = [
require( 'postcss-import' ),
require( 'postcss-cssnext' )( cssNextOpts )
];
pump( [
gulp.src( fileSrc ),
postcss( taskOpts ),
gulp.dest( fileDest )
], cb );
} );