Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const init = async () => {
if (process.env.NODE_ENV === 'production') {
await db.open(process.env.DB_PATH, { Promise, versobe: process.env.VERBOSE || false });
} else {
await db.open(':memory:', { Promise, versobe: process.env.VERBOSE || false });
await db.migrate();
}
userInsertStmt = await db.prepare(
`INSERT INTO Users (uuid, email, kdfSalt, srpSalt, srpVerifier, totpSecret)
VALUES ($uuid, $email, $kdfSalt, $srpSalt, $srpVerifier, $totpSecret)`);
getUserByEmailStmt = await db.prepare('SELECT * FROM Users WHERE email = $email');
};
const retp = db.run('commit')
.then(() => (isFinal ? null : db.run('begin')))
return retp
}
/*
* TODO: multiple sources indicate wrapping inserts in a transaction is key to getting
* decent bulk load performance.
* We're currently wrapping all inserts in one huge transaction. Should probably break
* this into more reasonable (50K rows?) chunks.
*/
await db.run(dropStmt)
await db.run(createStmt)
log.log('table created')
await db.run('begin')
const insertStmt = await db.prepare(insertStmtStr)
const rowCount = await consumeStream(csv.fromPath(pathname, md.csvOptions),
insertRow(insertStmt), commitBatch, md.rowCount,
hasHeaderRow)
log.log('consumeStream completed, rowCount: ', rowCount)
insertStmt.finalize()
return md
} catch (err) {
log.error(err, err.stack)
throw err
}
}