How to use pg-copy-streams - 10 common examples

To help you get started, we’ve selected a few pg-copy-streams examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NLeSC / spot / server / spot-import.js View on Github external
if (err) throw err;

    // setup copy from
    var command = 'COPY ' + options.table + ' FROM STDIN ';
    command = command + '( ';
    command = command + 'FORMAT CSV, ';
    command = command + "DELIMITER '\t', ";
    command = command + "QUOTE '\b', "; // defaults to '"' which can give problems
    command = command + 'NULL ' + misval + ' ';
    command = command + ') ';
    console.log(command.toString());

    // create table & sink
    client.query('DROP TABLE IF EXISTS ' + options.table);
    client.query(q.toString());
    var sink = client.query(pgStream.from(command));

    // create transfrom
    var transform = csvStringify({
      columns: columns,
      quote: false,
      quotedEmpty: false,
      delimiter: '\t',
      rowDelimiter: 'unix'
    });

    streamify(parsed).pipe(transform).pipe(sink);
    // var testSink = fs.createWriteStream('file_to_import.csv');
    // source.pipe(testSink);
  });
github evansiroky / db-streamer / lib / inserters / pgInserter.js View on Github external
pg.connect(this.dbConnString, function (err, client, done) {
      function doneFn (err) {
        done()
        self.endHandler(err)
      }

      if (err) {
        doneFn(err)
      }

      const stream = client.query(copyFrom(self.getCopyQueryText()))
      const fileStream = fs.createReadStream(self.deferred.tempDeferredFilename)

      fileStream.on('error', doneFn)
      fileStream.pipe(stream).on('finish', function () {
        // delete temp file
        fs.unlink(self.deferred.tempDeferredFilename, doneFn)
      })
    })
  } else {
github gajus / slonik / src / connectionMethods / copyFromBinary.js View on Github external
(finalConnection, finalSql) => {
      const copyFromBinaryStream = finalConnection.query(from(finalSql));

      bufferToStream(payloadBuffer).pipe(copyFromBinaryStream);

      return new Promise((resolve, reject) => {
        copyFromBinaryStream.on('error', (error) => {
          reject(error);
        });

        copyFromBinaryStream.on('end', () => {
          // $FlowFixMe
          resolve({});
        });
      });
    },
  );
github brianc / node-postgres / test / integration / gh-issues / 699-tests.js View on Github external
client.query(c, function (err) {
    if (err) throw err;

    var stream = client.query(copyFrom("COPY employee FROM STDIN"));
    stream.on('end', function () {
      done();
      helper.pg.end();
    });

    for (var i = 1; i <= 5; i++) {
      var line = ['1\ttest', i, '\tuser', i, '\n'];
      stream.write(line.join(''));
    }
    stream.end();
  });
});
github BenLubar / nodebb-postgres-converter / writer / postgres / session.js View on Github external
async function copySessions(db, input, connection) {
	console.time('Copy sessions');

	try {
		var stream = db.query(copyFrom(`COPY "session" FROM STDIN`));

		var promise = new Promise(function(resolve, reject) {
			stream.on('error', reject);
			stream.on('end', resolve);
		});

		function write(values) {
			return new Promise(function(resolve, reject) {
				var ok = stream.write(values, 'utf8');
				if (ok) {
					return resolve();
				}
				stream.once('drain', resolve);
			});
		}
github MagicStack / pgbench / _nodejs / index.js View on Github external
var _start_copy = function(_cb) {
                req_start = _now();
                var csvstream = csvwriter({
                    sendHeaders: false,
                    separator: '\t',
                    headers: copy.columns
                });
                var copystream = conn.query(pgcopy(stmt.text));
                csvstream.pipe(copystream);
                copystream.on('end', _cb);
                copystream.on('error', _cb);
                for (var row of copy.rows) {
                   csvstream.write(row);
                }
                csvstream.end();
            }
github AnatolyUss / nmig / src / DataLoader.js View on Github external
self._pg.connect((error, client, done) => {
                                    if (error) {
                                        generateError(self, '\t--[populateTableWorker] Cannot connect to PostgreSQL server...\n' + error, sql);
                                        resolvePopulateTableWorker();
                                    } else {
                                        const sqlCopy      = 'COPY "' + self._schema + '"."' + tableName + '" FROM STDIN DELIMITER \'' + self._delimiter + '\' CSV;';
                                        const copyStream   = client.query(from(sqlCopy));
                                        const bufferStream = new BufferStream(buffer);

                                        copyStream.on('end', () => {
                                            /*
                                             * COPY FROM STDIN does not return the number of rows inserted.
                                             * But the transactional behavior still applies (no records inserted if at least one failed).
                                             * That is why in case of 'on end' the rowsInChunk value is actually the number of records inserted.
                                             */
                                            process.send(new MessageToMaster(tableName, rowsInChunk, rowsCnt));
                                            deleteChunk(self, dataPoolId, client, done).then(() => resolvePopulateTableWorker());
                                        });

                                        copyStream.on('error', copyStreamError => {
                                            processDataError(
                                                self,
                                                copyStreamError,
github ideal-postcodes / postcodes.io / app / models / base.js View on Github external
pool.connect((error, client, done) => {
        const pgStream = client
          .query(copyFrom(query))
          .on("end", () => {
            done();
            return cb();
          })
          .on("error", error => {
            done();
            return cb(error);
          });
        fs.createReadStream(filepath, { encoding: "utf8" })
          .pipe(csv.parse())
          .pipe(csv.transform(transform))
          .pipe(csv.stringify())
          .pipe(pgStream);
      });
    },

pg-copy-streams

Low-Level COPY TO and COPY FROM streams for PostgreSQL in JavaScript using

MIT
Latest version published 1 year ago

Package Health Score

65 / 100
Full package analysis

Popular pg-copy-streams functions