How to use @google-cloud/bigquery - 10 common examples

To help you get started, we’ve selected a few @google-cloud/bigquery examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github googleapis / nodejs-bigquery / samples / getTableLabels.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_get_table_labels]
  // Import the Google Cloud client library
  const {BigQuery} = require('@google-cloud/bigquery');
  const bigquery = new BigQuery();

  async function getTableLabels() {
    // Gets labels on a dataset.

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";

    // Retrieve current dataset metadata.
    const table = bigquery.dataset(datasetId).table(tableId);
    const [metadata] = await table.getMetadata();
    const labels = metadata.labels;

    console.log(`${tableId} Labels:`);
github googleapis / nodejs-bigquery / samples / deleteLabelTable.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_delete_label_table]
  // Import the Google Cloud client library
  const {BigQuery} = require('@google-cloud/bigquery');
  const bigquery = new BigQuery();

  async function deleteLabelTable() {
    // Deletes a label from an existing table.
    // This example dataset starts with existing label { color: 'green' }

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";

    const dataset = bigquery.dataset(datasetId);
    const [table] = await dataset.table(tableId).get();

    // Retrieve current table metadata
    const [metadata] = await table.getMetadata();
github googleapis / nodejs-bigquery / samples / extractTableCompressed.js View on Github external
function main(
  datasetId = 'my_dataset',
  tableId = 'my_table',
  bucketName = 'my-bucket',
  filename = 'file.csv'
) {
  // [START bigquery_extract_table_compressed]
  // Import the Google Cloud client libraries
  const {BigQuery} = require('@google-cloud/bigquery');
  const {Storage} = require('@google-cloud/storage');

  const bigquery = new BigQuery();
  const storage = new Storage();

  async function extractTableCompressed() {
    // Exports my_dataset:my_table to gcs://my-bucket/my-file as a compressed file.

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";
    // const bucketName = "my-bucket";
    // const filename = "file.csv";

    // Location must match that of the source table.
    const options = {
      location: 'US',
github googleapis / nodejs-bigquery / samples / deleteTable.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_delete_table]
  // Import the Google Cloud client library
  const {BigQuery} = require('@google-cloud/bigquery');
  const bigquery = new BigQuery();

  async function deleteTable() {
    // Deletes "my_table" from "my_dataset".

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";

    // Delete the table
    await bigquery
      .dataset(datasetId)
      .table(tableId)
      .delete();
github googleapis / nodejs-bigquery / samples / browseRows.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_browse_table]

  // Import the Google Cloud client library and create a client
  const {BigQuery} = require('@google-cloud/bigquery');
  const bigquery = new BigQuery();

  async function browseRows() {
    // Displays rows from "my_table" in "my_dataset".

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";

    // List rows in the table
    const [rows] = await bigquery
      .dataset(datasetId)
      .table(tableId)
      .getRows();
github googleapis / nodejs-bigquery / samples / loadOrcFromGCSTruncate.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_load_table_gcs_orc_truncate]
  // Import the Google Cloud client libraries
  const {BigQuery} = require('@google-cloud/bigquery');
  const {Storage} = require('@google-cloud/storage');

  // Instantiate the clients
  const bigquery = new BigQuery();
  const storage = new Storage();

  /**
   * This sample loads the CSV file at
   * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv
   *
   * TODO(developer): Replace the following lines with the path to your file.
   */
  const bucketName = 'cloud-samples-data';
  const filename = 'bigquery/us-states/us-states.orc';

  async function loadORCFromGCSTruncate() {
    /**
     * Imports a GCS file into a table and overwrites
     * table data if table already exists.
     */
github googleapis / nodejs-bigquery / samples / addColumnLoadAppend.js View on Github external
function main(
  datasetId = 'my_dataset',
  tableId = 'my_table',
  fileName = '/path/to/file.csv'
) {
  // [START bigquery_add_column_load_append]
  // Import the Google Cloud client libraries
  const {BigQuery} = require('@google-cloud/bigquery');

  // Instantiate client
  const bigquery = new BigQuery();

  async function addColumnLoadAppend() {
    // Adds a new column to a BigQuery table while appending rows via a load job.

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const fileName = '/path/to/file.csv';
    // const datasetId = 'my_dataset';
    // const tableId = 'my_table';

    // In this example, the existing table contains only the 'Name', 'Age',
    // & 'Weight' columns. 'REQUIRED' fields cannot  be added to an existing
    // schema, so the additional column must be 'NULLABLE'.
    const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN';
github firebase / functions-samples / bigquery-import / functions / index.js View on Github external
exports.addtobigquery = functions.database.ref('/logs/$logid').onWrite(event => {
  // TODO: Make sure you set the `bigquery.datasetName` Google Cloud environment variable.
  const dataset = bigquery.dataset(functions.config().bigquery.datasetname);
  // TODO: Make sure you set the `bigquery.tableName` Google Cloud environment variable.
  const table = dataset.table(functions.config().bigquery.tablename);

  return table.insert({
    ID: event.data.key,
    MESSAGE: event.data.val().message,
    NUMBER: event.data.val().number
  });
});
github JustinBeckwith / cloudcats / worker / analyzer.js View on Github external
'use strict';

const util = require('util');
const async = require('async');
const {BigQuery} = require('@google-cloud/bigquery');
const reddit = require('./reddit');
const vision = require('./vision');
const logger = require('./logger');

const bigquery = new BigQuery();
const dataset = bigquery.dataset('cloudcats');
const table = dataset.table('images');

const PostType = {
	CAT: 0,
	DOG: 1,
	NEITHER: 2,
	BOTH: 3
};

async function publishToBigQuery(data) {
	try {
		await table.insert(data);
	} catch (error) {
		logger.error(`error publishing to bigquery: ${util.inspect(error)}\n\t${error.stack}`);
	}
github mprove-io / mprove / backend / src / models / proc / create-dataset-bigquery.ts View on Github external
export async function createDatasetBigquery(item: {
  bigquery_project: string;
  project_id: string;
  credentials_file_path: string;
}) {
  let bigquery;

  bigquery = new BigQuery({
    projectId: item.bigquery_project,
    keyFilename: item.credentials_file_path
  });

  let datasetName = `mprove_${item.project_id}`;

  let bigqueryDataset = bigquery.dataset(datasetName);

  let datasetExistsItem = await bigqueryDataset
    .exists()
    .catch((e: any) =>
      helper.reThrow(e, enums.bigqueryErrorsEnum.BIGQUERY_DATASET_EXISTS_CHECK)
    );

  if (datasetExistsItem[0] === false) {
    await bigqueryDataset

@google-cloud/bigquery

Google BigQuery Client Library for Node.js

Apache-2.0
Latest version published 2 months ago

Package Health Score

89 / 100
Full package analysis