Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_get_table_labels]
// Import the Google Cloud client library
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
async function getTableLabels() {
// Gets labels on a dataset.
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
// Retrieve current dataset metadata.
const table = bigquery.dataset(datasetId).table(tableId);
const [metadata] = await table.getMetadata();
const labels = metadata.labels;
console.log(`${tableId} Labels:`);
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_delete_label_table]
// Import the Google Cloud client library
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
async function deleteLabelTable() {
// Deletes a label from an existing table.
// This example dataset starts with existing label { color: 'green' }
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
const dataset = bigquery.dataset(datasetId);
const [table] = await dataset.table(tableId).get();
// Retrieve current table metadata
const [metadata] = await table.getMetadata();
function main(
datasetId = 'my_dataset',
tableId = 'my_table',
bucketName = 'my-bucket',
filename = 'file.csv'
) {
// [START bigquery_extract_table_compressed]
// Import the Google Cloud client libraries
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');
const bigquery = new BigQuery();
const storage = new Storage();
async function extractTableCompressed() {
// Exports my_dataset:my_table to gcs://my-bucket/my-file as a compressed file.
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
// const bucketName = "my-bucket";
// const filename = "file.csv";
// Location must match that of the source table.
const options = {
location: 'US',
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_delete_table]
// Import the Google Cloud client library
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
async function deleteTable() {
// Deletes "my_table" from "my_dataset".
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
// Delete the table
await bigquery
.dataset(datasetId)
.table(tableId)
.delete();
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_browse_table]
// Import the Google Cloud client library and create a client
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
async function browseRows() {
// Displays rows from "my_table" in "my_dataset".
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
// List rows in the table
const [rows] = await bigquery
.dataset(datasetId)
.table(tableId)
.getRows();
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_load_table_gcs_orc_truncate]
// Import the Google Cloud client libraries
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');
// Instantiate the clients
const bigquery = new BigQuery();
const storage = new Storage();
/**
* This sample loads the CSV file at
* https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv
*
* TODO(developer): Replace the following lines with the path to your file.
*/
const bucketName = 'cloud-samples-data';
const filename = 'bigquery/us-states/us-states.orc';
async function loadORCFromGCSTruncate() {
/**
* Imports a GCS file into a table and overwrites
* table data if table already exists.
*/
function main(
datasetId = 'my_dataset',
tableId = 'my_table',
fileName = '/path/to/file.csv'
) {
// [START bigquery_add_column_load_append]
// Import the Google Cloud client libraries
const {BigQuery} = require('@google-cloud/bigquery');
// Instantiate client
const bigquery = new BigQuery();
async function addColumnLoadAppend() {
// Adds a new column to a BigQuery table while appending rows via a load job.
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const fileName = '/path/to/file.csv';
// const datasetId = 'my_dataset';
// const tableId = 'my_table';
// In this example, the existing table contains only the 'Name', 'Age',
// & 'Weight' columns. 'REQUIRED' fields cannot be added to an existing
// schema, so the additional column must be 'NULLABLE'.
const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN';
'use strict';
const util = require('util');
const async = require('async');
const {BigQuery} = require('@google-cloud/bigquery');
const reddit = require('./reddit');
const vision = require('./vision');
const logger = require('./logger');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('cloudcats');
const table = dataset.table('images');
const PostType = {
CAT: 0,
DOG: 1,
NEITHER: 2,
BOTH: 3
};
async function publishToBigQuery(data) {
try {
await table.insert(data);
} catch (error) {
logger.error(`error publishing to bigquery: ${util.inspect(error)}\n\t${error.stack}`);
}
export async function createDatasetBigquery(item: {
bigquery_project: string;
project_id: string;
credentials_file_path: string;
}) {
let bigquery;
bigquery = new BigQuery({
projectId: item.bigquery_project,
keyFilename: item.credentials_file_path
});
let datasetName = `mprove_${item.project_id}`;
let bigqueryDataset = bigquery.dataset(datasetName);
let datasetExistsItem = await bigqueryDataset
.exists()
.catch((e: any) =>
helper.reThrow(e, enums.bigqueryErrorsEnum.BIGQUERY_DATASET_EXISTS_CHECK)
);
if (datasetExistsItem[0] === false) {
await bigqueryDataset
constructor() {
this.bq = new bigquery.BigQuery();
}
/**