How to use @google-cloud/storage - 10 common examples

To help you get started, we’ve selected a few @google-cloud/storage examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github material-components / material-components-web-react / test / screenshot / screenshot.tsx View on Github external
const serviceAccountKey: string = process.env.MDC_GCLOUD_SERVICE_ACCOUNT_KEY || '';
const branchName = process.env.MDC_BRANCH_NAME;
const commitHash = process.env.MDC_COMMIT_HASH;
const goldenFilePath = './test/screenshot/golden.json';
const bucketName = 'screenshot-uploads';
const defaultMetadata = {
  commit: commitHash,
  branch: branchName,
};

const NO_MATCH_DIRECTORY = 'no_match';

let storage: Storage|null = null;
let bucket: Storage.Bucket|null = null;
if (serviceAccountKey) {
  storage = new Storage({
    credentials: JSON.parse(serviceAccountKey),
  });

  bucket = storage.bucket(bucketName);
}

export default class Screenshot {
  urlPath_: string;
  /**
   * @param {string} urlPath The URL path to test
   */
  constructor(urlPath: string) {
    /** @private {string} */
    this.urlPath_ = urlPath;
    // TODO allow clients to specify capture-chrome options, like viewport size
  }
github googleapis / nodejs-storage / samples / hmacKeyDeactivate.js View on Github external
function main(
  hmacKeyAccessId = 'GOOG0234230X00',
  projectId = 'serviceAccountProjectId'
) {
  // [START storage_deactivate_hmac_key]
  // Imports the Google Cloud client library
  const {Storage} = require('@google-cloud/storage');

  // Creates a client
  const storage = new Storage();

  // Deactivate HMAC SA Key
  async function deactivateHmacKey() {
    /**
     * TODO(developer): Uncomment the following line before running the sample.
     */
    // const hmacKeyAccessId = 'HMAC Access Key Id to update, e.g. GOOG0234230X00';
    // const projectId = 'The project Id this service account belongs to, e.g. serviceAccountProjectId';

    const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId});
    const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'INACTIVE'});

    console.log(`The HMAC key is now inactive.`);
    console.log(`The HMAC key metadata is:`);
    for (const [key, value] of Object.entries(hmacKeyMetadata)) {
      console.log(`${key}: ${value}`);
github googleapis / nodejs-storage / samples / iam.js View on Github external
// [START storage_add_bucket_iam_member]
  // Imports the Google Cloud client library
  const {Storage} = require('@google-cloud/storage');

  /**
   * TODO(developer): Uncomment the following lines before running the sample.
   */
  // const bucketName = 'Name of a bucket, e.g. my-bucket';
  // const roleName = 'Role to grant, e.g. roles/storage.objectViewer';
  // const members = [
  //   'user:jdoe@example.com',    // Example members to grant
  //   'group:admins@example.com', // the new role to
  // ];

  // Creates a client
  const storage = new Storage();

  // Get a reference to a Google Cloud Storage bucket
  const bucket = storage.bucket(bucketName);

  // Gets and updates the bucket's IAM policy
  const [policy] = await bucket.iam.getPolicy();

  // Adds the new roles to the bucket's IAM policy
  policy.bindings.push({
    role: roleName,
    members: members,
  });

  // Updates the bucket's IAM policy
  await bucket.iam.setPolicy(policy);
github googleapis / nodejs-storage / samples / fileSetMetadata.js View on Github external
function main(bucketName = 'my-bucket', filename = 'file.txt') {
  // [START storage_set_metadata]
  // Imports the Google Cloud client library
  const {Storage} = require('@google-cloud/storage');

  // Creates a client
  const storage = new Storage();

  /**
   * TODO(developer): Uncomment the following lines before running the sample.
   */
  // const bucketName = 'Name of a bucket, e.g. my-bucket';
  // const filename = 'File to access, e.g. file.txt';
  async function setFileMetadata() {
    // Set file metadata.
    const [metadata] = await storage
      .bucket(bucketName)
      .file(filename)
      .setMetadata({
        // Predefinded metadata for server e.g. 'cacheControl', 'contentDisposition',
        // 'contentEncoding', 'contentEncoding', 'contentLanguage', 'contentType'
        contentDisposition: 'attachment; filename*=utf-8\'\'"anotherImage.jpg"',
        contentType: 'image/jpeg',
github googleapis / nodejs-bigquery / samples / loadTableGCSORC.js View on Github external
function main(datasetId = 'my_dataset', tableId = 'my_table') {
  // [START bigquery_load_table_gcs_orc]
  // Import the Google Cloud client libraries
  const {BigQuery} = require('@google-cloud/bigquery');
  const {Storage} = require('@google-cloud/storage');

  // Instantiate clients
  const bigquery = new BigQuery();
  const storage = new Storage();

  /**
   * This sample loads the ORC file at
   * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.orc
   *
   * TODO(developer): Replace the following lines with the path to your file.
   */
  const bucketName = 'cloud-samples-data';
  const filename = 'bigquery/us-states/us-states.orc';

  async function loadTableGCSORC() {
    // Imports a GCS file into a table with ORC source format.

    /**
     * TODO(developer): Uncomment the following line before running the sample.
     */
github GoogleCloudPlatform / training-data-analyst / courses / developingapps / nodejs / containerengine / start / frontend / gcp / cloudstorage.js View on Github external
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

'use strict';

const config = require('../config');
const {Storage} = require('@google-cloud/storage');

const GCLOUD_PROJECT = config.get('GCLOUD_PROJECT');
const GCLOUD_BUCKET = config.get('GCLOUD_BUCKET');

const storage = new Storage({GCLOUD_PROJECT});
const bucket = storage.bucket(GCLOUD_BUCKET);


// Express middleware that will automatically pass uploads to Cloud Storage.
// req.file is processed and will have a new property:
// * ``cloudStoragePublicUrl`` the public url to the object.
// [START process]
function sendUploadToGCS (req, res, next) {
  if (!req.file) {
    return next();
  }

  const oname = Date.now() + req.file.originalname;
  const file = bucket.file(oname);

  const stream = file.createWriteStream({
github googleapis / repo-automation-bots / packages / gcf-utils / src / gcf-utils.ts View on Github external
async getProbotConfig(): Promise {
    const storage = new Storage();
    const kmsclient = new KMS.KeyManagementServiceClient();

    const destFileName = '/tmp/creds.json';
    const bucketName = process.env.DRIFT_PRO_BUCKET || '';
    const srcFilename = process.env.GCF_SHORT_FUNCTION_NAME || '';

    const options = {
      destination: destFileName,
    };

    // Downloads the file
    await storage
      .bucket(bucketName)
      .file(srcFilename)
      .download(options);
github googleapis / nodejs-bigquery / samples / extractTableJSON.js View on Github external
function main(
  datasetId = 'my_dataset',
  tableId = 'my_table',
  bucketName = 'my-bucket',
  filename = 'file.json'
) {
  // [START bigquery_extract_table_json]
  // Import the Google Cloud client libraries
  const {BigQuery} = require('@google-cloud/bigquery');
  const {Storage} = require('@google-cloud/storage');

  const bigquery = new BigQuery();
  const storage = new Storage();

  async function extractTableJSON() {
    // Exports my_dataset:my_table to gcs://my-bucket/my-file as JSON.

    /**
     * TODO(developer): Uncomment the following lines before running the sample.
     */
    // const datasetId = "my_dataset";
    // const tableId = "my_table";
    // const bucketName = "my-bucket";
    // const filename = "file.json";

    // Location must match that of the source table.
    const options = {
      format: 'json',
      location: 'US',
github RocketChat / Rocket.Chat / packages / rocketchat-file-upload / ufs / GoogleStorage / server.js View on Github external
constructor(options) {
		super(options);

		const gcs = gcStorage(options.connection);
		this.bucket = gcs.bucket(options.bucket);

		options.getPath = options.getPath || function(file) {
			return file._id;
		};

		this.getPath = function(file) {
			if (file.GoogleStorage) {
				return file.GoogleStorage.path;
			}
			// Compatibility
			// TODO: Migration
			if (file.googleCloudStorage) {
				return file.googleCloudStorage.path + file._id;
			}
		};
github project-slippi / slippi-desktop-app / app / main.dev.js View on Github external
case "slippi:":
    const tmpDir = os.tmpdir();
    const destination = path.join(tmpDir, 'replay.slp');
    const replayPath = myUrl.searchParams.get('path');

    // The following path generation will not work on dev
    // __static didn't exist and __dirname didn't work. /shrug
    const appPath = app.getAppPath();
    const keyPath = path.join(appPath, "../app.asar.unpacked/static/storage-reader.json");
    log.info(`Keypath: ${keyPath}`);
    const storage = new Storage({
      projectId: 'slippi',
      keyFilename: keyPath,
    });
    const bucket = storage.bucket('slippi.appspot.com');
    const file = new File(bucket, replayPath);

    log.info(`Downloading file ${replayPath} to ${destination}`);

    // Dowload file
    await file.download({ destination: destination });

    log.info(`Finished download`);

    // Wait until mainWindow exists so that we can send an IPC to play.
    // We are willing to wait for a few seconds before timing out
    await waitForMainWindow();
    mainWindow.webContents.send("play-replay", path.join(tmpDir, 'replay.slp'));


    break;
  case "file:":

@google-cloud/storage

Cloud Storage Client Library for Node.js

Apache-2.0
Latest version published 12 days ago

Package Health Score

91 / 100
Full package analysis