Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function main(
hmacKeyAccessId = 'GOOG0234230X00',
projectId = 'serviceAccountProjectId'
) {
// [START storage_deactivate_hmac_key]
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
// Deactivate HMAC SA Key
async function deactivateHmacKey() {
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
// const hmacKeyAccessId = 'HMAC Access Key Id to update, e.g. GOOG0234230X00';
// const projectId = 'The project Id this service account belongs to, e.g. serviceAccountProjectId';
const hmacKey = storage.hmacKey(hmacKeyAccessId, {projectId});
const [hmacKeyMetadata] = await hmacKey.setMetadata({state: 'INACTIVE'});
console.log(`The HMAC key is now inactive.`);
console.log(`The HMAC key metadata is:`);
for (const [key, value] of Object.entries(hmacKeyMetadata)) {
console.log(`${key}: ${value}`);
// [START storage_add_bucket_iam_member]
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const roleName = 'Role to grant, e.g. roles/storage.objectViewer';
// const members = [
// 'user:jdoe@example.com', // Example members to grant
// 'group:admins@example.com', // the new role to
// ];
// Creates a client
const storage = new Storage();
// Get a reference to a Google Cloud Storage bucket
const bucket = storage.bucket(bucketName);
// Gets and updates the bucket's IAM policy
const [policy] = await bucket.iam.getPolicy();
// Adds the new roles to the bucket's IAM policy
policy.bindings.push({
role: roleName,
members: members,
});
// Updates the bucket's IAM policy
await bucket.iam.setPolicy(policy);
function main(bucketName = 'my-bucket', filename = 'file.txt') {
// [START storage_set_metadata]
// Imports the Google Cloud client library
const {Storage} = require('@google-cloud/storage');
// Creates a client
const storage = new Storage();
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to access, e.g. file.txt';
async function setFileMetadata() {
// Set file metadata.
const [metadata] = await storage
.bucket(bucketName)
.file(filename)
.setMetadata({
// Predefinded metadata for server e.g. 'cacheControl', 'contentDisposition',
// 'contentEncoding', 'contentEncoding', 'contentLanguage', 'contentType'
contentDisposition: 'attachment; filename*=utf-8\'\'"anotherImage.jpg"',
contentType: 'image/jpeg',
function main(datasetId = 'my_dataset', tableId = 'my_table') {
// [START bigquery_load_table_gcs_orc]
// Import the Google Cloud client libraries
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');
// Instantiate clients
const bigquery = new BigQuery();
const storage = new Storage();
/**
* This sample loads the ORC file at
* https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.orc
*
* TODO(developer): Replace the following lines with the path to your file.
*/
const bucketName = 'cloud-samples-data';
const filename = 'bigquery/us-states/us-states.orc';
async function loadTableGCSORC() {
// Imports a GCS file into a table with ORC source format.
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
const config = require('../config');
const {Storage} = require('@google-cloud/storage');
const GCLOUD_PROJECT = config.get('GCLOUD_PROJECT');
const GCLOUD_BUCKET = config.get('GCLOUD_BUCKET');
const storage = new Storage({GCLOUD_PROJECT});
const bucket = storage.bucket(GCLOUD_BUCKET);
// Express middleware that will automatically pass uploads to Cloud Storage.
// req.file is processed and will have a new property:
// * ``cloudStoragePublicUrl`` the public url to the object.
// [START process]
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const oname = Date.now() + req.file.originalname;
const file = bucket.file(oname);
const stream = file.createWriteStream({
async getProbotConfig(): Promise {
const storage = new Storage();
const kmsclient = new KMS.KeyManagementServiceClient();
const destFileName = '/tmp/creds.json';
const bucketName = process.env.DRIFT_PRO_BUCKET || '';
const srcFilename = process.env.GCF_SHORT_FUNCTION_NAME || '';
const options = {
destination: destFileName,
};
// Downloads the file
await storage
.bucket(bucketName)
.file(srcFilename)
.download(options);
function main(
datasetId = 'my_dataset',
tableId = 'my_table',
bucketName = 'my-bucket',
filename = 'file.json'
) {
// [START bigquery_extract_table_json]
// Import the Google Cloud client libraries
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');
const bigquery = new BigQuery();
const storage = new Storage();
async function extractTableJSON() {
// Exports my_dataset:my_table to gcs://my-bucket/my-file as JSON.
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const datasetId = "my_dataset";
// const tableId = "my_table";
// const bucketName = "my-bucket";
// const filename = "file.json";
// Location must match that of the source table.
const options = {
format: 'json',
location: 'US',
const GCSDataStore = require('../lib/stores/GCSDataStore');
const { Storage } = require('@google-cloud/storage');
const File = require('../lib/models/File');
const ERRORS = require('../lib/constants').ERRORS;
const EVENTS = require('../lib/constants').EVENTS;
const STORE_PATH = '/files';
const PROJECT_ID = 'vimeo-open-source';
const KEYFILE = path.resolve(__dirname, '../keyfile.json');
const BUCKET = 'tus-node-server';
const TEST_FILE_SIZE = 960244;
const TEST_FILE_PATH = path.resolve(__dirname, 'test.mp4');
const FILE_ALREADY_IN_BUCKET = 'dont_delete_this_file.mp4';
const gcs = new Storage({
projectId: PROJECT_ID,
keyFilename: KEYFILE,
});
const bucket = gcs.bucket(BUCKET);
const deleteFile = (file_name) => {
return new Promise((resolve, reject) => {
console.log(`[GCLOUD] Deleting ${file_name} from ${bucket.name} bucket`);
bucket.file(file_name).delete((err, res) => {
resolve(res);
});
});
};
describe('GCSDataStore', () => {
if (process.env.TRAVIS_SECURE_ENV_VARS !== 'true') {
var Client = exports.Client = function (options) {
google.Client.call(this, options);
_.extend(this, require('./containers'));
_.extend(this, require('./files'));
this.storage = new Storage(options);
};
getStorage_(options) {
return new Storage(
{projectId: options.projectId, keyFilename: options.keyFilename});
}