How to use aws-sdk - 10 common examples

To help you get started, we’ve selected a few aws-sdk examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github badunk / multer-s3 / test / integration / express.spec.js View on Github external
var upload = multer({storage: multerS3({
  bucket: 'some-bucket',
  secretAccessKey: 'some secret',
  accessKeyId: 'some key',
  region: 'us-east-1',
  s3ForcePathStyle: true,
  endpoint: new AWS.Endpoint('http://localhost:4568')
})})

var uploadAuto = multer({storage: multerS3({
  bucket: 'some-bucket',
  secretAccessKey: 'some secret',
  accessKeyId: 'some key',
  region: 'us-east-1',
  s3ForcePathStyle: true,
  endpoint: new AWS.Endpoint('http://localhost:4568'),
  contentType: multerS3.AUTO_CONTENT_TYPE
})})

var uploadCustomKey = multer({storage: multerS3({
  bucket: 'some-bucket',
  secretAccessKey: 'some secret',
  accessKeyId: 'some key',
  region: 'us-east-1',
  s3ForcePathStyle: true,
  endpoint: new AWS.Endpoint('http://localhost:4568'),
  key: function (req, file, cb) {
    cb(null, 'key-name')
  }
})})

// express setup
github OpenUserJS / OpenUserJS.org / controllers / scriptStorage.js View on Github external
// Check hash here against old and don't increment Script model date if same.
          // Allows sync reset for GH and resave/reset to S3 if needed
          // Covers issue with GitHub cache serving old raw
          if (script.hash !== aScript.hash) {
            aScript.updated = new Date();
          }

          if (findMeta(script.meta, 'UserScript.version.0.value') !==
            findMeta(aMeta, 'UserScript.version.0.value')) {

            aScript.installsSinceUpdate = 0;
          }
        }

        // Attempt to write out data to externals...
        s3 = new AWS.S3();
        if (s3) { // NOTE: Should be a noop
          s3.putObject({
            Bucket: bucketName,
            Key: installName,
            Body: aBuf

          }, function (aErr, aData) {
            if (aErr) {
              // Forward the error
              aScript.invalidate('_id', aErr);

              // Localize the error
              console.error(
                'S3 putObject critical error\n' +
                  installName + '\n' +
                    JSON.stringify(aErr, null, ' ') + '\n' +
github taskcluster / taskcluster / services / queue / src / bucket.js View on Github external
assert(options.credentials, 'credentials must be specified');
  assert(!options.bucketCDN || typeof options.bucketCDN === 'string',
    'Expected bucketCDN to be a hostname or empty string for none');
  assert(options.monitor, 'options.monitor is required');
  if (options.bucketCDN) {
    assert(/^https?:\/\//.test(options.bucketCDN), 'bucketCDN must be http(s)');
    assert(/[^\/]$/.test(options.bucketCDN),
      'bucketCDN shouldn\'t end with slash');
  }
  // Store the monitor
  this.monitor = options.monitor;
  // Ensure access to the bucket property
  this.bucket = options.bucket;
  // Create S3 client
  if (!options.credentials.mock) {
    this.s3 = new aws.S3(_.defaults({
      params: {
        Bucket: options.bucket,
      },
    }, options.credentials));
  } else {
    this.s3 = options.credentials.mock;
  }
  // Store bucket CDN
  this.bucketCDN = options.bucketCDN;
};
github PRX / Infrastructure / cd / lambdas / infrastructure-s3-sync / index.js View on Github external
// object prefix of the Git commit hash.
// TODO currently this is using `latest` as the prefix
//
// This function also creates an output artifact, which is a zipped JSON file
// that contains the Git commit hash of the input repo artifact. The destination
// bucket for that is the native CodePipeline artifact store bucket.
//
// This should always callback to the CodePipeline API to indicate success or
// failure.

const AWS = require('aws-sdk');
const fs = require('fs');
const unzip = require('unzipper');
const JSZip = require("jszip");

const s3 = new AWS.S3({apiVersion: '2006-03-01'});
const codepipeline = new AWS.CodePipeline();

exports.handler = (event, context, callback) => {
    const job = event['CodePipeline.job'];

    try {
        console.log('Starting sync...');

        const sync = syncCode(job, context, callback);
        const tag = publishRevision(job, context, callback);

        Promise.all([tag, sync])
            .then(() => {
                console.log('...Notifying CodePipeline job of success!');
                codepipeline.putJobSuccessResult({ jobId: job.id }, (e, d) => {
                    callback(null, '...Done!');
github GoogleContainerTools / kpt / docsy / node_modules / fsevents / node_modules / node-pre-gyp / lib / publish.js View on Github external
s3.headObject(s3_opts, function(err, meta){
            if (meta) log.info('publish', JSON.stringify(meta));
            if (err && err.code == 'NotFound') {
                // we are safe to publish because
                // the object does not already exist
                log.info('publish', 'Preparing to put object');
                var s3_put = new AWS.S3();
                var s3_put_opts = {  ACL: config.acl,
                                     Body: fs.createReadStream(tarball),
                                     Bucket: config.bucket,
                                     Key: key_name
                                  };
                log.info('publish', 'Putting object');
                try {
                    s3_put.putObject(s3_put_opts, function(err, resp){
                        log.info('publish', 'returned from putting object');
                        if(err) {
                           log.info('publish', 's3 putObject error: "' + err + '"');
                           return callback(err);
                        }
                        if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
                        log.info('publish', 'successfully put object');
                        console.log('['+package_json.name+'] published to ' + remote_package);
github lupyuen / AWSIOT / nodejs / ProcessSIGFOXMessage.js View on Github external
//     ],
//     "Resource": [
//       "*"
//     ]
//   }
// ]
// }

'use strict';

console.log('Loading function');

//  Init the AWS connection.
const AWS = require('aws-sdk');
AWS.config.region = 'us-west-2';
AWS.config.logger = process.stdout;  //  Debug

if (!process.env.LAMBDA_TASK_ROOT) {
  //  For unit test, set the credentials.
  const config = require('os').platform() === 'win32' ?
    require('../../../unabiz-emulator/config.json') :
    require('../../../../SIGFOX/unabiz-emulator/config.json');
  AWS.config.credentials = {
    accessKeyId: config.accessKeyId,
    secretAccessKey: config.secretAccessKey,
  };
}
//  Use AWS command line "aws iot describe-endpoint" to get the endpoint address.
const endpoint = 'A1P01IYM2DOZA0.iot.us-west-2.amazonaws.com';
//  Open the AWS IoT connection with the endpoint.
const iotdata = new AWS.IotData({ endpoint });
github serverless / serverless-graphql / app-backend / appsync / dynamo / deploy-dynamo.js View on Github external
// Load the SDK for JavaScript
const AWS = require('aws-sdk');
const fs = require('fs');

// Set the region
AWS.config.update({ region: 'us-east-1' });
AWS.config.setPromisesDependency(require('bluebird'));

const appsync = new AWS.AppSync({ apiVersion: '2017-07-25' });

// For creating User Pool: Reference https://serverless-stack.com/chapters/create-a-cognito-user-pool.html
// API key is not recommended for security.

const graphQLAPIName = '...'; // your graphQL API Name
const awsRegion = 'us-east-1'; // AWS Region ex - us-east-1
const userPoolId = '...'; // Your Cognito User Pool Id
const roleName = 'Dynamo-AppSyncServiceRole';
const accountId = '...';
const serviceRole = `arn:aws:iam::${accountId}:role/${roleName}`; // Service IAM Role for appsync to access data sources
const MAX_RETRIES = 20;
let appId;

function wait(timeout) {
github architect / architect / src / create / aws / create-ws-router / create / route.js View on Github external
module.exports = function route({api, env, name, region, account, RouteKey}, callback) {

  let gateway = new aws.ApiGatewayV2({region})
  let lambda = new aws.Lambda({region})

  let arn = `arn:aws:lambda:${region}:${account}:function:${name}-${env}-ws-${RouteKey.replace('$', '')}`

  // used later
  let integrationId
  waterfall([

    /**
     * setup the integration
     */
    function createIntegration(callback) {
      setTimeout(function throttle() {
        let uri = `arn:aws:apigateway:${region}:lambda:path/2015-03-31/functions/${arn}/invocations`
        // console.log(api)
        gateway.createIntegration({
          ApiId: api.ApiId,
github devonfw / my-thai-star / serverless (deprecated) / database / deleteTables.ts View on Github external
creds = new AWS.Credentials('akid', 'secret', 'session');
        conf = {
            credentials: creds,
            endpoint: databaseURL,
            region: 'us-west-2',
        };
    } else {
        creds = new AWS.Credentials('akid2', 'secret2', 'session2');
        conf = {
            credentials: creds,
            endpoint: databaseURL,
            region: 'us-west-2',
        };
    }

    const dynamodb = new AWS.DynamoDB(conf);

    dynamodb.listTables().eachPage((err, data) => {
        if (err) {
            console.error(err); // an error occurred
            return false;
        } else if (data && data.TableNames) {
            data.TableNames.map((elem) => {
                return {
                    TableName: elem,
                };
            }).forEach((params) => {
                dynamodb.deleteTable(params, (err2: Error, data2: any) => {
                    if (err2) {
                        console.error('Unable to delete table. Error JSON:', JSON.stringify(err2, null, 2));
                    }
                });
github repo-analytics / repo-analytics.github.io / backend / db / dynamodb.js View on Github external
config = require('../secret.json');
} catch (error) {
  console.log('no secret json, on github action')
}

const awsConfig = {
  region: 'us-east-1',
  // process.env.awsAccessKeyId in action secret
  accessKeyId: process.env.awsAccessKeyId ? process.env.awsAccessKeyId : config.aws.accessKeyId,
  secretAccessKey: process.env.awsSecretAccessKey ? process.env.awsSecretAccessKey : config.aws.secretAccessKey,
  signatureVersion: 'v4',
};

AWS.config.update(awsConfig);

const dynamodb = new AWS.DynamoDB();

module.exports = dynamodb;