How to use the aws-sdk.S3 function in aws-sdk

To help you get started, we’ve selected a few aws-sdk examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github OpenUserJS / OpenUserJS.org / controllers / scriptStorage.js View on Github external
// Check hash here against old and don't increment Script model date if same.
          // Allows sync reset for GH and resave/reset to S3 if needed
          // Covers issue with GitHub cache serving old raw
          if (script.hash !== aScript.hash) {
            aScript.updated = new Date();
          }

          if (findMeta(script.meta, 'UserScript.version.0.value') !==
            findMeta(aMeta, 'UserScript.version.0.value')) {

            aScript.installsSinceUpdate = 0;
          }
        }

        // Attempt to write out data to externals...
        s3 = new AWS.S3();
        if (s3) { // NOTE: Should be a noop
          s3.putObject({
            Bucket: bucketName,
            Key: installName,
            Body: aBuf

          }, function (aErr, aData) {
            if (aErr) {
              // Forward the error
              aScript.invalidate('_id', aErr);

              // Localize the error
              console.error(
                'S3 putObject critical error\n' +
                  installName + '\n' +
                    JSON.stringify(aErr, null, ' ') + '\n' +
github taskcluster / taskcluster / services / queue / src / bucket.js View on Github external
assert(options.credentials, 'credentials must be specified');
  assert(!options.bucketCDN || typeof options.bucketCDN === 'string',
    'Expected bucketCDN to be a hostname or empty string for none');
  assert(options.monitor, 'options.monitor is required');
  if (options.bucketCDN) {
    assert(/^https?:\/\//.test(options.bucketCDN), 'bucketCDN must be http(s)');
    assert(/[^\/]$/.test(options.bucketCDN),
      'bucketCDN shouldn\'t end with slash');
  }
  // Store the monitor
  this.monitor = options.monitor;
  // Ensure access to the bucket property
  this.bucket = options.bucket;
  // Create S3 client
  if (!options.credentials.mock) {
    this.s3 = new aws.S3(_.defaults({
      params: {
        Bucket: options.bucket,
      },
    }, options.credentials));
  } else {
    this.s3 = options.credentials.mock;
  }
  // Store bucket CDN
  this.bucketCDN = options.bucketCDN;
};
github PRX / Infrastructure / cd / lambdas / infrastructure-s3-sync / index.js View on Github external
// object prefix of the Git commit hash.
// TODO currently this is using `latest` as the prefix
//
// This function also creates an output artifact, which is a zipped JSON file
// that contains the Git commit hash of the input repo artifact. The destination
// bucket for that is the native CodePipeline artifact store bucket.
//
// This should always callback to the CodePipeline API to indicate success or
// failure.

const AWS = require('aws-sdk');
const fs = require('fs');
const unzip = require('unzipper');
const JSZip = require("jszip");

const s3 = new AWS.S3({apiVersion: '2006-03-01'});
const codepipeline = new AWS.CodePipeline();

exports.handler = (event, context, callback) => {
    const job = event['CodePipeline.job'];

    try {
        console.log('Starting sync...');

        const sync = syncCode(job, context, callback);
        const tag = publishRevision(job, context, callback);

        Promise.all([tag, sync])
            .then(() => {
                console.log('...Notifying CodePipeline job of success!');
                codepipeline.putJobSuccessResult({ jobId: job.id }, (e, d) => {
                    callback(null, '...Done!');
github GoogleContainerTools / kpt / docsy / node_modules / fsevents / node_modules / node-pre-gyp / lib / publish.js View on Github external
s3.headObject(s3_opts, function(err, meta){
            if (meta) log.info('publish', JSON.stringify(meta));
            if (err && err.code == 'NotFound') {
                // we are safe to publish because
                // the object does not already exist
                log.info('publish', 'Preparing to put object');
                var s3_put = new AWS.S3();
                var s3_put_opts = {  ACL: config.acl,
                                     Body: fs.createReadStream(tarball),
                                     Bucket: config.bucket,
                                     Key: key_name
                                  };
                log.info('publish', 'Putting object');
                try {
                    s3_put.putObject(s3_put_opts, function(err, resp){
                        log.info('publish', 'returned from putting object');
                        if(err) {
                           log.info('publish', 's3 putObject error: "' + err + '"');
                           return callback(err);
                        }
                        if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
                        log.info('publish', 'successfully put object');
                        console.log('['+package_json.name+'] published to ' + remote_package);
github evanchiu / serverless-galleria / uploader / src / index.js View on Github external
const aws = require('aws-sdk');
const fs = require('fs');
const mime = require('mime-types');
const path = require('path');
const s3 = new aws.S3();

const destBucket = process.env.DEST_BUCKET;

exports.handler = main;

function main(event, context, lambdaCallback) {
  // Fail on mising data
  if (!destBucket) {
    context.fail('Error: Environment variable DEST_BUCKET missing');
    return;
  }

  if (event.path.startsWith('/api/file/')) {
    return fileRoute(event, context, lambdaCallback);
  } else {
    return servePublic(event, context, lambdaCallback);
github shishirsharma / MyS3Browser / src / app / navbar-dropdown-menu-link / navbar-dropdown-menu-link.component.ts View on Github external
.subscribe(credential => {
        console.log('navbar-dropdown-menu-link.component#ngOnInit: Observable', credential);

        AWS.config.update({
          credentials: new AWS.Credentials(credential.access_key_id, credential.secret_access_key)
        });
        AWS.config.region = credential.s3_region;
        let s3 = new AWS.S3();

        this.awsS3Service.listBuckets(s3, (error, buckets) => {
          this.buckets  = buckets;
        });
      });
github edutec / Snap4Arduino-old-huge / modules / gnu / node_modules / serialport / node_modules / node-pre-gyp / lib / unpublish.js View on Github external
function unpublish(gyp, argv, callback) {
    var AWS = require("aws-sdk");
    var package_json = JSON.parse(fs.readFileSync('./package.json'));
    var opts = versioning.evaluate(package_json, gyp.opts);
    s3_setup.detect(opts.hosted_path,config);
    AWS.config.update(config);
    var key_name = url.resolve(config.prefix,opts.package_name);
    var s3 =  new AWS.S3();
    var s3_opts = {  Bucket: config.bucket,
                     Key: key_name
                  };
    s3.headObject(s3_opts, function(err, meta) {
        if (err && err.code == 'NotFound') {
            console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
            return callback();
        } else if(err) {
            return callback(err);
        } else {
            log.info('unpublish', JSON.stringify(meta));
            s3.deleteObject(s3_opts, function(err, resp) {
                if (err) return callback(err);
                log.info(JSON.stringify(resp));
                console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
                return callback();
github andrewrk / node-s3-client / lib / index.js View on Github external
function Client(options) {
  options = options || {};
  this.s3 = options.s3Client || new AWS.S3(options.s3Options);
  this.s3Pend = new Pend();
  this.s3Pend.max = options.maxAsyncS3 || 20;
  this.s3RetryCount = options.s3RetryCount || 3;
  this.s3RetryDelay = options.s3RetryDelay || 1000;
  this.multipartUploadThreshold = options.multipartUploadThreshold || (20 * 1024 * 1024);
  this.multipartUploadSize = options.multipartUploadSize || (15 * 1024 * 1024);
  this.multipartDownloadThreshold = options.multipartDownloadThreshold || (20 * 1024 * 1024);
  this.multipartDownloadSize = options.multipartDownloadSize || (15 * 1024 * 1024);

  if (this.multipartUploadThreshold < MIN_MULTIPART_SIZE) {
    throw new Error("Minimum multipartUploadThreshold is 5MB.");
  }
  if (this.multipartUploadThreshold > MAX_PUTOBJECT_SIZE) {
    throw new Error("Maximum multipartUploadThreshold is 5GB.");
  }
  if (this.multipartUploadSize < MIN_MULTIPART_SIZE) {
github strues / boldr / src / api / routes / s3 / s3.routes.js View on Github external
import aws from 'aws-sdk';
import express from 'express';

const debug = require('debug')('boldr:s3');
const config = require('../../config/config');

const awsConfig = config.get('aws');

function checkTrailingSlash(path) {
  if (path && path[path.length - 1] !== '/') {
    path += '/';
  }
  return path;
}

const s3 = new aws.S3({
  accessKeyId: awsConfig.keyId,
  secretAccessKey: awsConfig.keySecret,
  region: awsConfig.region,
});

export default function S3Router(options) {
  const S3_BUCKET = awsConfig.bucket;
  const getFileKeyDir = options.getFileKeyDir || function() {
    return '';
  };

  if (!S3_BUCKET) {
    throw new Error('S3_BUCKET is required.');
  }

  const s3Options = {};
github awslabs / ai-powered-speech-analytics-for-amazon-connect / source / helper / lib / website-helper.js View on Github external
*                                                                                                                    *
 *      http://aws.amazon.com/asl/                                                                                    *
 *                                                                                                                    *
 *  or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES *
 *  OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions    *
 *  and limitations under the License.                                                                                *
 *********************************************************************************************************************/

/**
 * @author Solution Builders
 */

'use strict';

let AWS = require('aws-sdk');
let s3 = new AWS.S3();
const fs = require('fs');
var _downloadKey = 'AI-powered-speech-analytics-for-amazon-connect/${version}/web-site-manifest.json';
const _downloadLocation = '/tmp/web-site-manifest.json';

/**
 * Helper function to interact with s3 hosted website for cfn custom resource.
 *
 * @class websiteHelper
 */
let websiteHelper = (function() {

    /**
     * @class websiteHelper
     * @constructor
     */
    let websiteHelper = function() {};