How to use the boto.s3 function in boto

To help you get started, we’ve selected a few boto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nimbusproject / nimbus / tests / ec2_submit_tests.py View on Github external
def test_ec2_submit_url(self):
        bucket_name = "Repo"
        bucket = self.s3conn.get_bucket(bucket_name)
        k = boto.s3.key.Key(bucket)
        image_name = self.cb_random_bucketname(10)
        k.key = "WHATEVER/" + image_name
        k.set_contents_from_filename(os.environ['NIMBUS_SOURCE_TEST_IMAGE'])
        url = "cumulus://HOST/" + bucket_name + "/" + k.key
        print url
        res = self.ec2conn.run_instances(url)
github nimbusproject / nimbus / tests / ec2_repo_tests.py View on Github external
def test_ec2_list_upload(self):
        # obviously this will not work if the default name changes
        bucket = self.s3conn.get_bucket("Repo")
        k = boto.s3.key.Key(bucket)
        image_id = self.cb_random_bucketname(25)
        k.key = "VMS/" + self.can_user.get_id() + "/" + image_id
        k.set_contents_from_filename(os.environ['NIMBUS_SOURCE_TEST_IMAGE'])

        images = self.ec2conn.get_all_images()
        self.assertTrue(len(images) >= 1, "should be 1 image %d" % len(images))
        found = False
        for i in images:
            if i.id == image_id:
                found = True

        for i in images:
            print "+++++++++++++++++++++++++"
            print i
        self.assertTrue(found, "The image should have been found %s" % (image_id))
github wal-e / wal-e / tests / test_s3_calling_format.py View on Github external
def test_cert_validation_sensitivity(monkeypatch):
    """Test degradation of dotted bucket names to OrdinaryCallingFormat

    Although legal bucket names with SubdomainCallingFormat, these
    kinds of bucket names run afoul certification validation, and so
    they are forced to fall back to OrdinaryCallingFormat.
    """
    monkeypatch.setenv('AWS_REGION', 'us-east-1')
    for bn in SUBDOMAIN_OK:
        if '.' not in bn:
            cinfo = calling_format.from_store_name(bn)
            assert (cinfo.calling_format ==
                    boto.s3.connection.SubdomainCallingFormat)
        else:
            assert '.' in bn

            cinfo = calling_format.from_store_name(bn)
            assert (cinfo.calling_format == connection.OrdinaryCallingFormat)
            assert cinfo.region == 'us-east-1'
            assert cinfo.ordinary_endpoint == 's3.amazonaws.com'
github skoczen / mindful-browsing / fabfile.py View on Github external
def open_s3(self):
        """
        Opens connection to S3 returning bucket and key
        """
        conn = boto.connect_s3(
            self.AWS_ACCESS_KEY_ID,
            self.AWS_SECRET_ACCESS_KEY,
            is_secure=True,
            calling_format=OrdinaryCallingFormat() 
        )
        try:
            bucket = conn.get_bucket(self.AWS_STORAGE_BUCKET_NAME)
        except boto.exception.S3ResponseError:
            bucket = conn.create_bucket(self.AWS_STORAGE_BUCKET_NAME)
        return bucket, boto.s3.key.Key(bucket)
github elifesciences / elife-bot / activity / activity_PublishPOA.py View on Github external
s3_conn = S3Connection(self.settings.aws_access_key_id, self.settings.aws_secret_access_key)
        bucket = s3_conn.lookup(bucket_name)
        
        outbox_s3_key_names = []
        articles_not_uploaded_to_outbox = []

        for xml_file in xml_files:
            # Check for v2 or naming format
            # Very simple, checks for the letter v
            if self.get_filename_from_path(xml_file, '.xml').find('v') > -1:
                if v2_xml_upload is not True:
                    # Do not upload
                    articles_not_uploaded_to_outbox.append(xml_file)
                    continue

            s3key = boto.s3.key.Key(bucket)
            s3key.key = s3_folder_name + self.get_filename_from_path(xml_file, '.xml') + '.xml'
            s3key.set_contents_from_filename(xml_file, replace=True)
            outbox_s3_key_names.append(s3key.key)
        
        # Final check for empty lists of files
        if len(outbox_s3_key_names) <= 0:
            outbox_s3_key_names = None
        if len(articles_not_uploaded_to_outbox) <= 0:
            articles_not_uploaded_to_outbox = None
            
        return (outbox_s3_key_names, articles_not_uploaded_to_outbox)
github openstack / horizon / django-openstack / src / django_openstack / nova / manager.py View on Github external
def get_instance_graph(self, region, instance_id, graph_name):
        # TODO(devcamcar): Need better support for multiple regions.
        #                  Need a way to get object store by region.
        s3 = boto.s3.connection.S3Connection(
            aws_access_key_id=settings.NOVA_ACCESS_KEY,
            aws_secret_access_key=settings.NOVA_SECRET_KEY,
            is_secure=False,
            calling_format=boto.s3.connection.OrdinaryCallingFormat(),
            port=3333,
            host=settings.NOVA_CLC_IP)
        key = '_%s.monitor' % instance_id

        try:
            bucket = s3.get_bucket(key, validate=False)
        except boto.exception.S3ResponseError, e:
            if e.code == "NoSuchBucket":
                return None
            else:
                raise e
github ansible / awx / awx / lib / site-packages / boto / __init__.py View on Github external
def storage_uri_for_key(key):
    """Returns a StorageUri for the given key.

    :type key: :class:`boto.s3.key.Key` or subclass
    :param key: URI naming bucket + optional object.
    """
    if not isinstance(key, boto.s3.key.Key):
        raise InvalidUriError('Requested key (%s) is not a subclass of '
                              'boto.s3.key.Key' % str(type(key)))
    prov_name = key.bucket.connection.provider.get_provider_name()
    uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name)
    return storage_uri(uri_str)
github Khan / internal-webserver / gae_dashboard / fetch_amazon_billing.py View on Github external
"""Get Amazon billing data from S3 as an array of strings.

    By default, we retrieve the billing data of the current month and year.
    """
    now = datetime.datetime.now()

    year = str(year or now.year)
    # AWS naming convention: leading zero for single-digit months
    month = str(month or now.month).zfill(2)

    # 759597320137 is KA's internal AWS "PayerAccountId". This field appears in
    # every row of each billing CSV, as well as the names of S3 objects.
    csv_key = "759597320137-aws-billing-csv-%s-%s.csv" % (year, month)

    # TODO(nabil[2016-12-23]): generate s3 key, use it below and in secrets.py
    conn = boto.s3.connection.S3Connection(
        s3_secrets.youtube_export_s3_access_key,
        s3_secrets.youtube_export_s3_secret_key)
    billing_bucket = conn.get_bucket('ka-aws-billing-reports')
    value = billing_bucket.get_key(csv_key)

    if value is None:
        raise S3FileNotFound(csv_key)

    return value.get_contents_as_string().splitlines()
github cloudera / hue / desktop / core / ext-py / django-extensions-1.8.0 / django_extensions / management / commands / sync_s3.py View on Github external
def open_s3(self):
        """
        Opens connection to S3 returning bucket and key
        """
        conn = boto.connect_s3(
            self.AWS_ACCESS_KEY_ID,
            self.AWS_SECRET_ACCESS_KEY,
            **self.get_s3connection_kwargs())
        try:
            bucket = conn.get_bucket(self.AWS_BUCKET_NAME)
        except boto.exception.S3ResponseError:
            bucket = conn.create_bucket(self.AWS_BUCKET_NAME)
        return bucket, boto.s3.key.Key(bucket)
github elifesciences / elife-bot / poll.py View on Github external
def get_folders(bucket, prefix = None, delimiter = '/', headers = None):
    # Get "folders" from the bucket, with optional
    # prefix for the "folder" of interest
    # default delimiter is '/'
    
    if(bucket is None):
        return None

    folders = []
    keys = []

    bucketList = bucket.list(prefix = prefix, delimiter = delimiter, headers = headers)

    for item in bucketList:
        if(isinstance(item, boto.s3.prefix.Prefix)):
            # Can loop through each prefix and search for objects
            folders.append(item)
            #print 'Prefix: ' + item.name
        elif (isinstance(item, boto.s3.key.Key)):
            keys.append(item)
            #print 'Key: ' + item.name

    return keys, folders