How to use the boto.connect_s3 function in boto

To help you get started, we’ve selected a few boto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jenshadlich / s3srv / integration-tests / python / delete_bucket.py View on Github external
import boto.s3.connection

access_key = 'GUSCQ627K6CC4OAL4RGC'
secret_key = 'eU8cXNzdXGEFpMMa4SSINjlIeD1fea4pMO3dIw9T'
conn = boto.connect_s3(
    aws_access_key_id=access_key,
    aws_secret_access_key=secret_key,
    host='localhost',
    port=8080,
    is_secure=False,
    calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)

bucket = conn.delete_bucket('test-bucket')
github spulec / moto / tests / test_s3 / test_s3.py View on Github external
def test_key_save_to_missing_bucket():
    conn = boto.connect_s3("the_key", "the_secret")
    bucket = conn.get_bucket("mybucket", validate=False)

    key = Key(bucket)
    key.key = "the-key"
    key.set_contents_from_string.when.called_with("foobar").should.throw(
        S3ResponseError
    )
github pygeomet / PyGEOMET / PyGEOMET / datasets / RadarDataset.py View on Github external
maxday = 29
            elif self.month == '02':
                maxday = 28
            elif self.month == '04' or self.month == '06' or \
                 self.month == '09' or self.month == 'self.11':
                maxday = 30
            else:
                maxday = 31
            tmp = np.arange(1,maxday+1,1)
            self.dayList = ["%02d" % x for x in tmp]

            if self.day == None:
                self.day = self.dayList[self.currentDayIndex]

        self.setURL()
        s3conn = boto.connect_s3()
        bucket = s3conn.get_bucket('noaa-nexrad-level2')
        flist = bucket.get_all_keys(prefix=self.path)
        self.ntimes = len(flist)
        hharr = []
        mmssarr = []
        self.farr = []
        j = -1
        for i in range(0,len(flist)):
            time = flist[i].name.split("_")[1]
            hh = time[0:2]
            mmss = time[2:6]
            self.farr.append(time[0:6])
            if hh not in hharr:
                mmssarr.append([])
                hharr.append(hh)
                j += 1
github codeforamerica / srtracker / app.py View on Github external
def save_uploaded_media(media):
    extension = '.' in media.filename and media.filename.rsplit('.')[1]

    if extension in app.config['ACCEPTABLE_MEDIA']:
        unique_name = str(uuid.uuid4())
        if extension:
            unique_name = '%s.%s' % (unique_name, extension)

        storage_type = app.config.get('MEDIA_STORAGE_TYPE')
        # TODO: upload methodologies should be their own module
        if storage_type == 'file':
            media.save(os.path.join(app.config['MEDIA_STORAGE_PATH'], unique_name))
        elif storage_type == 's3':
            import boto
            s3_connection = boto.connect_s3(app.config['AWS_ACCESS_KEY_ID'], app.config['AWS_SECRET_KEY'])
            bucket_name = app.config['MEDIA_S3_BUCKET']
            bucket = s3_connection.get_bucket(bucket_name)
            key = boto.s3.key.Key(bucket)
            key.key = unique_name
            key.set_contents_from_file(media)
            key.make_public()
            url = 'http://%s.s3.amazonaws.com/%s' % (bucket_name, unique_name)
            app.logger.debug('S3 URL for %s: %s', media.filename, url)

            # for S3, prefer the actual AWS URL, since it's public and there's no reason to funnel it through SR Tracker
            return (unique_name, url)
        
        # return this more generically, since we should theoretically be able to use it for any storage type
        return (unique_name, url_for('get_request_media', filename=unique_name, _external=True))

    return None
github turi-code / SFrame / oss_src / unity / python / sframe / util / file_util.py View on Github external
def _get_s3_key(s3_path, aws_credentials = {}):
    '''Given S3 path, get the key object that represents the path'''
    conn = boto.connect_s3(**aws_credentials)
    (bucket_name, path) = parse_s3_path(s3_path)
    bucket = conn.get_bucket(bucket_name, validate=False)
    k = boto.s3.key.Key(bucket)
    k.key = path
    return k
github probcomp / Cloudless / examples / DPMB / MrJob / generate_scoring.py View on Github external
def create_file_queue(bucket_dir_suffix, bucket_str=None):
    if bucket_str is None:
        bucket_str = settings.s3.bucket_str
    #
    bucket = boto.connect_s3().get_bucket(bucket_str)
    summary_filenames = filter_bucket_filenames(bucket, bucket_dir_suffix,
                                                is_summary)
    score_filenames = filter_bucket_filenames(bucket, bucket_dir_suffix,
                                              is_score)
    target_score_filenames = map(get_score_name, summary_filenames)
    missing_score_filenames = setdiff(target_score_filenames, score_filenames)
    corresponding_summary_filenames = map(
        get_summary_name, missing_score_filenames)
    is_not_init = lambda filename: filename.find('-1')==-1
    corresponding_summary_filenames = filter(is_not_init, corresponding_summary_filenames)
    queue = create_queue_from_list(
        corresponding_summary_filenames, bucket_dir_suffix)
    return queue
github scrapy / scrapy / scrapy / extensions / feedexport.py View on Github external
u = urlparse(uri)
        self.bucketname = u.hostname
        self.access_key = u.username or access_key
        self.secret_key = u.password or secret_key
        self.is_botocore = is_botocore()
        self.keyname = u.path[1:]  # remove first "/"
        self.acl = acl
        if self.is_botocore:
            import botocore.session
            session = botocore.session.get_session()
            self.s3_client = session.create_client(
                's3', aws_access_key_id=self.access_key,
                aws_secret_access_key=self.secret_key)
        else:
            import boto
            self.connect_s3 = boto.connect_s3
github nagwww / 101-AWS-S3-Hacks / versionstatus.py View on Github external
"""
- Author : Nag m
- Hack   : Find out the status of the Bucket Versioning 
- Info   : Find out the status of the Bucket Versioning
            * 101-s3-aws
"""

import boto

def version(name):
   bucket = conn.get_bucket(name)
   print bucket.get_versioning_status()

if __name__ == "__main__":
   conn = boto.connect_s3()
   bucketname = "101-s3-aws"
   version(bucketname)
github tsileo / bakthat / bakthat / backends.py View on Github external
def __init__(self, conf={}, profile="default"):
        BakthatBackend.__init__(self, conf, profile)

        con = boto.connect_s3(self.conf["access_key"], self.conf["secret_key"])

        region_name = self.conf["region_name"]
        if region_name == DEFAULT_LOCATION:
            region_name = ""

        try:
            self.bucket = con.get_bucket(self.conf["s3_bucket"])
        except S3ResponseError, e:
            if e.code == "NoSuchBucket":
                self.bucket = con.create_bucket(self.conf["s3_bucket"], location=region_name)
            else:
                raise e

        self.container = self.conf["s3_bucket"]
        self.container_key = "s3_bucket"
github jbm9 / indri / scanner_uploader / uploader.py View on Github external
def bootstrap(self):
        if not self.no_upload:
            self.c = boto.connect_s3()
            self.b = self.c.get_bucket(self.bucketname)
            bucket_location = self.b.get_location()
            if bucket_location:
                self.c = boto.s3.connect_to_region(bucket_location)
                self.b = self.c.get_bucket(self.bucketname)