How to use the boto.s3.connection.OrdinaryCallingFormat function in boto

To help you get started, we’ve selected a few boto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jenshadlich / s3srv / integration-tests / python / create_object.py View on Github external
import boto.s3.connection

access_key = 'GUSCQ627K6CC4OAL4RGC'
secret_key = 'eU8cXNzdXGEFpMMa4SSINjlIeD1fea4pMO3dIw9T'
conn = boto.connect_s3(
    aws_access_key_id=access_key,
    aws_secret_access_key=secret_key,
    host='localhost',
    port=8080,
    is_secure=False,
    calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)

bucket = conn.get_bucket('test-bucket')
from boto.s3.key import Key

key1 = Key(bucket)
key1.key = 'test/object-1'
key1.content_type = 'text/plain'
key1.set_contents_from_string("content 1")

key2 = Key(bucket)
key2.key = 'test-object-2'
key2.content_type = 'text/plain'
key2.set_contents_from_string("content 2")
github cisco-ie / pipeline-gnmi / vendor / github.com / influxdata / influxdb / build.py View on Github external
"""Upload provided package output to AWS S3.
    """
    logging.debug("Uploading files to bucket '{}': {}".format(bucket_name, packages))
    try:
        import boto
        from boto.s3.key import Key
        from boto.s3.connection import OrdinaryCallingFormat
        logging.getLogger("boto").setLevel(logging.WARNING)
    except ImportError:
        logging.warn("Cannot upload packages without 'boto' Python library!")
        return False
    logging.info("Connecting to AWS S3...")
    # Up the number of attempts to 10 from default of 1
    boto.config.add_section("Boto")
    boto.config.set("Boto", "metadata_service_num_attempts", "10")
    c = boto.connect_s3(calling_format=OrdinaryCallingFormat())
    if bucket_name is None:
        bucket_name = DEFAULT_BUCKET
    bucket = c.get_bucket(bucket_name.split('/')[0])
    for p in packages:
        if '/' in bucket_name:
            # Allow for nested paths within the bucket name (ex:
            # bucket/folder). Assuming forward-slashes as path
            # delimiter.
            name = os.path.join('/'.join(bucket_name.split('/')[1:]),
                                os.path.basename(p))
        else:
            name = os.path.basename(p)
        logging.debug("Using key: {}".format(name))
        if bucket.get_key(name) is None or overwrite:
            logging.info("Uploading file {}".format(name))
            k = Key(bucket)
github neo4j-drivers / boltkit / boltkit / controller.py View on Github external
def download_s3(self, package):
        """ Download from private s3 distributions.
        """
        package_path = path_join(self.path, package)
        aws_access_key_id = get_env_variable_or_raise_error("AWS_ACCESS_KEY_ID")
        aws_secret_access_key = get_env_variable_or_raise_error("AWS_SECRET_ACCESS_KEY")

        bucket_name = getenv("BUCKET", DIST_HOST)
        # connect to the bucket
        conn = boto.s3.connect_to_region(
            "eu-west-1",
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            is_secure=True,
            calling_format=OrdinaryCallingFormat()
        )
        bucket = conn.get_bucket(bucket_name)
        # Get the Key object of the given key, in the bucket
        k = Key(bucket, package)

        # Ensure the destination exist
        try:
            makedirs(self.path)
        except OSError:
            pass
        self.write("Downloading from aws bucket %s... " % bucket_name)

        # Get the contents of the key into a file
        k.get_contents_to_filename(package_path)
        return package_path
github NCI-GDC / gdcdatamodel / zug / downloaders.py View on Github external
def setup_s3(self):
        self.logger.info("Connecting to s3 at %s.", self.s3_info["host"])
        self.boto_conn = boto.connect_s3(
            aws_access_key_id=self.s3_info["access_key"],
            aws_secret_access_key=self.s3_info["secret_key"],
            host=self.s3_info["host"],
            port=self.s3_info["port"],
            is_secure=False,
            calling_format=boto.s3.connection.OrdinaryCallingFormat(),
        )
        self.logger.info("Getting s3 bucket %s", self.s3_info["bucket"])
        self.s3_bucket = self.boto_conn.get_bucket(self.s3_info["bucket"])
github openstack / nova / nova / api / ec2 / images.py View on Github external
def conn(context):
    access = manager.AuthManager().get_access_key(context.user,
                                                  context.project)
    secret = str(context.user.secret)
    calling = boto.s3.connection.OrdinaryCallingFormat()
    return boto.s3.connection.S3Connection(aws_access_key_id=access,
                                           aws_secret_access_key=secret,
                                           is_secure=False,
                                           calling_format=calling,
                                           port=FLAGS.s3_port,
                                           host=FLAGS.s3_host)
github samuelclay / NewsBlur / utils / backups / s3.py View on Github external
def save_file_in_s3(filename, name=None):
    conn   = S3Connection(ACCESS_KEY, SECRET, calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(BUCKET_NAME)
    k      = Key(bucket)
    k.key  = name or filename

    k.set_contents_from_filename(filename)
github nprapps / lookatthis / fabfile / utils.py View on Github external
def get_bucket(bucket_name):
    """
    Established a connection and gets s3 bucket
    """

    if '.' in bucket_name:
        s3 = boto.connect_s3(calling_format=OrdinaryCallingFormat())
    else:
        s3 = boto.connect_s3()

    return s3.get_bucket(bucket_name)
github ownport / portable-ansible / ansible / lib / ansible / modules / core / cloud / amazon / s3.py View on Github external
# Boto uses symbolic names for locations but region strings will
        # actually work fine for everything except us-east-1 (US Standard)
        location = region

    if module.params.get('object'):
        obj = os.path.expanduser(module.params['object'])

    # allow eucarc environment variables to be used if ansible vars aren't set
    if not s3_url and 'S3_URL' in os.environ:
        s3_url = os.environ['S3_URL']

    # bucket names with .'s in them need to use the calling_format option,
    # otherwise the connection will fail. See https://github.com/boto/boto/issues/2836
    # for more details.
    if '.' in bucket:
        aws_connect_kwargs['calling_format'] = OrdinaryCallingFormat()

    # Look at s3_url and tweak connection settings
    # if connecting to Walrus or fakes3
    try:
        if is_fakes3(s3_url):
            fakes3 = urlparse.urlparse(s3_url)
            s3 = S3Connection(
                is_secure=fakes3.scheme == 'fakes3s',
                host=fakes3.hostname,
                port=fakes3.port,
                calling_format=OrdinaryCallingFormat(),
                **aws_connect_kwargs
            )
        elif is_walrus(s3_url):
            walrus = urlparse.urlparse(s3_url).hostname
            s3 = boto.connect_walrus(walrus, **aws_connect_kwargs)
github bokeh / bokeh / scripts / build_upload.py View on Github external
return "junk"

    try:
        AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
        AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
    except Exception:
        failed("Could NOT retrieve CDN credentials")
        abort_checks()

    buckets = []
    for bucket_name, bucket_region in [('cdn.bokeh.org', 'us-east-1'), ('cdn-backup.bokeh.org', 'us-west-2')]:
        try:
            conn = boto.s3.connect_to_region(bucket_region,
                                             aws_access_key_id=AWS_ACCESS_KEY_ID,
                                             aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
                                             calling_format=boto.s3.connection.OrdinaryCallingFormat())
            buckets.append(conn.get_bucket(bucket_name))
        except Exception:
            failed("Could NOT connect to CDN bucket %r" % bucket_name)
            abort_checks()
    return buckets