How to use the boto3.session.Config function in boto3

To help you get started, we’ve selected a few boto3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github geoadmin / mf-geoadmin3 / scripts / s3manage.py View on Github external
def __init_connection__(bucket_name):
    try:
        session = boto3.session.Session()
    except botocore.exceptions.BotoCoreError as e:
        print('Cannot establish connection to bucket "%s". Check you credentials.' % bucket_name)
        print(e)
        sys.exit(1)

    s3client = session.client('s3', config=boto3.session.Config(signature_version='s3v4'))
    s3 = session.resource('s3', config=boto3.session.Config(signature_version='s3v4'))

    bucket = s3.Bucket(bucket_name)
    return (s3, s3client, bucket)
github geoadmin / mf-geoadmin3 / scripts / s3manage.py View on Github external
def __init_connection__(bucket_name):
    try:
        session = boto3.session.Session()
    except botocore.exceptions.BotoCoreError as e:
        print('Cannot establish connection to bucket "%s". Check you credentials.' % bucket_name)
        print(e)
        sys.exit(1)

    s3client = session.client('s3', config=boto3.session.Config(signature_version='s3v4'))
    s3 = session.resource('s3', config=boto3.session.Config(signature_version='s3v4'))

    bucket = s3.Bucket(bucket_name)
    return (s3, s3client, bucket)
github AlgoTraders / stock-analysis-engine / analysis_engine / publish.py View on Github external
if s3_enabled and s3_address and s3_bucket and s3_key:
        endpoint_url = f'http{"s" if s3_secure else ""}://{s3_address}'

        if verbose:
            log.debug(
                f's3 start - {label} endpoint_url={endpoint_url} '
                f'region={s3_region_name}')

        s3 = boto3.resource(
            's3',
            endpoint_url=endpoint_url,
            aws_access_key_id=s3_access_key,
            aws_secret_access_key=s3_secret_key,
            region_name=s3_region_name,
            config=boto3.session.Config(
                signature_version='s3v4')
        )

        if s3.Bucket(s3_bucket) not in s3.buckets.all():
            if verbose:
                log.debug(f's3 creating bucket={s3_bucket} {label}')
            s3.create_bucket(
                Bucket=s3_bucket)

        if verbose:
            log.debug(
                f's3 upload start - bytes={num_mb} to '
                f'{s3_bucket}:{s3_key} {label}')

        s3.Bucket(
            s3_bucket).put_object(
github SAP / InfraBox / src / pyinfraboxutils / storage.py View on Github external
def _get_client(self):
        client = boto3.client('s3',
                              endpoint_url=self.url,
                              config=boto3.session.Config(signature_version='s3v4'),
                              aws_access_key_id=get_env('INFRABOX_STORAGE_S3_ACCESS_KEY'),
                              aws_secret_access_key=get_env('INFRABOX_STORAGE_S3_SECRET_KEY'))

        return client
github localstack / localstack / localstack / utils / aws / aws_stack.py View on Github external
def get_s3_client():
    return boto3.resource('s3',
        endpoint_url=config.TEST_S3_URL,
        config=boto3.session.Config(
            s3={'addressing_style': 'path'}),
        verify=False)
github AlgoTraders / stock-analysis-engine / analysis_engine / work_tasks / publish_from_s3_to_redis.py View on Github external
's3_secure',
                ae_consts.S3_SECURE) == '1'

            endpoint_url = f'http{"s" if secure else ""}://{service_address}'

            log.info(
                f'{label} building s3 endpoint_url={endpoint_url} '
                f'region={region_name}')

            s3 = boto3.resource(
                's3',
                endpoint_url=endpoint_url,
                aws_access_key_id=access_key,
                aws_secret_access_key=secret_key,
                region_name=region_name,
                config=boto3.session.Config(
                    signature_version='s3v4')
            )

            try:
                log.info(
                    f'{label} checking bucket={s3_bucket_name} exists')
                if s3.Bucket(s3_bucket_name) not in s3.buckets.all():
                    log.info(
                        f'{label} creating bucket={s3_bucket_name}')
                    s3.create_bucket(
                        Bucket=s3_bucket_name)
            except Exception as e:
                log.info(
                    f'{label} failed creating bucket={s3_bucket_name} '
                    f'with ex={e}')
            # end of try/ex for creating bucket
github AlgoTraders / stock-analysis-engine / analysis_engine / load_history_dataset_from_s3.py View on Github external
:param s3_secure: Transmit using tls encryption
        (default is ``False``)
    """
    log.info(f'start s3={s3_address}:{s3_bucket}/{s3_key}')

    data_from_file = None

    endpoint_url = f'http{"s" if s3_secure else ""}://{s3_address}'

    s3 = boto3.resource(
        's3',
        endpoint_url=endpoint_url,
        aws_access_key_id=s3_access_key,
        aws_secret_access_key=s3_secret_key,
        region_name=s3_region_name,
        config=boto3.session.Config(signature_version='s3v4'))

    try:
        data_from_file = s3_utils.s3_read_contents_from_key(
            s3=s3,
            s3_bucket_name=s3_bucket,
            s3_key=s3_key,
            encoding=encoding,
            convert_as_json=convert_as_json,
            compress=compress)
    except Exception as e:
        if (
                'An error occurred (NoSuchBucket) '
                'when calling the GetObject operation') in str(e):
            msg = (
                f'missing s3_bucket={s3_bucket} in s3_address={s3_address}')
            log.error(msg)