How to use the boto3.resource function in boto3

To help you get started, we’ve selected a few boto3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github spulec / moto / tests / test_dynamodb2 / test_dynamodb_table_with_range_key.py View on Github external
def test_update_table_gsi_throughput():
    dynamodb = boto3.resource("dynamodb", region_name="us-east-1")

    # Create the DynamoDB table.
    table = dynamodb.create_table(
        TableName="users",
        KeySchema=[
            {"AttributeName": "forum_name", "KeyType": "HASH"},
            {"AttributeName": "subject", "KeyType": "RANGE"},
        ],
        GlobalSecondaryIndexes=[
            {
                "IndexName": "TestGSI",
                "KeySchema": [
                    {"AttributeName": "username", "KeyType": "HASH"},
                    {"AttributeName": "created", "KeyType": "RANGE"},
                ],
                "Projection": {"ProjectionType": "ALL"},
github m3dev / gokart / test / test_s3_zip_client.py View on Github external
def test_unpack_archive(self):
        conn = boto3.resource('s3', region_name='us-east-1')
        conn.create_bucket(Bucket='test')

        file_path = os.path.join('s3://test/', 'test.zip')
        in_temporary_directory = os.path.join(_get_temporary_directory(), 'in', 'dummy')
        out_temporary_directory = os.path.join(_get_temporary_directory(), 'out', 'dummy')

        # make dummy zip file.
        os.makedirs(in_temporary_directory, exist_ok=True)
        in_zip_client = S3ZipClient(file_path=file_path, temporary_directory=in_temporary_directory)
        in_zip_client.make_archive()

        # load dummy zip file.
        out_zip_client = S3ZipClient(file_path=file_path, temporary_directory=out_temporary_directory)
        self.assertFalse(os.path.exists(out_temporary_directory))
        out_zip_client.unpack_archive()
github keithrozario / potassium40 / lambda / compress_bucket.py View on Github external
def compress_bucket(event, context):
    """
    compresses all files in an s3_bucket to a zip file
    """

    logger = logging.getLogger()
    level = logging.INFO
    logger.setLevel(level)

    bucket_name = os.environ['bucket_name']
    file_name = 'robots.json.gz'
    file_dir = '/tmp/'  # directory in the

    s3 = boto3.resource('s3')
    bucket = s3.Bucket(bucket_name)
    downloaded_keys = []
    full_list = []

    logger.info("Reading all files in bucket: {}".format(bucket_name))

    for k, obj in enumerate(bucket.objects.all()):
        if obj.key not in downloaded_keys:
            # Append to downloaded keys, and write contents to io_string
            downloaded_keys.append(obj.key)
            file_list = json.loads((obj.get()['Body'].read()).decode('utf-8'))
            full_list.extend(file_list)

    logger.info("Read in and compressed {:,} files to {}".format(len(downloaded_keys),
                                                                 file_name))
    # write everything to a single file
github aws / sagemaker-containers / src / sagemaker_containers / _files.py View on Github external
def s3_download(url, dst):  # type: (str, str) -> None
    """Download a file from S3.

    Args:
        url (str): the s3 url of the file.
        dst (str): the destination where the file will be saved.
    """
    url = parse.urlparse(url)

    if url.scheme != "s3":
        raise ValueError("Expecting 's3' scheme, got: %s in %s" % (url.scheme, url))

    bucket, key = url.netloc, url.path.lstrip("/")

    region = os.environ.get("AWS_REGION", os.environ.get(_params.REGION_NAME_ENV))
    s3 = boto3.resource("s3", region_name=region)

    s3.Bucket(bucket).download_file(key, dst)
github twilio-labs / socless / functions / set_investigation_status / lambda_function.py View on Github external
def handle_state(investigation_id,status):
    """
    Set the Investigation ID to status.
    This is applied to the original incident
    """
    VALID_STATUSES = ['open','closed','active','whitelisted']
    if not investigation_id:
        return {"result": "failure", "message": "No investigation_id provided"}
    if status not in VALID_STATUSES:
        return {"result": "failure", "message": f"Status {status} is not a valid status"}

    event_table = boto3.resource('dynamodb').Table(EVENTS_TABLE)
    # Investigation_id in Socless now matches the ID of the original event ''
    try:
        update_query = event_table.update_item(
            Key={'id':investigation_id},
            UpdateExpression='SET status_ = :status_',
            ExpressionAttributeValues={':status_': status},
            ConditionExpression='attribute_exists(id)'
        )
    except ClientError as e:
        if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
            return { "result": "failure", "message": f"Investigation with id {investigation_id} does not exist"}
        else:
            raise
    else:
        return {"result": "success"}
github ChineseGLUE / ChineseGLUE / baselines / models_pytorch / mrc_pytorch / tools / file_utils.py View on Github external
def s3_get(url: str, temp_file: IO) -> None:
    """Pull a file directly from S3."""
    s3_resource = boto3.resource("s3")
    bucket_name, s3_path = split_s3_path(url)
    s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file)
github aws / lumberyard / dev / Gems / CloudGemFramework / v1 / AWS / lambda-code / ProjectResourceHandler / LambdaConfigurationResourceHandler.py View on Github external
def _get_input_key(props):
    input_key = '{}/{}-lambda-code.zip'.format(props.ConfigurationKey, props.FunctionName)
    config_bucket = boto3.resource('s3').Bucket(props.ConfigurationBucket)
    objs = list(config_bucket.objects.filter(Prefix=input_key))
    if len(objs) == 0:
        print("There is no unique code folder for {}, using default".format(props.FunctionName))
        input_key = '{}/lambda-function-code.zip'.format(props.ConfigurationKey)
    return input_key
github apache / incubator-dlab / infrastructure-provisioning / aws / src / ssn / scripts / finalize.py View on Github external
def cleanup(key_id):
    try:
        iam = boto3.resource('iam')
        current_user = iam.CurrentUser()
        for user_key in current_user.access_keys.all():
            if user_key.id == key_id:
                print "Deleted key " + user_key.id
                user_key.delete()
        return True
    except:
        return False
github spenczar / lektor-s3 / lektor_s3.py View on Github external
def connect(self, credentials):
        self.s3 = boto3.resource(service_name='s3')
        self.cloudfront = boto3.client(service_name='cloudfront')