Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from localstack.utils.aws import aws_stack
from localstack.utils.common import to_str, to_bytes
TEST_BUCKET_NAME = 'test-bucket'
KINESIS_STREAM_NAME = 'test_stream_1'
MSG_BODY_RAISE_ERROR_FLAG = 'raise_error'
MSG_BODY_MESSAGE_TARGET = 'message_target'
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
# Subclass of boto's TypeDeserializer for DynamoDB
# to adjust for DynamoDB Stream format.
class TypeDeserializer(boto3.dynamodb.types.TypeDeserializer):
def _deserialize_n(self, value):
return float(value)
def _deserialize_b(self, value):
return value # already in Base64
def handler(event, context):
""" Generic event forwarder Lambda. """
# print test messages (to test CloudWatch Logs integration)
LOGGER.info('Lambda log message - logging module')
print('Lambda log message - print function')
if MSG_BODY_RAISE_ERROR_FLAG in event:
raise Exception('Test exception (this is intentional)')
def setUp(self):
self.deserializer = TypeDeserializer()
get_s3_client, firehose_stream_arn, connect_elasticsearch, extract_region_from_auth_header)
from localstack.utils.kinesis import kinesis_connector
from localstack.utils.analytics import event_publisher
APP_NAME = 'firehose_api'
app = Flask(APP_NAME)
ACTION_HEADER_PREFIX = 'Firehose_20150804'
# logger
LOG = logging.getLogger(__name__)
# maps stream names to details
DELIVERY_STREAMS = {}
# dynamodb deserializer
deser = TypeDeserializer()
def get_delivery_stream_names():
names = []
for name, stream in iteritems(DELIVERY_STREAMS):
names.append(stream['DeliveryStreamName'])
return names
def get_delivery_stream_tags(stream_name, exclusive_start_tag_key=None, limit=50):
stream = DELIVERY_STREAMS[stream_name]
response = {}
start_i = -1
if exclusive_start_tag_key is not None:
start_i = next(iter([i for i, tag in enumerate(stream['Tags']) if tag['Key'] == exclusive_start_tag_key]))
def get_account_role_map(boto_session, region_name):
"""Fetch the ARNs of all the IAM Roles which people have created in other
AWS accounts which are inserted into DynamoDB with
http://github.com/gene1wood/cloudformation-cross-account-outputs
:return: dict with account ID keys and IAM Role ARN values
"""
client = boto_session.client('dynamodb', region_name=region_name)
paginator = client.get_paginator('scan')
service_model = client._service_model.operation_model('Scan')
trans = TransformationInjector(deserializer=TypeDeserializer())
items = []
for page in paginator.paginate(TableName=DYNAMODB_TABLE_NAME):
trans.inject_attribute_value_output(page, service_model)
items.extend([x for x in page['Items']])
return {x['aws-account-id']: x['GuardDutyMemberAccountIAMRoleArn']
for x in items
if x.get('category') == DB_CATEGORY
and {'aws-account-id',
'GuardDutyMemberAccountIAMRoleArn'} <= set(x)}
serializer=None, deserializer=None):
self._transformer = transformer
if transformer is None:
self._transformer = ParameterTransformer()
self._condition_builder = condition_builder
if condition_builder is None:
self._condition_builder = ConditionExpressionBuilder()
self._serializer = serializer
if serializer is None:
self._serializer = TypeSerializer()
self._deserializer = deserializer
if deserializer is None:
self._deserializer = TypeDeserializer()
def simplified_new_image(self):
image = self.new_image
if image is not None:
td = TypeDeserializer()
return {k: td.deserialize(v) for k, v in image.items()}
else:
return None
def __init__(self, dynamodb_table_resource=None, dynamodb_client=None, transactions=True):
"""Take a dynamodb table resource to use for operations."""
self.table = dynamodb_table_resource
self.client = dynamodb_client
self.transactions = transactions
self.deserializer = TypeDeserializer()
# The following parameters are required to configure the ES cluster
ES_ENDPOINT = os.environ['ES_ENDPOINT']
ES_REGION = os.environ['ES_REGION']
DEBUG = True if os.environ.get('DEBUG') == 1 else False
# ElasticSearch 6 deprecated having multiple mapping types in an index. Default to doc.
DOC_TYPE = 'doc'
ES_MAX_RETRIES = 3 # Max number of retries for exponential backoff
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
logger.info("Streaming to ElasticSearch")
# Subclass of boto's TypeDeserializer for DynamoDB to adjust for DynamoDB Stream format.
class StreamTypeDeserializer(TypeDeserializer):
def _deserialize_n(self, value):
return float(value)
def _deserialize_b(self, value):
return value # Already in Base64
class ES_Exception(Exception):
'''Capture status_code from request'''
status_code = 0
payload = ''
def __init__(self, status_code, payload):
self.status_code = status_code
self.payload = payload
Exception.__init__(
ES_ENDPOINT = 'ES_ENDPOINT HERE'
# The following parameters can be optionally customized
DOC_TABLE_FORMAT = '{}' # Python formatter to generate index name from the DynamoDB table name
DOC_TYPE_FORMAT = '{}_type' # Python formatter to generate type name from the DynamoDB table name, default is to add '_type' suffix
ES_REGION = None # If not set, use the runtime lambda region
ES_MAX_RETRIES = 3 # Max number of retries for exponential backoff
DEBUG = True # Set verbose debugging information
print "Streaming to ElasticSearch"
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
# Subclass of boto's TypeDeserializer for DynamoDB to adjust for DynamoDB Stream format.
class StreamTypeDeserializer(TypeDeserializer):
def _deserialize_n(self, value):
return float(value)
def _deserialize_b(self, value):
return value # Already in Base64
class ES_Exception(Exception):
'''Exception capturing status_code from Client Request'''
status_code = 0
payload = ''
def __init__(self, status_code, payload):
self.status_code = status_code
self.payload = payload
Exception.__init__(self, 'ES_Exception: status_code={}, payload={}'.format(status_code, payload))