def send_event_to_target(arn, event, target_attributes=None, asynchronous=True): region = arn.split(':')[3] if ':lambda:' in arn: from localstack.services.awslambda import lambda_api lambda_api.run_lambda(func_arn=arn, event=event, context={}, asynchronous=asynchronous) elif ':sns:' in arn: sns_client = connect_to_service('sns', region_name=region) sns_client.publish(TopicArn=arn, Message=json.dumps(event)) elif ':sqs:' in arn: sqs_client = connect_to_service('sqs', region_name=region) queue_url = get_sqs_queue_url(arn) msg_group_id = dict_utils.get_safe(target_attributes, '$.SqsParameters.MessageGroupId') kwargs = {'MessageGroupId': msg_group_id} if msg_group_id else {} sqs_client.send_message(QueueUrl=queue_url, MessageBody=json.dumps(event), **kwargs) elif ':states:' in arn: stepfunctions_client = connect_to_service('stepfunctions', region_name=region) stepfunctions_client.start_execution(stateMachineArn=arn, input=json.dumps(event)) elif ':firehose:' in arn: delivery_stream_name = firehose_name(arn) firehose_client = connect_to_service('firehose', region_name=region) firehose_client.put_record( DeliveryStreamName=delivery_stream_name, Record={'Data': to_bytes(json.dumps(event))}) elif ':events:' in arn: bus_name = arn.split(':')[-1].split('/')[-1] events_client = connect_to_service('events', region_name=region) events_client.put_events( Entries=[{ 'EventBusName': bus_name, 'Source': event.get('source'), 'DetailType': event.get('detail-type'), 'Detail': event.get('detail') }] ) elif ':kinesis:' in arn: partition_key_path = dict_utils.get_safe( target_attributes, '$.KinesisParameters.PartitionKeyPath', default_value='$.id' ) stream_name = arn.split('/')[-1] partition_key = dict_utils.get_safe(event, partition_key_path, event['id']) kinesis_client = connect_to_service('kinesis', region_name=region) kinesis_client.put_record( StreamName=stream_name, Data=to_bytes(json.dumps(event)), PartitionKey=partition_key ) else: LOG.warning('Unsupported Events rule target ARN: "%s"' % arn)
def s3_response_is_delete_keys(self, request, path, bucket_name): if self.subdomain_based_buckets(request): # Temporary fix until moto supports x-id and DeleteObjects (#3931) query = self._get_querystring(request.url) is_delete_keys_v3 = (query and ('delete' in query) and get_safe(query, '$.x-id.0') == 'DeleteObjects') return is_delete_keys_v3 or is_delete_keys(request, path, bucket_name) else: return utils_is_delete_keys(request, path, bucket_name)
def test_get_safe(self): dictionary = { "level_one_1": { "level_two_1": { "level_three_1": "level_three_1_value", "level_three_2": ["one", "two"], }, "level_two_2": "level_two_2_value", }, "level_one_2": "level_one_2_value", } self.assertEqual( dictionary["level_one_1"]["level_two_1"], get_safe(dictionary, "$.level_one_1.level_two_1"), ) self.assertEqual( dictionary["level_one_1"]["level_two_1"], get_safe(dictionary, ["$", "level_one_1", "level_two_1"]), ) self.assertEqual( "level_three_1_value", get_safe(dictionary, "$.level_one_1.level_two_1.level_three_1"), ) self.assertEqual( "level_three_1_value", get_safe(dictionary, ["$", "level_one_1", "level_two_1", "level_three_1"]), ) self.assertIsNone( get_safe(dictionary, ["$", "level_one_1", "level_two_1", "random", "value"])) self.assertEqual( "default_value", get_safe( dictionary, ["$", "level_one_1", "level_two_1", "random", "value"], "default_value", ), ) self.assertEqual( "one", get_safe( dictionary, ["$", "level_one_1", "level_two_1", "level_three_2", "0"]), ) self.assertEqual( "two", get_safe(dictionary, "$.level_one_1.level_two_1.level_three_2.1"))
def test_get_safe(self): dictionary = { 'level_one_1': { 'level_two_1': { 'level_three_1': 'level_three_1_value', 'level_three_2': ['one', 'two'] }, 'level_two_2': 'level_two_2_value' }, 'level_one_2': 'level_one_2_value' } self.assertEqual( get_safe(dictionary, '$.level_one_1.level_two_1'), dictionary['level_one_1']['level_two_1'] ) self.assertEqual( get_safe(dictionary, ['$', 'level_one_1', 'level_two_1']), dictionary['level_one_1']['level_two_1'] ) self.assertEqual( get_safe(dictionary, '$.level_one_1.level_two_1.level_three_1'), 'level_three_1_value' ) self.assertEqual( get_safe(dictionary, ['$', 'level_one_1', 'level_two_1', 'level_three_1']), 'level_three_1_value' ) self.assertEqual( get_safe(dictionary, ['$', 'level_one_1', 'level_two_1', 'random', 'value']), None ) self.assertEqual( get_safe(dictionary, ['$', 'level_one_1', 'level_two_1', 'random', 'value'], 'default_value'), 'default_value' ) self.assertEqual( get_safe(dictionary, ['$', 'level_one_1', 'level_two_1', 'level_three_2', '0']), 'one' ) self.assertEqual( get_safe(dictionary, '$.level_one_1.level_two_1.level_three_2.1'), 'two' )
def send_event_to_target(arn, event, target_attributes=None, asynchronous=True): region = arn.split(':')[3] if ':lambda:' in arn: from localstack.services.awslambda import lambda_api lambda_api.run_lambda(func_arn=arn, event=event, context={}, asynchronous=asynchronous) elif ':sns:' in arn: sns_client = connect_to_service('sns', region_name=region) sns_client.publish(TopicArn=arn, Message=json.dumps(event)) elif ':sqs:' in arn: sqs_client = connect_to_service('sqs', region_name=region) queue_url = get_sqs_queue_url(arn) msg_group_id = dict_utils.get_safe(target_attributes, '$.SqsParameters.MessageGroupId') kwargs = {'MessageGroupId': msg_group_id} if msg_group_id else {} sqs_client.send_message(QueueUrl=queue_url, MessageBody=json.dumps(event), **kwargs) elif ':states:' in arn: stepfunctions_client = connect_to_service('stepfunctions', region_name=region) stepfunctions_client.start_execution(stateMachineArn=arn, input=json.dumps(event)) elif ':firehose:' in arn: delivery_stream_name = firehose_name(arn) firehose_client = connect_to_service('firehose', region_name=region) firehose_client.put_record( DeliveryStreamName=delivery_stream_name, Record={'Data': to_bytes(json.dumps(event))}) elif ':events:' in arn: events_client = connect_to_service('events', region_name=region) target_name = arn.split(':')[-1].split('/')[-1] if ':destination/' in arn: destination = events_client.describe_api_destination(Name=target_name) method = destination.get('HttpMethod', 'GET') endpoint = destination.get('InvocationEndpoint') state = destination.get('ApiDestinationState') or 'ACTIVE' LOG.debug('Calling EventBridge API destination (state "%s"): %s %s' % (state, method, endpoint)) result = requests.request(method=method, url=endpoint, data=json.dumps(event or {})) if result.status_code >= 400: LOG.debug('Received code %s forwarding events: %s %s' % (result.status_code, method, endpoint)) else: events_client.put_events( Entries=[{ 'EventBusName': target_name, 'Source': event.get('source'), 'DetailType': event.get('detail-type'), 'Detail': event.get('detail') }] ) elif ':kinesis:' in arn: partition_key_path = dict_utils.get_safe( target_attributes, '$.KinesisParameters.PartitionKeyPath', default_value='$.id' ) stream_name = arn.split('/')[-1] partition_key = dict_utils.get_safe(event, partition_key_path, event['id']) kinesis_client = connect_to_service('kinesis', region_name=region) kinesis_client.put_record( StreamName=stream_name, Data=to_bytes(json.dumps(event)), PartitionKey=partition_key ) else: LOG.warning('Unsupported Events rule target ARN: "%s"' % arn)
def send_event_to_target( target_arn: str, event: Dict, target_attributes: Dict = None, asynchronous: bool = True, target: Dict = {}, ): region = target_arn.split(":")[3] if ":lambda:" in target_arn: from localstack.services.awslambda import lambda_api lambda_api.run_lambda(func_arn=target_arn, event=event, context={}, asynchronous=asynchronous) elif ":sns:" in target_arn: sns_client = connect_to_service("sns", region_name=region) sns_client.publish(TopicArn=target_arn, Message=json.dumps(event)) elif ":sqs:" in target_arn: sqs_client = connect_to_service("sqs", region_name=region) queue_url = get_sqs_queue_url(target_arn) msg_group_id = dict_utils.get_safe(target_attributes, "$.SqsParameters.MessageGroupId") kwargs = {"MessageGroupId": msg_group_id} if msg_group_id else {} sqs_client.send_message(QueueUrl=queue_url, MessageBody=json.dumps(event), **kwargs) elif ":states:" in target_arn: stepfunctions_client = connect_to_service("stepfunctions", region_name=region) stepfunctions_client.start_execution(stateMachineArn=target_arn, input=json.dumps(event)) elif ":firehose:" in target_arn: delivery_stream_name = firehose_name(target_arn) firehose_client = connect_to_service("firehose", region_name=region) firehose_client.put_record( DeliveryStreamName=delivery_stream_name, Record={"Data": to_bytes(json.dumps(event))}, ) elif ":events:" in target_arn: if ":api-destination/" in target_arn or ":destination/" in target_arn: send_event_to_api_destination(target_arn, event, target.get("HttpParameters")) else: events_client = connect_to_service("events", region_name=region) eventbus_name = target_arn.split(":")[-1].split("/")[-1] events_client.put_events( Entries=[{ "EventBusName": eventbus_name, "Source": event.get("source"), "DetailType": event.get("detail-type"), "Detail": event.get("detail"), }]) elif ":kinesis:" in target_arn: partition_key_path = dict_utils.get_safe( target_attributes, "$.KinesisParameters.PartitionKeyPath", default_value="$.id", ) stream_name = target_arn.split("/")[-1] partition_key = dict_utils.get_safe(event, partition_key_path, event["id"]) kinesis_client = connect_to_service("kinesis", region_name=region) kinesis_client.put_record( StreamName=stream_name, Data=to_bytes(json.dumps(event)), PartitionKey=partition_key, ) elif ":logs:" in target_arn: log_group_name = target_arn.split(":")[-1] logs_client = connect_to_service("logs", region_name=region) log_stream_name = str(uuid.uuid4()) logs_client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name) logs_client.put_log_events( logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=[{ "timestamp": now_utc(millis=True), "message": json.dumps(event) }], ) else: LOG.warning('Unsupported Events rule target ARN: "%s"', target_arn)
def send_event_to_target(target_arn, event, target_attributes=None, asynchronous=True): region = target_arn.split(":")[3] if ":lambda:" in target_arn: from localstack.services.awslambda import lambda_api lambda_api.run_lambda( func_arn=target_arn, event=event, context={}, asynchronous=asynchronous ) elif ":sns:" in target_arn: sns_client = connect_to_service("sns", region_name=region) sns_client.publish(TopicArn=target_arn, Message=json.dumps(event)) elif ":sqs:" in target_arn: sqs_client = connect_to_service("sqs", region_name=region) queue_url = get_sqs_queue_url(target_arn) msg_group_id = dict_utils.get_safe(target_attributes, "$.SqsParameters.MessageGroupId") kwargs = {"MessageGroupId": msg_group_id} if msg_group_id else {} sqs_client.send_message(QueueUrl=queue_url, MessageBody=json.dumps(event), **kwargs) elif ":states:" in target_arn: stepfunctions_client = connect_to_service("stepfunctions", region_name=region) stepfunctions_client.start_execution(stateMachineArn=target_arn, input=json.dumps(event)) elif ":firehose:" in target_arn: delivery_stream_name = firehose_name(target_arn) firehose_client = connect_to_service("firehose", region_name=region) firehose_client.put_record( DeliveryStreamName=delivery_stream_name, Record={"Data": to_bytes(json.dumps(event))}, ) elif ":events:" in target_arn: events_client = connect_to_service("events", region_name=region) if ":api-destination/" in target_arn: # API destination support # see https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-api-destinations.html api_destination_name = target_arn.split(":")[-1].split("/")[ 1 ] # ...:api-destination/{name}/{uuid} destination = events_client.describe_api_destination(Name=api_destination_name) method = destination.get("HttpMethod", "GET") endpoint = destination.get("InvocationEndpoint") state = destination.get("ApiDestinationState") or "ACTIVE" LOG.debug( 'Calling EventBridge API destination (state "%s"): %s %s' % (state, method, endpoint) ) # TODO: support connection/auth (BASIC AUTH, API KEY, OAUTH) # connection_arn = destination.get("ConnectionArn") headers = { # default headers AWS sends with every api destination call "User-Agent": "Amazon/EventBridge/ApiDestinations", "Content-Type": "application/json; charset=utf-8", "Range": "bytes=0-1048575", "Accept-Encoding": "gzip,deflate", "Connection": "close", } # TODO: consider option to disable the actual network call to avoid unintended side effects # TODO: InvocationRateLimitPerSecond (needs some form of thread-safety, scoped to the api destination) result = requests.request( method=method, url=endpoint, data=json.dumps(event or {}), headers=headers ) if result.status_code >= 400: LOG.debug( "Received code %s forwarding events: %s %s" % (result.status_code, method, endpoint) ) if result.status_code == 429 or 500 <= result.status_code <= 600: pass # TODO: retry logic (only retry on 429 and 5xx response status) else: eventbus_name = target_arn.split(":")[-1].split("/")[-1] events_client.put_events( Entries=[ { "EventBusName": eventbus_name, "Source": event.get("source"), "DetailType": event.get("detail-type"), "Detail": event.get("detail"), } ] ) elif ":kinesis:" in target_arn: partition_key_path = dict_utils.get_safe( target_attributes, "$.KinesisParameters.PartitionKeyPath", default_value="$.id", ) stream_name = target_arn.split("/")[-1] partition_key = dict_utils.get_safe(event, partition_key_path, event["id"]) kinesis_client = connect_to_service("kinesis", region_name=region) kinesis_client.put_record( StreamName=stream_name, Data=to_bytes(json.dumps(event)), PartitionKey=partition_key, ) else: LOG.warning('Unsupported Events rule target ARN: "%s"' % target_arn)