def do_execute(*args): # set the invocation time in milliseconds invocation_time = int(time.time() * 1000) # start the execution try: result, log_output = self._execute(func_arn, func_details, event, context, version) except Exception as e: if asynchronous: if get_from_event(event, 'eventSource') == EVENT_SOURCE_SQS: sqs_queue_arn = get_from_event(event, 'eventSourceARN') if sqs_queue_arn: # event source is SQS, send event back to dead letter queue sqs_error_to_dead_letter_queue( sqs_queue_arn, event, e) else: # event source is not SQS, send back to lambda dead letter lambda_error_to_dead_letter_queue( func_details, event, e) raise e finally: self.function_invoke_times[func_arn] = invocation_time # forward log output to cloudwatch logs self._store_logs(func_details, log_output, invocation_time) # return final result return result, log_output
def _run(func_arn=None): # set the invocation time in milliseconds invocation_time = int(time.time() * 1000) # start the execution raised_error = None result = None dlq_sent = None try: result = self._execute(func_arn, func_details, event, context, version) except Exception as e: raised_error = e if asynchronous: if get_from_event(event, 'eventSource') == EVENT_SOURCE_SQS: sqs_queue_arn = get_from_event( event, 'eventSourceARN') if sqs_queue_arn: # event source is SQS, send event back to dead letter queue dlq_sent = sqs_error_to_dead_letter_queue( sqs_queue_arn, event, e) else: # event source is not SQS, send back to lambda dead letter queue lambda_error_to_dead_letter_queue( func_details, event, e) raise e finally: self.function_invoke_times[func_arn] = invocation_time callback and callback(result, func_arn, event, error=raised_error, dlq_sent=dlq_sent) # return final result return result
def process_sqs_message(message_body, message_attributes, queue_name, region_name=None): # feed message into the first listening lambda (message should only get processed once) try: queue_arn = aws_stack.sqs_queue_arn(queue_name, region_name=region_name) sources = get_event_sources(source_arn=queue_arn) arns = [s.get('FunctionArn') for s in sources] LOG.debug('Found %s source mappings for event from SQS queue %s: %s' % (len(arns), queue_arn, arns)) source = next(iter(sources), None) if not source: return False if source: arn = source['FunctionArn'] event = {'Records': [{ 'body': message_body, 'receiptHandle': 'MessageReceiptHandle', 'md5OfBody': md5(message_body), 'eventSourceARN': queue_arn, 'eventSource': 'aws:sqs', 'awsRegion': region_name, 'messageId': str(uuid.uuid4()), 'attributes': { 'ApproximateFirstReceiveTimestamp': '{}000'.format(int(time.time())), 'SenderId': TEST_AWS_ACCOUNT_ID, 'ApproximateReceiveCount': '1', 'SentTimestamp': '{}000'.format(int(time.time())) }, 'messageAttributes': message_attributes, 'sqs': True, }]} result = run_lambda(event=event, context={}, func_arn=arn) status_code = getattr(result, 'status_code', 200) if status_code >= 400: LOG.warning('Invoking Lambda %s from SQS message failed (%s): %s' % (arn, status_code, result.data)) # check if we need to forward to a dead letter queue sqs_error_to_dead_letter_queue(queue_arn, event, result) return True except Exception as e: LOG.warning('Unable to run Lambda function on SQS messages: %s %s' % (e, traceback.format_exc()))