def inject_trigger_tags_for_firehose(span, original_event): domain_name = constants.DomainNames['STREAM'] class_name = constants.ClassNames['FIREHOSE'] stream_arn = original_event['deliveryStreamArn'] index = stream_arn.index('/') + 1 stream_name = stream_arn[index:] operation_names = [stream_name] region = original_event['region'] trace_links = [] if 'records' in original_event: for record in original_event['records']: if "approximateArrivalTimestamp" in record and "data" in record: timestamp = record["approximateArrivalTimestamp"] / 1000 try: data = base64.b64decode(record["data"]) data_md5 = hashlib.md5(data).hexdigest() trace_links.append(region + ':' + stream_name + ':' + str(int(timestamp - 1)) + ':' + data_md5) trace_links.append(region + ':' + stream_name + ':' + str(int(timestamp)) + ':' + data_md5) trace_links.append(region + ':' + stream_name + ':' + str(int(timestamp + 1)) + ':' + data_md5) except Exception: pass invocation_trace_support.add_incoming_trace_links(trace_links) inject_trigger_tags_to_span(span, domain_name, class_name, operation_names) inject_trigger_tags_to_invocation(domain_name, class_name, operation_names)
def inject_trigger_tags_for_api_gateway(span, original_event): domain_name = constants.DomainNames['API'] class_name = constants.ClassNames['APIGATEWAY'] path = '/' + str(original_event['context']['stage']) + str(original_event['params']['path']) operation_names = [str(original_event['params']['header']['Host']) + path] invocation_support.set_application_resource_name(path) inject_trigger_tags_to_span(span, domain_name, class_name, operation_names) inject_trigger_tags_to_invocation(domain_name, class_name, operation_names)
def test_dynamodb_trigger(tracer_and_invocation_support, handler, mock_dynamodb_event, mock_context): _, handler = handler tracer, invocation_support, invocation_trace_support = tracer_and_invocation_support handler(mock_dynamodb_event, mock_context) execution_context = ExecutionContextManager.get() span = execution_context.recorder.get_spans()[0] assert lambda_event_utils.get_lambda_event_type( mock_dynamodb_event, mock_context) == lambda_event_utils.LambdaEventType.DynamoDB assert span.get_tag(constants.SpanTags['TRIGGER_DOMAIN_NAME'] ) == constants.DomainNames['DB'] assert span.get_tag(constants.SpanTags['TRIGGER_CLASS_NAME'] ) == constants.ClassNames['DYNAMODB'] assert span.get_tag(constants.SpanTags['TRIGGER_OPERATION_NAMES']) == [ 'ExampleTableWithStream' ] assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_DOMAIN_NAME'] ) == constants.DomainNames['DB'] assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_CLASS_NAME'] ) == constants.ClassNames['DYNAMODB'] assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_OPERATION_NAMES']) == [ 'ExampleTableWithStream' ] md5_key = hashlib.md5("Id={N: 101}".encode()).hexdigest() md5_image_1 = hashlib.md5( "Id={N: 101}, Message={S: New item!}".encode()).hexdigest() md5_image_2 = hashlib.md5( "Id={N: 101}, Message={S: This item has changed}".encode()).hexdigest( ) region = 'eu-west-2' table_name = 'ExampleTableWithStream' timestamp = 1480642019 links = [ region + ':' + table_name + ':' + str(timestamp) + ':' + 'SAVE' + ':' + md5_key, region + ':' + table_name + ':' + str(timestamp + 1) + ':' + 'SAVE' + ':' + md5_key, region + ':' + table_name + ':' + str(timestamp + 2) + ':' + 'SAVE' + ':' + md5_key, region + ':' + table_name + ':' + str(timestamp) + ':' + 'SAVE' + ':' + md5_image_1, region + ':' + table_name + ':' + str(timestamp + 1) + ':' + 'SAVE' + ':' + md5_image_1, region + ':' + table_name + ':' + str(timestamp + 2) + ':' + 'SAVE' + ':' + md5_image_1, region + ':' + table_name + ':' + str(timestamp) + ':' + 'SAVE' + ':' + md5_image_2, region + ':' + table_name + ':' + str(timestamp + 1) + ':' + 'SAVE' + ':' + md5_image_2, region + ':' + table_name + ':' + str(timestamp + 2) + ':' + 'SAVE' + ':' + md5_image_2 ] assert sorted(invocation_trace_support.get_incoming_trace_links().get( 'incomingTraceLinks')) == sorted(links)
def test_cloudwatch_logs_trigger(tracer_and_invocation_support, handler, mock_cloudwatch_logs_event, mock_context): _, handler = handler tracer, invocation_support, _ = tracer_and_invocation_support handler(mock_cloudwatch_logs_event, mock_context) execution_context = ExecutionContextManager.get() span = execution_context.recorder.get_spans()[0] assert lambda_event_utils.get_lambda_event_type( mock_cloudwatch_logs_event, mock_context) == lambda_event_utils.LambdaEventType.CloudWatchLogs compressed_data = base64.b64decode( mock_cloudwatch_logs_event['awslogs']['data']) decompressed_data = json.loads( str(GzipFile(fileobj=BytesIO(compressed_data)).read(), 'utf-8')) assert span.get_tag(constants.SpanTags['TRIGGER_DOMAIN_NAME']) == 'Log' assert span.get_tag( constants.SpanTags['TRIGGER_CLASS_NAME']) == 'AWS-CloudWatch-Log' assert span.get_tag(constants.SpanTags['TRIGGER_OPERATION_NAMES']) == [ decompressed_data['logGroup'] ] assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_DOMAIN_NAME']) == 'Log' assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_CLASS_NAME']) == 'AWS-CloudWatch-Log' assert invocation_support.get_agent_tag( constants.SpanTags['TRIGGER_OPERATION_NAMES']) == [ decompressed_data['logGroup'] ]
def attributes_to_str(attributes): sorted_keys = sorted(attributes.keys()) attributes_sorted = [] for attr in sorted_keys: try: key = list(attributes[attr].keys())[0] attributes_sorted.append(attr + '=' + '{' + key + ': ' + str(attributes[attr][key]) + '}') except Exception: pass return ', '.join(attributes_sorted)
def add_dynamodb_trace_links(trace_links, region, table_name, creation_time, operation_type, attributes): timestamp = creation_time - 1 if attributes: attributes_hash = hashlib.md5(attributes_to_str(attributes).encode()).hexdigest() if attributes_hash: for i in range(3): trace_links.append(region + ':' + table_name + ':' + str( int(timestamp + i)) + ':' + operation_type + ':' + attributes_hash)
def inject_trigger_tags_for_cloudwatch_logs(span, original_event): domain_name = constants.DomainNames['LOG'] class_name = constants.ClassNames['CLOUDWATCHLOG'] operation_names = [] try: compressed_data = base64.b64decode(original_event['awslogs']['data']) decompressed_data = json.loads(str(GzipFile(fileobj=BytesIO(compressed_data)).read(), 'utf-8')) operation_names = [decompressed_data['logGroup']] except Exception as e: print('Error handling base64 format!', e) inject_trigger_tags_to_span(span, domain_name, class_name, operation_names) inject_trigger_tags_to_invocation(domain_name, class_name, operation_names)