def update_event_response(host: Optional[str], status_code: int, headers: dict, body: bytes) -> None: """ :param host: If None, use the host from the last span, otherwise this is the first chuck and we can empty the aggregated response body This function assumes synchronous execution - we update the last http event. """ if is_lumigo_edge(host): return last_event = SpansContainer.get_span().pop_last_span() if last_event: http_info = last_event.get("info", {}).get("httpInfo", {}) if not host: host = http_info.get("host", "unknown") else: HttpState.previous_response_body = b"" has_error = is_error_code(status_code) max_size = Configuration.get_max_entry_size(has_error) headers = {k.lower(): v for k, v in headers.items()} if headers else {} parser = get_parser(host, headers)() # type: ignore if len(HttpState.previous_response_body) < max_size: HttpState.previous_response_body += body if has_error: _update_request_data_increased_size_limit(http_info, max_size) update = parser.parse_response( # type: ignore host, status_code, headers, HttpState.previous_response_body # type: ignore ) SpansContainer.get_span().add_span( recursive_json_join(update, last_event))
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: return recursive_json_join( {"info": { "messageId": headers.get("x-amz-request-id") }}, super().parse_response(url, status_code, headers, body), )
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: return recursive_json_join( {"info": { "messageId": SqsParser._extract_message_id(body) }}, super().parse_response(url, status_code, headers, body), )
def parse_request(self, parse_params: HttpRequest) -> dict: resource_name = safe_split_get(parse_params.host, ".", 0) if resource_name == "s3": resource_name = safe_split_get(parse_params.uri, "/", 1) return recursive_json_join( {"info": { "resourceName": resource_name }}, super().parse_request(parse_params), )
def parse_request(self, parse_params: HttpRequest) -> dict: return recursive_json_join( { "info": { "resourceName": safe_key_from_query(parse_params.body, "QueueUrl") } }, super().parse_request(parse_params), )
def add_step_end_event(self, ret_val): message_id = str(uuid.uuid4()) step_function_span = create_step_function_span(message_id) self.spans.append( recursive_json_join(step_function_span, self.base_msg)) self.span_ids_to_send.add(step_function_span["id"]) if isinstance(ret_val, dict): ret_val[LUMIGO_EVENT_KEY] = {STEP_FUNCTION_UID_KEY: message_id} get_logger().debug( f"Added key {LUMIGO_EVENT_KEY} to the user's return value")
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: aws_request_id = headers.get("x-amzn-requestid") apigw_request_id = headers.get("apigw-requestid") message_id = aws_request_id or apigw_request_id return recursive_json_join( {"info": { "messageId": message_id }}, super().parse_response(url, status_code, headers, body), )
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: return recursive_json_join( { "info": { "messageId": safe_key_from_xml( body, "PublishResponse/PublishResult/MessageId") } }, super().parse_response(url, status_code, headers, body), )
def parse_request(self, parse_params: HttpRequest) -> dict: return recursive_json_join( { "info": { "resourceName": safe_key_from_query(parse_params.body, "TopicArn"), "targetArn": safe_key_from_query(parse_params.body, "TopicArn"), } }, super().parse_request(parse_params), )
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: additional_info = {} message_id = headers.get("x-amzn-requestid") if message_id and self.should_add_message_id: additional_info["info"] = {"messageId": message_id} span_id = headers.get("x-amzn-requestid") or headers.get( "x-amz-requestid") if span_id: additional_info["id"] = span_id return recursive_json_join( additional_info, super().parse_response(url, status_code, headers, body))
def parse_request(self, parse_params: HttpRequest) -> dict: decoded_uri = safe_split_get(unquote(parse_params.uri), "/", 3) return recursive_json_join( { "info": { "resourceName": extract_function_name_from_arn(decoded_uri) if is_aws_arn(decoded_uri) else decoded_uri }, "invocationType": parse_params.headers.get("x-amz-invocation-type"), }, super().parse_request(parse_params), )
def parse_request(self, parse_params: HttpRequest) -> dict: try: parsed_body = json.loads(parse_params.body) except json.JSONDecodeError as e: get_logger().exception( "Error while trying to parse eventBridge request body", exc_info=e) parsed_body = {} resource_names = set() if isinstance(parsed_body.get("Entries"), list): resource_names = { e["EventBusName"] for e in parsed_body["Entries"] if e.get("EventBusName") } return recursive_json_join( {"info": { "resourceNames": list(resource_names) or None }}, super().parse_request(parse_params), )
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: try: parsed_body = json.loads(body) except json.JSONDecodeError as e: get_logger().debug( "Error while trying to parse eventBridge request body", exc_info=e) parsed_body = {} message_ids = [] if isinstance(parsed_body.get("Entries"), list): message_ids = [ e["EventId"] for e in parsed_body["Entries"] if e.get("EventId") ] return recursive_json_join( {"info": { "messageIds": message_ids }}, super().parse_response(url, status_code, headers, body), )
def parse_request(self, parse_params: HttpRequest) -> dict: target: str = parse_params.headers.get("x-amz-target", "") method = safe_split_get(target, ".", 1) try: parsed_body = json.loads(parse_params.body) except json.JSONDecodeError as e: get_logger().debug("Error while trying to parse ddb request body", exc_info=e) parsed_body = {} return recursive_json_join( { "info": { "resourceName": self._extract_table_name( parsed_body, method), "dynamodbMethod": method, "messageId": self._extract_message_id(parsed_body, method), } }, super().parse_request(parse_params), )
def add_span(self, span: dict): """ This function parses an request event and add it to the span. """ self.spans.append(recursive_json_join(span, self.base_msg)) self.span_ids_to_send.add(span["id"])
def __init__( self, name: str = None, started: int = None, region: str = None, runtime: str = None, memory_allocated: str = None, log_stream_name: str = None, log_group_name: str = None, trace_root: str = None, transaction_id: str = None, request_id: str = None, account: str = None, trace_id_suffix: str = None, trigger_by: dict = None, max_finish_time: int = None, is_new_invocation: bool = False, event: str = None, envs: str = None, ): version = open( _VERSION_PATH, "r").read() if os.path.exists(_VERSION_PATH) else "unknown" version = version.strip() self.name = name self.region = region self.trace_root = trace_root self.trace_id_suffix = trace_id_suffix malformed_txid = False if transaction_id == MALFORMED_TXID: transaction_id = os.urandom(12).hex() malformed_txid = True self.transaction_id = transaction_id self.max_finish_time = max_finish_time self.base_msg = { "started": started, "transactionId": transaction_id, "account": account, "region": region, "parentId": request_id, "info": { "tracer": { "version": version }, "traceId": { "Root": trace_root } }, "token": Configuration.token, } is_cold = SpansContainer.is_cold and not is_provision_concurrency_initialization( ) self.function_span = recursive_json_join( { "id": request_id, "type": FUNCTION_TYPE, "name": name, "runtime": runtime, "event": event, "envs": envs, "memoryAllocated": memory_allocated, "readiness": "cold" if is_cold else "warm", "info": { "logStreamName": log_stream_name, "logGroupName": log_group_name, **(trigger_by or {}), }, "isMalformedTransactionId": malformed_txid, EXECUTION_TAGS_KEY: [], }, self.base_msg, ) self.span_ids_to_send: Set[str] = set() self.spans: List[Dict] = [] if is_new_invocation: SpansContainer.is_cold = False
def test_recursive_json_join(d1, d2, result): assert recursive_json_join(d1, d2) == result