def get_parser(url: str, headers: Optional[dict] = None) -> Type[Parser]: if should_use_tracer_extension(): return Parser service = safe_split_get(url, ".", 0) if service == "dynamodb": return DynamoParser elif service == "sns": return SnsParser elif service == "lambda": return LambdaParser elif service == "kinesis": return KinesisParser elif service == "events": return EventBridgeParser elif safe_split_get(url, ".", 1) == "s3" or safe_split_get(url, ".", 0) == "s3": return S3Parser # SQS Legacy Endpoints: https://docs.aws.amazon.com/general/latest/gr/rande.html elif service in ("sqs", "sqs-fips") or "queue.amazonaws.com" in url: return SqsParser elif "execute-api" in url: return ApiGatewayV2Parser elif url.endswith("amazonaws.com") or (headers and headers.get("x-amzn-requestid")): return ServerlessAWSParser return Parser
def parse_request(self, parse_params: HttpRequest) -> dict: resource_name = safe_split_get(parse_params.host, ".", 0) if resource_name == "s3": resource_name = safe_split_get(parse_params.uri, "/", 1) return recursive_json_join( {"info": { "resourceName": resource_name }}, super().parse_request(parse_params), )
def get_patched_root(self): """ We're changing the root in order to pass/share the transaction id. More info: https://docs.aws.amazon.com/xray/latest/devguide/xray-api-sendingdata.html#xray-api-traceids """ current_time = int(time.time()) root = safe_split_get(self.trace_root, "-", 0) return f"Root={root}-{hex(current_time)[2:]}-{self.transaction_id}{self.trace_id_suffix}"
def _parse_appsync(event: dict) -> dict: headers = safe_get(event, ["context", "request", "headers"]) if not headers: headers = safe_get(event, ["request", "headers"]) host = headers.get("host") trace_id = headers.get("x-amzn-trace-id") message_id = safe_split_get(trace_id, "=", -1) return {"triggeredBy": "appsync", "api": host, "messageId": message_id}
def parse_request(self, parse_params: HttpRequest) -> dict: decoded_uri = safe_split_get(unquote(parse_params.uri), "/", 3) return recursive_json_join( { "info": { "resourceName": extract_function_name_from_arn(decoded_uri) if is_aws_arn(decoded_uri) else decoded_uri }, "invocationType": parse_params.headers.get("x-amz-invocation-type"), }, super().parse_request(parse_params), )
def create_span(cls, event=None, context=None, is_new_invocation=False) -> "SpansContainer": """ This function creates a span out of a given AWS context. The force flag delete any existing span-container (to handle with warm execution of lambdas). Note that if lambda will be executed directly (regular pythonic function call and not invoked), it will override the container. """ if cls._span and not is_new_invocation: return cls._span # copy the event to ensure that we will not change it event = copy.deepcopy(event) additional_info = {} if Configuration.verbose: additional_info.update({ "event": EventDumper.dump_event(event), "envs": _get_envs_for_span() }) trace_root, transaction_id, suffix = parse_trace_id( os.environ.get("_X_AMZN_TRACE_ID", "")) remaining_time = getattr(context, "get_remaining_time_in_millis", lambda: MAX_LAMBDA_TIME)() cls._span = SpansContainer( started=get_current_ms_time(), name=os.environ.get("AWS_LAMBDA_FUNCTION_NAME"), runtime=os.environ.get("AWS_EXECUTION_ENV"), region=get_region(), memory_allocated=os.environ.get("AWS_LAMBDA_FUNCTION_MEMORY_SIZE"), log_stream_name=os.environ.get("AWS_LAMBDA_LOG_STREAM_NAME"), log_group_name=os.environ.get("AWS_LAMBDA_LOG_GROUP_NAME"), trace_root=trace_root, transaction_id=transaction_id, trace_id_suffix=suffix, request_id=getattr(context, "aws_request_id", ""), account=safe_split_get( getattr(context, "invoked_function_arn", ""), ":", 4, ""), trigger_by=parse_triggered_by(event), max_finish_time=get_current_ms_time() + remaining_time, is_new_invocation=is_new_invocation, **additional_info, ) return cls._span
def parse_request(self, parse_params: HttpRequest) -> dict: target: str = parse_params.headers.get("x-amz-target", "") method = safe_split_get(target, ".", 1) try: parsed_body = json.loads(parse_params.body) except json.JSONDecodeError as e: get_logger().debug("Error while trying to parse ddb request body", exc_info=e) parsed_body = {} return recursive_json_join( { "info": { "resourceName": self._extract_table_name( parsed_body, method), "dynamodbMethod": method, "messageId": self._extract_message_id(parsed_body, method), } }, super().parse_request(parse_params), )
def test_safe_split_get(input_params, expected_output): assert safe_split_get(*input_params) == expected_output