def str_to_tuple(val: str) -> Optional[Tuple]: try: if val: return tuple(val.split(",")) except Exception as e: get_logger().debug("Error while convert str to tuple", exc_info=e) return None
def dump_event(event: Dict, handlers: List[EventParseHandler] = None, has_error: bool = False) -> str: max_size = Configuration.get_max_entry_size(has_error) if should_use_tracer_extension(): return aws_dump(event) handlers = handlers or [ ApiGWHandler(), SNSHandler(), SQSHandler(), S3Handler(), CloudfrontHandler(), DDBHandler(), ] event_obj = Event(event) for handler in handlers: try: if handler.is_supported(event_obj): return lumigo_dumps( handler.parse(event), max_size, omit_skip_path=handler.get_omit_skip_path(), ) except Exception as e: get_logger().debug( f"Error while trying to parse with handler {handler.__class__.__name__} event {event}", exc_info=e, ) return lumigo_dumps(event, max_size)
def handle_timeout(self, *args): get_logger().info("The tracer reached the end of the timeout timer") to_send = [s for s in self.spans if s["id"] in self.span_ids_to_send] self.span_ids_to_send.clear() if Configuration.send_only_if_error: to_send.append(self._generate_start_span()) lumigo_utils.report_json(region=self.region, msgs=to_send)
def str_to_list(val: str) -> Optional[List[str]]: try: if val: return val.split(",") except Exception as e: get_logger().debug("Error while convert str to list", exc_info=e) return None
def wrap_sqlalchemy(): with lumigo_safe_execute("wrap sqlalchemy"): if importlib.util.find_spec("sqlalchemy") and listen: get_logger().debug("wrapping sqlalchemy") wrap_function_wrapper( "sqlalchemy.engine.strategies", "DefaultEngineStrategy.create", execute_wrapper )
def _after_cursor_execute(conn, cursor, statement, parameters, context, executemany): with lumigo_safe_execute("handle sqlalchemy after execute"): span = SpansContainer.get_span().get_last_span() if not span: get_logger().warning("Redis span ended without a record on its start") return span.update({"ended": get_current_ms_time(), "response": ""})
def lambda_wrapper(*args, **kwargs): if _is_context_already_wrapped(*args): return func(*args, **kwargs) _add_wrap_flag_to_context(*args) executed = False ret_val = None try: SpansContainer.create_span(*args, is_new_invocation=True) with lumigo_safe_execute("auto tag"): AutoTagEvent.auto_tag_event(args[0]) SpansContainer.get_span().start(*args) try: executed = True ret_val = func(*args, **kwargs) except Exception as e: with lumigo_safe_execute("Customer's exception"): SpansContainer.get_span().add_exception_event( e, inspect.trace()) raise finally: with lumigo_safe_execute("end"): SpansContainer.get_span().end(ret_val, *args) return ret_val except Exception: # The case where our wrapping raised an exception if not executed: TimeoutMechanism.stop() get_logger().exception("exception in the wrapper", exc_info=True) return func(*args, **kwargs) else: raise
def wrap_redis(): with lumigo_safe_execute("wrap redis"): if importlib.util.find_spec("redis"): get_logger().debug("wrapping redis") wrap_function_wrapper("redis.client", "Redis.execute_command", execute_command_wrapper) wrap_function_wrapper("redis.client", "Pipeline.execute", execute_wrapper)
def add_step_end_event(self, ret_val): message_id = str(uuid.uuid4()) step_function_span = create_step_function_span(message_id) self.spans.append( recursive_json_join(step_function_span, self.base_msg)) self.span_ids_to_send.add(step_function_span["id"]) if isinstance(ret_val, dict): ret_val[LUMIGO_EVENT_KEY] = {STEP_FUNCTION_UID_KEY: message_id} get_logger().debug( f"Added key {LUMIGO_EVENT_KEY} to the user's return value")
def test_report_json_china_on_error_no_exception_and_notify_user(capsys, monkeypatch): monkeypatch.setattr(Configuration, "should_report", True) monkeypatch.setattr(Configuration, "edge_kinesis_aws_access_key_id", "my_value") monkeypatch.setattr(Configuration, "edge_kinesis_aws_secret_access_key", "my_value") monkeypatch.setattr(boto3, "client", MagicMock(side_effect=Exception)) lumigo_utils.get_logger().setLevel(logging.CRITICAL) report_json(CHINA_REGION, [{"a": "b"}]) assert "Failed to send spans" in capsys.readouterr().out
def start(self, event=None, context=None): to_send = self._generate_start_span() if not Configuration.send_only_if_error: report_duration = lumigo_utils.report_json(region=self.region, msgs=[to_send]) self.function_span["reporter_rtt"] = report_duration else: get_logger().debug( "Skip sending start because tracer in 'send only if error' mode ." ) self.start_timeout_timer(context)
def command_failed(exception: Exception): with lumigo_safe_execute("redis command failed"): span = SpansContainer.get_span().get_last_span() if not span: get_logger().warning( "Redis span ended without a record on its start") return span.update({ "ended": get_current_ms_time(), "error": exception.args[0] if exception.args else None })
def command_finished(ret_val: Dict): with lumigo_safe_execute("redis command finished"): span = SpansContainer.get_span().get_last_span() if not span: get_logger().warning( "Redis span ended without a record on its start") return span.update({ "ended": get_current_ms_time(), "response": lumigo_dumps(copy.deepcopy(ret_val)) })
def auto_tag_event( event: Optional[Dict] = None, handlers: Optional[List[EventAutoTagHandler]] = None) -> None: if event: handlers = handlers or [ApiGWHandler(), ConfigurationHandler()] for handler in handlers: try: if handler.is_supported(event): handler.auto_tag(event) except Exception as e: get_logger().debug( f"Error while trying to auto tag with handler {handler.__class__.__name__} event {event}", exc_info=e, )
def start_timeout_timer(self, context=None) -> None: if Configuration.timeout_timer: if not hasattr(context, "get_remaining_time_in_millis"): get_logger().info( "Skip setting timeout timer - Could not get the remaining time." ) return remaining_time = context.get_remaining_time_in_millis() / 1000 buffer = get_timeout_buffer(remaining_time) if buffer >= remaining_time or remaining_time < 2: get_logger().debug( "Skip setting timeout timer - Too short timeout.") return TimeoutMechanism.start(remaining_time - buffer, self.handle_timeout)
def failed(self, event): with lumigo_safe_execute("pymongo failed"): if event.request_id not in LumigoMongoMonitoring.request_to_span_id: get_logger().warning( "Mongo span ended without a record on its start") return span_id = LumigoMongoMonitoring.request_to_span_id.pop( event.request_id) span = SpansContainer.get_span().get_span_by_id(span_id) span.update({ "ended": span["started"] + (event.duration_micros / 1000), "error": lumigo_dumps(event.failure), })
def wrap_aiohttp(): with lumigo_safe_execute("wrap http calls"): get_logger().debug("wrapping http requests") if aiohttp: trace_config = aiohttp.TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_chunk_sent.append(on_request_chunk_sent) trace_config.on_request_end.append(on_request_end) trace_config.on_response_chunk_received.append(on_response_chunk_received) trace_config.on_request_exception.append(on_request_exception) wrap_function_wrapper( "aiohttp.client", "ClientSession.__init__", aiohttp_trace_configs_wrapper(trace_config), )
def _handle_error(context): with lumigo_safe_execute("handle sqlalchemy error"): span = SpansContainer.get_span().get_span_by_id(_last_span_id) if not span: get_logger().warning( "Redis span ended without a record on its start") return span.update({ "ended": get_current_ms_time(), "error": lumigo_dumps({ "type": context.original_exception.__class__.__name__, "args": context.original_exception.args, }), })
def end(self, ret_val=None, event: Optional[dict] = None, context=None) -> Optional[int]: TimeoutMechanism.stop() reported_rtt = None self.previous_request = None self.function_span.update({"ended": get_current_ms_time()}) if Configuration.is_step_function: self.add_step_end_event(ret_val) parsed_ret_val = None if Configuration.verbose: try: if ret_val is not None: parsed_ret_val = lumigo_dumps(ret_val, enforce_jsonify=True, decimal_safe=True) except Exception as err: suffix = "" if err.args: suffix = f'Original message: "{err.args[0]}"' self.function_span["error"] = self._create_exception_event( "ReturnValueError", "The lambda will probably fail due to bad return value. " + suffix, ) self.function_span.update({"return_value": parsed_ret_val}) if _is_span_has_error(self.function_span): self._set_error_extra_data(event) spans_contain_errors: bool = any( _is_span_has_error(s) for s in self.spans + [self.function_span]) if (not Configuration.send_only_if_error) or spans_contain_errors: to_send = [self.function_span] + [ s for s in self.spans if s["id"] in self.span_ids_to_send ] reported_rtt = lumigo_utils.report_json(region=self.region, msgs=to_send) else: get_logger().debug( "No Spans were sent, `Configuration.send_only_if_error` is on and no span has error" ) return reported_rtt
def wrap_http_calls(): with lumigo_safe_execute("wrap http calls"): get_logger().debug("wrapping http requests") wrap_function_wrapper("http.client", "HTTPConnection.send", _http_send_wrapper) wrap_function_wrapper("http.client", "HTTPConnection.request", _headers_reminder_wrapper) if importlib.util.find_spec("botocore"): wrap_function_wrapper("botocore.awsrequest", "AWSRequest.__init__", _putheader_wrapper) wrap_function_wrapper("http.client", "HTTPConnection.getresponse", _response_wrapper) wrap_function_wrapper("http.client", "HTTPResponse.read", _read_wrapper) if importlib.util.find_spec("urllib3"): wrap_function_wrapper("urllib3.response", "HTTPResponse.read_chunked", _read_stream_wrapper) if importlib.util.find_spec("requests"): wrap_function_wrapper("requests.api", "request", _requests_wrapper)
def parse_request(self, parse_params: HttpRequest) -> dict: try: parsed_body = json.loads(parse_params.body) except json.JSONDecodeError as e: get_logger().exception( "Error while trying to parse eventBridge request body", exc_info=e) parsed_body = {} resource_names = set() if isinstance(parsed_body.get("Entries"), list): resource_names = { e["EventBusName"] for e in parsed_body["Entries"] if e.get("EventBusName") } return recursive_json_join( {"info": { "resourceNames": list(resource_names) or None }}, super().parse_request(parse_params), )
def parse_request(self, parse_params: HttpRequest) -> dict: target: str = parse_params.headers.get("x-amz-target", "") method = safe_split_get(target, ".", 1) try: parsed_body = json.loads(parse_params.body) except json.JSONDecodeError as e: get_logger().debug("Error while trying to parse ddb request body", exc_info=e) parsed_body = {} return recursive_json_join( { "info": { "resourceName": self._extract_table_name( parsed_body, method), "dynamodbMethod": method, "messageId": self._extract_message_id(parsed_body, method), } }, super().parse_request(parse_params), )
def parse_response(self, url: str, status_code: int, headers, body: bytes) -> dict: try: parsed_body = json.loads(body) except json.JSONDecodeError as e: get_logger().debug( "Error while trying to parse eventBridge request body", exc_info=e) parsed_body = {} message_ids = [] if isinstance(parsed_body.get("Entries"), list): message_ids = [ e["EventId"] for e in parsed_body["Entries"] if e.get("EventId") ] return recursive_json_join( {"info": { "messageIds": message_ids }}, super().parse_response(url, status_code, headers, body), )
def get_extension_logger(): return get_logger("lumigo-extension")
def capture_all_logs(caplog): caplog.set_level(logging.DEBUG, logger="lumigo") get_logger().propagate = True
def verbose_logger(): """ This fixture make sure that we will see all the log in the tests. """ lumigo_utils.get_logger().setLevel(logging.DEBUG) lumigo_utils.config(should_report=False, verbose=True)
def wrap_pymongo(): with lumigo_safe_execute("wrap pymogno"): if monitoring: get_logger().debug("wrapping pymongo") monitoring.register(LumigoMongoMonitoring())