def __init__(self, config=None, **inline): # configure loggers first cls = self.__class__ self.logger = get_logger("%s.%s" % (cls.__module__, cls.__name__)) self.error_logger = get_logger("elasticapm.errors") self._pid = None self._thread_starter_lock = threading.Lock() self._thread_managers = {} self.tracer = None self.processors = [] self.filter_exception_types_dict = {} self._service_info = None # setting server_version here is mainly used for testing self.server_version = inline.pop("server_version", None) self.check_python_version() config = Config(config, inline_dict=inline) if config.errors: for msg in config.errors.values(): self.error_logger.error(msg) config.disable_send = True if config.service_name == "python_service": self.logger.warning( "No custom SERVICE_NAME was set -- using non-descript default 'python_service'" ) self.config = VersionedConfig(config, version=None) # Insert the log_record_factory into the logging library # The LogRecordFactory functionality is only available on python 3.2+ if compat.PY3 and not self.config.disable_log_record_factory: record_factory = logging.getLogRecordFactory() # Only way to know if it's wrapped is to create a log record throwaway_record = record_factory(__name__, logging.DEBUG, __file__, 252, "dummy_msg", [], None) if not hasattr(throwaway_record, "elasticapm_labels"): self.logger.debug( "Inserting elasticapm log_record_factory into logging") # Late import due to circular imports import elasticapm.handlers.logging as elastic_logging new_factory = elastic_logging.log_record_factory( record_factory) logging.setLogRecordFactory(new_factory) headers = { "Content-Type": "application/x-ndjson", "Content-Encoding": "gzip", "User-Agent": self.get_user_agent(), } transport_kwargs = { "headers": headers, "verify_server_cert": self.config.verify_server_cert, "server_cert": self.config.server_cert, "timeout": self.config.server_timeout, "processors": self.load_processors(), } self._api_endpoint_url = compat.urlparse.urljoin( self.config.server_url if self.config.server_url.endswith("/") else self.config.server_url + "/", constants.EVENTS_API_PATH, ) transport_class = import_string(self.config.transport_class) self._transport = transport_class(url=self._api_endpoint_url, client=self, **transport_kwargs) self.config.transport = self._transport self._thread_managers["transport"] = self._transport for exc_to_filter in self.config.filter_exception_types or []: exc_to_filter_type = exc_to_filter.split(".")[-1] exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1]) self.filter_exception_types_dict[ exc_to_filter_type] = exc_to_filter_module if platform.python_implementation() == "PyPy": # PyPy introduces a `_functools.partial.__call__` frame due to our use # of `partial` in AbstractInstrumentedModule skip_modules = ("elasticapm.", "_functools") else: skip_modules = ("elasticapm.", ) self.tracer = Tracer( frames_collector_func=lambda: list( stacks.iter_stack_frames(start_frame=inspect.currentframe(), skip_top_modules=skip_modules, config=self.config)), frames_processing_func=lambda frames: self. _get_stack_info_for_trace( frames, library_frame_context_lines=self.config. source_lines_span_library_frames, in_app_frame_context_lines=self.config. source_lines_span_app_frames, with_locals=self.config.collect_local_variables in ("all", "transactions"), locals_processor_func=lambda local_var: varmap( lambda k, v: shorten( v, list_length=self.config.local_var_list_max_length, string_length=self.config.local_var_max_length, dict_length=self.config.local_var_dict_max_length, ), local_var, ), ), queue_func=self.queue, config=self.config, agent=self, ) self.include_paths_re = stacks.get_path_regex( self.config.include_paths) if self.config.include_paths else None self.exclude_paths_re = stacks.get_path_regex( self.config.exclude_paths) if self.config.exclude_paths else None self._metrics = MetricsRegistry(self) for path in self.config.metrics_sets: self._metrics.register(path) if self.config.breakdown_metrics: self._metrics.register( "elasticapm.metrics.sets.breakdown.BreakdownMetricSet") if self.config.prometheus_metrics: self._metrics.register( "elasticapm.metrics.sets.prometheus.PrometheusMetrics") if self.config.metrics_interval: self._thread_managers["metrics"] = self._metrics compat.atexit_register(self.close) if self.config.central_config: self._thread_managers["config"] = self.config else: self._config_updater = None if self.config.use_elastic_excepthook: self.original_excepthook = sys.excepthook sys.excepthook = self._excepthook if config.enabled: self.start_threads() # Save this Client object as the global CLIENT_SINGLETON set_client(self)
def __init__(self, config=None, **inline): # configure loggers first cls = self.__class__ self.logger = logging.getLogger("%s.%s" % (cls.__module__, cls.__name__)) self.error_logger = logging.getLogger("elasticapm.errors") self.tracer = None self.processors = [] self.filter_exception_types_dict = {} self._service_info = None self.config = Config(config, inline_dict=inline) if self.config.errors: for msg in self.config.errors.values(): self.error_logger.error(msg) self.config.disable_send = True headers = { "Content-Type": "application/x-ndjson", "Content-Encoding": "gzip", "User-Agent": "elasticapm-python/%s" % elasticapm.VERSION, } if self.config.secret_token: headers["Authorization"] = "Bearer %s" % self.config.secret_token transport_kwargs = { "metadata": self._build_metadata(), "headers": headers, "verify_server_cert": self.config.verify_server_cert, "server_cert": self.config.server_cert, "timeout": self.config.server_timeout, "max_flush_time": self.config.api_request_time / 1000.0, "max_buffer_size": self.config.api_request_size, } self._api_endpoint_url = compat.urlparse.urljoin( self.config.server_url if self.config.server_url.endswith("/") else self.config.server_url + "/", constants.EVENTS_API_PATH, ) self._transport = import_string(self.config.transport_class)(self._api_endpoint_url, **transport_kwargs) for exc_to_filter in self.config.filter_exception_types or []: exc_to_filter_type = exc_to_filter.split(".")[-1] exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1]) self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module self.processors = [import_string(p) for p in self.config.processors] if self.config.processors else [] if platform.python_implementation() == "PyPy": # PyPy introduces a `_functools.partial.__call__` frame due to our use # of `partial` in AbstractInstrumentedModule skip_modules = ("elasticapm.", "_functools") else: skip_modules = ("elasticapm.",) self.tracer = Tracer( frames_collector_func=lambda: list( stacks.iter_stack_frames(start_frame=inspect.currentframe(), skip_top_modules=skip_modules) ), frames_processing_func=lambda frames: self._get_stack_info_for_trace( frames, library_frame_context_lines=self.config.source_lines_span_library_frames, in_app_frame_context_lines=self.config.source_lines_span_app_frames, with_locals=self.config.collect_local_variables in ("all", "transactions"), locals_processor_func=lambda local_var: varmap( lambda k, v: shorten( v, list_length=self.config.local_var_list_max_length, string_length=self.config.local_var_max_length, ), local_var, ), ), queue_func=self.queue, sample_rate=self.config.transaction_sample_rate, max_spans=self.config.transaction_max_spans, span_frames_min_duration=self.config.span_frames_min_duration, ignore_patterns=self.config.transactions_ignore_patterns, ) self.include_paths_re = stacks.get_path_regex(self.config.include_paths) if self.config.include_paths else None self.exclude_paths_re = stacks.get_path_regex(self.config.exclude_paths) if self.config.exclude_paths else None self._metrics = MetricsRegistry( self.config.metrics_interval / 1000.0, self.queue, ignore_patterns=self.config.disable_metrics ) for path in self.config.metrics_sets: self._metrics.register(path) compat.atexit_register(self.close)
def _build_msg_for_logging(self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs): """ Captures, processes and serializes an event into a dict object """ transaction = execution_context.get_transaction() span = execution_context.get_span() if transaction: transaction_context = deepcopy(transaction.context) else: transaction_context = {} event_data = {} if custom is None: custom = {} if date is not None: warnings.warn( "The date argument is no longer evaluated and will be removed in a future release", DeprecationWarning) date = time.time() if stack is None: stack = self.config.auto_log_stacks if context: transaction_context.update(context) context = transaction_context else: context = transaction_context event_data["context"] = context if transaction and transaction.labels: context["tags"] = deepcopy(transaction.labels) # if '.' not in event_type: # Assume it's a builtin event_type = "elasticapm.events.%s" % event_type handler = self.get_handler(event_type) result = handler.capture(self, **kwargs) if self._filter_exception_type(result): return # data (explicit) culprit takes over auto event detection culprit = result.pop("culprit", None) if custom.get("culprit"): culprit = custom.pop("culprit") for k, v in compat.iteritems(result): if k not in event_data: event_data[k] = v log = event_data.get("log", {}) if stack and "stacktrace" not in log: if stack is True: frames = stacks.iter_stack_frames(skip=3, config=self.config) else: frames = stack frames = stacks.get_stack_info( frames, with_locals=self.config.collect_local_variables in ("errors", "all"), library_frame_context_lines=self.config. source_lines_error_library_frames, in_app_frame_context_lines=self.config. source_lines_error_app_frames, include_paths_re=self.include_paths_re, exclude_paths_re=self.exclude_paths_re, locals_processor_func=lambda local_var: varmap( lambda k, v: shorten( v, list_length=self.config.local_var_list_max_length, string_length=self.config.local_var_max_length, dict_length=self.config.local_var_dict_max_length, ), local_var, ), ) log["stacktrace"] = frames if "stacktrace" in log and not culprit: culprit = stacks.get_culprit(log["stacktrace"], self.config.include_paths, self.config.exclude_paths) if "level" in log and isinstance(log["level"], compat.integer_types): log["level"] = logging.getLevelName(log["level"]).lower() if log: event_data["log"] = log if culprit: event_data["culprit"] = culprit if "custom" in context: context["custom"].update(custom) else: context["custom"] = custom # Make sure all data is coerced event_data = transform(event_data) if "exception" in event_data: event_data["exception"]["handled"] = bool(handled) event_data["timestamp"] = int(date * 1000000) if transaction: if transaction.trace_parent: event_data["trace_id"] = transaction.trace_parent.trace_id # parent id might already be set in the handler event_data.setdefault("parent_id", span.id if span else transaction.id) event_data["transaction_id"] = transaction.id event_data["transaction"] = { "sampled": transaction.is_sampled, "type": transaction.transaction_type } return event_data
def __init__(self, config=None, **inline): # configure loggers first cls = self.__class__ self.logger = get_logger("%s.%s" % (cls.__module__, cls.__name__)) self.error_logger = get_logger("elasticapm.errors") self.tracer = None self.processors = [] self.filter_exception_types_dict = {} self._service_info = None config = Config(config, inline_dict=inline) if config.errors: for msg in config.errors.values(): self.error_logger.error(msg) config.disable_send = True self.config = VersionedConfig(config, version=None) # Insert the log_record_factory into the logging library # The LogRecordFactory functionality is only available on python 3.2+ if compat.PY3 and not self.config.disable_log_record_factory: record_factory = logging.getLogRecordFactory() # Only way to know if it's wrapped is to create a log record throwaway_record = record_factory(__name__, logging.DEBUG, __file__, 252, "dummy_msg", [], None) if not hasattr(throwaway_record, "elasticapm_labels"): self.logger.debug( "Inserting elasticapm log_record_factory into logging") # Late import due to circular imports import elasticapm.handlers.logging as elastic_logging new_factory = elastic_logging.log_record_factory( record_factory) logging.setLogRecordFactory(new_factory) headers = { "Content-Type": "application/x-ndjson", "Content-Encoding": "gzip", "User-Agent": "elasticapm-python/%s" % elasticapm.VERSION, } if self.config.secret_token: headers["Authorization"] = "Bearer %s" % self.config.secret_token transport_kwargs = { "metadata": self._build_metadata(), "headers": headers, "verify_server_cert": self.config.verify_server_cert, "server_cert": self.config.server_cert, "timeout": self.config.server_timeout, "max_flush_time": self.config.api_request_time / 1000.0, "max_buffer_size": self.config.api_request_size, "processors": self.load_processors(), } self._api_endpoint_url = compat.urlparse.urljoin( self.config.server_url if self.config.server_url.endswith("/") else self.config.server_url + "/", constants.EVENTS_API_PATH, ) self._transport = import_string(self.config.transport_class)( self._api_endpoint_url, **transport_kwargs) for exc_to_filter in self.config.filter_exception_types or []: exc_to_filter_type = exc_to_filter.split(".")[-1] exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1]) self.filter_exception_types_dict[ exc_to_filter_type] = exc_to_filter_module if platform.python_implementation() == "PyPy": # PyPy introduces a `_functools.partial.__call__` frame due to our use # of `partial` in AbstractInstrumentedModule skip_modules = ("elasticapm.", "_functools") else: skip_modules = ("elasticapm.", ) self.tracer = Tracer( frames_collector_func=lambda: list( stacks.iter_stack_frames(start_frame=inspect.currentframe(), skip_top_modules=skip_modules, config=self.config)), frames_processing_func=lambda frames: self. _get_stack_info_for_trace( frames, library_frame_context_lines=self.config. source_lines_span_library_frames, in_app_frame_context_lines=self.config. source_lines_span_app_frames, with_locals=self.config.collect_local_variables in ("all", "transactions"), locals_processor_func=lambda local_var: varmap( lambda k, v: shorten( v, list_length=self.config.local_var_list_max_length, string_length=self.config.local_var_max_length, dict_length=self.config.local_var_dict_max_length, ), local_var, ), ), queue_func=self.queue, config=self.config, agent=self, ) self.include_paths_re = stacks.get_path_regex( self.config.include_paths) if self.config.include_paths else None self.exclude_paths_re = stacks.get_path_regex( self.config.exclude_paths) if self.config.exclude_paths else None self._metrics = MetricsRegistry( self.config.metrics_interval / 1000.0, self.queue, ignore_patterns=self.config.disable_metrics) for path in self.config.metrics_sets: self._metrics.register(path) if self.config.breakdown_metrics: self._metrics.register( "elasticapm.metrics.sets.breakdown.BreakdownMetricSet") compat.atexit_register(self.close) if self.config.central_config: self._config_updater = IntervalTimer( update_config, 1, "eapm conf updater", daemon=True, args=(self, ), evaluate_function_interval=True) self._config_updater.start() else: self._config_updater = None
def capture(client, exc_info=None, **kwargs): culprit = exc_value = exc_type = exc_module = frames = exc_traceback = None new_exc_info = False if not exc_info or exc_info is True: new_exc_info = True exc_info = sys.exc_info() if not exc_info: raise ValueError("No exception found") try: exc_type, exc_value, exc_traceback = exc_info frames = get_stack_info( iter_traceback_frames(exc_traceback), with_locals=client.config.collect_local_variables in ("errors", "all"), library_frame_context_lines=client.config. source_lines_error_library_frames, in_app_frame_context_lines=client.config. source_lines_error_app_frames, include_paths_re=client.include_paths_re, exclude_paths_re=client.exclude_paths_re, locals_processor_func=lambda local_var: varmap( lambda k, val: shorten( val, list_length=client.config.local_var_list_max_length, string_length=client.config.local_var_max_length, ), local_var, ), ) culprit = get_culprit(frames, client.config.include_paths, client.config.exclude_paths) if hasattr(exc_type, "__module__"): exc_module = exc_type.__module__ exc_type = exc_type.__name__ else: exc_module = None exc_type = exc_type.__name__ finally: if new_exc_info: try: del exc_info del exc_traceback except Exception as e: logger.exception(e) if "message" in kwargs: message = kwargs["message"] else: message = "%s: %s" % (exc_type, to_unicode(exc_value) ) if exc_value else str(exc_type) return { "id": str(uuid.uuid4()), "culprit": culprit, "exception": { "message": message, "type": keyword_field(str(exc_type)), "module": keyword_field(str(exc_module)), "stacktrace": frames, }, }
def test_shorten_string(): result = shorten("hello world!", string_length=5) assert len(result) == 5 assert result == "he..."
def test_shorten_tuple(): result = shorten(tuple(range(500)), list_length=50) assert len(result) == 52 assert result[-2] == "..." assert result[-1] == "(450 more elements)"
def _build_msg_for_logging(self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs): """ Captures, processes and serializes an event into a dict object """ transaction = get_transaction() if transaction: transaction_context = deepcopy(transaction.context) else: transaction_context = {} event_data = {} if custom is None: custom = {} if not date: date = datetime.datetime.utcnow() if stack is None: stack = self.config.auto_log_stacks if context: transaction_context.update(context) context = transaction_context else: context = transaction_context event_data['context'] = context if transaction and transaction.tags: context['tags'] = deepcopy(transaction.tags) # if '.' not in event_type: # Assume it's a builtin event_type = 'elasticapm.events.%s' % event_type handler = self.get_handler(event_type) result = handler.capture(self, **kwargs) if self._filter_exception_type(result): return # data (explicit) culprit takes over auto event detection culprit = result.pop('culprit', None) if custom.get('culprit'): culprit = custom.pop('culprit') for k, v in compat.iteritems(result): if k not in event_data: event_data[k] = v log = event_data.get('log', {}) if stack and 'stacktrace' not in log: if stack is True: frames = stacks.iter_stack_frames(skip=3) else: frames = stack frames = stacks.get_stack_info( frames, with_locals=self.config.collect_local_variables in ('errors', 'all'), library_frame_context_lines=self.config. source_lines_error_library_frames, in_app_frame_context_lines=self.config. source_lines_error_app_frames, include_paths_re=self.include_paths_re, exclude_paths_re=self.exclude_paths_re, locals_processor_func=lambda local_var: varmap( lambda k, v: shorten( v, list_length=self.config.local_var_list_max_length, string_length=self.config.local_var_max_length, ), local_var)) log['stacktrace'] = frames if 'stacktrace' in log and not culprit: culprit = stacks.get_culprit(log['stacktrace'], self.config.include_paths, self.config.exclude_paths) if 'level' in log and isinstance(log['level'], compat.integer_types): log['level'] = logging.getLevelName(log['level']).lower() if log: event_data['log'] = log if culprit: event_data['culprit'] = culprit if 'custom' in context: context['custom'].update(custom) else: context['custom'] = custom # Run the data through processors for processor in self.processors: event_data = processor(self, event_data) # Make sure all data is coerced event_data = transform(event_data) if 'exception' in event_data: event_data['exception']['handled'] = bool(handled) event_data.update({ 'timestamp': date.strftime(constants.TIMESTAMP_FORMAT), }) transaction = get_transaction() if transaction: event_data['transaction'] = {'id': transaction.id} return self._build_msg({'errors': [event_data]})
def test_shorten_tuple(self): result = shorten(tuple(range(500)), list_length=50) self.assertEquals(len(result), 52) self.assertEquals(result[-2], '...') self.assertEquals(result[-1], '(450 more elements)')
def test_shorten_string(self): result = shorten('hello world!', string_length=5) self.assertEquals(len(result), 5) self.assertEquals(result, 'he...')
def test_shorten_dict(): result = shorten({k: k for k in range(500)}, dict_length=50) assert len(result) == 51 assert result["<truncated>"] == "(450 more elements)"
def capture(client, exc_info=None, **kwargs): culprit = exc_value = exc_type = exc_module = frames = exc_traceback = None new_exc_info = False if not exc_info or exc_info is True: new_exc_info = True exc_info = sys.exc_info() if not exc_info: raise ValueError('No exception found') try: exc_type, exc_value, exc_traceback = exc_info frames = get_stack_info( iter_traceback_frames(exc_traceback), with_locals=client.config.collect_local_variables in ('errors', 'all'), library_frame_context_lines=client.config. source_lines_error_library_frames, in_app_frame_context_lines=client.config. source_lines_error_app_frames, include_paths_re=client.include_paths_re, exclude_paths_re=client.exclude_paths_re, locals_processor_func=lambda local_var: varmap( lambda k, val: shorten( val, list_length=client.config.local_var_list_max_length, string_length=client.config.local_var_max_length), local_var)) culprit = get_culprit(frames, client.config.include_paths, client.config.exclude_paths) if hasattr(exc_type, '__module__'): exc_module = exc_type.__module__ exc_type = exc_type.__name__ else: exc_module = None exc_type = exc_type.__name__ finally: if new_exc_info: try: del exc_info del exc_traceback except Exception as e: logger.exception(e) if 'message' in kwargs: message = kwargs['message'] else: message = '%s: %s' % (exc_type, to_unicode(exc_value) ) if exc_value else str(exc_type) return { 'id': str(uuid.uuid4()), 'culprit': culprit, 'exception': { 'message': message, 'type': keyword_field(str(exc_type)), 'module': keyword_field(str(exc_module)), 'stacktrace': frames, } }