def __init__(self, collect_interval, queue_func, tags=None, ignore_patterns=None): """ Creates a new metric registry :param collect_interval: the interval to collect metrics from registered metric sets :param queue_func: the function to call with the collected metrics :param tags: """ self._collect_interval = collect_interval self._queue_func = queue_func self._metricsets = {} self._tags = tags or {} self._collect_timer = None self._ignore_patterns = ignore_patterns or () if self._collect_interval: # we only start the thread if we are not in a uwsgi master process if not is_master_process(): self._start_collect_timer() else: # If we _are_ in a uwsgi master process, we use the postfork hook to start the thread after the fork compat.postfork(lambda: self._start_collect_timer())
def queue(self, event_type, data, flush=False): if self.config.disable_send: return if flush and is_master_process(): # don't flush in uWSGI master process to avoid ending up in an unpredictable threading state flush = False self._transport.queue(event_type, data, flush)
def _get_transport(self, parsed_url): if self.async_mode and is_master_process(): # when in the master process, always use SYNC mode. This avoids # the danger of being forked into an inconsistent threading state self.logger.info( 'Sending message synchronously while in master ' 'process. PID: %s', os.getpid()) return import_string(defaults.SYNC_TRANSPORT_CLASS)(parsed_url) if parsed_url not in self._transports: self._transports[parsed_url] = self._transport_class(parsed_url) return self._transports[parsed_url]
def queue(self, event_type, data, flush=False): if self.config.disable_send: return # Run the data through processors for processor in self.processors: if not hasattr(processor, "event_types") or event_type in processor.event_types: data = processor(self, data) if flush and is_master_process(): # don't flush in uWSGI master process to avoid ending up in an unpredictable threading state flush = False self._transport.queue(event_type, data, flush)
def _get_transport(self, parsed_url): if hasattr(self._transport_class, 'sync_transport') and is_master_process(): # when in the master process, always use SYNC mode. This avoids # the danger of being forked into an inconsistent threading state self.logger.info('Sending message synchronously while in master ' 'process. PID: %s', os.getpid()) return self._transport_class.sync_transport(parsed_url) if parsed_url not in self._transports: self._transports[parsed_url] = self._transport_class( parsed_url, verify_server_cert=self.config.verify_server_cert ) return self._transports[parsed_url]
def __init__(self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, queue_chill_count=500, queue_chill_time=1.0, **kwargs): """ Create a new Transport instance :param metadata: Metadata object to prepend to every queue :param compress_level: GZip compress level. If zero, no GZip compression will be used :param json_serializer: serializer to use for JSON encoding :param max_flush_time: Maximum time between flushes in seconds :param max_buffer_size: Maximum size of buffer before flush :param kwargs: """ self.state = TransportState() self._metadata = metadata if metadata is not None else {} self._compress_level = min( 9, max(0, compress_level if compress_level is not None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._event_queue = ChilledQueue(maxsize=10000, chill_until=queue_chill_count, max_chill_time=queue_chill_time) self._event_process_thread = threading.Thread( target=self._process_queue, name="eapm event processor thread") self._event_process_thread.daemon = True self._last_flush = timeit.default_timer() self._counts = defaultdict(int) self._flushed = threading.Event() self._closed = False # only start the event processing thread if we are not in a uwsgi master process if not is_master_process(): self._start_event_processor() else: # if we _are_ in a uwsgi master process, use the postfork mixup to start the thread after the fork compat.postfork(lambda: self._start_event_processor())
def queue(self, event_type, data, flush=False): if self.config.disable_send: return # Run the data through processors for processor in self.processors: if not hasattr(processor, "event_types") or event_type in processor.event_types: data = processor(self, data) if not data: self.logger.debug( "Dropped event of type %s due to processor %s.%s", event_type, getattr(processor, "__module__"), getattr(processor, "__name__"), ) data = None # normalize all "falsy" values to None break if flush and is_master_process(): # don't flush in uWSGI master process to avoid ending up in an unpredictable threading state flush = False self._transport.queue(event_type, data, flush)