def collect_statsbeat_metrics(options): # pylint: disable=global-statement global _STATSBEAT_METRICS global _STATSBEAT_EXPORTER # Only start statsbeat if did not exist before if _STATSBEAT_METRICS is None and _STATSBEAT_EXPORTER is None: with _STATSBEAT_LOCK: # Only start statsbeat if did not exist before exporter = MetricsExporter( is_stats=True, connection_string=_get_stats_connection_string( options.endpoint), # noqa: E501 enable_standard_metrics=False, export_interval=_STATS_SHORT_EXPORT_INTERVAL, # 15m by default ) # The user's ikey is the one being tracked producer = _AzureStatsbeatMetricsProducer(options) _STATSBEAT_METRICS = producer # Export some initial stats on program start execution_context.set_is_exporter(True) exporter.export_metrics(_STATSBEAT_METRICS.get_initial_metrics()) execution_context.set_is_exporter(False) exporter.exporter_thread = \ transport.get_exporter_thread([_STATSBEAT_METRICS], exporter, exporter.options.export_interval) _STATSBEAT_EXPORTER = exporter
def test_dependency_patch_exporter_thread(self): map = standard_metrics.http_dependency.dependency_map standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None session = mock.Mock() execution_context.set_is_exporter(True) result = standard_metrics.http_dependency.dependency_patch(session) self.assertIsNone(map.get('count')) self.assertIsNone(result)
def test_dependency_patch(self): map = standard_metrics.http_dependency.dependency_map standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None session = requests.Session() execution_context.set_is_exporter(False) result = standard_metrics.http_dependency.dependency_patch(session) self.assertEqual(map['count'], 1) self.assertIsNone(result)
def close(self): try: # Suppress request tracking on flush execution_context.set_is_exporter(True) self.func(*self.args, **self.kwargs) execution_context.set_is_exporter(False) except Exception as ex: logger.exception("Error handling metric flush: {}".format(ex)) self.cancel()
def run(self): # pragma: NO COVER # Indicate that this thread is an exporter thread. execution_context.set_is_exporter(True) src = self.src dst = self.dst while True: batch = src.gets(dst.max_batch_size, dst.export_interval) if batch and isinstance(batch[-1], QueueEvent): dst.emit(batch[:-1], event=batch[-1]) if batch[-1] is src.EXIT_EVENT: break else: continue dst.emit(batch)
def start(self): """Starts the background thread. Additionally, this registers a handler for process exit to attempt to send any pending data before shutdown. """ with self._lock: if self.is_alive: return self._thread = threading.Thread(target=self._thread_main, name=_WORKER_THREAD_NAME) self._thread.daemon = True # Indicate that this thread is an exporter thread. Used for # auto-collection. execution_context.set_is_exporter(True) self._thread.start() atexit.register(self._export_pending_data)
def _thread_main(self): """The entry point for the worker thread. Pulls pending data off the queue and writes them in batches to the specified tracing backend using the exporter. """ # Indicate that this thread is an exporter thread. # Used to suppress tracking of requests in this thread. execution_context.set_is_exporter(True) quit_ = False while True: items = self._get_items() data = [] for item in items: if item is _WORKER_TERMINATOR: quit_ = True # Continue processing items, don't break, try to process # all items we got back before quitting. else: data.extend(item) if data: try: self.exporter.emit(data) except Exception: logger.exception( '%s failed to emit data.' 'Dropping %s objects from queue.', self.exporter.__class__.__name__, len(data)) pass for _ in range(len(items)): self._queue.task_done() # self._event is set at exit, at which point we start draining the # queue immediately. If self._event is unset, block for # self.wait_period between each batch of exports. self._event.wait(self._wait_period) if quit_: break
def run(self): # Indicate that this thread is an exporter thread. execution_context.set_is_exporter(True) src = self._src dst = self._dst while True: batch = src.gets(dst.max_batch_size, dst.export_interval) if batch and isinstance(batch[-1], QueueEvent): try: dst._export(batch[:-1], event=batch[-1]) except Exception: logger.exception('Unhandled exception from exporter.') if batch[-1] is src.EXIT_EVENT: break continue # pragma: NO COVER try: dst._export(batch) except Exception: logger.exception('Unhandled exception from exporter.')
def start(self): """Starts the background thread. Additionally, this registers a handler for process exit to attempt to send any pending data before shutdown. """ with self._lock: if self.is_alive: return self._thread = threading.Thread( target=self._thread_main, name=_WORKER_THREAD_NAME) self._thread.daemon = True # Indicate that this thread is an exporter thread. Used for # auto-collection. execution_context.set_is_exporter(True) self._thread.start() ### Perpetau Mod Start # add in https://github.com/census-instrumentation/opencensus-python/pull/893 execution_context.set_is_exporter(False) atexit.register(self._export_pending_data)
def run(self): logger.info("AzureLogHandler: Running worker...") # Indicate that this thread is an exporter thread. # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) src = self._src dst = self._dst while True: batch = src.gets(dst.max_batch_size, dst.export_interval) if batch and isinstance(batch[-1], QueueEvent): try: dst._export(batch[:-1], event=batch[-1]) except Exception: logger.exception('Unhandled exception from exporter.') if batch[-1] is src.EXIT_EVENT: break continue # pragma: NO COVER try: dst._export(batch) except Exception: logger.exception('Unhandled exception from exporter.')
def export(self, datas): # Used to suppress tracking of requests in export execution_context.set_is_exporter(True) self.exporter.emit(datas) # Reset the context execution_context.set_is_exporter(False)
def run(self): # Indicate that this thread is an exporter thread. execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run()
def run(self): # Indicate that this thread is an exporter thread. # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run()