def new_metrics_exporter(**options): options = Options(**options) exporter = MetricsExporter(options=options) transport.get_exporter_thread(stats.stats, exporter, interval=options.export_interval) return exporter
def new_stats_exporter(options=None, interval=None): """Get a stats exporter and running transport thread. Create a new `StackdriverStatsExporter` with the given options and start periodically exporting stats to stackdriver in the background. Fall back to default auth if `options` is null. This will raise `google.auth.exceptions.DefaultCredentialsError` if default credentials aren't configured. See `opencensus.metrics.transport.get_exporter_thread` for details on the transport thread. :type options: :class:`Options` :param exporter: Options to pass to the exporter :type interval: int or float :param interval: Seconds between export calls. :rtype: :class:`StackdriverStatsExporter` :return: The newly-created exporter. """ if options is None: _, project_id = google.auth.default() options = Options(project_id=project_id) if str(options.project_id).strip() == "": raise ValueError(ERROR_BLANK_PROJECT_ID) ci = client_info.ClientInfo(client_library_version=get_user_agent_slug()) client = monitoring_v3.MetricServiceClient(client_info=ci) exporter = StackdriverStatsExporter(client=client, options=options) transport.get_exporter_thread([stats.stats], exporter, interval=interval) return exporter
def new_metrics_exporter(**options): options_ = Options(**options) exporter = MetricsExporter(options=options_) producers = [stats_module.stats] if options_.enable_standard_metrics: producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, interval=options_.export_interval) return exporter
def new_metrics_exporter(**options): exporter = MetricsExporter(**options) producers = [stats_module.stats] if exporter.options.enable_standard_metrics: producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, interval=exporter.options.export_interval) atexit.register(exporter.export_metrics, stats_module.stats.get_metrics()) return exporter
def new_metrics_exporter(**options): exporter = MetricsExporter(**options) producers = [stats_module.stats] if exporter.options.enable_standard_metrics: producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, interval=exporter.options.export_interval) from opencensus.ext.azure.metrics_exporter import heartbeat_metrics heartbeat_metrics.enable_heartbeat_metrics( exporter.options.connection_string, exporter.options.instrumentation_key ) return exporter
def enable_heartbeat_metrics(connection_string, ikey): with _HEARTBEAT_LOCK: # Only start heartbeat if did not exist before global _HEARTBEAT_METRICS # pylint: disable=global-statement if _HEARTBEAT_METRICS is None: exporter = MetricsExporter( connection_string=connection_string, instrumentation_key=ikey, export_interval=900.0, # Send every 15 minutes ) producer = AzureHeartbeatMetricsProducer() _HEARTBEAT_METRICS = producer transport.get_exporter_thread([_HEARTBEAT_METRICS], exporter, exporter.options.export_interval)
def collect_statsbeat_metrics(options): # pylint: disable=global-statement global _STATSBEAT_METRICS global _STATSBEAT_EXPORTER # Only start statsbeat if did not exist before if _STATSBEAT_METRICS is None and _STATSBEAT_EXPORTER is None: with _STATSBEAT_LOCK: # Only start statsbeat if did not exist before exporter = MetricsExporter( is_stats=True, connection_string=_get_stats_connection_string( options.endpoint), # noqa: E501 enable_standard_metrics=False, export_interval=_STATS_SHORT_EXPORT_INTERVAL, # 15m by default ) # The user's ikey is the one being tracked producer = _AzureStatsbeatMetricsProducer(options) _STATSBEAT_METRICS = producer # Export some initial stats on program start execution_context.set_is_exporter(True) exporter.export_metrics(_STATSBEAT_METRICS.get_initial_metrics()) execution_context.set_is_exporter(False) exporter.exporter_thread = \ transport.get_exporter_thread([_STATSBEAT_METRICS], exporter, exporter.options.export_interval) _STATSBEAT_EXPORTER = exporter
def new_metrics_exporter(**options): exporter = MetricsExporter(**options) producers = [stats_module.stats] if exporter.options.enable_standard_metrics: logger.info("Enabling standard metrics (CPU, etc.)") producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, interval=exporter.options.export_interval) # TODO: Forcing the enablement of heartbeat metrics sounds like a very bad idea # from opencensus.ext.azure.metrics_exporter import heartbeat_metrics # heartbeat_metrics.enable_heartbeat_metrics( # exporter.options.connection_string, # exporter.options.instrumentation_key # ) return exporter
def test_exporter_deleted(self, mock_logger): producer = mock.Mock() exporter = mock.Mock() task = transport.get_exporter_thread(producer, exporter) del exporter gc.collect() time.sleep(INTERVAL + INTERVAL / 2.0) mock_logger.exception.assert_called() self.assertTrue(task.finished.is_set())
def test_producer_error(self, mock_logger): producer = mock.Mock() exporter = mock.Mock() producer.get_metrics.side_effect = ValueError() task = transport.get_exporter_thread(producer, exporter) time.sleep(INTERVAL + INTERVAL / 2.0) mock_logger.exception.assert_called() self.assertFalse(task.finished.is_set())
def new_metrics_exporter(**options): exporter = MetricsExporter(**options) producers = [stats_module.stats] if exporter.options.enable_standard_metrics: producers.append(standard_metrics.producer) exporter.exporter_thread = transport.get_exporter_thread( producers, exporter, interval=exporter.options.export_interval) if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): from opencensus.ext.azure.metrics_exporter import statsbeat_metrics # Stats will track the user's ikey statsbeat_metrics.collect_statsbeat_metrics(exporter.options) return exporter
def test_threaded_export(self, mock_logger): producer = mock.Mock() exporter = mock.Mock() metrics = mock.Mock() producer.get_metrics.return_value = metrics try: task = transport.get_exporter_thread(producer, exporter) producer.get_metrics.assert_not_called() exporter.export_metrics.assert_not_called() time.sleep(INTERVAL + INTERVAL / 2.0) producer.get_metrics.assert_called_once_with() exporter.export_metrics.assert_called_once_with(metrics) finally: task.cancel() task.join()
def new_stats_exporter(service_name, hostname=None, endpoint=None, interval=None): """Create a new worker thread and attach the exporter to it. :type endpoint: str :param endpoint: address of the opencensus service. :type service_name: str :param service_name: name of the service :type host_name: str :param host_name: name of the host (machine or host name) :type interval: int or float :param interval: Seconds between export calls. """ endpoint = utils.DEFAULT_ENDPOINT if endpoint is None else endpoint exporter = StatsExporter( ExportRpcHandler(_create_stub(endpoint), service_name, hostname)) transport.get_exporter_thread([stats.stats], exporter, interval) return exporter
def __init__(self, insert_key, service_name, host=None, interval=5): client = self.client = MetricClient(insert_key=insert_key, host=host) client.add_version_info("NewRelic-OpenCensus-Exporter", __version__) self.views = {} self.count_values = {} # Register an exporter thread for this exporter thread = self._thread = transport.get_exporter_thread( [stats.stats], self, interval=interval) self.interval = thread.interval self._common = { "interval.ms": self.interval * 1000, "attributes": { "service.name": service_name }, }
def collect_statsbeat_metrics(options): with _STATSBEAT_LOCK: # Only start statsbeat if did not exist before global _STATSBEAT_METRICS # pylint: disable=global-statement if _STATSBEAT_METRICS is None: exporter = MetricsExporter( is_stats=True, connection_string=_STATS_CONNECTION_STRING, enable_standard_metrics=False, export_interval=_STATS_SHORT_EXPORT_INTERVAL, # 15m by default ) # The user's ikey is the one being tracked producer = _AzureStatsbeatMetricsProducer(options) _STATSBEAT_METRICS = producer # Export some initial stats on program start exporter.export_metrics(_STATSBEAT_METRICS.get_initial_metrics()) exporter.exporter_thread = \ transport.get_exporter_thread([_STATSBEAT_METRICS], exporter, exporter.options.export_interval)
def test_multiple_producers(self, iter_mock, mock_logger): producer1 = mock.Mock() producer2 = mock.Mock() producers = [producer1, producer2] exporter = mock.Mock() metrics = mock.Mock() producer1.get_metrics.return_value = metrics producer2.get_metrics.return_value = metrics iter_mock.return_value = metrics try: task = transport.get_exporter_thread(producers, exporter) producer1.get_metrics.assert_not_called() producer2.get_metrics.assert_not_called() exporter.export_metrics.assert_not_called() time.sleep(INTERVAL + INTERVAL / 2.0) producer1.get_metrics.assert_called_once_with() producer2.get_metrics.assert_called_once_with() exporter.export_metrics.assert_called_once_with(metrics) finally: task.cancel() task.join()
def new_stats_exporter(options=None, interval=DEFAULT_INTERVAL): """Return a stats exporter and running transport thread.""" exporter = ZenossStatsExporter(options=options) transport.get_exporter_thread([stats.stats], exporter, interval=interval) return exporter