def test_null_options(self):
        # Check that we don't suppress auth errors
        auth_error = google.auth.exceptions.DefaultCredentialsError
        mock_auth_error = mock.Mock()
        mock_auth_error.side_effect = auth_error
        with mock.patch(
                'opencensus.ext.stackdriver.stats_exporter'
                '.google.auth.default', mock_auth_error):
            with self.assertRaises(auth_error):
                stackdriver.new_stats_exporter()

        # Check that we get the default credentials' project ID
        mock_auth_ok = mock.Mock()
        mock_auth_ok.return_value = (None, 123)
        with mock.patch(
                'opencensus.ext.stackdriver.stats_exporter'
                '.google.auth.default', mock_auth_ok):
            sdse = stackdriver.new_stats_exporter()
        self.assertEqual(sdse.options.project_id, 123)

        # Check that we raise if auth works but the project is empty
        mock_auth_no_project = mock.Mock()
        mock_auth_no_project.return_value = (None, '')
        with mock.patch(
                'opencensus.ext.stackdriver.stats_exporter'
                '.google.auth.default', mock_auth_no_project):
            with self.assertRaises(ValueError):
                stackdriver.new_stats_exporter()
Exemple #2
0
def main():
    # Register the view. Measurements are only aggregated and exported if
    # they're associated with a registered view.
    stats.stats.view_manager.register_view(LATENCY_VIEW)

    # Create the Stackdriver stats exporter and start exporting metrics in the
    # background, once every 60 seconds by default.
    exporter = stats_exporter.new_stats_exporter()
    print('Exporting stats to project "{}"'.format(exporter.options.project_id))

    # Register exporter to the view manager.
    stats.stats.view_manager.register_exporter(exporter)

    # Record 100 fake latency values between 0 and 5 seconds.
    while True:
        for num in range(100):
            ms = random() * 5 * 1000
            print("Latency {}: {}".format(num, ms))

            tagmap = TagMap()
            tagmap.insert("mylabel", str(randrange(1,11)))

            mmap = stats.stats.stats_recorder.new_measurement_map()
            mmap.measure_float_put(LATENCY_MS, ms)
            mmap.record(tags=tagmap)
            time.sleep(0.5)
def main():
    # Register the view. Measurements are only aggregated and exported if
    # they're associated with a registered view.
    stats.stats.view_manager.register_view(LATENCY_VIEW)

    # Create the Stackdriver stats exporter and start exporting metrics in the
    # background, once every 60 seconds by default.
    exporter = stats_exporter.new_stats_exporter()
    print('Exporting stats to project "{}"'.format(
        exporter.options.project_id))

    # Register exporter to the view manager.
    stats.stats.view_manager.register_exporter(exporter)

    # Record 100 fake latency values between 0 and 5 seconds.
    for num in range(100):
        ms = random() * 5 * 1000
        print("Latency {}: {}".format(num, ms))

        mmap = stats.stats.stats_recorder.new_measurement_map()
        mmap.measure_float_put(LATENCY_MS, ms)
        mmap.record()

    # Keep the thread alive long enough for the exporter to export at least
    # once.
    time.sleep(65)
def setup_open_census():
    stats_stats = stats.Stats()

    app.m_response_ms = measure_module.MeasureFloat("flask_response_time",
                                                    "The request duration",
                                                    "ms")

    app.key_method = tag_key_module.TagKey("method")
    # Create the status key
    app.key_status = tag_key_module.TagKey("status")
    # Create the error key
    app.key_error = tag_key_module.TagKey("error")

    app.view_manager = stats_stats.view_manager
    app.stats_recorder = stats_stats.stats_recorder
    response_time_view = view.View(
        "response_time", "The time it took to respond",
        [app.key_method, app.key_status, app.key_error], app.m_response_ms,
        aggregation.LastValueAggregation())

    app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options(
        project_id=os.getenv('PROJECT_ID')))

    app.view_manager.register_exporter(app.exporter)
    app.view_manager.register_view(response_time_view)
    def test_stats_record_async(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % sufix
        measure_name = "SampleMeasureNameAsyncTest%s" % sufix
        measure_description = "SampleDescriptionAsyncTest%s" % sufix
        view_name = "SampleViewNameAsyncTest%s" % sufix
        view_description = "SampleViewDescriptionAsyncTest%s" % sufix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)

        @retry(wait_fixed=RETRY_WAIT_PERIOD,
               stop_max_attempt_number=RETRY_MAX_ATTEMPT)
        def get_metric_descriptors(self, exporter, view_description):
            name = exporter.client.project_path(PROJECT)
            list_metrics_descriptors = exporter.client.list_metric_descriptors(
                name)
            element = next((element for element in list_metrics_descriptors
                            if element.description == view_description), None)
            self.assertIsNotNone(element)
            self.assertEqual(element.description, view_description)
            self.assertEqual(element.unit, "By")

        get_metric_descriptors(self, exporter, view_description)
def setupOpenCensusAndPrometheusExporter():
    stats = stats_module.stats
    view_manager = stats.view_manager
    view_manager.register_view(latency_view)
    view_manager.register_view(line_count_view)

    exporter = stats_exporter.new_stats_exporter()

    view_manager.register_exporter(exporter)
    def test_not_blank_project(self):
        patch_client = mock.patch(('opencensus.ext.stackdriver.stats_exporter'
                                   '.monitoring_v3.MetricServiceClient'),
                                  _Client)

        with patch_client:
            exporter_created = stackdriver.new_stats_exporter(
                stackdriver.Options(project_id=1))

        self.assertIsInstance(exporter_created,
                              stackdriver.StackdriverStatsExporter)
    def test_singleton_with_params(self):
        default_labels = {'key1': 'value1'}
        patch_client = mock.patch(('opencensus.ext.stackdriver.stats_exporter'
                                   '.monitoring_v3.MetricServiceClient'),
                                  _Client)

        with patch_client:
            exporter_created = stackdriver.new_stats_exporter(
                stackdriver.Options(project_id=1,
                                    default_monitoring_labels=default_labels))

        self.assertEqual(exporter_created.default_labels, default_labels)
    def test_export_empty(self, mock_stats, mock_client):
        """Check that we don't attempt to export empty metric sets."""

        mock_stats.get_metrics.return_value = []

        with MockGetExporterThread() as mget:
            exporter = stackdriver.new_stats_exporter(
                stackdriver.Options(project_id=1))
            mget.transport.step()

        exporter.client.create_metric_descriptor.assert_not_called()
        exporter.client.create_time_series.assert_not_called()
Exemple #10
0
    def test_stats_record_async(self):
        # We are using suffix in order to prevent cached objects
        suffix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % suffix
        measure_name = "SampleMeasureNameAsyncTest%s" % suffix
        measure_description = "SampleDescriptionAsyncTest%s" % suffix
        view_name = "SampleViewNameAsyncTest%s" % suffix
        view_description = "SampleViewDescriptionAsyncTest%s" % suffix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)
        # Give the exporter thread enough time to export exactly once
        time.sleep(transport.DEFAULT_INTERVAL * 1.5)

        self.check_sd_md(exporter, view_description)
    def test_client_info_user_agent(self):
        """Check that the monitoring client sets a user agent.

        The user agent should include the library version. Note that this
        assumes MetricServiceClient calls ClientInfo.to_user_agent to attach
        the user agent as metadata to metric service API calls.
        """
        patch_client = mock.patch(
            'opencensus.ext.stackdriver.stats_exporter.monitoring_v3'
            '.MetricServiceClient', _Client)

        with patch_client:
            exporter = stackdriver.new_stats_exporter(
                stackdriver.Options(project_id=1))

        self.assertIn(stackdriver.get_user_agent_slug(),
                      exporter.client.client_info.to_user_agent())
    def test_export_single_metric(self, mock_stats, mock_client):
        """Check that we can export a set of a single metric."""

        lv = label_value.LabelValue('val')
        val = value.ValueLong(value=123)
        dt = datetime(2019, 3, 20, 21, 34, 0, 537954)
        pp = point.Point(value=val, timestamp=dt)

        ts = [
            time_series.TimeSeries(label_values=[lv],
                                   points=[pp],
                                   start_timestamp=utils.to_iso_str(dt))
        ]

        desc = metric_descriptor.MetricDescriptor(
            name='name2',
            description='description2',
            unit='unit2',
            type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64,
            label_keys=[label_key.LabelKey('key', 'description')])

        mm = metric.Metric(descriptor=desc, time_series=ts)
        mock_stats.get_metrics.return_value = [mm]

        with MockGetExporterThread() as mget:
            exporter = stackdriver.new_stats_exporter(
                stackdriver.Options(project_id=1))
            mget.transport.step()

        exporter.client.create_metric_descriptor.assert_called()
        self.assertEqual(exporter.client.create_metric_descriptor.call_count,
                         1)
        md_call_arg =\
            exporter.client.create_metric_descriptor.call_args[0][1]
        self.assertEqual(md_call_arg.metric_kind,
                         monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE)
        self.assertEqual(md_call_arg.value_type,
                         monitoring_v3.enums.MetricDescriptor.ValueType.INT64)

        exporter.client.create_time_series.assert_called()
        self.assertEqual(exporter.client.create_time_series.call_count, 1)
        ts_call_arg = exporter.client.create_time_series.call_args[0][1]
        self.assertEqual(len(ts_call_arg), 1)
        self.assertEqual(len(ts_call_arg[0].points), 1)
        self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123)
def main():
    # Enable metrics
    exporter = stackdriver.new_stats_exporter(
        stackdriver.Options(project_id=project_id))
    view_manager.register_exporter(exporter)

    view_manager.register_view(latency_view)
    mmap = stats_recorder.new_measurement_map()
    tmap = tag_map_module.TagMap()

    for i in range(100):
        ms = random.random() * 5 * 1000
        print("Latency {0}:{1}".format(i, ms))
        mmap.measure_float_put(m_latency_ms, ms)
        mmap.record(tmap)
        time.sleep(1)

    print("Done recording metrics")
def main():
    # Register the view. Measurements are only aggregated and exported if
    # they're associated with a registered view.
    stats.stats.view_manager.register_view(LATENCY_VIEW)

    # Create the Stackdriver stats exporter and start exporting metrics in the
    # background, once every 60 seconds by default.
    exporter = stats_exporter.new_stats_exporter()
    print('Exporting stats to project "{}"'
          .format(exporter.options.project_id))

    # Record 100 fake latency values between 0 and 5 seconds.
    for num in range(100):
        ms = random() * 5 * 1000
        print("Latency {}: {}".format(num, ms))

        mmap = stats.stats.stats_recorder.new_measurement_map()
        mmap.measure_float_put(LATENCY_MS, ms)
        mmap.record()

    # Keep the thread alive long enough for the exporter to export at least
    # once.
    time.sleep(65)
Exemple #15
0
def register_stackdriver_exporter() -> None:
    exporter = stackdriver.new_stats_exporter()
    stats().view_manager.register_exporter(exporter)
    app.view_manager = stats_stats.view_manager
    app.stats_recorder = stats_stats.stats_recorder
    response_time_view = view.View(
        "response_time", "The time it took to respond",
        [app.key_method, app.key_status, app.key_error], app.m_response_ms,
        aggregation.LastValueAggregation())

    app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options(
        project_id=os.getenv('PROJECT_ID')))

    app.view_manager.register_exporter(app.exporter)
    app.view_manager.register_view(response_time_view)


'''A more complex flask app making use of middleware and the open census lib to measure metrics in stackdriver'''
middleware = OpenCensusFlask(
    app,
    exporter=stackdriver.new_stats_exporter(options=stackdriver.Options(
        project_id=os.getenv('PROJECT_ID'))))


@app.route('/', methods=["GET", "POST"])
@export_response_time
def hello():
    return 'Hello world!'


if __name__ == '__main__':
    app.run(host='localhost', port=8080)
Exemple #17
0
 def test_new_stats_exporter(self, mock_client):
     transport = stats_exporter.new_stats_exporter()
     self.assertIsNotNone(transport)
     self.assertIsNotNone(transport.options)
     self.assertIsNotNone(transport.options.project_id)
Exemple #18
0
                        response_list[index]['count']
                    ) + "</td><td>" + response_list[index]['price'] + "</tr>"
            else:
                vendor_response += "<tr><td>This vendor timed out.</td></tr>"

        vendor_response += "</table>"

    monitor.measure_int_put(RPC_MEASURE, app.config['NUM_RPCS'])
    monitor.record()

    if (app.config['ERRORS'] > 0):
        monitor.measure_int_put(RPC_MEASURE, app.config['ERRORS'])
        monitor.record()

    return vendor_response


if __name__ == '__main__':
    tracer = initialize_tracer()
    app.config['TRACER'] = tracer
    app.config['STATS'] = stats.stats.stats_recorder.new_measurement_map()
    app.config['ERRORS'] = 0
    app.config['NUM_RPCS'] = 0

    register_views(stats.stats.view_manager)

    exporter = stats_exporter.new_stats_exporter()
    stats.stats.view_manager.register_exporter(exporter)

    app.run(host='0.0.0.0', port=5000, debug=True)