def _get_monitored_resource( resource: Resource, ) -> Optional[MonitoredResource]: """Add Google resource specific information (e.g. instance id, region). See https://cloud.google.com/monitoring/custom-metrics/creating-metrics#custom-metric-resources for supported types Args: series: ProtoBuf TimeSeries """ resource_attributes = resource.attributes if resource_attributes.get("cloud.provider") != "gcp": return None resource_type = resource_attributes["gcp.resource_type"] if resource_type not in OT_RESOURCE_LABEL_TO_GCP: return None return MonitoredResource( type=resource_type, labels={ gcp_label: str(resource_attributes[ot_label]) for ot_label, gcp_label in OT_RESOURCE_LABEL_TO_GCP[ resource_type ].items() }, )
def test_extract_resources(self): exporter = CloudMonitoringMetricsExporter(project_id=self.project_id) self.assertIsNone( exporter._get_monitored_resource(Resource.create_empty()) ) resource = Resource( labels={ "cloud.account.id": 123, "host.id": "host", "cloud.zone": "US", "cloud.provider": "gcp", "extra_info": "extra", "gcp.resource_type": "gce_instance", "not_gcp_resource": "value", } ) expected_extract = MonitoredResource( type="gce_instance", labels={"project_id": "123", "instance_id": "host", "zone": "US"}, ) self.assertEqual( exporter._get_monitored_resource(resource), expected_extract ) resource = Resource( labels={ "cloud.account.id": "123", "host.id": "host", "extra_info": "extra", "not_gcp_resource": "value", "gcp.resource_type": "gce_instance", "cloud.provider": "gcp", } ) # Should throw when passed a malformed GCP resource dict self.assertRaises(KeyError, exporter._get_monitored_resource, resource) resource = Resource( labels={ "cloud.account.id": "123", "host.id": "host", "extra_info": "extra", "not_gcp_resource": "value", "gcp.resource_type": "unsupported_gcp_resource", "cloud.provider": "gcp", } ) self.assertIsNone(exporter._get_monitored_resource(resource)) resource = Resource( labels={ "cloud.account.id": "123", "host.id": "host", "extra_info": "extra", "not_gcp_resource": "value", "cloud.provider": "aws", } ) self.assertIsNone(exporter._get_monitored_resource(resource))
def test_list_entries_no_paging(self): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger TOKEN = 'TOKEN' TEXT = 'TEXT' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], self.FILTER, DESCENDING) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, self.FILTER) self.assertEqual(order_by, DESCENDING) self.assertEqual(page_size, 0) self.assertIs(options.page_token, INITIAL_PAGE)
def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging.client import Client from google.cloud.logging.entries import StructEntry from google.cloud.logging.logger import Logger SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries([self.PROJECT], page_size=SIZE, page_token=TOKEN) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, NEW_TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, StructEntry) self.assertEqual(entry.payload, payload) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def test_setup_logging_resource(self, mock_setup_logging_zone): timezone = 'test time zone' mock_setup_logging_zone.return_value = timezone actual_resource = curation_gae_handler.setup_logging_resource() expected_resource = MonitoredResource( type='gae_app', labels={ 'project_id': self.project_id, 'module_id': GAE_LOGGING_MODULE_ID, 'version_id': GAE_LOGGING_VERSION_ID, 'zone': timezone }) self.assertEqual(expected_resource, actual_resource)
def setup_logging_resource(): """ Set the values for the Google Logging Resource object. Thread safe. :return: MonitoredResource pb2 structure. """ labels = { "project_id": app_identity.get_application_id(), "module_id": GAE_LOGGING_MODULE_ID, "version_id": GAE_LOGGING_VERSION_ID, "zone": setup_logging_zone() } # https://cloud.google.com/logging/docs/reference/v2/rpc/google.api#google.api.MonitoredResource resource_pb2 = MonitoredResource(type='gae_app', labels=labels) return resource_pb2
def _make_log_entry_with_extras(self, labels, iid, type_url, now): from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud.grpc.logging.v2.log_entry_pb2 import ( LogEntryOperation) from google.logging.type.http_request_pb2 import HttpRequest from google.logging.type.log_severity_pb2 import WARNING from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp resource_pb = MonitoredResource( type='global', labels=labels) proto_payload = Any(type_url=type_url) timestamp_pb = _datetime_to_pb_timestamp(now) request_pb = HttpRequest( request_url='http://example.com/requested', request_method='GET', status=200, referer='http://example.com/referer', user_agent='AGENT', cache_hit=True, request_size=256, response_size=1024, remote_ip='1.2.3.4', ) operation_pb = LogEntryOperation( producer='PRODUCER', first=True, last=True, id='OPID', ) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, proto_payload=proto_payload, timestamp=timestamp_pb, severity=WARNING, insert_id=iid, http_request=request_pb, labels=labels, operation=operation_pb) return entry_pb
def test_list_entries_no_paging(self): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE from google.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._testing import _GAXPageIterator from google.cloud.logging import DESCENDING TOKEN = 'TOKEN' TEXT = 'TEXT' resource_pb = MonitoredResource(type='global') timestamp_pb = _datetime_to_pb_timestamp(datetime.datetime.utcnow()) entry_pb = LogEntry(log_name=self.LOG_NAME, resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) response = _GAXPageIterator([entry_pb], TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) entries, next_token = api.list_entries([self.PROJECT], self.FILTER, DESCENDING) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, dict) self.assertEqual(entry['logName'], self.LOG_NAME) self.assertEqual(entry['resource'], {'type': 'global'}) self.assertEqual(entry['textPayload'], TEXT) self.assertEqual(next_token, TOKEN) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, self.FILTER) self.assertEqual(order_by, DESCENDING) self.assertEqual(page_size, 0) self.assertIs(options.page_token, INITIAL_PAGE)
def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.logging.v2.log_entry_pb2 import LogEntry from google.cloud._testing import _GAXPageIterator from google.cloud._helpers import _datetime_to_pb_timestamp SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' resource_pb = MonitoredResource(type='global') timestamp_pb = _datetime_to_pb_timestamp(datetime.datetime.utcnow()) entry_pb = LogEntry(log_name=self.LOG_NAME, resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) response = _GAXPageIterator([entry_pb], NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) entries, next_token = api.list_entries([self.PROJECT], page_size=SIZE, page_token=TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, dict) self.assertEqual(entry['logName'], self.LOG_NAME) self.assertEqual(entry['resource'], {'type': 'global'}) self.assertEqual(entry['jsonPayload'], payload) self.assertEqual(next_token, NEW_TOKEN) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def test_export(self): client = mock.Mock() with mock.patch( "opentelemetry.exporter.cloud_monitoring.time_ns", lambda: NANOS_PER_SECOND, ): exporter = CloudMonitoringMetricsExporter( project_id=self.project_id, client=client ) exporter.project_name = self.project_name exporter.export( [ MetricRecord( MockMetric(), (("label1", "value1"),), UnsupportedAggregator(), ) ] ) client.create_time_series.assert_not_called() client.create_metric_descriptor.return_value = MetricDescriptor( **{ "name": None, "type": "custom.googleapis.com/OpenTelemetry/name", "display_name": "name", "description": "description", "labels": [ LabelDescriptor(key="label1", value_type="STRING"), LabelDescriptor(key="label2", value_type="INT64"), ], "metric_kind": "CUMULATIVE", "value_type": "DOUBLE", } ) resource = Resource( labels={ "cloud.account.id": 123, "host.id": "host", "cloud.zone": "US", "cloud.provider": "gcp", "extra_info": "extra", "gcp.resource_type": "gce_instance", "not_gcp_resource": "value", } ) sum_agg_one = SumAggregator() sum_agg_one.checkpoint = 1 sum_agg_one.last_update_timestamp = ( WRITE_INTERVAL + 1 ) * NANOS_PER_SECOND exporter.export( [ MetricRecord( MockMetric(meter=MockMeter(resource=resource)), (("label1", "value1"), ("label2", 1),), sum_agg_one, ), MetricRecord( MockMetric(meter=MockMeter(resource=resource)), (("label1", "value2"), ("label2", 2),), sum_agg_one, ), ] ) expected_resource = MonitoredResource( type="gce_instance", labels={"project_id": "123", "instance_id": "host", "zone": "US"}, ) series1 = TimeSeries(resource=expected_resource) series1.metric.type = "custom.googleapis.com/OpenTelemetry/name" series1.metric.labels["label1"] = "value1" series1.metric.labels["label2"] = "1" point = series1.points.add() point.value.int64_value = 1 point.interval.end_time.seconds = WRITE_INTERVAL + 1 point.interval.end_time.nanos = 0 point.interval.start_time.seconds = 1 point.interval.start_time.nanos = 0 series2 = TimeSeries(resource=expected_resource) series2.metric.type = "custom.googleapis.com/OpenTelemetry/name" series2.metric.labels["label1"] = "value2" series2.metric.labels["label2"] = "2" point = series2.points.add() point.value.int64_value = 1 point.interval.end_time.seconds = WRITE_INTERVAL + 1 point.interval.end_time.nanos = 0 point.interval.start_time.seconds = 1 point.interval.start_time.nanos = 0 client.create_time_series.assert_has_calls( [mock.call(self.project_name, [series1, series2])] ) # Attempting to export too soon after another export with the exact # same labels leads to it being dropped sum_agg_two = SumAggregator() sum_agg_two.checkpoint = 1 sum_agg_two.last_update_timestamp = ( WRITE_INTERVAL + 2 ) * NANOS_PER_SECOND exporter.export( [ MetricRecord( MockMetric(), (("label1", "value1"), ("label2", 1),), sum_agg_two, ), MetricRecord( MockMetric(), (("label1", "value2"), ("label2", 2),), sum_agg_two, ), ] ) self.assertEqual(client.create_time_series.call_count, 1) # But exporting with different labels is fine sum_agg_two.checkpoint = 2 exporter.export( [ MetricRecord( MockMetric(), (("label1", "changed_label"), ("label2", 2),), sum_agg_two, ), ] ) series3 = TimeSeries() series3.metric.type = "custom.googleapis.com/OpenTelemetry/name" series3.metric.labels["label1"] = "changed_label" series3.metric.labels["label2"] = "2" point = series3.points.add() point.value.int64_value = 2 point.interval.end_time.seconds = WRITE_INTERVAL + 2 point.interval.end_time.nanos = 0 point.interval.start_time.seconds = 1 point.interval.start_time.nanos = 0 client.create_time_series.assert_has_calls( [ mock.call(self.project_name, [series1, series2]), mock.call(self.project_name, [series3]), ] )