def test_list_members_w_all_arguments(self): import datetime from gcloud._helpers import _datetime_to_rfc3339 self._setUpResources() T0 = datetime.datetime(2016, 4, 6, 22, 5, 0) T1 = datetime.datetime(2016, 4, 6, 22, 10, 0) MEMBER_FILTER = 'resource.zone = "us-central1-a"' RESPONSE = { 'members': self.MEMBERS, } connection = _Connection(RESPONSE) client = _Client(project=self.PROJECT, connection=connection) group = self._makeOneFromJSON(self.JSON_GROUP, client) members = group.list_members( start_time=T0, end_time=T1, filter_string=MEMBER_FILTER) self.assertEqual(members, [self.RESOURCE1, self.RESOURCE2]) request, = connection._requested expected_request = { 'method': 'GET', 'path': '/%s/members' % self.GROUP_NAME, 'query_params': { 'interval.startTime': _datetime_to_rfc3339(T0), 'interval.endTime': _datetime_to_rfc3339(T1), 'filter': MEMBER_FILTER, }, } self.assertEqual(request, expected_request)
def test_list_members_w_all_arguments(self): import datetime from gcloud._helpers import _datetime_to_rfc3339 self._setUpResources() T0 = datetime.datetime(2016, 4, 6, 22, 5, 0) T1 = datetime.datetime(2016, 4, 6, 22, 10, 0) MEMBER_FILTER = 'resource.zone = "us-central1-a"' RESPONSE = { 'members': self.MEMBERS, } connection = _Connection(RESPONSE) client = _Client(project=self.PROJECT, connection=connection) group = self._makeOneFromJSON(self.JSON_GROUP, client) members = group.list_members(start_time=T0, end_time=T1, filter_string=MEMBER_FILTER) self.assertEqual(members, [self.RESOURCE1, self.RESOURCE2]) request, = connection._requested expected_request = { 'method': 'GET', 'path': '/%s/members' % self.GROUP_NAME, 'query_params': { 'interval.startTime': _datetime_to_rfc3339(T0), 'interval.endTime': _datetime_to_rfc3339(T1), 'filter': MEMBER_FILTER, }, } self.assertEqual(request, expected_request)
def _build_query_params(self, headers_only=False, page_size=None, page_token=None): """Yield key-value pairs for the URL query string. We use a series of key-value pairs (suitable for passing to ``urlencode``) instead of a ``dict`` to allow for repeated fields. :type headers_only: boolean :param headers_only: Whether to omit the point data from the :class:`~gcloud.monitoring.timeseries.TimeSeries` objects. :type page_size: integer or None :param page_size: A limit on the number of points to return per page. :type page_token: string or None :param page_token: A token to continue the retrieval. """ yield 'filter', self.filter yield 'interval.endTime', _datetime_to_rfc3339(self._end_time, ignore_zone=False) if self._start_time is not None: yield 'interval.startTime', _datetime_to_rfc3339(self._start_time, ignore_zone=False) if self._per_series_aligner is not None: yield 'aggregation.perSeriesAligner', self._per_series_aligner if self._alignment_period_seconds is not None: alignment_period = '{period}s'.format( period=self._alignment_period_seconds) yield 'aggregation.alignmentPeriod', alignment_period if self._cross_series_reducer is not None: yield ('aggregation.crossSeriesReducer', self._cross_series_reducer) for field in self._group_by_fields: yield 'aggregation.groupByFields', field if headers_only: yield 'view', 'HEADERS' if page_size is not None: yield 'pageSize', page_size if page_token is not None: yield 'pageToken', page_token
def _build_query_params(self, headers_only=False, page_size=None, page_token=None): """Yield key-value pairs for the URL query string. We use a series of key-value pairs (suitable for passing to ``urlencode``) instead of a ``dict`` to allow for repeated fields. :type headers_only: boolean :param headers_only: Whether to omit the point data from the :class:`~gcloud.monitoring.timeseries.TimeSeries` objects. :type page_size: integer or None :param page_size: A limit on the number of points to return per page. :type page_token: string or None :param page_token: A token to continue the retrieval. """ yield 'filter', self.filter yield 'interval.endTime', _datetime_to_rfc3339( self._end_time, ignore_zone=False) if self._start_time is not None: yield 'interval.startTime', _datetime_to_rfc3339( self._start_time, ignore_zone=False) if self._per_series_aligner is not None: yield 'aggregation.perSeriesAligner', self._per_series_aligner if self._alignment_period_seconds is not None: alignment_period = '{period}s'.format( period=self._alignment_period_seconds) yield 'aggregation.alignmentPeriod', alignment_period if self._cross_series_reducer is not None: yield ('aggregation.crossSeriesReducer', self._cross_series_reducer) for field in self._group_by_fields: yield 'aggregation.groupByFields', field if headers_only: yield 'view', 'HEADERS' if page_size is not None: yield 'pageSize', page_size if page_token is not None: yield 'pageToken', page_token
def _makeResource(self): from gcloud._helpers import _datetime_to_rfc3339 when_str = _datetime_to_rfc3339(self.WHEN) return { 'kind': 'dns#change', 'id': self.CHANGES_NAME, 'startTime': when_str, 'status': 'done', 'additions': [ { 'name': 'test.example.com', 'type': 'CNAME', 'ttl': '3600', 'rrdatas': ['www.example.com'] }, ], 'deletions': [ { 'name': 'test.example.com', 'type': 'CNAME', 'ttl': '86400', 'rrdatas': ['other.example.com'] }, ], }
def _timestamp_message(self, attrs): """Add a timestamp to ``attrs``, if the topic is so configured. If ``attrs`` already has the key, do nothing. Helper method for ``publish``/``Batch.publish``. """ if self.timestamp_messages and 'timestamp' not in attrs: attrs['timestamp'] = _datetime_to_rfc3339(_NOW())
def _timestamp_message(self, attrs): """Add a timestamp to ``attrs``, if the topic is so configured. If ``attrs`` already has the key, do nothing. Helper method for ``publish``/``Batch.publish``. """ if self.timestamp_messages and 'timestamp' not in attrs: attrs['timestamp'] = _datetime_to_rfc3339(_NOW())
def _build_value_resource(value): """Helper for _build_fields_resource""" result = {} if value.value_type == 'string': result['stringValue'] = value.string_value if value.string_format is not None: result['stringFormat'] = value.string_format if value.language is not None: result['lang'] = value.language elif value.value_type == 'number': result['numberValue'] = value.number_value elif value.value_type == 'timestamp': stamp = _datetime_to_rfc3339(value.timestamp_value) result['timestampValue'] = stamp elif value.value_type == 'geo': result['geoValue'] = '%s, %s' % value.geo_value else: raise ValueError('Unknown value_type: %s' % value.value_type) return result
def _makeResource(self): from gcloud._helpers import _datetime_to_rfc3339 when_str = _datetime_to_rfc3339(self.WHEN) return { 'kind': 'dns#change', 'id': self.CHANGES_NAME, 'startTime': when_str, 'status': 'done', 'additions': [ {'name': 'test.example.com', 'type': 'CNAME', 'ttl': '3600', 'rrdatas': ['www.example.com']}, ], 'deletions': [ {'name': 'test.example.com', 'type': 'CNAME', 'ttl': '86400', 'rrdatas': ['other.example.com']}, ], }
def _callFUT(self, *args, **kwargs): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(*args, **kwargs)
def test_write_entries_multiple(self): # pylint: disable=too-many-statements import datetime from google.logging.type.log_severity_pb2 import WARNING from google.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct from gcloud._helpers import _datetime_to_rfc3339, UTC TEXT = 'TEXT' NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' JSON = {'payload': 'PAYLOAD', 'type': 'json'} PROTO = { '@type': TIMESTAMP_TYPE_URL, 'value': _datetime_to_rfc3339(NOW), } PRODUCER = 'PRODUCER' OPID = 'OPID' URL = 'http://example.com/' ENTRIES = [ {'textPayload': TEXT, 'severity': WARNING}, {'jsonPayload': JSON, 'operation': {'producer': PRODUCER, 'id': OPID}}, {'protoPayload': PROTO, 'httpRequest': {'requestUrl': URL}}, ] LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) RESOURCE = { 'type': 'global', } LABELS = { 'foo': 'bar', } gax_api = _GAXLoggingAPI() api = self._makeOne(gax_api) api.write_entries(ENTRIES, LOG_PATH, RESOURCE, LABELS) entries, log_name, resource, labels, partial_success, options = ( gax_api._write_log_entries_called_with) self.assertEqual(len(entries), len(ENTRIES)) entry = entries[0] self.assertTrue(isinstance(entry, LogEntry)) self.assertEqual(entry.log_name, '') self.assertEqual(entry.resource.type, '') self.assertEqual(entry.labels, {}) self.assertEqual(entry.text_payload, TEXT) self.assertEqual(entry.severity, WARNING) entry = entries[1] self.assertTrue(isinstance(entry, LogEntry)) self.assertEqual(entry.log_name, '') self.assertEqual(entry.resource.type, '') self.assertEqual(entry.labels, {}) json_struct = entry.json_payload self.assertTrue(isinstance(json_struct, Struct)) self.assertEqual(json_struct.fields['payload'].string_value, JSON['payload']) operation = entry.operation self.assertEqual(operation.producer, PRODUCER) self.assertEqual(operation.id, OPID) entry = entries[2] self.assertTrue(isinstance(entry, LogEntry)) self.assertEqual(entry.log_name, '') self.assertEqual(entry.resource.type, '') self.assertEqual(entry.labels, {}) proto = entry.proto_payload self.assertTrue(isinstance(proto, Any)) self.assertEqual(proto.type_url, TIMESTAMP_TYPE_URL) request = entry.http_request self.assertEqual(request.request_url, URL) self.assertEqual(log_name, LOG_PATH) self.assertEqual(resource, RESOURCE) self.assertEqual(labels, LABELS) self.assertEqual(partial_success, False) self.assertEqual(options, None)
def _make_timestamp(value): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(value)
def list_members(self, filter_string=None, end_time=None, start_time=None): """Lists all members of this group via a ``GET`` request. If no ``end_time`` is provided then the group membership over the last minute is returned. Example:: >>> for member in group.list_members(): ... print member List members that are Compute Engine VM instances:: >>> filter_string = 'resource.type = "gce_instance"' >>> for member in group.list_members(filter_string=filter_string): ... print member List historical members that existed between 4 and 5 hours ago:: >>> import datetime >>> t1 = datetime.datetime.utcnow() - datetime.timedelta(hours=4) >>> t0 = t1 - datetime.timedelta(hours=1) >>> for member in group.list_members(end_time=t1, start_time=t0): ... print member :type filter_string: string or None :param filter_string: An optional list filter describing the members to be returned. The filter may reference the type, labels, and metadata of monitored resources that comprise the group. See the `filter documentation`_. :type end_time: :class:`datetime.datetime` or None :param end_time: The end time (inclusive) of the time interval for which results should be returned, as a datetime object. If ``start_time`` is specified, then this must also be specified. :type start_time: :class:`datetime.datetime` or None :param start_time: The start time (exclusive) of the time interval for which results should be returned, as a datetime object. :rtype: list of :class:`~gcloud.monitoring.resource.Resource` :returns: A list of resource instances. :raises: :exc:`ValueError` if the ``start_time`` is specified, but the ``end_time`` is missing. .. _filter documentation: https://cloud.google.com/monitoring/api/v3/filters#group-filter """ if start_time is not None and end_time is None: raise ValueError('If "start_time" is specified, "end_time" must ' 'also be specified') path = '%s/members' % (self.path, ) resources = [] page_token = None params = {} if filter_string is not None: params['filter'] = filter_string if end_time is not None: params['interval.endTime'] = _datetime_to_rfc3339( end_time, ignore_zone=False) if start_time is not None: params['interval.startTime'] = _datetime_to_rfc3339( start_time, ignore_zone=False) while True: if page_token is not None: params['pageToken'] = page_token response = self.client.connection.api_request( method='GET', path=path, query_params=params.copy()) for info in response.get('members', ()): resources.append(Resource._from_dict(info)) page_token = response.get('nextPageToken') if not page_token: break return resources
def test_list_entries_with_extra_properties(self): from datetime import datetime from google.logging.type.log_severity_pb2 import WARNING from gcloud._testing import _GAXPageIterator from gcloud._helpers import UTC from gcloud._helpers import _datetime_to_rfc3339 from gcloud._helpers import _datetime_to_pb_timestamp NOW = datetime.utcnow().replace(tzinfo=UTC) SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} SEVERITY = 'WARNING' LABELS = { 'foo': 'bar', } IID = 'IID' request = _HTTPRequestPB() operation = _LogEntryOperationPB() EXTRAS = { 'severity': WARNING, 'labels': LABELS, 'insert_id': IID, 'http_request': request, 'operation': operation, } ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS) ENTRY.resource.labels['foo'] = 'bar' ENTRY.timestamp = _datetime_to_pb_timestamp(NOW) response = _GAXPageIterator([ENTRY], NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) entries, next_token = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, dict) self.assertEqual(entry['logName'], self.LOG_NAME) self.assertEqual(entry['resource'], {'type': 'global', 'labels': {'foo': 'bar'}}) self.assertEqual(entry['protoPayload'], PAYLOAD) self.assertEqual(entry['severity'], SEVERITY) self.assertEqual(entry['labels'], LABELS) self.assertEqual(entry['insertId'], IID) self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW)) EXPECTED_REQUEST = { 'requestMethod': request.request_method, 'requestUrl': request.request_url, 'status': request.status, 'requestSize': request.request_size, 'responseSize': request.response_size, 'referer': request.referer, 'userAgent': request.user_agent, 'remoteIp': request.remote_ip, 'cacheHit': request.cache_hit, } self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST) EXPECTED_OPERATION = { 'producer': operation.producer, 'id': operation.id, 'first': operation.first, 'last': operation.last, } self.assertEqual(entry['operation'], EXPECTED_OPERATION) self.assertEqual(next_token, NEW_TOKEN) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def _callFUT(self, value): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(value)
def _callFUT(self, *args, **kwargs): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(*args, **kwargs)
def _pb_timestamp_to_rfc3339(timestamp_pb): """Helper for :func:_log_entry_pb_to_mapping""" timestamp = _pb_timestamp_to_datetime(timestamp_pb) return _datetime_to_rfc3339(timestamp)
def _make_timestamp(value): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(value)
def test_query(self): import datetime from gcloud._helpers import _datetime_to_rfc3339 from gcloud.exceptions import NotFound START_TIME = datetime.datetime(2016, 4, 6, 22, 5, 0) END_TIME = datetime.datetime(2016, 4, 6, 22, 10, 0) MINUTES = 5 METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization' METRIC_LABELS = {'instance_name': 'instance-1'} METRIC_LABELS2 = {'instance_name': 'instance-2'} RESOURCE_TYPE = 'gce_instance' RESOURCE_LABELS = { 'project_id': 'my-project', 'zone': 'us-east1-a', 'instance_id': '1234567890123456789', } RESOURCE_LABELS2 = { 'project_id': 'my-project', 'zone': 'us-east1-b', 'instance_id': '9876543210987654321', } METRIC_KIND = 'GAUGE' VALUE_TYPE = 'DOUBLE' TS1 = '2016-04-06T22:05:00.042Z' TS2 = '2016-04-06T22:05:01.042Z' TS3 = '2016-04-06T22:05:02.042Z' VAL1 = 0.1 VAL2 = 0.2 def P(timestamp, value): return { 'interval': {'startTime': timestamp, 'endTime': timestamp}, 'value': {'doubleValue': value}, } SERIES1 = { 'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS}, 'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS}, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, 'points': [P(TS3, VAL1), P(TS2, VAL1), P(TS1, VAL1)], } SERIES2 = { 'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2}, 'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2}, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, 'points': [P(TS3, VAL2), P(TS2, VAL2), P(TS1, VAL2)], } RESPONSE = {'timeSeries': [SERIES1, SERIES2]} client = self._makeOne(project=PROJECT, credentials=_Credentials()) connection = client.connection = _Connection(RESPONSE) # A simple query. In practice, it can be very convenient to let the # end time default to the start of the current minute. query = client.query(METRIC_TYPE, end_time=END_TIME, minutes=MINUTES) response = list(query) self.assertEqual(len(response), 2) series1, series2 = response self.assertEqual(series1.metric.type, METRIC_TYPE) self.assertEqual(series2.metric.type, METRIC_TYPE) self.assertEqual(series1.metric.labels, METRIC_LABELS) self.assertEqual(series2.metric.labels, METRIC_LABELS2) self.assertEqual(series1.resource.type, RESOURCE_TYPE) self.assertEqual(series2.resource.type, RESOURCE_TYPE) self.assertEqual(series1.resource.labels, RESOURCE_LABELS) self.assertEqual(series2.resource.labels, RESOURCE_LABELS2) self.assertEqual(series1.metric_kind, METRIC_KIND) self.assertEqual(series2.metric_kind, METRIC_KIND) self.assertEqual(series1.value_type, VALUE_TYPE) self.assertEqual(series2.value_type, VALUE_TYPE) self.assertEqual([p.value for p in series1.points], [VAL1, VAL1, VAL1]) self.assertEqual([p.value for p in series2.points], [VAL2, VAL2, VAL2]) self.assertEqual([p.end_time for p in series1.points], [TS1, TS2, TS3]) self.assertEqual([p.end_time for p in series2.points], [TS1, TS2, TS3]) expected_request = { 'method': 'GET', 'path': '/projects/{project}/timeSeries/'.format(project=PROJECT), 'query_params': [ ('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)), ('interval.endTime', _datetime_to_rfc3339(END_TIME)), ('interval.startTime', _datetime_to_rfc3339(START_TIME)), ], } request, = connection._requested self.assertEqual(request, expected_request) with self.assertRaises(NotFound): list(query)
def test_list_changes_explicit(self): from gcloud._helpers import _datetime_to_rfc3339 from gcloud.dns.changes import Changes from gcloud.dns.resource_record_set import ResourceRecordSet self._setUpConstants() PATH = 'projects/%s/managedZones/%s/changes' % ( self.PROJECT, self.ZONE_NAME) TOKEN = 'TOKEN' NAME_1 = 'www.example.com' TYPE_1 = 'A' TTL_1 = '86400' RRDATAS_1 = ['123.45.67.89'] NAME_2 = 'alias.example.com' TYPE_2 = 'CNAME' TTL_2 = '3600' RRDATAS_2 = ['www.example.com'] CHANGES_NAME = 'changeset_id' DATA = { 'changes': [{ 'kind': 'dns#change', 'id': CHANGES_NAME, 'status': 'pending', 'startTime': _datetime_to_rfc3339(self.WHEN), 'additions': [ {'kind': 'dns#resourceRecordSet', 'name': NAME_1, 'type': TYPE_1, 'ttl': TTL_1, 'rrdatas': RRDATAS_1}], 'deletions': [ {'kind': 'dns#change', 'name': NAME_2, 'type': TYPE_2, 'ttl': TTL_2, 'rrdatas': RRDATAS_2}], }] } conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(DATA) client2 = _Client(project=self.PROJECT, connection=conn2) zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) changes, token = zone.list_changes( max_results=3, page_token=TOKEN, client=client2) self.assertEqual(len(changes), len(DATA['changes'])) for found, expected in zip(changes, DATA['changes']): self.assertTrue(isinstance(found, Changes)) self.assertEqual(found.name, CHANGES_NAME) self.assertEqual(found.status, 'pending') self.assertEqual(found.started, self.WHEN) self.assertEqual(len(found.additions), len(expected['additions'])) for found_rr, expected_rr in zip(found.additions, expected['additions']): self.assertTrue(isinstance(found_rr, ResourceRecordSet)) self.assertEqual(found_rr.name, expected_rr['name']) self.assertEqual(found_rr.record_type, expected_rr['type']) self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) self.assertEqual(len(found.deletions), len(expected['deletions'])) for found_rr, expected_rr in zip(found.deletions, expected['deletions']): self.assertTrue(isinstance(found_rr, ResourceRecordSet)) self.assertEqual(found_rr.name, expected_rr['name']) self.assertEqual(found_rr.record_type, expected_rr['type']) self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) self.assertEqual(token, None) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['query_params'], {'maxResults': 3, 'pageToken': TOKEN})
def _pb_timestamp_to_rfc3339(timestamp_pb): """Helper for :func:_log_entry_pb_to_mapping""" timestamp = _pb_timestamp_to_datetime(timestamp_pb) return _datetime_to_rfc3339(timestamp)
def list_members(self, filter_string=None, end_time=None, start_time=None): """Lists all members of this group via a ``GET`` request. If no ``end_time`` is provided then the group membership over the last minute is returned. Example:: >>> for member in group.list_members(): ... print member List members that are Compute Engine VM instances:: >>> filter_string = 'resource.type = "gce_instance"' >>> for member in group.list_members(filter_string=filter_string): ... print member List historical members that existed between 4 and 5 hours ago:: >>> import datetime >>> t1 = datetime.datetime.utcnow() - datetime.timedelta(hours=4) >>> t0 = t1 - datetime.timedelta(hours=1) >>> for member in group.list_members(end_time=t1, start_time=t0): ... print member :type filter_string: string or None :param filter_string: An optional list filter describing the members to be returned. The filter may reference the type, labels, and metadata of monitored resources that comprise the group. See the `filter documentation`_. :type end_time: :class:`datetime.datetime` or None :param end_time: The end time (inclusive) of the time interval for which results should be returned, as a datetime object. If ``start_time`` is specified, then this must also be specified. :type start_time: :class:`datetime.datetime` or None :param start_time: The start time (exclusive) of the time interval for which results should be returned, as a datetime object. :rtype: list of :class:`~gcloud.monitoring.resource.Resource` :returns: A list of resource instances. :raises: :exc:`ValueError` if the ``start_time`` is specified, but the ``end_time`` is missing. .. _filter documentation: https://cloud.google.com/monitoring/api/v3/filters#group-filter """ if start_time is not None and end_time is None: raise ValueError('If "start_time" is specified, "end_time" must ' 'also be specified') path = '%s/members' % (self.path,) resources = [] page_token = None params = {} if filter_string is not None: params['filter'] = filter_string if end_time is not None: params['interval.endTime'] = _datetime_to_rfc3339( end_time, ignore_zone=False) if start_time is not None: params['interval.startTime'] = _datetime_to_rfc3339( start_time, ignore_zone=False) while True: if page_token is not None: params['pageToken'] = page_token response = self.client.connection.api_request( method='GET', path=path, query_params=params.copy()) for info in response.get('members', ()): resources.append(Resource._from_dict(info)) page_token = response.get('nextPageToken') if not page_token: break return resources
def test_list_changes_explicit(self): from gcloud._helpers import _datetime_to_rfc3339 from gcloud.dns.changes import Changes from gcloud.dns.resource_record_set import ResourceRecordSet self._setUpConstants() PATH = 'projects/%s/managedZones/%s/changes' % (self.PROJECT, self.ZONE_NAME) TOKEN = 'TOKEN' NAME_1 = 'www.example.com' TYPE_1 = 'A' TTL_1 = '86400' RRDATAS_1 = ['123.45.67.89'] NAME_2 = 'alias.example.com' TYPE_2 = 'CNAME' TTL_2 = '3600' RRDATAS_2 = ['www.example.com'] CHANGES_NAME = 'changeset_id' DATA = { 'changes': [{ 'kind': 'dns#change', 'id': CHANGES_NAME, 'status': 'pending', 'startTime': _datetime_to_rfc3339(self.WHEN), 'additions': [{ 'kind': 'dns#resourceRecordSet', 'name': NAME_1, 'type': TYPE_1, 'ttl': TTL_1, 'rrdatas': RRDATAS_1 }], 'deletions': [{ 'kind': 'dns#change', 'name': NAME_2, 'type': TYPE_2, 'ttl': TTL_2, 'rrdatas': RRDATAS_2 }], }] } conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(DATA) client2 = _Client(project=self.PROJECT, connection=conn2) zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) changes, token = zone.list_changes(max_results=3, page_token=TOKEN, client=client2) self.assertEqual(len(changes), len(DATA['changes'])) for found, expected in zip(changes, DATA['changes']): self.assertTrue(isinstance(found, Changes)) self.assertEqual(found.name, CHANGES_NAME) self.assertEqual(found.status, 'pending') self.assertEqual(found.started, self.WHEN) self.assertEqual(len(found.additions), len(expected['additions'])) for found_rr, expected_rr in zip(found.additions, expected['additions']): self.assertTrue(isinstance(found_rr, ResourceRecordSet)) self.assertEqual(found_rr.name, expected_rr['name']) self.assertEqual(found_rr.record_type, expected_rr['type']) self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) self.assertEqual(len(found.deletions), len(expected['deletions'])) for found_rr, expected_rr in zip(found.deletions, expected['deletions']): self.assertTrue(isinstance(found_rr, ResourceRecordSet)) self.assertEqual(found_rr.name, expected_rr['name']) self.assertEqual(found_rr.record_type, expected_rr['type']) self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) self.assertEqual(token, None) self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['query_params'], { 'maxResults': 3, 'pageToken': TOKEN })
def test_query(self): import datetime from gcloud._helpers import _datetime_to_rfc3339 from gcloud.exceptions import NotFound START_TIME = datetime.datetime(2016, 4, 6, 22, 5, 0) END_TIME = datetime.datetime(2016, 4, 6, 22, 10, 0) MINUTES = 5 METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization' METRIC_LABELS = {'instance_name': 'instance-1'} METRIC_LABELS2 = {'instance_name': 'instance-2'} RESOURCE_TYPE = 'gce_instance' RESOURCE_LABELS = { 'project_id': 'my-project', 'zone': 'us-east1-a', 'instance_id': '1234567890123456789', } RESOURCE_LABELS2 = { 'project_id': 'my-project', 'zone': 'us-east1-b', 'instance_id': '9876543210987654321', } METRIC_KIND = 'GAUGE' VALUE_TYPE = 'DOUBLE' TS1 = '2016-04-06T22:05:00.042Z' TS2 = '2016-04-06T22:05:01.042Z' TS3 = '2016-04-06T22:05:02.042Z' VAL1 = 0.1 VAL2 = 0.2 def P(timestamp, value): return { 'interval': { 'startTime': timestamp, 'endTime': timestamp }, 'value': { 'doubleValue': value }, } SERIES1 = { 'metric': { 'type': METRIC_TYPE, 'labels': METRIC_LABELS }, 'resource': { 'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS }, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, 'points': [P(TS3, VAL1), P(TS2, VAL1), P(TS1, VAL1)], } SERIES2 = { 'metric': { 'type': METRIC_TYPE, 'labels': METRIC_LABELS2 }, 'resource': { 'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2 }, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, 'points': [P(TS3, VAL2), P(TS2, VAL2), P(TS1, VAL2)], } RESPONSE = {'timeSeries': [SERIES1, SERIES2]} client = self._makeOne(project=PROJECT, credentials=_Credentials()) connection = client.connection = _Connection(RESPONSE) # A simple query. In practice, it can be very convenient to let the # end time default to the start of the current minute. query = client.query(METRIC_TYPE, end_time=END_TIME, minutes=MINUTES) response = list(query) self.assertEqual(len(response), 2) series1, series2 = response self.assertEqual(series1.metric.type, METRIC_TYPE) self.assertEqual(series2.metric.type, METRIC_TYPE) self.assertEqual(series1.metric.labels, METRIC_LABELS) self.assertEqual(series2.metric.labels, METRIC_LABELS2) self.assertEqual(series1.resource.type, RESOURCE_TYPE) self.assertEqual(series2.resource.type, RESOURCE_TYPE) self.assertEqual(series1.resource.labels, RESOURCE_LABELS) self.assertEqual(series2.resource.labels, RESOURCE_LABELS2) self.assertEqual(series1.metric_kind, METRIC_KIND) self.assertEqual(series2.metric_kind, METRIC_KIND) self.assertEqual(series1.value_type, VALUE_TYPE) self.assertEqual(series2.value_type, VALUE_TYPE) self.assertEqual([p.value for p in series1.points], [VAL1, VAL1, VAL1]) self.assertEqual([p.value for p in series2.points], [VAL2, VAL2, VAL2]) self.assertEqual([p.end_time for p in series1.points], [TS1, TS2, TS3]) self.assertEqual([p.end_time for p in series2.points], [TS1, TS2, TS3]) expected_request = { 'method': 'GET', 'path': '/projects/{project}/timeSeries/'.format(project=PROJECT), 'query_params': [ ('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)), ('interval.endTime', _datetime_to_rfc3339(END_TIME)), ('interval.startTime', _datetime_to_rfc3339(START_TIME)), ], } request, = connection._requested self.assertEqual(request, expected_request) with self.assertRaises(NotFound): list(query)
def _callFUT(self, value): from gcloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(value)