Ejemplo n.º 1
0
    def test_list_members_w_all_arguments(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        self._setUpResources()

        T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
        T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
        MEMBER_FILTER = 'resource.zone = "us-central1-a"'

        RESPONSE = {
            'members': self.MEMBERS,
        }
        connection = _Connection(RESPONSE)
        client = _Client(project=self.PROJECT, connection=connection)
        group = self._makeOneFromJSON(self.JSON_GROUP, client)
        members = group.list_members(
            start_time=T0, end_time=T1, filter_string=MEMBER_FILTER)

        self.assertEqual(members, [self.RESOURCE1, self.RESOURCE2])

        request, = connection._requested
        expected_request = {
            'method': 'GET', 'path': '/%s/members' % self.GROUP_NAME,
            'query_params': {
                'interval.startTime': _datetime_to_rfc3339(T0),
                'interval.endTime': _datetime_to_rfc3339(T1),
                'filter': MEMBER_FILTER,
            },
        }
        self.assertEqual(request, expected_request)
Ejemplo n.º 2
0
    def test_list_members_w_all_arguments(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        self._setUpResources()

        T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
        T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
        MEMBER_FILTER = 'resource.zone = "us-central1-a"'

        RESPONSE = {
            'members': self.MEMBERS,
        }
        connection = _Connection(RESPONSE)
        client = _Client(project=self.PROJECT, connection=connection)
        group = self._make_oneFromJSON(self.JSON_GROUP, client)
        members = group.list_members(start_time=T0,
                                     end_time=T1,
                                     filter_string=MEMBER_FILTER)

        self.assertEqual(members, [self.RESOURCE1, self.RESOURCE2])

        request, = connection._requested
        expected_request = {
            'method': 'GET',
            'path': '/%s/members' % self.GROUP_NAME,
            'query_params': {
                'interval.startTime': _datetime_to_rfc3339(T0),
                'interval.endTime': _datetime_to_rfc3339(T1),
                'filter': MEMBER_FILTER,
            },
        }
        self.assertEqual(request, expected_request)
    def test_to_dict_float_with_start_time(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        VALUE = 1.6180339
        start_time = datetime.datetime.now()
        start_time_str = _datetime_to_rfc3339(start_time, ignore_zone=False)
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)

        point = self._make_one(end_time=end_time_str,
                               start_time=start_time_str,
                               value=VALUE)
        info = {
            'interval': {
                'startTime': start_time_str,
                'endTime': end_time_str
            },
            'value': {
                'doubleValue': VALUE
            },
        }

        point_dict = point._to_dict()
        self.assertEqual(info, point_dict)
Ejemplo n.º 4
0
    def test_timeseries_factory_cumulative(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        MY_CUMULATIVE_METRIC = 'custom.googleapis.com/my_cumulative_metric'
        METRIC_LABELS = {
            'status': 'successful'
        }

        RESOURCE_TYPE = 'gce_instance'
        RESOURCE_LABELS = {
            'instance_id': '1234567890123456789',
            'zone': 'us-central1-f'
        }

        client = self._make_one(
            project=PROJECT, credentials=_make_credentials())
        client._connection = _Connection()   # For safety's sake.
        resource = client.resource(RESOURCE_TYPE, RESOURCE_LABELS)

        VALUE = 42
        VALUE2 = 43
        RESET_TIME = datetime.datetime.utcnow()
        TIME1 = datetime.datetime.utcnow()
        TIME2 = datetime.datetime.utcnow()

        # Construct a couple of time series assuming a cumulative metric.
        cumulative_metric = client.metric(MY_CUMULATIVE_METRIC, METRIC_LABELS)
        cumulative_timeseries = client.time_series(cumulative_metric,
                                                   resource,
                                                   VALUE,
                                                   start_time=RESET_TIME,
                                                   end_time=TIME1)

        cumulative_timeseries2 = client.time_series(cumulative_metric,
                                                    resource,
                                                    VALUE2,
                                                    start_time=RESET_TIME,
                                                    end_time=TIME2)

        RESET_TIME_STR = _datetime_to_rfc3339(RESET_TIME, ignore_zone=False)
        TIME1_STR = _datetime_to_rfc3339(TIME1, ignore_zone=False)
        TIME2_STR = _datetime_to_rfc3339(TIME2, ignore_zone=False)

        self.assertEqual(cumulative_timeseries.points[0].start_time,
                         RESET_TIME_STR)
        self.assertEqual(cumulative_timeseries.points[0].end_time, TIME1_STR)
        self.assertEqual(cumulative_timeseries.points[0].value, VALUE)
        self.assertEqual(cumulative_timeseries2.points[0].start_time,
                         RESET_TIME_STR)
        self.assertEqual(cumulative_timeseries2.points[0].end_time,
                         TIME2_STR)
        self.assertEqual(cumulative_timeseries2.points[0].value, VALUE2)
Ejemplo n.º 5
0
    def _build_query_params(self,
                            headers_only=False,
                            page_size=None,
                            page_token=None):
        """Yield key-value pairs for the URL query string.

        We use a series of key-value pairs (suitable for passing to
        ``urlencode``) instead of a ``dict`` to allow for repeated fields.

        :type headers_only: bool
        :param headers_only:
             Whether to omit the point data from the
             :class:`~google.cloud.monitoring.timeseries.TimeSeries` objects.

        :type page_size: int
        :param page_size:
            (Optional) A limit on the number of points to return per page.

        :type page_token: str
        :param page_token: (Optional) A token to continue the retrieval.
        """
        yield 'filter', self.filter

        yield 'interval.endTime', _datetime_to_rfc3339(self._end_time,
                                                       ignore_zone=False)

        if self._start_time is not None:
            yield 'interval.startTime', _datetime_to_rfc3339(self._start_time,
                                                             ignore_zone=False)

        if self._per_series_aligner is not None:
            yield 'aggregation.perSeriesAligner', self._per_series_aligner

        if self._alignment_period_seconds is not None:
            alignment_period = '{period}s'.format(
                period=self._alignment_period_seconds)
            yield 'aggregation.alignmentPeriod', alignment_period

        if self._cross_series_reducer is not None:
            yield ('aggregation.crossSeriesReducer',
                   self._cross_series_reducer)

        for field in self._group_by_fields:
            yield 'aggregation.groupByFields', field

        if headers_only:
            yield 'view', 'HEADERS'

        if page_size is not None:
            yield 'pageSize', page_size

        if page_token is not None:
            yield 'pageToken', page_token
    def test_timeseries_factory_cumulative(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339
        MY_CUMULATIVE_METRIC = 'custom.googleapis.com/my_cumulative_metric'
        METRIC_LABELS = {
            'status': 'successful'
        }

        RESOURCE_TYPE = 'gce_instance'
        RESOURCE_LABELS = {
            'instance_id': '1234567890123456789',
            'zone': 'us-central1-f'
        }

        client = self._make_one(project=PROJECT, credentials=_Credentials())
        client._connection = _Connection()   # For safety's sake.
        resource = client.resource(RESOURCE_TYPE, RESOURCE_LABELS)

        VALUE = 42
        VALUE2 = 43
        RESET_TIME = datetime.datetime.utcnow()
        TIME1 = datetime.datetime.utcnow()
        TIME2 = datetime.datetime.utcnow()

        # Construct a couple of time series assuming a cumulative metric.
        cumulative_metric = client.metric(MY_CUMULATIVE_METRIC, METRIC_LABELS)
        cumulative_timeseries = client.time_series(cumulative_metric,
                                                   resource,
                                                   VALUE,
                                                   start_time=RESET_TIME,
                                                   end_time=TIME1)

        cumulative_timeseries2 = client.time_series(cumulative_metric,
                                                    resource,
                                                    VALUE2,
                                                    start_time=RESET_TIME,
                                                    end_time=TIME2)

        RESET_TIME_STR = _datetime_to_rfc3339(RESET_TIME, ignore_zone=False)
        TIME1_STR = _datetime_to_rfc3339(TIME1, ignore_zone=False)
        TIME2_STR = _datetime_to_rfc3339(TIME2, ignore_zone=False)

        self.assertEqual(cumulative_timeseries.points[0].start_time,
                         RESET_TIME_STR)
        self.assertEqual(cumulative_timeseries.points[0].end_time, TIME1_STR)
        self.assertEqual(cumulative_timeseries.points[0].value, VALUE)
        self.assertEqual(cumulative_timeseries2.points[0].start_time,
                         RESET_TIME_STR)
        self.assertEqual(cumulative_timeseries2.points[0].end_time,
                         TIME2_STR)
        self.assertEqual(cumulative_timeseries2.points[0].value, VALUE2)
Ejemplo n.º 7
0
    def _build_query_params(self, headers_only=False,
                            page_size=None, page_token=None):
        """Yield key-value pairs for the URL query string.

        We use a series of key-value pairs (suitable for passing to
        ``urlencode``) instead of a ``dict`` to allow for repeated fields.

        :type headers_only: bool
        :param headers_only:
             Whether to omit the point data from the
             :class:`~google.cloud.monitoring.timeseries.TimeSeries` objects.

        :type page_size: int
        :param page_size:
            (Optional) A limit on the number of points to return per page.

        :type page_token: str
        :param page_token: (Optional) A token to continue the retrieval.
        """
        yield 'filter', self.filter

        yield 'interval.endTime', _datetime_to_rfc3339(
            self._end_time, ignore_zone=False)

        if self._start_time is not None:
            yield 'interval.startTime', _datetime_to_rfc3339(
                self._start_time, ignore_zone=False)

        if self._per_series_aligner is not None:
            yield 'aggregation.perSeriesAligner', self._per_series_aligner

        if self._alignment_period_seconds is not None:
            alignment_period = '{period}s'.format(
                period=self._alignment_period_seconds)
            yield 'aggregation.alignmentPeriod', alignment_period

        if self._cross_series_reducer is not None:
            yield ('aggregation.crossSeriesReducer',
                   self._cross_series_reducer)

        for field in self._group_by_fields:
            yield 'aggregation.groupByFields', field

        if headers_only:
            yield 'view', 'HEADERS'

        if page_size is not None:
            yield 'pageSize', page_size

        if page_token is not None:
            yield 'pageToken', page_token
    def test_commit_w_bound_client(self):
        import json
        import datetime
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value
        from google.cloud._helpers import _datetime_to_rfc3339
        from google.cloud.logging.logger import _GLOBAL_RESOURCE

        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        message = Struct(fields={'foo': Value(bool_value=True)})
        IID1 = 'IID1'
        IID2 = 'IID2'
        IID3 = 'IID3'
        TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999)
        TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999)
        TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999)
        ENTRIES = [
            {
                'textPayload': TEXT,
                'insertId': IID1,
                'timestamp': _datetime_to_rfc3339(TIMESTAMP1),
                'resource': _GLOBAL_RESOURCE._to_dict()
            },
            {
                'jsonPayload': STRUCT,
                'insertId': IID2,
                'timestamp': _datetime_to_rfc3339(TIMESTAMP2),
                'resource': _GLOBAL_RESOURCE._to_dict()
            },
            {
                'protoPayload': json.loads(MessageToJson(message)),
                'insertId': IID3,
                'timestamp': _datetime_to_rfc3339(TIMESTAMP3),
                'resource': _GLOBAL_RESOURCE._to_dict()
            },
        ]
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = _Logger()
        batch = self._make_one(logger, client=client)

        batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1)
        batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2)
        batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3)
        batch.commit()

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, logger.full_name, None, None))
Ejemplo n.º 9
0
    def _get_changes(self, token, changes_name):
        from google.cloud._helpers import _datetime_to_rfc3339

        name_1 = 'www.example.com'
        type_1 = 'A'
        ttl_1 = '86400'
        rrdatas_1 = ['123.45.67.89']
        name_2 = 'alias.example.com'
        type_2 = 'CNAME'
        ttl_2 = '3600'
        rrdatas_2 = ['www.example.com']
        result = {
            'changes': [{
                'kind': 'dns#change',
                'id': changes_name,
                'status': 'pending',
                'startTime': _datetime_to_rfc3339(self.WHEN),
                'additions': [
                    {'kind': 'dns#resourceRecordSet',
                     'name': name_1,
                     'type': type_1,
                     'ttl': ttl_1,
                     'rrdatas': rrdatas_1}],
                'deletions': [
                    {'kind': 'dns#change',
                     'name': name_2,
                     'type': type_2,
                     'ttl': ttl_2,
                     'rrdatas': rrdatas_2}],
            }]
        }
        if token is not None:
            result['nextPageToken'] = token
        return result
Ejemplo n.º 10
0
 def to_api_repr(self):
     """API repr (JSON format) for entry.
     """
     info = {}
     if self.log_name is not None:
         info["logName"] = self.log_name
     if self.resource is not None:
         info["resource"] = self.resource._to_dict()
     if self.labels is not None:
         info["labels"] = self.labels
     if self.insert_id is not None:
         info["insertId"] = self.insert_id
     if self.severity is not None:
         info["severity"] = self.severity
     if self.http_request is not None:
         info["httpRequest"] = self.http_request
     if self.timestamp is not None:
         info["timestamp"] = _datetime_to_rfc3339(self.timestamp)
     if self.trace is not None:
         info["trace"] = self.trace
     if self.span_id is not None:
         info["spanId"] = self.span_id
     if self.trace_sampled is not None:
         info["traceSampled"] = self.trace_sampled
     if self.source_location is not None:
         source_location = self.source_location.copy()
         source_location["line"] = str(source_location.pop("line", 0))
         info["sourceLocation"] = source_location
     if self.operation is not None:
         info["operation"] = self.operation
     return info
Ejemplo n.º 11
0
    def test_subscription_pull_explicit(self):
        import base64
        import datetime
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import _datetime_to_rfc3339

        NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        NOW_RFC3339 = _datetime_to_rfc3339(NOW)
        PAYLOAD = b"This is the message text"
        B64 = base64.b64encode(PAYLOAD).decode("ascii")
        ACK_ID = "DEADBEEF"
        MSG_ID = "BEADCAFE"
        MESSAGE = {"messageId": MSG_ID, "data": B64, "attributes": {"a": "b"}, "publishTime": NOW_RFC3339}
        RECEIVED = [{"ackId": ACK_ID, "message": MESSAGE}]
        message_pb = _PubsubMessagePB(MSG_ID, B64, {"a": "b"}, NOW_PB)
        response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)])
        gax_api = _GAXSubscriberAPI(_pull_response=response_pb)
        client = _Client(self.PROJECT)
        api = self._make_one(gax_api, client)
        MAX_MESSAGES = 10

        received = api.subscription_pull(self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES)

        self.assertEqual(received, RECEIVED)
        sub_path, max_messages, return_immediately, options = gax_api._pull_called_with
        self.assertEqual(sub_path, self.SUB_PATH)
        self.assertEqual(max_messages, MAX_MESSAGES)
        self.assertTrue(return_immediately)
        self.assertIsNone(options)
Ejemplo n.º 12
0
    def _get_changes(self, token, changes_name):
        from google.cloud._helpers import _datetime_to_rfc3339

        name_1 = 'www.example.com'
        type_1 = 'A'
        ttl_1 = '86400'
        rrdatas_1 = ['123.45.67.89']
        name_2 = 'alias.example.com'
        type_2 = 'CNAME'
        ttl_2 = '3600'
        rrdatas_2 = ['www.example.com']
        result = {
            'changes': [{
                'kind': 'dns#change',
                'id': changes_name,
                'status': 'pending',
                'startTime': _datetime_to_rfc3339(self.WHEN),
                'additions': [
                    {'kind': 'dns#resourceRecordSet',
                     'name': name_1,
                     'type': type_1,
                     'ttl': ttl_1,
                     'rrdatas': rrdatas_1}],
                'deletions': [
                    {'kind': 'dns#change',
                     'name': name_2,
                     'type': type_2,
                     'ttl': ttl_2,
                     'rrdatas': rrdatas_2}],
            }]
        }
        if token is not None:
            result['nextPageToken'] = token
        return result
Ejemplo n.º 13
0
    def _make_resource(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        when_str = _datetime_to_rfc3339(self.WHEN)
        return {
            "kind": "dns#change",
            "id": self.CHANGES_NAME,
            "startTime": when_str,
            "status": "done",
            "additions": [
                {
                    "name": "test.example.com",
                    "type": "CNAME",
                    "ttl": "3600",
                    "rrdatas": ["www.example.com"],
                }
            ],
            "deletions": [
                {
                    "name": "test.example.com",
                    "type": "CNAME",
                    "ttl": "86400",
                    "rrdatas": ["other.example.com"],
                }
            ],
        }
Ejemplo n.º 14
0
    def _makeResource(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        when_str = _datetime_to_rfc3339(self.WHEN)
        return {
            'kind':
            'dns#change',
            'id':
            self.CHANGES_NAME,
            'startTime':
            when_str,
            'status':
            'done',
            'additions': [
                {
                    'name': 'test.example.com',
                    'type': 'CNAME',
                    'ttl': '3600',
                    'rrdatas': ['www.example.com']
                },
            ],
            'deletions': [
                {
                    'name': 'test.example.com',
                    'type': 'CNAME',
                    'ttl': '86400',
                    'rrdatas': ['other.example.com']
                },
            ],
        }
Ejemplo n.º 15
0
    def test_to_dict(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        from google.cloud.monitoring.metric import Metric
        from google.cloud.monitoring.resource import Resource
        from google.cloud.monitoring.timeseries import Point

        VALUE = 42
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)

        METRIC = Metric(type=METRIC_TYPE, labels=METRIC_LABELS)
        RESOURCE = Resource(type=RESOURCE_TYPE, labels=RESOURCE_LABELS)
        POINT = Point(start_time=None, end_time=end_time_str, value=VALUE)

        info = {
            'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
            'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
            'points': [{
                'interval': {
                    'endTime': end_time_str},
                'value': {'int64Value': str(VALUE)},
            }]
        }

        series = self._make_one(metric=METRIC, resource=RESOURCE,
                                metric_kind=None, value_type=None,
                                points=[POINT])
        series_dict = series._to_dict()
        self.assertEqual(info, series_dict)
Ejemplo n.º 16
0
    def test_to_dict(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        from google.cloud.monitoring.metric import Metric
        from google.cloud.monitoring.resource import Resource
        from google.cloud.monitoring.timeseries import Point

        VALUE = 42
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)

        METRIC = Metric(type=METRIC_TYPE, labels=METRIC_LABELS)
        RESOURCE = Resource(type=RESOURCE_TYPE, labels=RESOURCE_LABELS)
        POINT = Point(start_time=None, end_time=end_time_str, value=VALUE)

        info = {
            'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
            'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
            'points': [{
                'interval': {
                    'endTime': end_time_str},
                'value': {'int64Value': str(VALUE)},
            }]
        }

        series = self._makeOne(metric=METRIC, resource=RESOURCE,
                               metric_kind=None, value_type=None,
                               points=[POINT])
        series_dict = series._to_dict()
        self.assertEqual(info, series_dict)
Ejemplo n.º 17
0
 def to_api_repr(self):
     """API repr (JSON format) for entry."""
     info = {}
     if self.log_name is not None:
         info["logName"] = self.log_name
     if self.resource is not None:
         info["resource"] = self.resource._to_dict()
     if self.labels is not None:
         info["labels"] = self.labels
     if self.insert_id is not None:
         info["insertId"] = self.insert_id
     if self.severity is not None:
         info["severity"] = self.severity
     if self.http_request is not None:
         info["httpRequest"] = self.http_request
     if self.timestamp is not None:
         info["timestamp"] = _datetime_to_rfc3339(self.timestamp)
     if self.trace is not None:
         info["trace"] = self.trace
     if self.span_id is not None:
         info["spanId"] = self.span_id
     if self.trace_sampled is not None:
         info["traceSampled"] = self.trace_sampled
     if self.source_location is not None:
         source_location = self.source_location.copy()
         source_location["line"] = str(source_location.pop("line", 0))
         info["sourceLocation"] = source_location
     if self.operation is not None:
         info["operation"] = self.operation
     return info
Ejemplo n.º 18
0
 def to_api_repr(self):
     """API repr (JSON format) for entry.
     """
     info = {}
     if self.log_name is not None:
         info['logName'] = self.log_name
     if self.resource is not None:
         info['resource'] = self.resource._to_dict()
     if self.labels is not None:
         info['labels'] = self.labels
     if self.insert_id is not None:
         info['insertId'] = self.insert_id
     if self.severity is not None:
         info['severity'] = self.severity
     if self.http_request is not None:
         info['httpRequest'] = self.http_request
     if self.timestamp is not None:
         info['timestamp'] = _datetime_to_rfc3339(self.timestamp)
     if self.trace is not None:
         info['trace'] = self.trace
     if self.span_id is not None:
         info['spanId'] = self.span_id
     if self.trace_sampled is not None:
         info['traceSampled'] = self.trace_sampled
     if self.source_location is not None:
         source_location = self.source_location.copy()
         source_location['line'] = str(source_location.pop('line', 0))
         info['sourceLocation'] = source_location
     if self.operation is not None:
         info['operation'] = self.operation
     return info
Ejemplo n.º 19
0
    def generateUploadPolicy(self, conditions):
        """
		Our implementation of bucket.generate_upload_policy - which works with default token credentials
		Create a signed upload policy for uploading objects.

		This method generates and signs a policy document. You can use
		`policy documents`_ to allow visitors to a website to upload files to
		Google Cloud Storage without giving them direct write access.

		For example:

		.. literalinclude:: snippets.py
			:start-after: [START policy_document]
			:end-before: [END policy_document]

		.. _policy documents:
			https://cloud.google.com/storage/docs/xml-api\
			/post-object#policydocument

		:type expiration: datetime
		:param expiration: Optional expiration in UTC. If not specified, the
						   policy will expire in 1 hour.

		:type conditions: list
		:param conditions: A list of conditions as described in the
						  `policy documents`_ documentation.

		:type client: :class:`~google.cloud.storage.client.Client`
		:param client: Optional. The client to use.  If not passed, falls back
					   to the ``client`` stored on the current bucket.

		:rtype: dict
		:returns: A dictionary of (form field name, form field value) of form
				  fields that should be added to your HTML upload form in order
				  to attach the signature.
		"""
        global credentials, bucket
        auth_request = requests.Request()
        sign_cred = compute_engine.IDTokenCredentials(
            auth_request,
            "",
            service_account_email=credentials.service_account_email)
        expiration = _NOW() + timedelta(hours=1)
        conditions = conditions + [{"bucket": bucket.name}]
        policy_document = {
            "expiration": _datetime_to_rfc3339(expiration),
            "conditions": conditions,
        }
        encoded_policy_document = base64.b64encode(
            json.dumps(policy_document).encode("utf-8"))
        signature = base64.b64encode(
            sign_cred.sign_bytes(encoded_policy_document))
        fields = {
            "bucket": bucket.name,
            "GoogleAccessId": sign_cred.signer_email,
            "policy": encoded_policy_document.decode("utf-8"),
            "signature": signature.decode("utf-8"),
        }
        return fields
    def test_to_api_repr_explicit(self):
        import datetime
        from google.cloud.logging.resource import Resource
        from google.cloud._helpers import _datetime_to_rfc3339

        LOG_NAME = "test.log"
        JSON_PAYLOAD = {"key": "value"}
        LABELS = {"foo": "bar", "baz": "qux"}
        IID = "IID"
        SEVERITY = "CRITICAL"
        METHOD = "POST"
        URI = "https://api.example.com/endpoint"
        STATUS = "500"
        REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        RESOURCE = Resource(
            type="gae_app", labels={"module_id": "default", "version_id": "test"}
        )
        TRACE = "12345678-1234-5678-1234-567812345678"
        SPANID = "000000000000004a"
        FILE = "my_file.py"
        LINE = 123
        FUNCTION = "my_function"
        SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
        OP_ID = "OP_ID"
        PRODUCER = "PRODUCER"
        OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
        expected = {
            "logName": LOG_NAME,
            "jsonPayload": JSON_PAYLOAD,
            "labels": LABELS,
            "insertId": IID,
            "severity": SEVERITY,
            "httpRequest": REQUEST,
            "timestamp": _datetime_to_rfc3339(TIMESTAMP),
            "resource": RESOURCE._to_dict(),
            "trace": TRACE,
            "spanId": SPANID,
            "traceSampled": True,
            "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION},
            "operation": OPERATION,
        }
        entry = self._make_one(
            log_name=LOG_NAME,
            payload=JSON_PAYLOAD,
            labels=LABELS,
            insert_id=IID,
            severity=SEVERITY,
            http_request=REQUEST,
            timestamp=TIMESTAMP,
            resource=RESOURCE,
            trace=TRACE,
            span_id=SPANID,
            trace_sampled=True,
            source_location=SOURCE_LOCATION,
            operation=OPERATION,
        )

        self.assertEqual(entry.to_api_repr(), expected)
Ejemplo n.º 21
0
 def test_w_datetime(self):
     import datetime
     from google.protobuf.struct_pb2 import Value
     from google.cloud._helpers import UTC, _datetime_to_rfc3339
     now = datetime.datetime.utcnow().replace(tzinfo=UTC)
     value_pb = self._callFUT(now)
     self.assertIsInstance(value_pb, Value)
     self.assertEqual(value_pb.string_value, _datetime_to_rfc3339(now))
Ejemplo n.º 22
0
    def test_generate_upload_policy(self, now):
        from google.cloud._helpers import _datetime_to_rfc3339

        _, policy = self._test_generate_upload_policy_helper()

        self.assertEqual(
            policy['expiration'],
            _datetime_to_rfc3339(now() + datetime.timedelta(hours=1)))
Ejemplo n.º 23
0
    def test_timeseries_factory_gauge(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        METRIC_TYPE = 'custom.googleapis.com/my_metric'
        METRIC_LABELS = {
            'status': 'successful'
        }

        RESOURCE_TYPE = 'gce_instance'
        RESOURCE_LABELS = {
            'instance_id': '1234567890123456789',
            'zone': 'us-central1-f'
        }

        VALUE = 42
        TIME1 = datetime.datetime.utcnow()
        TIME1_STR = _datetime_to_rfc3339(TIME1, ignore_zone=False)

        client = self._make_one(
            project=PROJECT, credentials=_make_credentials())
        client._connection = _Connection()   # For safety's sake.
        metric = client.metric(METRIC_TYPE, METRIC_LABELS)
        resource = client.resource(RESOURCE_TYPE, RESOURCE_LABELS)

        # Construct a time series assuming a gauge metric.
        timeseries = client.time_series(metric, resource, VALUE,
                                        end_time=TIME1)
        self.assertEqual(timeseries.metric, metric)
        self.assertEqual(timeseries.resource, resource)
        self.assertEqual(len(timeseries.points), 1)
        self.assertEqual(timeseries.points[0].value, VALUE)
        self.assertIsNone(timeseries.points[0].start_time)
        self.assertEqual(timeseries.points[0].end_time, TIME1_STR)

        TIME2 = datetime.datetime.utcnow()
        TIME2_STR = _datetime_to_rfc3339(TIME2, ignore_zone=False)
        # Construct a time series assuming a gauge metric using the current
        # time
        with mock.patch('google.cloud.monitoring.client._UTCNOW',
                        new=lambda: TIME2):
            timeseries_no_end = client.time_series(metric, resource, VALUE)

        self.assertEqual(timeseries_no_end.points[0].end_time, TIME2_STR)
        self.assertIsNone(timeseries_no_end.points[0].start_time)
Ejemplo n.º 24
0
    def test_timeseries_factory_gauge(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339

        METRIC_TYPE = 'custom.googleapis.com/my_metric'
        METRIC_LABELS = {'status': 'successful'}

        RESOURCE_TYPE = 'gce_instance'
        RESOURCE_LABELS = {
            'instance_id': '1234567890123456789',
            'zone': 'us-central1-f'
        }

        VALUE = 42
        TIME1 = datetime.datetime.utcnow()
        TIME1_STR = _datetime_to_rfc3339(TIME1, ignore_zone=False)

        client = self._make_one(project=PROJECT,
                                credentials=_make_credentials())
        client._connection = _Connection()  # For safety's sake.
        metric = client.metric(METRIC_TYPE, METRIC_LABELS)
        resource = client.resource(RESOURCE_TYPE, RESOURCE_LABELS)

        # Construct a time series assuming a gauge metric.
        timeseries = client.time_series(metric,
                                        resource,
                                        VALUE,
                                        end_time=TIME1)
        self.assertEqual(timeseries.metric, metric)
        self.assertEqual(timeseries.resource, resource)
        self.assertEqual(len(timeseries.points), 1)
        self.assertEqual(timeseries.points[0].value, VALUE)
        self.assertIsNone(timeseries.points[0].start_time)
        self.assertEqual(timeseries.points[0].end_time, TIME1_STR)

        TIME2 = datetime.datetime.utcnow()
        TIME2_STR = _datetime_to_rfc3339(TIME2, ignore_zone=False)
        # Construct a time series assuming a gauge metric using the current
        # time
        with mock.patch('google.cloud.monitoring.client._UTCNOW',
                        new=lambda: TIME2):
            timeseries_no_end = client.time_series(metric, resource, VALUE)

        self.assertEqual(timeseries_no_end.points[0].end_time, TIME2_STR)
        self.assertIsNone(timeseries_no_end.points[0].start_time)
Ejemplo n.º 25
0
    def commit(self, client=None):
        """Send saved log entries as a single API call.

        :type client: :class:`~google.cloud.logging.client.Client` or
                      ``NoneType``
        :param client: the client to use.  If not passed, falls back to the
                       ``client`` stored on the current batch.
        """
        if client is None:
            client = self.client

        kwargs = {
            'logger_name': self.logger.full_name,
        }

        if self.resource is not None:
            kwargs['resource'] = self.resource._to_dict()
        if self.logger.labels is not None:
            kwargs['labels'] = self.logger.labels

        entries = []
        for (entry_type, entry, labels, iid, severity, http_req, timestamp,
             resource, trace, span_id) in self.entries:
            if entry_type == 'text':
                info = {'textPayload': entry}
            elif entry_type == 'struct':
                info = {'jsonPayload': entry}
            elif entry_type == 'proto':
                # NOTE: If ``entry`` contains an ``Any`` field with an
                #       unknown type, this will fail with a ``TypeError``.
                #       However, since ``entry`` was provided by a user in
                #       ``Batch.log_proto``, the assumption is that any types
                #       needed for the protobuf->JSON conversion will be known
                #       from already imported ``pb2`` modules.
                info = {'protoPayload': MessageToDict(entry)}
            else:
                raise ValueError('Unknown entry type: %s' % (entry_type, ))
            if resource is not None:
                info['resource'] = resource._to_dict()
            if labels is not None:
                info['labels'] = labels
            if iid is not None:
                info['insertId'] = iid
            if severity is not None:
                info['severity'] = severity
            if http_req is not None:
                info['httpRequest'] = http_req
            if timestamp is not None:
                info['timestamp'] = _datetime_to_rfc3339(timestamp)
            if trace is not None:
                info['trace'] = trace
            if span_id is not None:
                info['spanId'] = span_id
            entries.append(info)

        client.logging_api.write_entries(entries, **kwargs)
        del self.entries[:]
Ejemplo n.º 26
0
    def _timestamp_message(self, attrs):
        """Add a timestamp to ``attrs``, if the topic is so configured.

        If ``attrs`` already has the key, do nothing.

        Helper method for ``publish``/``Batch.publish``.
        """
        if self.timestamp_messages and 'timestamp' not in attrs:
            attrs['timestamp'] = _datetime_to_rfc3339(_NOW())
Ejemplo n.º 27
0
    def _timestamp_message(self, attrs):
        """Add a timestamp to ``attrs``, if the topic is so configured.

        If ``attrs`` already has the key, do nothing.

        Helper method for ``publish``/``Batch.publish``.
        """
        if self.timestamp_messages and 'timestamp' not in attrs:
            attrs['timestamp'] = _datetime_to_rfc3339(_NOW())
Ejemplo n.º 28
0
    def test_commit_w_bound_client(self):
        import json
        import datetime
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value
        from google.cloud._helpers import _datetime_to_rfc3339
        from google.cloud.logging.logger import _GLOBAL_RESOURCE

        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        message = Struct(fields={'foo': Value(bool_value=True)})
        IID1 = 'IID1'
        IID2 = 'IID2'
        IID3 = 'IID3'
        TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999)
        TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999)
        TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999)
        ENTRIES = [
            {'textPayload': TEXT, 'insertId': IID1,
             'timestamp': _datetime_to_rfc3339(TIMESTAMP1),
             'resource': _GLOBAL_RESOURCE._to_dict()},
            {'jsonPayload': STRUCT, 'insertId': IID2,
             'timestamp': _datetime_to_rfc3339(TIMESTAMP2),
             'resource': _GLOBAL_RESOURCE._to_dict()},
            {'protoPayload': json.loads(MessageToJson(message)),
             'insertId': IID3,
             'timestamp': _datetime_to_rfc3339(TIMESTAMP3),
             'resource': _GLOBAL_RESOURCE._to_dict()},
        ]
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = _Logger()
        batch = self._make_one(logger, client=client)

        batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1)
        batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2)
        batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3)
        batch.commit()

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, logger.full_name, None, None))
Ejemplo n.º 29
0
    def rfc3339(self):
        """RFC 3339-compliant timestamp.

        :rtype: str
        :returns: Timestamp string according to RFC 3339 spec.
        """
        if self._nanosecond == 0:
            return _datetime_to_rfc3339(self)
        nanos = str(self._nanosecond).rstrip('0')
        return '%s.%sZ' % (self.strftime(_RFC3339_NO_FRACTION), nanos)
Ejemplo n.º 30
0
    def test_generate_upload_policy_args(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        expiration = datetime.datetime(1990, 5, 29)

        _, policy = self._test_generate_upload_policy_helper(
            expiration=expiration)

        self.assertEqual(policy['expiration'],
                         _datetime_to_rfc3339(expiration))
Ejemplo n.º 31
0
    def commit(self, client=None):
        """Send saved log entries as a single API call.

        :type client: :class:`~google.cloud.logging.client.Client` or
                      ``NoneType``
        :param client: the client to use.  If not passed, falls back to the
                       ``client`` stored on the current batch.
        """
        if client is None:
            client = self.client

        kwargs = {
            'logger_name': self.logger.full_name,
        }

        if self.resource is not None:
            kwargs['resource'] = self.resource._to_dict()
        if self.logger.labels is not None:
            kwargs['labels'] = self.logger.labels

        entries = []
        for (entry_type, entry, labels, iid, severity, http_req,
             timestamp, resource, trace) in self.entries:
            if entry_type == 'text':
                info = {'textPayload': entry}
            elif entry_type == 'struct':
                info = {'jsonPayload': entry}
            elif entry_type == 'proto':
                # NOTE: If ``entry`` contains an ``Any`` field with an
                #       unknown type, this will fail with a ``TypeError``.
                #       However, since ``entry`` was provided by a user in
                #       ``Batch.log_proto``, the assumption is that any types
                #       needed for the protobuf->JSON conversion will be known
                #       from already imported ``pb2`` modules.
                info = {'protoPayload': MessageToDict(entry)}
            else:
                raise ValueError('Unknown entry type: %s' % (entry_type,))
            if resource is not None:
                info['resource'] = resource._to_dict()
            if labels is not None:
                info['labels'] = labels
            if iid is not None:
                info['insertId'] = iid
            if severity is not None:
                info['severity'] = severity
            if http_req is not None:
                info['httpRequest'] = http_req
            if timestamp is not None:
                info['timestamp'] = _datetime_to_rfc3339(timestamp)
            if trace is not None:
                info['trace'] = trace
            entries.append(info)

        client.logging_api.write_entries(entries, **kwargs)
        del self.entries[:]
Ejemplo n.º 32
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and
    the keys expected in the JSON API.
    """
    entry_pb = LogEntry()
    if 'timestamp' in mapping:
        mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp'])
    ParseDict(mapping, entry_pb)
    return entry_pb
Ejemplo n.º 33
0
    def test_to_dict_float_with_start_time(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339
        VALUE = 1.6180339
        start_time = datetime.datetime.now()
        start_time_str = _datetime_to_rfc3339(start_time, ignore_zone=False)
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)

        point = self._make_one(end_time=end_time_str,
                               start_time=start_time_str, value=VALUE)
        info = {
            'interval': {
                'startTime': start_time_str,
                'endTime': end_time_str},
            'value': {'doubleValue': VALUE},
        }

        point_dict = point._to_dict()
        self.assertEqual(info, point_dict)
Ejemplo n.º 34
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and
    the keys expected in the JSON API.
    """
    entry_pb = LogEntry()
    if 'timestamp' in mapping:
        mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp'])
    ParseDict(mapping, entry_pb)
    return entry_pb
Ejemplo n.º 35
0
    def test_to_api_repr_w_datetime_string(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        now = datetime.datetime.utcnow()
        now_str = _datetime_to_rfc3339(now)
        EXPECTED = {
            "parameterType": {"type": "DATETIME"},
            "parameterValue": {"value": now_str},
        }
        klass = self._get_target_class()
        param = klass.positional(type_="DATETIME", value=now_str)
        self.assertEqual(param.to_api_repr(), EXPECTED)
Ejemplo n.º 36
0
    def test_to_api_repr_w_datetime_string(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        now = datetime.datetime.utcnow()
        now_str = _datetime_to_rfc3339(now)
        EXPECTED = {
            "parameterType": {"type": "DATETIME"},
            "parameterValue": {"value": now_str},
        }
        klass = self._get_target_class()
        param = klass.positional(type_="DATETIME", value=now_str)
        self.assertEqual(param.to_api_repr(), EXPECTED)
    def seek_timestamp(self, timestamp, client=None):
        """API call:  seek a subscription to a given point in time

        See:
        https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek

        :type time: :class:`datetime.datetime`
        :param time: The time to seek to.
        """
        client = self._require_client(client)
        timestamp = _datetime_to_rfc3339(timestamp)
        api = client.subscriber_api
        api.subscription_seek(self.full_name, time=timestamp)
Ejemplo n.º 38
0
    def seek_timestamp(self, timestamp, client=None):
        """API call:  seek a subscription to a given point in time

        See:
        https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek

        :type time: :class:`datetime.datetime`
        :param time: The time to seek to.
        """
        client = self._require_client(client)
        timestamp = _datetime_to_rfc3339(timestamp)
        api = client.subscriber_api
        api.subscription_seek(self.full_name, time=timestamp)
Ejemplo n.º 39
0
    def test_w_timestamp_w_nanos(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1.proto.type_pb2 import Type, TIMESTAMP
        from google.cloud._helpers import UTC, _datetime_to_rfc3339
        from google.cloud.spanner_v1._helpers import TimestampWithNanoseconds

        VALUE = TimestampWithNanoseconds(
            2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=UTC)
        field_type = Type(code=TIMESTAMP)
        value_pb = Value(string_value=_datetime_to_rfc3339(VALUE))

        parsed = self._callFUT(value_pb, field_type)
        self.assertIsInstance(parsed, TimestampWithNanoseconds)
        self.assertEqual(parsed, VALUE)
Ejemplo n.º 40
0
 def test_to_api_repr_w_datetime_datetime(self):
     import datetime
     from google.cloud._helpers import _datetime_to_rfc3339
     now = datetime.datetime.utcnow()
     EXPECTED = {
         'parameterType': {
             'type': 'DATETIME',
         },
         'parameterValue': {
             'value': _datetime_to_rfc3339(now),
         },
     }
     klass = self._get_target_class()
     param = klass.positional(type_='DATETIME', value=now)
     self.assertEqual(param.to_api_repr(), EXPECTED)
Ejemplo n.º 41
0
    def test_to_dict_int64(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339
        VALUE = 42
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)
        point = self._make_one(end_time=end_time_str, start_time=None,
                               value=VALUE)
        info = {
            'interval': {'endTime': end_time_str},
            'value': {'int64Value': str(VALUE)},
        }

        point_dict = point._to_dict()
        self.assertEqual(info, point_dict)
Ejemplo n.º 42
0
    def test_to_dict_int64(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339
        VALUE = 42
        end_time = datetime.datetime.now()
        end_time_str = _datetime_to_rfc3339(end_time, ignore_zone=False)
        point = self._makeOne(end_time=end_time_str, start_time=None,
                              value=VALUE)
        info = {
            'interval': {'endTime': end_time_str},
            'value': {'int64Value': str(VALUE)},
        }

        point_dict = point._to_dict()
        self.assertEqual(info, point_dict)
Ejemplo n.º 43
0
    def test_to_api_repr_w_datetime_datetime(self):
        from google.cloud._helpers import _datetime_to_rfc3339

        now = datetime.datetime.utcnow()
        EXPECTED = {
            'parameterType': {
                'type': 'DATETIME',
            },
            'parameterValue': {
                'value': _datetime_to_rfc3339(now)[:-1],  # strip trailing 'Z'
            },
        }
        klass = self._get_target_class()
        param = klass.positional(type_='DATETIME', value=now)
        self.assertEqual(param.to_api_repr(), EXPECTED)
Ejemplo n.º 44
0
def _make_value_pb(value):
    """Helper for :func:`_make_list_value_pbs`.

    :type value: scalar value
    :param value: value to convert

    :rtype: :class:`~google.protobuf.struct_pb2.Value`
    :returns: value protobufs
    :raises ValueError: if value is not of a known scalar type.
    """
    if value is None:
        return Value(null_value="NULL_VALUE")
    if isinstance(value, (list, tuple)):
        return Value(list_value=_make_list_value_pb(value))
    if isinstance(value, bool):
        return Value(bool_value=value)
    if isinstance(value, int):
        return Value(string_value=str(value))
    if isinstance(value, float):
        if math.isnan(value):
            return Value(string_value="NaN")
        if math.isinf(value):
            if value > 0:
                return Value(string_value="Infinity")
            else:
                return Value(string_value="-Infinity")
        return Value(number_value=value)
    if isinstance(value, datetime_helpers.DatetimeWithNanoseconds):
        return Value(string_value=value.rfc3339())
    if isinstance(value, datetime.datetime):
        return Value(
            string_value=_datetime_to_rfc3339(value, ignore_zone=False))
    if isinstance(value, datetime.date):
        return Value(string_value=value.isoformat())
    if isinstance(value, bytes):
        value = _try_to_coerce_bytes(value)
        return Value(string_value=value)
    if isinstance(value, str):
        return Value(string_value=value)
    if isinstance(value, ListValue):
        return Value(list_value=value)
    if isinstance(value, decimal.Decimal):
        _assert_numeric_precision_and_scale(value)
        return Value(string_value=str(value))
    if isinstance(value, JsonObject):
        return Value(string_value=value.serialize())

    raise ValueError("Unknown type: %s" % (value, ))
Ejemplo n.º 45
0
    def commit(self, client=None):
        """Send saved log entries as a single API call.

        :type client: :class:`~google.cloud.logging.client.Client` or
                      ``NoneType``
        :param client: the client to use.  If not passed, falls back to the
                       ``client`` stored on the current batch.
        """
        if client is None:
            client = self.client

        kwargs = {
            'logger_name': self.logger.full_name,
            'resource': {
                'type': 'global'
            },
        }
        if self.logger.labels is not None:
            kwargs['labels'] = self.logger.labels

        entries = []
        for (entry_type, entry, labels, iid, severity, http_req,
             timestamp) in self.entries:
            if entry_type == 'text':
                info = {'textPayload': entry}
            elif entry_type == 'struct':
                info = {'jsonPayload': entry}
            elif entry_type == 'proto':
                as_json_str = MessageToJson(entry)
                as_json = json.loads(as_json_str)
                info = {'protoPayload': as_json}
            else:
                raise ValueError('Unknown entry type: %s' % (entry_type, ))
            if labels is not None:
                info['labels'] = labels
            if iid is not None:
                info['insertId'] = iid
            if severity is not None:
                info['severity'] = severity
            if http_req is not None:
                info['httpRequest'] = http_req
            if timestamp is not None:
                info['timestamp'] = _datetime_to_rfc3339(timestamp)
            entries.append(info)

        client.logging_api.write_entries(entries, **kwargs)
        del self.entries[:]
    def test_seek_time_w_bound_client(self):
        import datetime

        from google.cloud import _helpers

        time = datetime.time()
        client = _Client(project=self.PROJECT)
        api = client.subscriber_api = _FauxSubscribererAPI()
        api._subscription_seek_response = {}
        topic = _Topic(self.TOPIC_NAME, client=client)
        subscription = self._make_one(self.SUB_NAME, topic)

        subscription.seek_timestamp(time)

        self.assertEqual(
            api._subscription_seeked,
            (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None))
Ejemplo n.º 47
0
    def _get_changes(self, token, changes_name):
        from google.cloud._helpers import _datetime_to_rfc3339

        name_1 = "www.example.com"
        type_1 = "A"
        ttl_1 = "86400"
        rrdatas_1 = ["123.45.67.89"]
        name_2 = "alias.example.com"
        type_2 = "CNAME"
        ttl_2 = "3600"
        rrdatas_2 = ["www.example.com"]
        result = {
            "changes": [
                {
                    "kind": "dns#change",
                    "id": changes_name,
                    "status": "pending",
                    "startTime": _datetime_to_rfc3339(self.WHEN),
                    "additions": [
                        {
                            "kind": "dns#resourceRecordSet",
                            "name": name_1,
                            "type": type_1,
                            "ttl": ttl_1,
                            "rrdatas": rrdatas_1,
                        }
                    ],
                    "deletions": [
                        {
                            "kind": "dns#change",
                            "name": name_2,
                            "type": type_2,
                            "ttl": ttl_2,
                            "rrdatas": rrdatas_2,
                        }
                    ],
                }
            ]
        }
        if token is not None:
            result["nextPageToken"] = token
        return result
Ejemplo n.º 48
0
    def _get_changes(self, token, changes_name):
        from google.cloud._helpers import _datetime_to_rfc3339

        name_1 = "www.example.com"
        type_1 = "A"
        ttl_1 = "86400"
        rrdatas_1 = ["123.45.67.89"]
        name_2 = "alias.example.com"
        type_2 = "CNAME"
        ttl_2 = "3600"
        rrdatas_2 = ["www.example.com"]
        result = {
            "changes": [
                {
                    "kind": "dns#change",
                    "id": changes_name,
                    "status": "pending",
                    "startTime": _datetime_to_rfc3339(self.WHEN),
                    "additions": [
                        {
                            "kind": "dns#resourceRecordSet",
                            "name": name_1,
                            "type": type_1,
                            "ttl": ttl_1,
                            "rrdatas": rrdatas_1,
                        }
                    ],
                    "deletions": [
                        {
                            "kind": "dns#change",
                            "name": name_2,
                            "type": type_2,
                            "ttl": ttl_2,
                            "rrdatas": rrdatas_2,
                        }
                    ],
                }
            ]
        }
        if token is not None:
            result["nextPageToken"] = token
        return result
Ejemplo n.º 49
0
    def test_subscription_pull_explicit(self):
        import base64
        import datetime
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import _datetime_to_rfc3339

        NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        NOW_RFC3339 = _datetime_to_rfc3339(NOW)
        PAYLOAD = b'This is the message text'
        B64 = base64.b64encode(PAYLOAD).decode('ascii')
        ACK_ID = 'DEADBEEF'
        MSG_ID = 'BEADCAFE'
        MESSAGE = {
            'messageId': MSG_ID,
            'data': B64,
            'attributes': {
                'a': 'b'
            },
            'publishTime': NOW_RFC3339,
        }
        RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}]
        message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB)
        response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)])
        gax_api = _GAXSubscriberAPI(_pull_response=response_pb)
        client = _Client(self.PROJECT)
        api = self._make_one(gax_api, client)
        MAX_MESSAGES = 10

        received = api.subscription_pull(self.SUB_PATH,
                                         return_immediately=True,
                                         max_messages=MAX_MESSAGES)

        self.assertEqual(received, RECEIVED)
        sub_path, max_messages, return_immediately, options = (
            gax_api._pull_called_with)
        self.assertEqual(sub_path, self.SUB_PATH)
        self.assertEqual(max_messages, MAX_MESSAGES)
        self.assertTrue(return_immediately)
        self.assertIsNone(options)
Ejemplo n.º 50
0
def _make_value_pb(value):
    """Helper for :func:`_make_list_value_pbs`.

    :type value: scalar value
    :param value: value to convert

    :rtype: :class:`~google.protobuf.struct_pb2.Value`
    :returns: value protobufs
    :raises ValueError: if value is not of a known scalar type.
    """
    if value is None:
        return Value(null_value='NULL_VALUE')
    if isinstance(value, (list, tuple)):
        return Value(list_value=_make_list_value_pb(value))
    if isinstance(value, bool):
        return Value(bool_value=value)
    if isinstance(value, six.integer_types):
        return Value(string_value=str(value))
    if isinstance(value, float):
        if math.isnan(value):
            return Value(string_value='NaN')
        if math.isinf(value):
            if value > 0:
                return Value(string_value='Infinity')
            else:
                return Value(string_value='-Infinity')
        return Value(number_value=value)
    if isinstance(value, datetime_helpers.DatetimeWithNanoseconds):
        return Value(string_value=value.rfc3339())
    if isinstance(value, datetime.datetime):
        return Value(string_value=_datetime_to_rfc3339(value))
    if isinstance(value, datetime.date):
        return Value(string_value=value.isoformat())
    if isinstance(value, six.binary_type):
        value = _try_to_coerce_bytes(value)
        return Value(string_value=value)
    if isinstance(value, six.text_type):
        return Value(string_value=value)
    if isinstance(value, ListValue):
        return Value(list_value=value)
    raise ValueError("Unknown type: %s" % (value,))
Ejemplo n.º 51
0
 def _makeResource(self):
     from google.cloud._helpers import _datetime_to_rfc3339
     when_str = _datetime_to_rfc3339(self.WHEN)
     return {
         'kind': 'dns#change',
         'id': self.CHANGES_NAME,
         'startTime': when_str,
         'status': 'done',
         'additions': [
             {'name': 'test.example.com',
              'type': 'CNAME',
              'ttl': '3600',
              'rrdatas': ['www.example.com']},
         ],
         'deletions': [
             {'name': 'test.example.com',
              'type': 'CNAME',
              'ttl': '86400',
              'rrdatas': ['other.example.com']},
         ],
     }
Ejemplo n.º 52
0
    def test_subscription_pull_explicit(self):
        import base64
        import datetime
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import _datetime_to_rfc3339
        NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        NOW_RFC3339 = _datetime_to_rfc3339(NOW)
        PAYLOAD = b'This is the message text'
        B64 = base64.b64encode(PAYLOAD).decode('ascii')
        ACK_ID = 'DEADBEEF'
        MSG_ID = 'BEADCAFE'
        MESSAGE = {
            'messageId': MSG_ID,
            'data': B64,
            'attributes': {'a': 'b'},
            'publishTime': NOW_RFC3339,
        }
        RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}]
        message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB)
        response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)])
        gax_api = _GAXSubscriberAPI(_pull_response=response_pb)
        api = self._makeOne(gax_api)
        MAX_MESSAGES = 10

        received = api.subscription_pull(
            self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES)

        self.assertEqual(received, RECEIVED)
        sub_path, max_messages, return_immediately, options = (
            gax_api._pull_called_with)
        self.assertEqual(sub_path, self.SUB_PATH)
        self.assertEqual(max_messages, MAX_MESSAGES)
        self.assertTrue(return_immediately)
        self.assertIsNone(options)
Ejemplo n.º 53
0
    def test_query(self):
        import datetime
        from google.cloud._helpers import _datetime_to_rfc3339
        from google.cloud.exceptions import NotFound

        START_TIME = datetime.datetime(2016, 4, 6, 22, 5, 0)
        END_TIME = datetime.datetime(2016, 4, 6, 22, 10, 0)
        MINUTES = 5

        METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization'
        METRIC_LABELS = {'instance_name': 'instance-1'}
        METRIC_LABELS2 = {'instance_name': 'instance-2'}

        RESOURCE_TYPE = 'gce_instance'
        RESOURCE_LABELS = {
            'project_id': 'my-project',
            'zone': 'us-east1-a',
            'instance_id': '1234567890123456789',
        }
        RESOURCE_LABELS2 = {
            'project_id': 'my-project',
            'zone': 'us-east1-b',
            'instance_id': '9876543210987654321',
        }

        METRIC_KIND = 'GAUGE'
        VALUE_TYPE = 'DOUBLE'

        TS1 = '2016-04-06T22:05:00.042Z'
        TS2 = '2016-04-06T22:05:01.042Z'
        TS3 = '2016-04-06T22:05:02.042Z'

        VAL1 = 0.1
        VAL2 = 0.2

        def P(timestamp, value):
            return {
                'interval': {'startTime': timestamp, 'endTime': timestamp},
                'value': {'doubleValue': value},
            }

        SERIES1 = {
            'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
            'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
            'metricKind': METRIC_KIND,
            'valueType': VALUE_TYPE,
            'points': [P(TS3, VAL1), P(TS2, VAL1), P(TS1, VAL1)],
        }
        SERIES2 = {
            'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
            'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
            'metricKind': METRIC_KIND,
            'valueType': VALUE_TYPE,
            'points': [P(TS3, VAL2), P(TS2, VAL2), P(TS1, VAL2)],
        }

        RESPONSE = {'timeSeries': [SERIES1, SERIES2]}

        client = self._makeOne(project=PROJECT, credentials=_Credentials())
        connection = client.connection = _Connection(RESPONSE)

        # A simple query. In practice, it can be very convenient to let the
        # end time default to the start of the current minute.
        query = client.query(METRIC_TYPE, end_time=END_TIME, minutes=MINUTES)
        response = list(query)

        self.assertEqual(len(response), 2)
        series1, series2 = response

        self.assertEqual(series1.metric.type, METRIC_TYPE)
        self.assertEqual(series2.metric.type, METRIC_TYPE)
        self.assertEqual(series1.metric.labels, METRIC_LABELS)
        self.assertEqual(series2.metric.labels, METRIC_LABELS2)

        self.assertEqual(series1.resource.type, RESOURCE_TYPE)
        self.assertEqual(series2.resource.type, RESOURCE_TYPE)
        self.assertEqual(series1.resource.labels, RESOURCE_LABELS)
        self.assertEqual(series2.resource.labels, RESOURCE_LABELS2)

        self.assertEqual(series1.metric_kind, METRIC_KIND)
        self.assertEqual(series2.metric_kind, METRIC_KIND)
        self.assertEqual(series1.value_type, VALUE_TYPE)
        self.assertEqual(series2.value_type, VALUE_TYPE)

        self.assertEqual([p.value for p in series1.points], [VAL1, VAL1, VAL1])
        self.assertEqual([p.value for p in series2.points], [VAL2, VAL2, VAL2])
        self.assertEqual([p.end_time for p in series1.points], [TS1, TS2, TS3])
        self.assertEqual([p.end_time for p in series2.points], [TS1, TS2, TS3])

        expected_request = {
            'method': 'GET',
            'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
            'query_params': [
                ('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
                ('interval.endTime', _datetime_to_rfc3339(END_TIME)),
                ('interval.startTime', _datetime_to_rfc3339(START_TIME)),
            ],
        }

        request, = connection._requested
        self.assertEqual(request, expected_request)

        with self.assertRaises(NotFound):
            list(query)
Ejemplo n.º 54
0
    def time_series(metric, resource, value,
                    end_time=None, start_time=None):
        """Construct a time series object for a single data point.

        .. note::

           While :class:`~google.cloud.monitoring.timeseries.TimeSeries`
           objects returned by the API typically have multiple data points,
           :class:`~google.cloud.monitoring.timeseries.TimeSeries` objects
           sent to the API must have at most one point.

        For example::

            >>> timeseries = client.time_series(metric, resource, 1.23,
            ...                                 end_time=end)

        For more information, see:

        https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries

        :type metric: :class:`~google.cloud.monitoring.metric.Metric`
        :param metric: A :class:`~google.cloud.monitoring.metric.Metric`.

        :type resource: :class:`~google.cloud.monitoring.resource.Resource`
        :param resource: A :class:`~google.cloud.monitoring.resource.Resource`
                         object.

        :type value: bool, int, string, or float
        :param value:
            The value of the data point to create for the
            :class:`~google.cloud.monitoring.timeseries.TimeSeries`.

            .. note::

               The Python type of the value will determine the
               :class:`~ValueType` sent to the API, which must match the value
               type specified in the metric descriptor. For example, a Python
               float will be sent to the API as a :data:`ValueType.DOUBLE`.

        :type end_time: :class:`~datetime.datetime`
        :param end_time:
            The end time for the point to be included in the time series.
            Assumed to be UTC if no time zone information is present.
            Defaults to the current time, as obtained by calling
            :meth:`datetime.datetime.utcnow`.

        :type start_time: :class:`~datetime.datetime`
        :param start_time:
            The start time for the point to be included in the time series.
            Assumed to be UTC if no time zone information is present.
            Defaults to None. If the start time is unspecified,
            the API interprets the start time to be the same as the end time.

        :rtype: :class:`~google.cloud.monitoring.timeseries.TimeSeries`
        :returns: A time series object.
        """
        if end_time is None:
            end_time = _UTCNOW()

        end_time = _datetime_to_rfc3339(end_time, ignore_zone=False)
        if start_time:
            start_time = _datetime_to_rfc3339(start_time, ignore_zone=False)

        point = Point(value=value, start_time=start_time, end_time=end_time)
        return TimeSeries(metric=metric, resource=resource, metric_kind=None,
                          value_type=None, points=[point])
Ejemplo n.º 55
0
    def _make_entry_resource(self, text=None, info=None, message=None,
                             labels=None, insert_id=None, severity=None,
                             http_request=None, timestamp=None):
        """Return a log entry resource of the appropriate type.

        Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`.

        Only one of ``text``, ``info``, or ``message`` should be passed.

        :type text: str
        :param text: (Optional) text payload

        :type info: dict
        :param info: (Optional) struct payload

        :type message: Protobuf message or :class:`NoneType`
        :param message: protobuf payload

        :type labels: dict
        :param labels: (Optional) labels passed in to calling method.

        :type insert_id: str
        :param insert_id: (optional) unique ID for log entry.

        :type severity: str
        :param severity: (optional) severity of event being logged.

        :type http_request: dict
        :param http_request: (optional) info about HTTP request associated with
                             the entry

        :type timestamp: :class:`datetime.datetime`
        :param timestamp: (optional) timestamp of event being logged.

        :rtype: dict
        :returns: The JSON resource created.
        """
        resource = {
            'logName': self.full_name,
            'resource': {'type': 'global'},
        }

        if text is not None:
            resource['textPayload'] = text

        if info is not None:
            resource['jsonPayload'] = info

        if message is not None:
            as_json_str = MessageToJson(message)
            as_json = json.loads(as_json_str)
            resource['protoPayload'] = as_json

        if labels is None:
            labels = self.labels

        if labels is not None:
            resource['labels'] = labels

        if insert_id is not None:
            resource['insertId'] = insert_id

        if severity is not None:
            resource['severity'] = severity

        if http_request is not None:
            resource['httpRequest'] = http_request

        if timestamp is not None:
            resource['timestamp'] = _datetime_to_rfc3339(timestamp)

        return resource
Ejemplo n.º 56
0
    def _make_entry_resource(self, text=None, info=None, message=None,
                             labels=None, insert_id=None, severity=None,
                             http_request=None, timestamp=None,
                             resource=_GLOBAL_RESOURCE, trace=None):
        """Return a log entry resource of the appropriate type.

        Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`.

        Only one of ``text``, ``info``, or ``message`` should be passed.

        :type text: str
        :param text: (Optional) text payload

        :type info: dict
        :param info: (Optional) struct payload

        :type message: :class:`~google.protobuf.message.Message`
        :param message: (Optional) The protobuf payload to log.

        :type labels: dict
        :param labels: (Optional) labels passed in to calling method.

        :type insert_id: str
        :param insert_id: (Optional) unique ID for log entry.

        :type severity: str
        :param severity: (Optional) severity of event being logged.

        :type http_request: dict
        :param http_request: (Optional) info about HTTP request associated with
                             the entry

        :type timestamp: :class:`datetime.datetime`
        :param timestamp: (Optional) timestamp of event being logged.

        :type resource: :class:`~google.cloud.logging.resource.Resource`
        :param resource: (Optional) Monitored resource of the entry

        :type trace: str
        :param trace: (optional) traceid to apply to the entry.

        :rtype: dict
        :returns: The JSON resource created.
        """
        entry = {
            'logName': self.full_name,
            'resource': resource._to_dict(),
        }

        if text is not None:
            entry['textPayload'] = text

        if info is not None:
            entry['jsonPayload'] = info

        if message is not None:
            # NOTE: If ``message`` contains an ``Any`` field with an
            #       unknown type, this will fail with a ``TypeError``.
            #       However, since ``message`` will be provided by a user,
            #       the assumption is that any types needed for the
            #       protobuf->JSON conversion will be known from already
            #       imported ``pb2`` modules.
            entry['protoPayload'] = MessageToDict(message)

        if labels is None:
            labels = self.labels

        if labels is not None:
            entry['labels'] = labels

        if insert_id is not None:
            entry['insertId'] = insert_id

        if severity is not None:
            entry['severity'] = severity

        if http_request is not None:
            entry['httpRequest'] = http_request

        if timestamp is not None:
            entry['timestamp'] = _datetime_to_rfc3339(timestamp)

        if trace is not None:
            entry['trace'] = trace

        return entry
Ejemplo n.º 57
0
    def list_members(self, filter_string=None, end_time=None, start_time=None):
        """Lists all members of this group via a ``GET`` request.

        If no ``end_time`` is provided then the group membership over the last
        minute is returned.

        Example::

            >>> for member in group.list_members():
            ...     print(member)

        List members that are Compute Engine VM instances::

            >>> filter_string = 'resource.type = "gce_instance"'
            >>> for member in group.list_members(filter_string=filter_string):
            ...     print(member)

        List historical members that existed between 4 and 5 hours ago::

            >>> import datetime
            >>> t1 = datetime.datetime.utcnow() - datetime.timedelta(hours=4)
            >>> t0 = t1 - datetime.timedelta(hours=1)
            >>> for member in group.list_members(end_time=t1, start_time=t0):
            ...     print(member)


        :type filter_string: str
        :param filter_string:
            (Optional) An optional list filter describing the members to be
            returned. The filter may reference the type, labels, and metadata
            of monitored resources that comprise the group. See the `filter
            documentation`_.

        :type end_time: :class:`datetime.datetime`
        :param end_time:
            (Optional) The end time (inclusive) of the time interval for which
            results should be returned, as a datetime object. If ``start_time``
            is specified, then this must also be specified.

        :type start_time: :class:`datetime.datetime`
        :param start_time:
            (Optional) The start time (exclusive) of the time interval for
            which results should be returned, as a datetime object.

        :rtype: list of :class:`~google.cloud.monitoring.resource.Resource`
        :returns: A list of resource instances.

        :raises:
            :exc:`ValueError` if the ``start_time`` is specified, but the
            ``end_time`` is missing.

        .. _filter documentation:
            https://cloud.google.com/monitoring/api/v3/filters#group-filter
        """
        if start_time is not None and end_time is None:
            raise ValueError('If "start_time" is specified, "end_time" must '
                             'also be specified')

        path = '%s/members' % (self.path,)
        resources = []
        page_token = None
        params = {}

        if filter_string is not None:
            params['filter'] = filter_string

        if end_time is not None:
            params['interval.endTime'] = _datetime_to_rfc3339(
                end_time, ignore_zone=False)

        if start_time is not None:
            params['interval.startTime'] = _datetime_to_rfc3339(
                start_time, ignore_zone=False)

        while True:
            if page_token is not None:
                params['pageToken'] = page_token

            response = self.client._connection.api_request(
                method='GET', path=path, query_params=params.copy())
            for info in response.get('members', ()):
                resources.append(Resource._from_dict(info))

            page_token = response.get('nextPageToken')
            if not page_token:
                break

        return resources
    def _call_fut(self, *args, **kwargs):
        from google.cloud._helpers import _datetime_to_rfc3339

        return _datetime_to_rfc3339(*args, **kwargs)
Ejemplo n.º 59
0
    def test_list_changes_explicit(self):
        from google.cloud._helpers import _datetime_to_rfc3339
        from google.cloud.dns.changes import Changes
        from google.cloud.dns.resource_record_set import ResourceRecordSet
        self._setUpConstants()
        PATH = 'projects/%s/managedZones/%s/changes' % (
            self.PROJECT, self.ZONE_NAME)
        TOKEN = 'TOKEN'
        NAME_1 = 'www.example.com'
        TYPE_1 = 'A'
        TTL_1 = '86400'
        RRDATAS_1 = ['123.45.67.89']
        NAME_2 = 'alias.example.com'
        TYPE_2 = 'CNAME'
        TTL_2 = '3600'
        RRDATAS_2 = ['www.example.com']
        CHANGES_NAME = 'changeset_id'
        DATA = {
            'changes': [{
                'kind': 'dns#change',
                'id': CHANGES_NAME,
                'status': 'pending',
                'startTime': _datetime_to_rfc3339(self.WHEN),
                'additions': [
                    {'kind': 'dns#resourceRecordSet',
                     'name': NAME_1,
                     'type': TYPE_1,
                     'ttl': TTL_1,
                     'rrdatas': RRDATAS_1}],
                'deletions': [
                    {'kind': 'dns#change',
                     'name': NAME_2,
                     'type': TYPE_2,
                     'ttl': TTL_2,
                     'rrdatas': RRDATAS_2}],
            }]
        }
        conn1 = _Connection()
        client1 = _Client(project=self.PROJECT, connection=conn1)
        conn2 = _Connection(DATA)
        client2 = _Client(project=self.PROJECT, connection=conn2)
        zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1)

        changes, token = zone.list_changes(
            max_results=3, page_token=TOKEN, client=client2)

        self.assertEqual(len(changes), len(DATA['changes']))
        for found, expected in zip(changes, DATA['changes']):
            self.assertTrue(isinstance(found, Changes))
            self.assertEqual(found.name, CHANGES_NAME)
            self.assertEqual(found.status, 'pending')
            self.assertEqual(found.started, self.WHEN)

            self.assertEqual(len(found.additions), len(expected['additions']))
            for found_rr, expected_rr in zip(found.additions,
                                             expected['additions']):
                self.assertTrue(isinstance(found_rr, ResourceRecordSet))
                self.assertEqual(found_rr.name, expected_rr['name'])
                self.assertEqual(found_rr.record_type, expected_rr['type'])
                self.assertEqual(found_rr.ttl, int(expected_rr['ttl']))
                self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas'])

            self.assertEqual(len(found.deletions), len(expected['deletions']))
            for found_rr, expected_rr in zip(found.deletions,
                                             expected['deletions']):
                self.assertTrue(isinstance(found_rr, ResourceRecordSet))
                self.assertEqual(found_rr.name, expected_rr['name'])
                self.assertEqual(found_rr.record_type, expected_rr['type'])
                self.assertEqual(found_rr.ttl, int(expected_rr['ttl']))
                self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas'])

        self.assertEqual(token, None)

        self.assertEqual(len(conn1._requested), 0)
        self.assertEqual(len(conn2._requested), 1)
        req = conn2._requested[0]
        self.assertEqual(req['method'], 'GET')
        self.assertEqual(req['path'], '/%s' % PATH)
        self.assertEqual(req['query_params'],
                         {'maxResults': 3, 'pageToken': TOKEN})