def test_list_entries_no_paging(self): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger TOKEN = 'TOKEN' TEXT = 'TEXT' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], self.FILTER, DESCENDING) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, self.FILTER) self.assertEqual(order_by, DESCENDING) self.assertEqual(page_size, 0) self.assertIs(options.page_token, INITIAL_PAGE)
def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging.client import Client from google.cloud.logging.entries import StructEntry from google.cloud.logging.logger import Logger SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries([self.PROJECT], page_size=SIZE, page_token=TOKEN) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, NEW_TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, StructEntry) self.assertEqual(entry.payload, payload) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae Performs "impedance matching" between the protobuf attrs and the keys expected in the JSON API. """ entry_pb = LogEntry() if 'timestamp' in mapping: mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp']) ParseDict(mapping, entry_pb) return entry_pb
def _make_log_entry_with_extras(self, labels, iid, type_url, now): from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud.grpc.logging.v2.log_entry_pb2 import ( LogEntryOperation) from google.logging.type.http_request_pb2 import HttpRequest from google.logging.type.log_severity_pb2 import WARNING from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp resource_pb = MonitoredResource( type='global', labels=labels) proto_payload = Any(type_url=type_url) timestamp_pb = _datetime_to_pb_timestamp(now) request_pb = HttpRequest( request_url='http://example.com/requested', request_method='GET', status=200, referer='http://example.com/referer', user_agent='AGENT', cache_hit=True, request_size=256, response_size=1024, remote_ip='1.2.3.4', ) operation_pb = LogEntryOperation( producer='PRODUCER', first=True, last=True, id='OPID', ) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, proto_payload=proto_payload, timestamp=timestamp_pb, severity=WARNING, insert_id=iid, http_request=request_pb, labels=labels, operation=operation_pb) return entry_pb