def test_list_entries_defaults(self): import six from google.cloud.logging.client import Client TOKEN = 'TOKEN' client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) returned = { 'nextPageToken': TOKEN, } client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) called_with = client._connection._called_with FILTER = 'logName=projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME) self.assertEqual(called_with, { 'method': 'POST', 'path': '/entries:list', 'data': { 'filter': FILTER, 'projectIds': [self.PROJECT], }, })
def test_list_entries_defaults(self): import six from google.cloud.logging.client import Client TOKEN = 'TOKEN' client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) returned = { 'nextPageToken': TOKEN, } client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) called_with = client._connection._called_with FILTER = 'logName=projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) self.assertEqual( called_with, { 'method': 'POST', 'path': '/entries:list', 'data': { 'filter': FILTER, 'projectIds': [self.PROJECT], }, })
def test_list_entries_defaults(self): import six from google.cloud.logging.client import Client TOKEN = "TOKEN" client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) returned = {"nextPageToken": TOKEN} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) called_with = client._connection._called_with FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) self.assertEqual( called_with, { "method": "POST", "path": "/entries:list", "data": { "filter": FILTER, "projectIds": [self.PROJECT] }, }, )
def __init__(self, client, name): http = copy.deepcopy(client._connection.http) http = client._connection.credentials.authorize(http) self.client = Client(client.project, client._connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger)
def test_list_entries_no_paging(self): import six from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger NOW, TIMESTAMP = self._make_timestamp() IID = 'IID' TEXT = 'TEXT' SENT = { 'projectIds': [self.PROJECT], } TOKEN = 'TOKEN' RETURNED = { 'entries': [{ 'textPayload': TEXT, 'insertId': IID, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }], 'nextPageToken': TOKEN, } client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT]) page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token # First check the token. self.assertEqual(token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) self.assertEqual(called_with, { 'method': 'POST', 'path': expected_path, 'data': SENT, })
def test_list_entries_no_paging(self): import six from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger NOW, TIMESTAMP = self._make_timestamp() IID = 'IID' TEXT = 'TEXT' SENT = { 'projectIds': [self.PROJECT], } TOKEN = 'TOKEN' RETURNED = { 'entries': [{ 'textPayload': TEXT, 'insertId': IID, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }], 'nextPageToken': TOKEN, } client = Client(project=self.PROJECT, credentials=object(), use_gax=False) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT]) page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token # First check the token. self.assertEqual(token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) self.assertEqual(called_with, { 'method': 'POST', 'path': expected_path, 'data': SENT, })
def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" INPUT_FILTER = "resource.type:global" TOKEN = "TOKEN" PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( call_payload_no_filter, { "method": "POST", "path": "/entries:list", "data": { "filter": "removed", "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, "projectIds": [PROJECT1, PROJECT2], }, }, ) # verify that default filter is 24 hours LOG_FILTER = "logName=projects/%s/logs/%s" % ( self.PROJECT, self.LOGGER_NAME, ) combined_filter = (INPUT_FILTER + " AND " + LOG_FILTER + " AND " + "timestamp>=" + self.TIME_FORMAT) timestamp = datetime.strptime( client._connection._called_with["data"]["filter"], combined_filter) yesterday = datetime.now(timezone.utc) - timedelta(days=1) self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def test_list_entries_no_paging(self): import six from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" TEXT = "TEXT" SENT = {"projectIds": [self.PROJECT]} TOKEN = "TOKEN" RETURNED = { "entries": [ { "textPayload": TEXT, "insertId": IID, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), } ], "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT]) page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token # First check the token. self.assertEqual(token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) self.assertEqual( called_with, {"method": "POST", "path": expected_path, "data": SENT} )
class BackgroundThreadTransport(Transport): """Aysnchronous transport that uses a background thread. Writes logging entries as a batch process. """ def __init__(self, client, name): http = copy.deepcopy(client.connection.http) http = client.connection.credentials.authorize(http) self.client = Client(client.project, client.connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) def send(self, record, message): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` :param record: Python log record that the handler was called with. :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. """ self.worker.enqueue(record, message)
def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" FILTER = "resource.type:global" TOKEN = "TOKEN" PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) called_with = client._connection._called_with combined_filter = "%s AND logName=projects/%s/logs/%s" % ( FILTER, self.PROJECT, self.LOGGER_NAME, ) self.assertEqual( called_with, { "method": "POST", "path": "/entries:list", "data": { "filter": combined_filter, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, "projectIds": [PROJECT1, PROJECT2], }, }, )
def __init__(self, client, name): http = copy.deepcopy(client.connection.http) http = client.connection.credentials.authorize(http) self.client = Client(client.project, client.connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger)
def test_list_entries_no_paging(self): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger TOKEN = 'TOKEN' TEXT = 'TEXT' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], self.FILTER, DESCENDING) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, self.FILTER) self.assertEqual(order_by, DESCENDING) self.assertEqual(page_size, 0) self.assertIs(options.page_token, INITIAL_PAGE)
def test_list_entries_defaults(self): import six from google.cloud.logging.client import Client TOKEN = "TOKEN" client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) returned = {"nextPageToken": TOKEN} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = six.next(iterator.pages) entries = list(page) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( call_payload_no_filter, { "path": "/entries:list", "method": "POST", "data": { "filter": "removed", "projectIds": [self.PROJECT] }, }, ) # verify that default filter is 24 hours timestamp = datetime.strptime( client._connection._called_with["data"]["filter"], LOG_FILTER + " AND timestamp>=" + self.TIME_FORMAT, ) yesterday = datetime.now(timezone.utc) - timedelta(days=1) self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging.client import Client from google.cloud.logging.entries import StructEntry from google.cloud.logging.logger import Logger SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' resource_pb = MonitoredResource(type='global') timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp_pb = _datetime_to_pb_timestamp(timestamp) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries([self.PROJECT], page_size=SIZE, page_token=TOKEN) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, NEW_TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, StructEntry) self.assertEqual(entry.payload, payload) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertIsNone(entry.insert_id) self.assertEqual(entry.timestamp, timestamp) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' FILTER = 'resource.type:global' TOKEN = 'TOKEN' PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries(projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) called_with = client._connection._called_with combined_filter = '%s AND logName=projects/%s/logs/%s' % ( FILTER, self.PROJECT, self.LOGGER_NAME) self.assertEqual( called_with, { 'method': 'POST', 'path': '/entries:list', 'data': { 'filter': combined_filter, 'orderBy': DESCENDING, 'pageSize': PAGE_SIZE, 'pageToken': TOKEN, 'projectIds': [PROJECT1, PROJECT2], }, })
def main(): client = Client() for example in _find_examples(): to_delete = [] print('%-25s: %s' % _name_and_doc(example)) try: example(client, to_delete) except AssertionError as failure: print(' FAIL: %s' % (failure, )) except Exception as error: # pylint: disable=broad-except print(' ERROR: %r' % (error, )) for item in to_delete: _backoff_not_found(item.delete)
def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' FILTER = 'resource.type:global' TOKEN = 'TOKEN' PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) called_with = client._connection._called_with combined_filter = '%s AND logName=projects/%s/logs/%s' % ( FILTER, self.PROJECT, self.LOGGER_NAME) self.assertEqual(called_with, { 'method': 'POST', 'path': '/entries:list', 'data': { 'filter': combined_filter, 'orderBy': DESCENDING, 'pageSize': PAGE_SIZE, 'pageToken': TOKEN, 'projectIds': [PROJECT1, PROJECT2], }, })
class LogHandler: def __init__(self): if RUBBISH_GEO_ENV == "local": logging.basicConfig(level=logging.INFO) else: # [dev, prod] self.client = Client() self.logger = self.client.logger("functional_api") def log_struct(self, struct): level = struct.get("level", "info") if level == "error": struct['traceback'] = traceback.format_exc() # SO#57712700 struct["caller"] = inspect.currentframe().f_back.f_code.co_name if RUBBISH_GEO_ENV == "local": getattr(logging, level)(json.dumps(struct)) else: # [dev, prod] self.logger.log_struct(struct)
class BackgroundThreadTransport(Transport): """Aysnchronous transport that uses a background thread. Writes logging entries as a batch process. """ def __init__(self, client, name): http = copy.deepcopy(client._connection.http) http = client._connection.credentials.authorize(http) self.client = Client(client.project, client._connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) def send(self, record, message): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` :param record: Python log record that the handler was called with. :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. """ self.worker.enqueue(record, message)
def __init__(self): if RUBBISH_GEO_ENV == "local": logging.basicConfig(level=logging.INFO) else: # [dev, prod] self.client = Client() self.logger = self.client.logger("functional_api")
def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' NOW, TIMESTAMP = self._make_timestamp() IID1 = 'IID1' IID2 = 'IID2' PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' TOKEN = 'TOKEN' PAGE_SIZE = 42 SENT = { 'projectIds': [PROJECT1, PROJECT2], 'filter': self.FILTER, 'orderBy': DESCENDING, 'pageSize': PAGE_SIZE, 'pageToken': TOKEN, } RETURNED = { 'entries': [{ 'jsonPayload': PAYLOAD, 'insertId': IID1, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME), }, { 'protoPayload': PROTO_PAYLOAD, 'insertId': IID2, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME), }], } client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries(projects=[PROJECT1, PROJECT2], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) entries = list(iterator) token = iterator.next_page_token # First check the token. self.assertIsNone(token) # Then check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) self.assertEqual(entry1.payload, PAYLOAD) self.assertIsInstance(entry1.logger, Logger) self.assertEqual(entry1.logger.name, self.LOGGER_NAME) self.assertEqual(entry1.insert_id, IID1) self.assertEqual(entry1.timestamp, NOW) self.assertIsNone(entry1.labels) self.assertIsNone(entry1.severity) self.assertIsNone(entry1.http_request) entry2 = entries[1] self.assertIsInstance(entry2, ProtobufEntry) self.assertEqual(entry2.payload, PROTO_PAYLOAD) self.assertIsInstance(entry2.logger, Logger) self.assertEqual(entry2.logger.name, self.LOGGER_NAME) self.assertEqual(entry2.insert_id, IID2) self.assertEqual(entry2.timestamp, NOW) self.assertIsNone(entry2.labels) self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH, ) self.assertEqual(called_with, { 'method': 'POST', 'path': expected_path, 'data': SENT, })
def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" NOW, TIMESTAMP = self._make_timestamp() IID1 = "IID1" IID2 = "IID2" PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" TOKEN = "TOKEN" PAGE_SIZE = 42 SENT = { "projectIds": [PROJECT1, PROJECT2], "filter": self.FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, } RETURNED = { "entries": [ { "jsonPayload": PAYLOAD, "insertId": IID1, "resource": { "type": "global" }, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, { "protoPayload": PROTO_PAYLOAD, "insertId": IID2, "resource": { "type": "global" }, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, ] } client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( projects=[PROJECT1, PROJECT2], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token # First check the token. self.assertIsNone(token) # Then check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) self.assertEqual(entry1.payload, PAYLOAD) self.assertIsInstance(entry1.logger, Logger) self.assertEqual(entry1.logger.name, self.LOGGER_NAME) self.assertEqual(entry1.insert_id, IID1) self.assertEqual(entry1.timestamp, NOW) self.assertIsNone(entry1.labels) self.assertIsNone(entry1.severity) self.assertIsNone(entry1.http_request) entry2 = entries[1] self.assertIsInstance(entry2, ProtobufEntry) self.assertEqual(entry2.payload, PROTO_PAYLOAD) self.assertIsInstance(entry2.logger, Logger) self.assertEqual(entry2.logger.name, self.LOGGER_NAME) self.assertEqual(entry2.insert_id, IID2) self.assertEqual(entry2.timestamp, NOW) self.assertIsNone(entry2.labels) self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH, ) self.assertEqual(called_with, { "method": "POST", "path": expected_path, "data": SENT })
def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' NOW, TIMESTAMP = self._make_timestamp() IID1 = 'IID1' IID2 = 'IID2' PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' TOKEN = 'TOKEN' PAGE_SIZE = 42 SENT = { 'projectIds': [PROJECT1, PROJECT2], 'filter': self.FILTER, 'orderBy': DESCENDING, 'pageSize': PAGE_SIZE, 'pageToken': TOKEN, } RETURNED = { 'entries': [{ 'jsonPayload': PAYLOAD, 'insertId': IID1, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }, { 'protoPayload': PROTO_PAYLOAD, 'insertId': IID2, 'resource': { 'type': 'global', }, 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }], } client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( projects=[PROJECT1, PROJECT2], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) entries = list(iterator) token = iterator.next_page_token # First check the token. self.assertIsNone(token) # Then check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) self.assertEqual(entry1.payload, PAYLOAD) self.assertIsInstance(entry1.logger, Logger) self.assertEqual(entry1.logger.name, self.LOGGER_NAME) self.assertEqual(entry1.insert_id, IID1) self.assertEqual(entry1.timestamp, NOW) self.assertIsNone(entry1.labels) self.assertIsNone(entry1.severity) self.assertIsNone(entry1.http_request) entry2 = entries[1] self.assertIsInstance(entry2, ProtobufEntry) self.assertEqual(entry2.payload, PROTO_PAYLOAD) self.assertIsInstance(entry2.logger, Logger) self.assertEqual(entry2.logger.name, self.LOGGER_NAME) self.assertEqual(entry2.insert_id, IID2) self.assertEqual(entry2.timestamp, NOW) self.assertIsNone(entry2.labels) self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) self.assertEqual(called_with, { 'method': 'POST', 'path': expected_path, 'data': SENT, })
def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" NOW, TIMESTAMP = self._make_timestamp() IID1 = "IID1" IID2 = "IID2" PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" TOKEN = "TOKEN" PAGE_SIZE = 42 SENT = { "projectIds": [PROJECT1, PROJECT2], "filter": self.FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, } RETURNED = { "entries": [ { "jsonPayload": PAYLOAD, "insertId": IID1, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, { "protoPayload": PROTO_PAYLOAD, "insertId": IID2, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, ] } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( projects=[PROJECT1, PROJECT2], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token # First check the token. self.assertIsNone(token) # Then check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) self.assertEqual(entry1.payload, PAYLOAD) self.assertIsInstance(entry1.logger, Logger) self.assertEqual(entry1.logger.name, self.LOGGER_NAME) self.assertEqual(entry1.insert_id, IID1) self.assertEqual(entry1.timestamp, NOW) self.assertIsNone(entry1.labels) self.assertIsNone(entry1.severity) self.assertIsNone(entry1.http_request) entry2 = entries[1] self.assertIsInstance(entry2, ProtobufEntry) self.assertEqual(entry2.payload, PROTO_PAYLOAD) self.assertIsInstance(entry2.logger, Logger) self.assertEqual(entry2.logger.name, self.LOGGER_NAME) self.assertEqual(entry2.insert_id, IID2) self.assertEqual(entry2.timestamp, NOW) self.assertIsNone(entry2.labels) self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) self.assertEqual( called_with, {"method": "POST", "path": expected_path, "data": SENT} )
from __future__ import print_function import os os.environ['GOOGLE_CLOUD_DISABLE_GRPC'] = 'true' import socket from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from igneous.secrets import PROJECT_NAME, QUEUE_NAME, google_credentials_path client = Client.from_service_account_json(google_credentials_path, project=PROJECT_NAME) logger = Logger('pipeline_logger', client) def log(severity, task, message): # Look at the log produce when running this script at: # https://console.cloud.google.com/logs/viewer?project=neuromancer-seung-import&resource=global # Choosing the severity: # DEBUG Debug or trace information. # INFO Routine information, such as ongoing status or performance. # NOTICE Normal but significant events, such as start up, shut down, or a configuration change. # WARNING Warning events might cause problems. # ERROR Error events are likely to cause problems. # CRITICAL Critical events cause more severe problems or outages. # ALERT A person must take an action immediately. # EMERGENCY One or more systems are unusable. #TODO change resource from global to GKE containter or similar
def test_list_entries_with_extra_properties(self): import datetime # Import the wrappers to register the type URL for BoolValue # pylint: disable=unused-variable from google.protobuf import wrappers_pb2 # pylint: enable=unused-variable from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging.client import Client from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.logger import Logger NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' SEVERITY = 'WARNING' LABELS = { 'foo': 'bar', } IID = 'IID' bool_type_url = 'type.googleapis.com/google.protobuf.BoolValue' entry_pb = self._make_log_entry_with_extras( LABELS, IID, bool_type_url, NOW) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) entries = list(iterator) next_token = iterator.next_page_token # First check the token. self.assertEqual(next_token, NEW_TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, ProtobufEntry) self.assertEqual(entry.payload, { '@type': bool_type_url, 'value': False, }) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOG_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertEqual(entry.labels, {'foo': 'bar'}) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request, { 'requestMethod': entry_pb.http_request.request_method, 'requestUrl': entry_pb.http_request.request_url, 'status': entry_pb.http_request.status, 'requestSize': str(entry_pb.http_request.request_size), 'responseSize': str(entry_pb.http_request.response_size), 'referer': entry_pb.http_request.referer, 'userAgent': entry_pb.http_request.user_agent, 'remoteIp': entry_pb.http_request.remote_ip, 'cacheHit': entry_pb.http_request.cache_hit, }) resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN)
def google_logger(): logging_client = Client() return AppEngineHandler(client=logging_client)