def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging_v2.logger import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger("logger_name", client1, labels=DEFAULT_LABELS) ENTRIES = [ { "textPayload": TEXT, "labels": LABELS, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "jsonPayload": STRUCT, "severity": SEVERITY, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "protoPayload": json.loads(MessageToJson(message)), "httpRequest": REQUEST, "resource": _GLOBAL_RESOURCE._to_dict(), }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS), )
def test_commit_w_resource_specified(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) batch = self._make_one(logger, client, resource=RESOURCE) MESSAGE = "This is the entry text" ENTRIES = [ { "textPayload": MESSAGE }, { "textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict() }, ] batch.log_text(MESSAGE, resource=None) batch.log_text(MESSAGE) batch.commit() self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), )
def test_to_api_repr_defaults(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" JSON_PAYLOAD = {"key": "value"} entry = self._make_one(log_name=LOG_NAME, payload=JSON_PAYLOAD) expected = { "logName": LOG_NAME, "jsonPayload": JSON_PAYLOAD, "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_defaults(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" TEXT = "TESTING" entry = self._make_one(log_name=LOG_NAME, payload=TEXT) expected = { "logName": LOG_NAME, "textPayload": TEXT, "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_struct(self): from google.protobuf.struct_pb2 import Struct, Value from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "struct.log" message = Struct(fields={"foo": Value(bool_value=True)}) entry = self._make_one(log_name=LOG_NAME, payload=message) expected = { "logName": LOG_NAME, "jsonPayload": message, "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_proto_defaults(self): from google.protobuf.json_format import MessageToDict from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value LOG_NAME = "test.log" message = Struct(fields={"foo": Value(bool_value=True)}) entry = self._make_one(log_name=LOG_NAME, payload=message) expected = { "logName": LOG_NAME, "protoPayload": MessageToDict(message), "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected)
def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging_v2.entries import LogEntry logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() batch = self._make_one(logger, client) batch.entries.append(LogEntry(severity="blah")) ENTRY = {"severity": "blah", "resource": _GLOBAL_RESOURCE._to_dict()} batch.commit() self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, ([ENTRY], logger.full_name, None, None))
def test_to_api_repr_w_source_location_no_line(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" FILE = "my_file.py" FUNCTION = "my_function" SOURCE_LOCATION = {"file": FILE, "function": FUNCTION} entry = self._make_one(log_name=LOG_NAME, source_location=SOURCE_LOCATION) expected = { "logName": LOG_NAME, "resource": _GLOBAL_RESOURCE._to_dict(), "sourceLocation": { "file": FILE, "line": "0", "function": FUNCTION }, } self.assertEqual(entry.to_api_repr(), expected)
def test_commit_w_bound_client(self): import json import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} message = Struct(fields={"foo": Value(bool_value=True)}) IID1 = "IID1" IID2 = "IID2" IID3 = "IID3" TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) TRACE1 = "12345678-1234-5678-1234-567812345678" TRACE2 = "12345678-1234-5678-1234-567812345679" TRACE3 = "12345678-1234-5678-1234-567812345670" SPANID1 = "000000000000004a" SPANID2 = "000000000000004b" SPANID3 = "000000000000004c" ENTRIES = [ { "textPayload": TEXT, "insertId": IID1, "timestamp": _datetime_to_rfc3339(TIMESTAMP1), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE1, "spanId": SPANID1, "traceSampled": True, }, { "jsonPayload": STRUCT, "insertId": IID2, "timestamp": _datetime_to_rfc3339(TIMESTAMP2), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE2, "spanId": SPANID2, "traceSampled": False, }, { "protoPayload": json.loads(MessageToJson(message)), "insertId": IID3, "timestamp": _datetime_to_rfc3339(TIMESTAMP3), "resource": _GLOBAL_RESOURCE._to_dict(), "trace": TRACE3, "spanId": SPANID3, "traceSampled": True, }, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text( TEXT, insert_id=IID1, timestamp=TIMESTAMP1, trace=TRACE1, span_id=SPANID1, trace_sampled=True, ) batch.log_struct( STRUCT, insert_id=IID2, timestamp=TIMESTAMP2, trace=TRACE2, span_id=SPANID2, trace_sampled=False, ) batch.log_proto( message, insert_id=IID3, timestamp=TIMESTAMP3, trace=TRACE3, span_id=SPANID3, trace_sampled=True, ) batch.commit() self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, None, None) )