def test_log_proto_w_explicit(self): import json import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging import Resource message = Struct(fields={"foo": Value(bool_value=True)}) ALT_LOG_NAME = "projects/foo/logs/alt.log.name" DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( type="gae_app", labels={"module_id": "default", "version_id": "test"} ) ENTRIES = [ { "logName": ALT_LOG_NAME, "protoPayload": json.loads(MessageToJson(message)), "labels": LABELS, "insertId": IID, "severity": SEVERITY, "httpRequest": REQUEST, "timestamp": "2016-12-31T00:01:02.999999Z", "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, } ] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_proto( message, log_name=ALT_LOG_NAME, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_commit_w_resource_specified(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) batch = self._make_one(logger, client, resource=RESOURCE) MESSAGE = "This is the entry text" ENTRIES = [ { "textPayload": MESSAGE }, { "textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict() }, ] batch.log_text(MESSAGE, resource=None) batch.log_text(MESSAGE) batch.commit() self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), )
def test_ctor_explicit(self): import io from google.cloud.logging import Resource resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_lable": "value"} name = "test-logger" client = _Client(self.PROJECT) stream = io.BytesIO() handler = self._make_one( client, name=name, transport=_Transport, resource=resource, labels=labels, stream=stream, ) self.assertEqual(handler.name, name) self.assertIs(handler.client, client) self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, name) self.assertIs(handler.resource, resource) self.assertEqual(handler.labels, labels) self.assertIs(handler.stream, stream)
def test_get_default_handler_general(self): import io from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_label": "value"} stream = io.BytesIO() credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials, _use_grpc=False) handler = client.get_default_handler(name=name, resource=resource, labels=labels, stream=stream) handler.transport.worker.stop() self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels)
def test_emit_manual_field_override(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource client = _Client(self.PROJECT) default_labels = { "default_key": "default-value", "overwritten_key": "bad_value", } handler = self._make_one( client, transport=_Transport, resource=_GLOBAL_RESOURCE, labels=default_labels, ) logname = "loggername" message = "hello world" record = logging.LogRecord(logname, logging.INFO, None, None, message, None, None) # set attributes manually expected_trace = "123" setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) expected_sampled = True setattr(record, "trace_sampled", expected_sampled) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} setattr(record, "source_location", expected_source) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) added_labels = { "added_key": "added_value", "overwritten_key": "new_value" } expected_labels = { "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", "python_logger": logname, } setattr(record, "labels", added_labels) handler.handle(record) self.assertEqual( handler.transport.send_called_with, ( record, message, expected_resource, expected_labels, expected_trace, expected_span, expected_sampled, expected_http, expected_source, ), )
def get_client_and_log_resource(): client = gcp_logging.Client( ) # .from_service_account_json(path_credentials) client.setup_logging() _LOG_RESOURCE = Resource(type='service_account', labels={ "project_id": client.project, }) return client, _LOG_RESOURCE
def test_log_proto_explicit(self): import datetime from google.cloud.logging import Resource from google.cloud.logging import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) ENTRY = ProtobufEntry( payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto( message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(batch.entries, [ENTRY])
def logger_usage(client, to_delete): """Logger usage.""" log_name = "logger_usage_%d" % (_millis()) # [START logger_create] logger = client.logger(log_name) # [END logger_create] to_delete.append(logger) # [START logger_log_text] logger.log_text("A simple entry") # API call # [END logger_log_text] # [START logger_log_struct] logger.log_struct({ "message": "My second entry", "weather": "partly cloudy" }) # API call # [END logger_log_struct] # [START logger_log_resource_text] from google.cloud.logging import Resource res = Resource( type="generic_node", labels={ "location": "us-central1-a", "namespace": "default", "node_id": "10.10.10.1", }, ) logger.log_struct({ "message": "My first entry", "weather": "partly cloudy" }, resource=res) # [END logger_log_resource_text] # [START logger_list_entries] from google.cloud.logging import DESCENDING for entry in logger.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) # [END logger_list_entries] def _logger_delete(): # [START logger_delete] logger.delete() # API call # [END logger_delete] _backoff_not_found(_logger_delete) to_delete.remove(logger)
def get_handler(self): # TODO drop these grpc variants for the REST versions, and we can drop # protobuf/grpc deps, and also so we can record tests. log_group = self.get_log_group() project_id = local_session( self.ctx.session_factory).get_default_project() client = LogClient(project_id) return CloudLoggingHandler( client, log_group, labels={ 'policy': self.ctx.policy.name, 'resource': self.ctx.policy.resource_type }, resource=Resource(type='project', labels={'project_id': project_id}))
def __init__(self, child_log_name='application', trace_header_name=None, labels=None, resource=None): logging.Handler.__init__(self) self.labels = labels self.trace_header_name = trace_header_name if resource is None: resource = _GLOBAL_RESOURCE else: resource = Resource(type=resource['type'], labels=resource['labels']) self.resource = resource self.transport_child = BackgroundThreadTransport( client, child_log_name) self.mLogLevels = []
def test_setup_logging_w_extra_kwargs(self): import io from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_label": "value"} stream = io.BytesIO() credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials, _use_grpc=False) with mock.patch( "google.cloud.logging_v2.client.setup_logging") as mocked: client.setup_logging(name=name, resource=resource, labels=labels, stream=stream) self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] (handler, ) = args self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels) handler.transport.worker.stop() expected_kwargs = { "excluded_loggers": ( "google.cloud", "google.auth", "google_auth_httplib2", "google.api_core.bidi", "werkzeug", ), "log_level": 20, } self.assertEqual(kwargs, expected_kwargs)
def test_should_pass_message_to_client(self, mock_client, mock_get_creds_and_project_id): self.addCleanup(_remove_stackdriver_handlers) mock_get_creds_and_project_id.return_value = ('creds', 'project_id') transport_type = mock.MagicMock() stackdriver_task_handler = StackdriverTaskHandler(transport=transport_type, labels={"key": 'value'}) logger = logging.getLogger("logger") logger.addHandler(stackdriver_task_handler) logger.info("test-message") stackdriver_task_handler.flush() transport_type.assert_called_once_with(mock_client.return_value, 'airflow') transport_type.return_value.send.assert_called_once_with( mock.ANY, 'test-message', labels={"key": 'value'}, resource=Resource(type='global', labels={}) ) mock_client.assert_called_once_with(credentials='creds', client_info=mock.ANY, project="project_id")
def test_should_set_labels(self, mock_client, mock_get_creds_and_project_id): mock_get_creds_and_project_id.return_value = ('creds', 'project_id') self.stackdriver_task_handler.set_context(self.ti) self.logger.addHandler(self.stackdriver_task_handler) self.logger.info("test-message") self.stackdriver_task_handler.flush() labels = { 'task_id': 'task_for_testing_file_log_handler', 'dag_id': 'dag_for_testing_file_task_handler', 'execution_date': '2016-01-01T00:00:00+00:00', 'try_number': '1', } resource = Resource(type='global', labels={}) self.transport_mock.return_value.send.assert_called_once_with( mock.ANY, 'test-message', labels=labels, resource=resource )
def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_creds_and_project_id): mock_get_creds_and_project_id.return_value = ('creds', 'project_id') resource = Resource( type="cloud_composer_environment", labels={ "environment.name": 'test-instancce', "location": 'europpe-west-3', "project_id": "project_id", }, ) self.stackdriver_task_handler = StackdriverTaskHandler( transport=self.transport_mock, resource=resource ) entry = mock.MagicMock(json_payload={"message": "TEXT"}) page = mock.MagicMock(entries=[entry, entry], next_page_token=None) mock_client.return_value.list_log_entries.return_value.pages = (n for n in [page]) logs, metadata = self.stackdriver_task_handler.read(self.ti) mock_client.return_value.list_log_entries.assert_called_once_with( request=ListLogEntriesRequest( resource_names=["projects/project_id"], filter=( 'resource.type="cloud_composer_environment"\n' 'logName="projects/project_id/logs/airflow"\n' 'resource.labels."environment.name"="test-instancce"\n' 'resource.labels.location="europpe-west-3"\n' 'resource.labels.project_id="project_id"\n' 'labels.task_id="task_for_testing_file_log_handler"\n' 'labels.dag_id="dag_for_testing_file_task_handler"\n' 'labels.execution_date="2016-01-01T00:00:00+00:00"' ), order_by='timestamp asc', page_size=1000, page_token=None, ) ) assert [(('default-hostname', 'TEXT\nTEXT'),)] == logs assert [{'end_of_log': True}] == metadata
def test_emit_manual_field_override(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource client = _Client(self.PROJECT) handler = self._make_one(client, transport=_Transport, resource=_GLOBAL_RESOURCE) logname = "loggername" message = "hello world" record = logging.LogRecord(logname, logging, None, None, message, None, None) # set attributes manually expected_trace = "123" setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) expected_labels = {"test-label": "manual"} setattr(record, "labels", expected_labels) handler.emit(record) self.assertEqual( handler.transport.send_called_with, ( record, message, expected_resource, expected_labels, expected_trace, expected_span, expected_http, ), )
def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value LOG_NAME = "test.log" message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE = 123 FUNCTION = "my_function" SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } expected = { "logName": LOG_NAME, "protoPayload": MessageToDict(message), "labels": LABELS, "insertId": IID, "severity": SEVERITY, "httpRequest": REQUEST, "timestamp": _datetime_to_rfc3339(TIMESTAMP), "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, "sourceLocation": { "file": FILE, "line": str(LINE), "function": FUNCTION }, "operation": OPERATION, } entry = self._make_one( log_name=LOG_NAME, payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, source_location=SOURCE_LOCATION, operation=OPERATION, ) self.assertEqual(entry.to_api_repr(), expected)
def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) SEVERITY = "CRITICAL" IID = "IID" NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) LABELS = {"foo": "bar", "baz": "qux"} METHOD = "POST" URI = "https://api.example.com/endpoint" RESOURCE = Resource( type="gae_app", labels={ "type": "gae_app", "labels": { "module_id": "default", "version": "test" }, }, ) STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE_NO = 123 FUNCTION = "my_function" SOURCE_LOCATION = { "file": FILE, "line": str(LINE_NO), "function": FUNCTION } OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } API_REPR = { "logName": LOG_NAME, "insertId": IID, "timestamp": TIMESTAMP, "labels": LABELS, "severity": SEVERITY, "httpRequest": { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS, }, "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, "sourceLocation": SOURCE_LOCATION, "operation": OPERATION, } loggers = {} entry = klass.from_api_repr(API_REPR, client, loggers=loggers) self.assertEqual(entry.log_name, LOG_NAME) logger = entry.logger self.assertIsInstance(logger, _Logger) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.received_timestamp) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request["requestMethod"], METHOD) self.assertEqual(entry.http_request["requestUrl"], URI) self.assertEqual(entry.http_request["status"], STATUS) self.assertIs(logger.client, client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) self.assertEqual(entry.resource, RESOURCE) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) self.assertTrue(entry.trace_sampled) source_location = entry.source_location self.assertEqual(source_location["file"], FILE) self.assertEqual(source_location["line"], LINE_NO) self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload)
def test_ctor_explicit(self): import datetime from google.cloud.logging import Resource LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) IID = "IID" TIMESTAMP = datetime.datetime.now() LABELS = {"foo": "bar", "baz": "qux"} SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } resource = Resource(type="global", labels={}) TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE_NO = 123 FUNCTION = "my_function" SOURCE_LOCATION = {"file": FILE, "line": LINE_NO, "function": FUNCTION} OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one( log_name=LOG_NAME, logger=logger, insert_id=IID, timestamp=TIMESTAMP, labels=LABELS, severity=SEVERITY, http_request=REQUEST, resource=resource, trace=TRACE, span_id=SPANID, trace_sampled=True, source_location=SOURCE_LOCATION, operation=OPERATION, ) self.assertEqual(entry.log_name, LOG_NAME) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, TIMESTAMP) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request["requestMethod"], METHOD) self.assertEqual(entry.http_request["requestUrl"], URI) self.assertEqual(entry.http_request["status"], STATUS) self.assertEqual(entry.resource, resource) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) self.assertTrue(entry.trace_sampled) source_location = entry.source_location self.assertEqual(source_location["file"], FILE) self.assertEqual(source_location["line"], LINE_NO) self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload)
import logging import google.cloud.logging from google.cloud.logging import Resource from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS from viur.core.utils import currentRequest, projectID, appVersion, isLocalDevelopmentServer client = google.cloud.logging.Client() requestLoggingRessource = Resource( type="gae_app", labels={ "project_id": projectID, "module_id": "default", "version_id": appVersion if not isLocalDevelopmentServer else "dev_appserver", }) requestLogger = client.logger("ViUR") class ViURDefaultLogger(CloudLoggingHandler): def emit(self, record): message = super(ViURDefaultLogger, self).format(record) try: currentReq = currentRequest.get() TRACE = "projects/{}/traces/{}".format(client.project, currentReq._traceID)
# Copyright 2018 Google Inc. All rights reserved. # Use of this source code is governed by the Apache 2.0 # license that can be found in the LICENSE file. import datetime import logging import os from flask import has_request_context from flask import request from google.cloud import logging as gcplogging from google.cloud.logging import Resource from flask_gcp_log_groups.background_thread import BackgroundThreadTransport _GLOBAL_RESOURCE = Resource(type='global', labels={}) logger = logging.getLogger(__name__) client = gcplogging.Client() class GCPHandler(logging.Handler): def __init__(self, child_log_name='application', trace_header_name=None, labels=None, resource=None): logging.Handler.__init__(self) self.labels = labels self.trace_header_name = trace_header_name if resource is None:
from cached_property import cached_property from google.api_core.gapic_v1.client_info import ClientInfo from google.auth.credentials import Credentials from google.cloud import logging as gcp_logging from google.cloud.logging import Resource from google.cloud.logging.handlers.transports import BackgroundThreadTransport, Transport from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse from airflow import version from airflow.models import TaskInstance from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id DEFAULT_LOGGER_NAME = "airflow" _GLOBAL_RESOURCE = Resource(type="global", labels={}) _DEFAULT_SCOPESS = frozenset( ["https://www.googleapis.com/auth/logging.read", "https://www.googleapis.com/auth/logging.write"] ) class StackdriverTaskHandler(logging.Handler): """Handler that directly makes Stackdriver logging API calls. This is a Python standard ``logging`` handler using that can be used to route Python standard logging messages directly to the Stackdriver Logging API. It can also be used to save logs for executing tasks. To do this, you should set as a handler with the name "tasks". In this case, it will also be used to read the log for display in Web UI.
import logging import google.cloud.logging from google.cloud.logging import Resource from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS from viur.core.utils import currentRequest, projectID client = google.cloud.logging.Client() requestLoggingRessource = Resource(type="gae_app", labels={ "project_id": projectID, "module_id": "default", }) requestLogger = client.logger("ViUR") class ViURDefaultLogger(CloudLoggingHandler): def emit(self, record): message = super(ViURDefaultLogger, self).format(record) try: currentReq = currentRequest.get() TRACE = "projects/{}/traces/{}".format(client.project, currentReq._traceID) currentReq.maxLogLevel = max(currentReq.maxLogLevel, record.levelno) except: TRACE = None self.transport.send( record,