def test_commit_w_resource_specified(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) batch = self._make_one(logger, client, resource=RESOURCE) MESSAGE = "This is the entry text" ENTRIES = [ { "textPayload": MESSAGE }, { "textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict() }, ] batch.log_text(MESSAGE, resource=None) batch.log_text(MESSAGE) batch.commit() self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), )
def test_log_empty_w_explicit(self): import datetime from google.cloud.logging_v2.resource import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) ENTRIES = [{ "logName": ALT_LOG_NAME, "labels": LABELS, "insertId": IID, "severity": SEVERITY, "httpRequest": REQUEST, "timestamp": "2016-12-31T00:01:02.999999Z", "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_empty( log_name=ALT_LOG_NAME, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_ctor_explicit(self): import io from google.cloud.logging_v2.resource import Resource resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_lable": "value"} name = "test-logger" client = _Client(self.PROJECT) stream = io.BytesIO() handler = self._make_one( client, name=name, transport=_Transport, resource=resource, labels=labels, stream=stream, ) self.assertEqual(handler.name, name) self.assertIs(handler.client, client) self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, name) self.assertIs(handler.resource, resource) self.assertEqual(handler.labels, labels) self.assertIs(handler.stream, stream)
def test_get_default_handler_general(self): import io from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.resource import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_label": "value"} stream = io.BytesIO() credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials, _use_grpc=False) handler = client.get_default_handler(name=name, resource=resource, labels=labels, stream=stream) handler.transport.worker.stop() self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels)
def test_handlers_w_extras(self): LOG_MESSAGE = "Testing with injected extras." for cls in [CloudLoggingHandler, AppEngineHandler]: LOGGER_NAME = f"{cls.__name__}-handler_extras" handler_name = self._logger_name(LOGGER_NAME) handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler.name) self.to_delete.append(logger) cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) expected_request = {"requestUrl": "localhost"} expected_source = {"file": "test.py"} extra = { "trace": "123", "span_id": "456", "http_request": expected_request, "source_location": expected_source, "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } cloud_logger.warn(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) self.assertEqual(entries[0].trace, extra["trace"]) self.assertEqual(entries[0].span_id, extra["span_id"]) self.assertEqual(entries[0].http_request, expected_request) self.assertEqual(entries[0].labels, extra["labels"]) self.assertEqual(entries[0].resource.type, extra["resource"].type)
def _new_log_factory(*args, **kwargs): # start with the default record record = _old_log_factory(*args, **kwargs) # add the static context project_id = _LOGGER_FUNCTION_CONTEXT.get('project_id', None) region = _LOGGER_FUNCTION_CONTEXT.get('function_region', None) function_name = _LOGGER_FUNCTION_CONTEXT.get('function_name', None) resource = Resource(type="cloud_function", labels={ 'project_id': project_id, 'region': region, 'function_name': function_name }) setattr(record, 'resource', resource) # add the request context, if any trace = getattr(_LOGGER_REQUEST_CONTEXT, 'trace', None) if trace: setattr(record, 'trace', trace) span_id = getattr(_LOGGER_REQUEST_CONTEXT, 'span_id', None) if span_id: setattr(record, 'span_id', span_id) execution_id = getattr(_LOGGER_REQUEST_CONTEXT, 'execution_id', None) if execution_id: labels = _LOGGER_FUNCTION_CONTEXT.get('labels', {'execution_id': execution_id}) setattr(record, 'labels', labels) return record
def __get_resource(self): return Resource( type="cloud_run_revision", labels={ "project_id": self.project_id, "service_name": self.service_name, "location": self.region, })
def _create_global_resource(project): """Create a global resource. Args: project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ return Resource(type="global", labels={"project_id": project})
def test_log_proto_explicit(self): import datetime from google.cloud.logging_v2.resource import Resource from google.cloud.logging_v2.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) ENTRY = ProtobufEntry( payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto( message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(batch.entries, [ENTRY])
def get_gae_resource(self): """Return the GAE resource using the environment variables. Returns: google.cloud.logging_v2.resource.Resource: Monitored resource for GAE. """ gae_resource = Resource( type="gae_app", labels={ "project_id": self.project_id, "module_id": self.module_id, "version_id": self.version_id, }, ) return gae_resource
def test_emit_manual_field_override(self): from google.cloud.logging_v2.resource import Resource inferred_http_request = {"request_url": "test"} inferred_trace_id = "trace-test" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(inferred_http_request, inferred_trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) gae_labels = handler.get_gae_labels() logname = "app" message = "hello world" record = logging.LogRecord(logname, logging, None, None, message, None, None) handler.project_id = self.PROJECT # set attributes manually expected_trace = "123" setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) additional_labels = {"test-label": "manual"} expected_labels = dict(gae_labels) expected_labels.update(additional_labels) setattr(record, "labels", additional_labels) handler.emit(record) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, logname) self.assertEqual( handler.transport.send_called_with, ( record, message, expected_resource, expected_labels, expected_trace, expected_span, expected_http, ), )
def init(log_id="Unknown"): # pragma: no cover resource = Resource( type="cloud_function", labels={ "function_name": gcloud.get_function_name(), "project_id": gcloud.get_project(), "region": gcloud.get_function_region(), }, ) client = google.cloud.logging.Client() logging.getLogger().handlers = [] handler = CloudLoggingHandler(client, resource=resource, labels={"log_id": str(log_id)}) logging.getLogger().addHandler(handler)
def _create_compute_resource(): """Create a standardized Compute Engine resource. Returns: google.cloud.logging.Resource """ instance = retrieve_metadata_server(_GCE_INSTANCE_ID) zone = retrieve_metadata_server(_ZONE_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="gce_instance", labels={ "project_id": project, "instance_id": instance if instance else "", "zone": zone if zone else "", }, ) return resource
def _create_app_engine_resource(): """Create a standardized App Engine resource. Returns: google.cloud.logging.Resource """ zone = retrieve_metadata_server(_ZONE_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="gae_app", labels={ "project_id": project, "module_id": os.environ.get(_GAE_SERVICE_ENV, ""), "version_id": os.environ.get(_GAE_VERSION_ENV, ""), "zone": zone if zone else "", }, ) return resource
def __init__(self, app, parentLogName='request', childLogName='application', traceHeaderName=None,labels=None, resource=None): logging.Handler.__init__(self) self.app = app self.labels=labels self.traceHeaderName = traceHeaderName if (resource is None): resource = _GLOBAL_RESOURCE else: resource = Resource(type=resource['type'], labels=resource['labels']) print(resource) self.resource = resource self.transport_parent = BackgroundThreadTransport(client, parentLogName) self.transport_child = BackgroundThreadTransport(client, childLogName) self.mLogLevels = {} if app is not None: self.init_app(app)
def _create_kubernetes_resource(): """Create a standardized Kubernetes resource. Returns: google.cloud.logging.Resource """ zone = retrieve_metadata_server(_ZONE_ID) cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="k8s_container", labels={ "project_id": project, "location": zone if zone else "", "cluster_name": cluster_name if cluster_name else "", }, ) return resource
def _create_cloud_run_resource(): """Create a standardized Cloud Run resource. Returns: google.cloud.logging.Resource """ region = retrieve_metadata_server(_REGION_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="cloud_run_revision", labels={ "project_id": project, "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""), "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""), "location": region.split("/")[-1] if region else "", "configuration_name": os.environ.get(_CLOUD_RUN_CONFIGURATION_ID, ""), }, ) return resource
def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" logger = Config.CLIENT.logger(self._logger_name("log_text_res")) now = datetime.utcnow() resource = Resource( type="gae_app", labels={"module_id": "default", "version_id": "test", "zone": ""}, ) self.to_delete.append(logger) logger.log_text(text_payload, timestamp=now, resource=resource) entries = _list_entries(logger) self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, text_payload) # project_id is output only so we don't want it in assertion del entries[0].resource.labels["project_id"] self.assertEqual(entries[0].resource, resource)
def get_log_resource_for_gce_instance(): # GCE logs not touched by us by default show up under "Cloud Logs" link from the instance # To match this, we need to set the resource field correctly in our logging to match these: # EXAMPLE FROM A DEFAULT VM INSTANCE LOG MESSAGE: # # resource: { # type: "gce_instance" # labels: { # project_id: "ceres-imaging-science" # instance_id: "6201251793328237718" # zone: "us-west1-a" # } # } # EXAMPLE QUERY OUTPUT BY STACKDRIVER FOR A VM INSTANCE: # # resource.type="gce_instance" # resource.labels.instance_id="6201251793328237718" # To do this, we're going to use the GCE computeMetadata endpoint # which will give us this info (or fail if we're not on GCE) # # For a list of all properties we could query on computeMetadata, # see: https://cloud.google.com/compute/docs/storing-retrieving-metadata metadata_server = "http://metadata/computeMetadata/v1/" metadata_flavor = {'Metadata-Flavor': 'Google'} get_compute_metadata = lambda propPath: requests.get( metadata_server + propPath, headers=metadata_flavor).text return Resource(type='gce_instance', labels={ 'instance_id': get_compute_metadata('instance/id'), 'project_id': get_compute_metadata('project/project-id'), 'zone': get_compute_metadata('instance/zone').split('/')[-1], })
def test_setup_logging_w_extra_kwargs(self): import io from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.resource import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_label": "value"} stream = io.BytesIO() credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials, _use_grpc=False) with mock.patch( "google.cloud.logging_v2.client.setup_logging") as mocked: client.setup_logging(name=name, resource=resource, labels=labels, stream=stream) self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] (handler, ) = args self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels) handler.transport.worker.stop() expected_kwargs = { "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), "log_level": 20, } self.assertEqual(kwargs, expected_kwargs)
def _create_functions_resource(): """Create a standardized Cloud Functions resource. Returns: google.cloud.logging.Resource """ project = retrieve_metadata_server(_PROJECT_NAME) region = retrieve_metadata_server(_REGION_ID) if _FUNCTION_NAME in os.environ: function_name = os.environ.get(_FUNCTION_NAME) elif _CLOUD_RUN_SERVICE_ID in os.environ: function_name = os.environ.get(_CLOUD_RUN_SERVICE_ID) else: function_name = "" resource = Resource( type="cloud_function", labels={ "project_id": project, "function_name": function_name, "region": region.split("/")[-1] if region else "", }, ) return resource
def _do_log(self, client, _entry_class, payload=None, **kw): """Helper for :meth:`log_empty`, :meth:`log_text`, etc.""" client = self._require_client(client) # Apply defaults kw["log_name"] = kw.pop("log_name", self.full_name) kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) partial_success = False severity = kw.get("severity", None) if isinstance(severity, str) and not severity.isupper(): # convert severity to upper case, as expected by enum definition kw["severity"] = severity.upper() if isinstance(kw["resource"], collections.abc.Mapping): # if resource was passed as a dict, attempt to parse it into a # Resource object try: kw["resource"] = Resource(**kw["resource"]) except TypeError as e: # dict couldn't be parsed as a Resource raise TypeError("invalid resource dict") from e if payload is not None: entry = _entry_class(payload=payload, **kw) else: entry = _entry_class(**kw) api_repr = entry.to_api_repr() entries = [api_repr] if google.cloud.logging_v2._instrumentation_emitted is False: partial_success = True entries = _add_instrumentation(entries, **kw) google.cloud.logging_v2._instrumentation_emitted = True client.logging_api.write_entries(entries, partial_success=partial_success)
def test_ctor_explicit(self): import datetime from google.cloud.logging_v2.resource import Resource LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) IID = "IID" TIMESTAMP = datetime.datetime.now() LABELS = {"foo": "bar", "baz": "qux"} SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } resource = Resource(type="global", labels={}) TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE_NO = 123 FUNCTION = "my_function" SOURCE_LOCATION = {"file": FILE, "line": LINE_NO, "function": FUNCTION} OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one( log_name=LOG_NAME, logger=logger, insert_id=IID, timestamp=TIMESTAMP, labels=LABELS, severity=SEVERITY, http_request=REQUEST, resource=resource, trace=TRACE, span_id=SPANID, trace_sampled=True, source_location=SOURCE_LOCATION, operation=OPERATION, ) self.assertEqual(entry.log_name, LOG_NAME) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, TIMESTAMP) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request["requestMethod"], METHOD) self.assertEqual(entry.http_request["requestUrl"], URI) self.assertEqual(entry.http_request["status"], STATUS) self.assertEqual(entry.resource, resource) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) self.assertTrue(entry.trace_sampled) source_location = entry.source_location self.assertEqual(source_location["file"], FILE) self.assertEqual(source_location["line"], LINE_NO) self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload)
def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value LOG_NAME = "test.log" message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE = 123 FUNCTION = "my_function" SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } expected = { "logName": LOG_NAME, "protoPayload": MessageToDict(message), "labels": LABELS, "insertId": IID, "severity": SEVERITY, "httpRequest": REQUEST, "timestamp": _datetime_to_rfc3339(TIMESTAMP), "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, "sourceLocation": { "file": FILE, "line": str(LINE), "function": FUNCTION }, "operation": OPERATION, } entry = self._make_one( log_name=LOG_NAME, payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, source_location=SOURCE_LOCATION, operation=OPERATION, ) self.assertEqual(entry.to_api_repr(), expected)
def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC from google.cloud.logging_v2.resource import Resource klass = self._get_target_class() client = _Client(self.PROJECT) SEVERITY = "CRITICAL" IID = "IID" NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) LABELS = {"foo": "bar", "baz": "qux"} METHOD = "POST" URI = "https://api.example.com/endpoint" RESOURCE = Resource( type="gae_app", labels={ "type": "gae_app", "labels": { "module_id": "default", "version": "test" }, }, ) STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" FILE = "my_file.py" LINE_NO = 123 FUNCTION = "my_function" SOURCE_LOCATION = { "file": FILE, "line": str(LINE_NO), "function": FUNCTION } OP_ID = "OP_ID" PRODUCER = "PRODUCER" OPERATION = { "id": OP_ID, "producer": PRODUCER, "first": True, "last": False } API_REPR = { "logName": LOG_NAME, "insertId": IID, "timestamp": TIMESTAMP, "labels": LABELS, "severity": SEVERITY, "httpRequest": { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS, }, "resource": RESOURCE._to_dict(), "trace": TRACE, "spanId": SPANID, "traceSampled": True, "sourceLocation": SOURCE_LOCATION, "operation": OPERATION, } loggers = {} entry = klass.from_api_repr(API_REPR, client, loggers=loggers) self.assertEqual(entry.log_name, LOG_NAME) logger = entry.logger self.assertIsInstance(logger, _Logger) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.received_timestamp) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request["requestMethod"], METHOD) self.assertEqual(entry.http_request["requestUrl"], URI) self.assertEqual(entry.http_request["status"], STATUS) self.assertIs(logger.client, client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) self.assertEqual(entry.resource, RESOURCE) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) self.assertTrue(entry.trace_sampled) source_location = entry.source_location self.assertEqual(source_location["file"], FILE) self.assertEqual(source_location["line"], LINE_NO) self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload)
def from_api_repr(cls, resource, client, *, loggers=None): """Construct an entry given its API representation Args: resource (dict): text entry resource representation returned from the API client (~logging_v2.client.Client): Client which holds credentials and project configuration. loggers (Optional[dict]): A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger if possible, or an empty logger field if not. Returns: google.cloud.logging.entries.LogEntry: Log entry parsed from ``resource``. """ if loggers is None: loggers = {} logger_fullname = resource["logName"] logger = loggers.get(logger_fullname) if logger is None: # attempt to create a logger if possible try: logger_name = logger_name_from_path(logger_fullname, client.project) logger = loggers[logger_fullname] = client.logger(logger_name) except ValueError: # log name is not scoped to a project. Leave logger as None pass payload = cls._extract_payload(resource) insert_id = resource.get("insertId") timestamp = resource.get("timestamp") if timestamp is not None: timestamp = _rfc3339_nanos_to_datetime(timestamp) labels = resource.get("labels") severity = resource.get("severity") http_request = resource.get("httpRequest") trace = resource.get("trace") span_id = resource.get("spanId") trace_sampled = resource.get("traceSampled") source_location = resource.get("sourceLocation") if source_location is not None: line = source_location.pop("line", None) source_location["line"] = _int_or_none(line) operation = resource.get("operation") monitored_resource_dict = resource.get("resource") monitored_resource = None if monitored_resource_dict is not None: monitored_resource = Resource._from_dict(monitored_resource_dict) inst = cls( log_name=logger_fullname, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, resource=monitored_resource, trace=trace, span_id=span_id, trace_sampled=trace_sampled, source_location=source_location, operation=operation, logger=logger, payload=payload, ) received = resource.get("receiveTimestamp") if received is not None: inst.received_timestamp = _rfc3339_nanos_to_datetime(received) return inst
from google.protobuf.any_pb2 import Any from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _name_from_project_path from google.cloud._helpers import _rfc3339_nanos_to_datetime from google.cloud._helpers import _datetime_to_rfc3339 # import officially supported proto definitions import google.cloud.audit.audit_log_pb2 # noqa: F401 import google.cloud.appengine_logging # noqa: F401 from google.iam.v1.logging import audit_data_pb2 # noqa: F401 _GLOBAL_RESOURCE = Resource(type="global", labels={}) _LOGGER_TEMPLATE = re.compile( r""" projects/ # static prefix (?P<project>[^/]+) # initial letter, wordchars + hyphen /logs/ # static midfix (?P<name>[^/]+) # initial letter, wordchars + allowed punc """, re.VERBOSE, ) def logger_name_from_path(path, project=None): """Validate a logger URI path and get the logger name.
def logger_usage(client_true, to_delete): """Logger usage.""" import google.cloud.logging # [START logger_create] client = google.cloud.logging.Client(project="my-project") logger = client.logger(name="log_id") # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] client = client_true log_id = "logger_usage_%d" % (_millis()) # [START logger_custom_labels] custom_labels = {"my-key": "my-value"} label_logger = client.logger(log_id, labels=custom_labels) # [END logger_custom_labels] to_delete.append(label_logger) # [START logger_custom_resource] from google.cloud.logging_v2.resource import Resource resource = Resource(type="global", labels={}) global_logger = client.logger(log_id, resource=resource) # [END logger_custom_resource] to_delete.append(global_logger) logger = client_true.logger(log_id) to_delete.append(logger) # [START logger_log_basic] logger.log("A simple entry") # API call # [END logger_log_basic] # [START logger_log_fields] logger.log( "an entry with fields set", severity="ERROR", insert_id="0123", labels={"my-label": "my-value"}, ) # API call # [END logger_log_fields] # [START logger_log_text] logger.log_text("A simple entry") # API call # [END logger_log_text] # [START logger_log_struct] logger.log_struct( {"message": "My second entry", "weather": "partly cloudy"} ) # API call # [END logger_log_struct] # [START logger_log_resource_text] from google.cloud.logging import Resource res = Resource( type="generic_node", labels={ "location": "us-central1-a", "namespace": "default", "node_id": "10.10.10.1", }, ) logger.log_struct( {"message": "My first entry", "weather": "partly cloudy"}, resource=res ) # [END logger_log_resource_text] # [START logger_log_batch] batch = logger.batch() batch.log("first log") batch.log("second log") batch.commit() # [END logger_log_batch] # [START logger_log_batch_context] with logger.batch() as batch: batch.log("first log") # do work batch.log("last log") # [END logger_log_batch_context] # [START logger_list_entries] from google.cloud.logging import DESCENDING for entry in logger.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) # [END logger_list_entries] def _logger_delete(): # [START logger_delete] logger.delete() # API call # [END logger_delete] _backoff_not_found(_logger_delete) to_delete.remove(logger)
# license that can be found in the LICENSE file. import logging import json import datetime import time from flask import Flask from flask import request, Response, render_template, g, jsonify, current_app from google.cloud import logging as gcplogging from google.cloud.logging_v2.resource import Resource from flask_gcp_log_groups.background_thread import BackgroundThreadTransport _GLOBAL_RESOURCE = Resource(type='global', labels={}) logger = logging.getLogger(__name__) client = gcplogging.Client() class GCPHandler(logging.Handler): def __init__(self, app, parentLogName='request', childLogName='application', traceHeaderName=None,labels=None, resource=None): logging.Handler.__init__(self) self.app = app self.labels=labels self.traceHeaderName = traceHeaderName if (resource is None): resource = _GLOBAL_RESOURCE else: