def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = { 'foo': 'bar', 'baz': 'qux', } SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) ENTRIES = [ { 'textPayload': TEXT, 'labels': LABELS, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'jsonPayload': STRUCT, 'severity': SEVERITY, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'protoPayload': json.loads(MessageToJson(message)), 'httpRequest': REQUEST, 'resource': _GLOBAL_RESOURCE._to_dict() }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS))
def logger(self, name): """Creates a logger bound to the current client. :type name: str :param name: the name of the logger to be constructed. :rtype: :class:`google.cloud.logging.logger.Logger` :returns: Logger created with the current client. """ return Logger(name, client=self)
def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger from google.cloud.logging.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger("logger_name", client1, labels=DEFAULT_LABELS) ENTRIES = [ { "textPayload": TEXT, "labels": LABELS, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "jsonPayload": STRUCT, "severity": SEVERITY, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "protoPayload": json.loads(MessageToJson(message)), "httpRequest": REQUEST, "resource": _GLOBAL_RESOURCE._to_dict(), }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS), )
def test_context_mgr_success(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client, labels=DEFAULT_LABELS) RESOURCE = { 'type': 'global', } ENTRIES = [ { 'textPayload': TEXT, 'httpRequest': REQUEST }, { 'jsonPayload': STRUCT, 'labels': LABELS }, { 'protoPayload': json.loads(MessageToJson(message)), 'severity': SEVERITY }, ] batch = self._make_one(logger, client=client) with batch as other: other.log_text(TEXT, http_request=REQUEST) other.log_struct(STRUCT, labels=LABELS) other.log_proto(message, severity=SEVERITY) self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS))
from __future__ import print_function import os os.environ['GOOGLE_CLOUD_DISABLE_GRPC'] = 'true' import socket from google.cloud.logging.client import Client from google.cloud.logging.logger import Logger from igneous.secrets import PROJECT_NAME, QUEUE_NAME, google_credentials_path client = Client.from_service_account_json(google_credentials_path, project=PROJECT_NAME) logger = Logger('pipeline_logger', client) def log(severity, task, message): # Look at the log produce when running this script at: # https://console.cloud.google.com/logs/viewer?project=neuromancer-seung-import&resource=global # Choosing the severity: # DEBUG Debug or trace information. # INFO Routine information, such as ongoing status or performance. # NOTICE Normal but significant events, such as start up, shut down, or a configuration change. # WARNING Warning events might cause problems. # ERROR Error events are likely to cause problems. # CRITICAL Critical events cause more severe problems or outages. # ALERT A person must take an action immediately. # EMERGENCY One or more systems are unusable. #TODO change resource from global to GKE containter or similar