def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" INPUT_FILTER = "resource.type:global" TOKEN = "TOKEN" PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( call_payload_no_filter, { "method": "POST", "path": "/entries:list", "data": { "filter": "removed", "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, ) # verify that default filter is 24 hours LOG_FILTER = "logName=projects/%s/logs/%s" % ( self.PROJECT, self.LOGGER_NAME, ) combined_filter = (INPUT_FILTER + " AND " + LOG_FILTER + " AND " + "timestamp>=" + self.TIME_FORMAT) timestamp = datetime.strptime( client._connection._called_with["data"]["filter"], combined_filter) yesterday = datetime.now(timezone.utc) - timedelta(days=1) self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def stackdriver_log(sender, payload={}): if settings.GOOGLE_REQUEST_LOGGING is False: return if not hasattr(local, '_gcp_logger'): from google.cloud.logging import Client as LoggingClient client = LoggingClient() logger_name = '%s-api' % settings.APP_NAME local._gcp_logger = client.logger(logger_name) local._gcp_logger.log_struct(payload)
def test_list_entries_no_paging(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} TOKEN = "TOKEN" RETURNED = { "entries": [ { "textPayload": TEXT, "insertId": IID, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", } ], "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT_PATH]) page = next(iterator.pages) entries = list(page) token = iterator.next_page_token # First check the token. self.assertEqual(token, TOKEN) # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) self.assertEqual( called_with, {"method": "POST", "path": expected_path, "data": SENT} )
def test_list_entries_explicit_timestamp(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" INPUT_FILTER = 'resource.type:global AND timestamp="2020-10-13T21"' TOKEN = "TOKEN" PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload LOG_FILTER = "logName=projects/%s/logs/%s" % ( self.PROJECT, self.LOGGER_NAME, ) combined_filter = INPUT_FILTER + " AND " + LOG_FILTER self.assertEqual( client._connection._called_with, { "method": "POST", "path": "/entries:list", "data": { "filter": combined_filter, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, )
def setup(): logger = logging.getLogger() # Set the region on log records. default_factory = logging.getLogRecordFactory() logging.setLogRecordFactory(partial(region_record_factory, default_factory)) # Send logs directly via the logging client if possible. This ensures trace # ids are propogated and allows us to send structured messages. if environment.in_gae(): client = Client() handler = StructuredAppEngineHandler(client) handlers.setup_logging(handler, log_level=logging.INFO) for handler in logger.handlers: if not isinstance(handler, StructuredAppEngineHandler): logger.removeHandler(handler) else: logging.basicConfig() for handler in logger.handlers: # If writing directly to Stackdriver, send a structured message. if isinstance(handler, StructuredAppEngineHandler): handler.setFormatter(StructuredLogFormatter()) # Otherwise, the default stream handler requires a string. else: handler.setFormatter( logging.Formatter( "(%(region)s) %(module)s/%(funcName)s : %(message)s"))
def test_list_entries_defaults(self): from google.cloud.logging import Client TOKEN = "TOKEN" client = Client(project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False) returned = {"nextPageToken": TOKEN} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = next(iterator.pages) entries = list(page) token = iterator.next_page_token self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( call_payload_no_filter, { "path": "/entries:list", "method": "POST", "data": { "filter": "removed", "resourceNames": [f"projects/{self.PROJECT}"], }, }, ) # verify that default filter is 24 hours timestamp = datetime.strptime( client._connection._called_with["data"]["filter"], LOG_FILTER + " AND timestamp>=" + self.TIME_FORMAT, ) yesterday = datetime.now(timezone.utc) - timedelta(days=1) self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def log_handler(self, log_name): try: from google.cloud.logging import Client from google.cloud.logging.handlers.app_engine import AppEngineHandler except ImportError: return super(AppEngine, self).log_handler(log_name) client = Client(_use_grpc=False) return AppEngineHandler( client, name=log_name)
def main(): client = Client() for example in _find_examples(): to_delete = [] print("%-25s: %s" % _name_and_doc(example)) try: example(client, to_delete) except AssertionError as failure: print(" FAIL: %s" % (failure,)) except Exception as error: # pylint: disable=broad-except print(" ERROR: %r" % (error,)) for item in to_delete: _backoff_not_found(item.delete)
def load_google_client(base64_data, scopes=[]): if not base64_data: return '' decoded = base64.b64decode(base64_data).decode('utf-8') # From: https://github.com/googleapis/google-cloud-python/issues/7291#issuecomment-461135696 with tempfile.NamedTemporaryFile() as temp: temp.write(decoded.encode('ascii')) temp.flush() client = LoggingClient.from_service_account_json(temp.name) return client
def generate_request_log(resp, took): """Collect data about the request for analytical purposes.""" # Move this down once we have multiple collectors. if settings.GOOGLE_REQUEST_LOGGING is False: return payload = { "v": __version__, "method": request.method, "endpoint": request.endpoint, "referrer": request.referrer, "ip": _get_remote_ip(), "ua": str(request.user_agent), "time": datetime.utcnow().isoformat(), "session_id": getattr(request, "_session_id", None), "locale": getattr(request, "_app_locale", None), "took": took, "url": request.url, "path": request.full_path, "status": resp.status_code, } if hasattr(request, "authz"): payload["role_id"] = request.authz.id tags = dict(request.view_args or ()) if hasattr(request, "_log_tags"): tags.update(request._log_tags) for tag, value in tags.items(): if value is not None and tag not in payload: payload[tag] = value # log.info("Log: %s", pformat(payload)) if not hasattr(local, "_gcp_logger"): from google.cloud.logging import Client client = Client() logger_name = "%s-api" % settings.APP_NAME local._gcp_logger = client.logger(logger_name) local._gcp_logger.log_struct(payload)
def generate_request_log(resp, took): """Collect data about the request for analytical purposes.""" # Move this down once we have multiple collectors. if settings.GOOGLE_REQUEST_LOGGING is False: return payload = { 'v': __version__, 'method': request.method, 'endpoint': request.endpoint, 'referrer': request.referrer, 'ip': _get_remote_ip(), 'ua': str(request.user_agent), 'time': datetime.utcnow().isoformat(), 'session_id': getattr(request, '_session_id', None), 'locale': getattr(request, '_app_locale', None), 'took': took, 'url': request.url, 'path': request.full_path, 'status': resp.status_code } if hasattr(request, 'authz'): payload['role_id'] = request.authz.id tags = dict(request.view_args or ()) if hasattr(request, '_log_tags'): tags.update(request._log_tags) for tag, value in tags.items(): if value is not None and tag not in payload: payload[tag] = value # log.info("Log: %s", pformat(payload)) if not hasattr(local, '_gcp_logger'): from google.cloud.logging import Client client = Client() logger_name = '%s-api' % settings.APP_NAME local._gcp_logger = client.logger(logger_name) local._gcp_logger.log_struct(payload)
def get_handler(logName): kwargs = {} try: kwargs['resource'] = get_log_resource_for_gce_instance() except: # Probably not on GCE ;-) pass # TODO: When we launched celery workers using prefork (multiprocessing: separate process per worker) # we found that from google.cloud.logging.handlers.transports.background_thread.BackgroundThreadTransport # stopped transmitting logs to GCP. We're not sure why, but as a workaround we switched to using # a SyncTransport sub-class. handler = CloudLoggingHandler(Client(), logName, transport=StructlogTransport, **kwargs) handler.setFormatter(jsonlogger.JsonFormatter()) return handler
def setup() -> None: """Setup logging""" # Set the region on log records. logging.setLogRecordFactory(ContextualLogRecord) logger = logging.getLogger() # Send logs directly via the logging client if possible. This ensures trace # ids are propagated and allows us to send structured messages. if environment.in_gcp(): client = Client() structured_handler = StructuredAppEngineHandler(client) handlers.setup_logging(structured_handler, log_level=logging.INFO) before_request_handler = StructuredAppEngineHandler( client, name=BEFORE_REQUEST_LOG) logging.getLogger(BEFORE_REQUEST_LOG).addHandler( before_request_handler) # Streams unstructured logs to stdout - these logs will still show up # under the appengine.googleapis.com/stdout Stackdriver logs bucket, # even if other logs are stalled on the global interpreter lock or some # other issue. stdout_handler = logging.StreamHandler(sys.stdout) handlers.setup_logging(stdout_handler, log_level=logging.INFO) for handler in logger.handlers: if not isinstance( handler, (StructuredAppEngineHandler, logging.StreamHandler)): logger.removeHandler(handler) else: logging.basicConfig() for handler in logger.handlers: # If we aren't writing directly to Stackdriver, prefix the log with important # context that would be in the labels. if not isinstance(handler, StructuredAppEngineHandler): handler.setFormatter( logging.Formatter( "[pid: %(process)d] (%(region)s) %(module)s/%(funcName)s : %(message)s" )) # Export gunicorn errors using the same handlers as other logs, so that they # go to Stackdriver in production. gunicorn_logger = logging.getLogger("gunicorn.error") gunicorn_logger.handlers = logger.handlers
def setup(): """Setup logging""" # Set the region on log records. default_factory = logging.getLogRecordFactory() logging.setLogRecordFactory(partial(region_record_factory, default_factory)) logger = logging.getLogger() # Send logs directly via the logging client if possible. This ensures trace # ids are propogated and allows us to send structured messages. if environment.in_gcp(): client = Client() handler = StructuredAppEngineHandler(client) handlers.setup_logging(handler, log_level=logging.INFO) # Streams unstructured logs to stdout - these logs will still show up # under the appengine.googleapis.com/stdout Stackdriver logs bucket, # even if other logs are stalled on the global interpreter lock or some # other issue. stdout_handler = logging.StreamHandler(sys.stdout) handlers.setup_logging(stdout_handler, log_level=logging.INFO) for handler in logger.handlers: if not isinstance( handler, (StructuredAppEngineHandler, logging.StreamHandler)): logger.removeHandler(handler) else: logging.basicConfig() for handler in logger.handlers: # If writing directly to Stackdriver, send a structured message. if isinstance(handler, StructuredAppEngineHandler): handler.setFormatter(StructuredLogFormatter()) # Otherwise, the default stream handler requires a string. else: handler.setFormatter( logging.Formatter( "(%(region)s) %(module)s/%(funcName)s : %(message)s")) # Export gunicorn errors using the same handlers as other logs, so that they # go to Stackdriver in production. gunicorn_logger = logging.getLogger("gunicorn.error") gunicorn_logger.handlers = logger.handlers
def setup_stackdriver(log_level, name, log_format, excluded_loggers=None): try: from google.cloud.logging import Client from google.cloud.logging import handlers as google_logging_handlers from google.cloud.logging.handlers.handlers import \ EXCLUDED_LOGGER_DEFAULTS, \ CloudLoggingHandler except ImportError: raise ValueError("google-cloud-logging is not properly installed") if not excluded_loggers: excluded_loggers = EXCLUDED_LOGGER_DEFAULTS client = Client() # the docstring of CloudLoggingHandler point to client instead of Client # noinspection PyTypeChecker handler = CloudLoggingHandler(client, name) handler.setFormatter(logging.Formatter(log_format, None, "%")) google_logging_handlers.setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers)
def log_handler(self, log_name): try: from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.resource import Resource from google.cloud.logging import Client except ImportError: return super(ComputeEngine, self).log_handler(log_name) instance_id = self.get_from_metadata('instance/id') zone = self.get_from_metadata('instance/zone') if not instance_id or not zone: return super(ComputeEngine, self).log_handler(log_name) resource = Resource(type='gce_instance', labels={ 'project_id': self.project_id, 'instance_id': instance_id, 'zone': zone.split('/')[-1], }) client = Client(_use_grpc=False) return CloudLoggingHandler(client, name=log_name, resource=resource)
# Copyright 2018 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.cloud.logging import Client from core.app_data import SA_DATA, SA_FILE if SA_DATA.get('private_key', ''): client = Client.from_service_account_json(SA_FILE) else: client = Client() logger_name = 'crmintapplogger' logger = client.logger(logger_name)
def add_stack_driver_support(log): client = Client() handler = client.get_default_handler() handler.setFormatter(StackdriverJsonFormatter()) log.addHandler(handler)
# Copyright 2018 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.appengine.api import app_identity from google.cloud.logging import Client project_id = app_identity.get_application_id() logger_name = 'crmintapplogger' logger = Client(project=project_id).logger(logger_name)
import responder from sklearn.datasets import load_iris from google.cloud.logging import Client client = Client.from_service_account_json("./service_account.json") client.setup_logging() app = responder.API() @app.route("/") async def index(request, response): import logging logging.info('================= Hello Logging =================') data = load_iris() response.text = str(data['DESCR'])
def get_handler(logName): handler = CloudLoggingHandler(Client(), logName) handler.setFormatter(jsonlogger.JsonFormatter()) return handler
def test_list_entries(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client from google.cloud.logging import Logger from google.cloud.logging import ProtobufEntry from google.cloud.logging import StructEntry PROJECT1 = "PROJECT1" PROJECT1_PATH = f"projects/{PROJECT1}" PROJECT2 = "PROJECT2" PROJECT2_PATH = f"projects/{PROJECT2}" NOW, TIMESTAMP = self._make_timestamp() IID1 = "IID1" IID2 = "IID2" PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" TOKEN = "TOKEN" PAGE_SIZE = 42 SENT = { "resourceNames": [PROJECT1_PATH, PROJECT2_PATH], "filter": self.FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, } RETURNED = { "entries": [ { "jsonPayload": PAYLOAD, "insertId": IID1, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, { "protoPayload": PROTO_PAYLOAD, "insertId": IID2, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, ] } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( resource_names=[PROJECT1_PATH, PROJECT2_PATH], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, ) entries = list(iterator) # Check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) self.assertEqual(entry1.payload, PAYLOAD) self.assertIsInstance(entry1.logger, Logger) self.assertEqual(entry1.logger.name, self.LOGGER_NAME) self.assertEqual(entry1.insert_id, IID1) self.assertEqual(entry1.timestamp, NOW) self.assertIsNone(entry1.labels) self.assertIsNone(entry1.severity) self.assertIsNone(entry1.http_request) entry2 = entries[1] self.assertIsInstance(entry2, ProtobufEntry) self.assertEqual(entry2.payload, PROTO_PAYLOAD) self.assertIsInstance(entry2.logger, Logger) self.assertEqual(entry2.logger.name, self.LOGGER_NAME) self.assertEqual(entry2.insert_id, IID2) self.assertEqual(entry2.timestamp, NOW) self.assertIsNone(entry2.labels) self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) self.assertEqual( called_with, {"method": "POST", "path": expected_path, "data": SENT} )
def test_list_entries_with_limits(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" IID1 = "IID1" IID2 = "IID2" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" RETURNED = { "entries": [ { "textPayload": TEXT, "insertId": IID, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", }, { "jsonPayload": PAYLOAD, "insertId": IID1, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, { "protoPayload": PROTO_PAYLOAD, "insertId": IID2, "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, ], } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) # try with negative max_results with self.assertRaises(ValueError): client._connection = _Connection(RETURNED) api = self._make_one(client) empty = list(api.list_entries([self.PROJECT_PATH], max_results=-1)) # try with max_results of 0 client._connection = _Connection(RETURNED) api = self._make_one(client) empty = list(api.list_entries([self.PROJECT_PATH], max_results=0)) self.assertEqual(empty, []) # try with single result client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT_PATH], max_results=1) entries = list(iterator) # check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) self.assertEqual(entry.payload, TEXT) self.assertIsInstance(entry.logger, Logger) self.assertEqual(entry.logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) called_with = client._connection._called_with expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) self.assertEqual( called_with, {"method": "POST", "path": expected_path, "data": SENT} )