def setup_logger() -> object: '''Setup main application logger ''' # Setup uncaught exception handler default_exception_handler() if app_config['app_log_factory'] == 'gcp' and os.getenv( 'GOOGLE_APPLICATION_CREDENTIALS') is None: app_config[ 'app_log_factory'] = 'file' # Prevent set up GCP logging without credentials ENV variable logging.error( 'GOOGLE_APPLICATION_CREDENTIALS ENV variable is missing, logging set to file output' ) if app_config['app_log_factory'] == 'gcp': # Disable whole cherrypy console logging cherrypy.log.screen = False logging.getLogger("cherrypy").propagate = False # Connect GCP logging to default Python logger logger = logging.getLogger() # Remove original log handlers for handler in logger.handlers: logger.removeHandler(handler) # Setup Google Cloud Logging client = google.cloud.logging.Client() # Setup CloudLoggingHandler(logging.StreamHandler) handler explicitly with Custom GCP Formatter handler = CloudLoggingHandler(client, labels={ 'app_name': app_config['app_name'], 'app_version': app_config['app_version'], 'app_environment': app_config['app_env'] }) handler.setFormatter(CustomGCPFormatter()) # Setup Python logger explicitly with custom handler setup_logging(handler) elif app_config['app_log_factory'] == 'file': # Load log configuration logging.config.dictConfig(LOG_CONFIG) # Custom app logger logger = logging.getLogger('app') else: # Load log configuration logging.config.dictConfig(LOG_CONFIG) # Custom app logger logger = logging.getLogger() return logger
def add_cloud_logging_handler(logger: Logger): client = google.cloud.logging.Client() handler = CloudLoggingHandler( client=client, name="clouddq", labels={ "name": APP_NAME, "releaseId": APP_VERSION, }, ) handler.setFormatter(JSONFormatter()) logger.addHandler(handler)
def makeLogger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) client = google.cloud.logging.Client() gcpHandler = CloudLoggingHandler(client, name=APP_NAME) gcpHandler.setLevel(logging.INFO) logger.addHandler(gcpHandler) stdoutHandler = logging.StreamHandler(sys.stdout) stdoutHandler.setLevel(logging.DEBUG) logger.addHandler(stdoutHandler) return logger
def get_handler(logName): kwargs = {} try: kwargs['resource'] = get_log_resource_for_gce_instance() except: # Probably not on GCE ;-) pass # TODO: When we launched celery workers using prefork (multiprocessing: separate process per worker) # we found that from google.cloud.logging.handlers.transports.background_thread.BackgroundThreadTransport # stopped transmitting logs to GCP. We're not sure why, but as a workaround we switched to using # a SyncTransport sub-class. handler = CloudLoggingHandler(Client(), logName, transport=StructlogTransport, **kwargs) handler.setFormatter(jsonlogger.JsonFormatter()) return handler
def init_gcloud_log(project_id, logger_name): log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" if IS_DEV_ENV or IS_TEST_ENV: logging.basicConfig(filename=logger_name + u'.log', level=logging.DEBUG, format=log_format) else: # see: https://googlecloudplatform.github.io/google-cloud-python/latest/logging-usage.html#cloud-logging-handler # and https://github.com/GoogleCloudPlatform/getting-started-python/blob/master/6-pubsub/bookshelf/__init__.py#L40 client = google.cloud.logging.Client(project_id) # NB: we should use AppEngineHandler for server at next google.cloud API update # https://googlecloudplatform.github.io/google-cloud-python/latest/logging-handlers-app-engine.html handler = CloudLoggingHandler(client, logger_name) handler.setFormatter(logging.Formatter(log_format)) setup_logging(handler) logging.getLogger().setLevel(logging.INFO) logging.getLogger("readability.readability").setLevel(logging.WARNING) # very verbose package
def get_logger(name, fmt='%(asctime)-15s %(levelname)s %(message)s'): """ Creates a Logger that logs to stdout :param name: name of the logger :param fmt: format string for log messages :return: Logger """ logging_path = os.path.expanduser('~') print_handler = logging.StreamHandler(sys.stdout) print_handler.setLevel(logging.DEBUG) print_handler.setFormatter(logging.Formatter(fmt)) file_handler = logging.handlers.TimedRotatingFileHandler(os.path.join( logging_path, 'bq-pipeline-{}.log'.format(name)), when='D', interval=30) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(logging.Formatter(fmt)) logging_client = LoggingClient() cloud_handler = CloudLoggingHandler(logging_client, name='bq-analyst-cron') log = logging.getLogger(__name__) log.setLevel(logging.INFO) log.addHandler(print_handler) log.addHandler(cloud_handler) log.addHandler(file_handler) return log
def _ParseLoggingArguments(self, options): """Parses the --logging flag. Args: options (argparse.Namespace): the parsed command-line arguments. Raises: errors.BadConfigOption: if the options are invalid. """ logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(name)s - %(message)s') self._logger = logging.getLogger(self.__class__.__name__) if 'stackdriver' in options.logging: if not self._gcs_settings: raise errors.BadConfigOption( 'Please provide a valid --gs_keyfile to enable StackDriver ' 'logging') gcp_credentials = service_account.Credentials.from_service_account_file( options.gs_keyfile) project_id = self._gcs_settings.get('project_id', None) gcp_logging_client = google_logging.Client( project=project_id, credentials=gcp_credentials) self._stackdriver_handler = CloudLoggingHandler(gcp_logging_client, name='GiftStick') self._logger.addHandler(self._stackdriver_handler) if options.log_progress: if 'stackdriver' not in options.logging: raise errors.BadConfigOption( 'Progress logging requires Stackdriver logging to be enabled' ) self._progress_logger = google_logging.logger.Logger( 'GiftStick', gcp_logging_client)
def setup_logging(name, log_level=20): task_id = str(uuid4()) logging.getLogger().setLevel(log_level) if os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') is None: return task_id res = Resource( type="generic_task", labels={ "location": "vast.ai/{}".format(os.environ.get('VAST_CONTAINERLABEL', '')), "task_id": task_id, "namespace": "samplernn-pytorch", "job": "gen.py", }, ) # Instantiates a client client = google.cloud.logging.Client() handler = CloudLoggingHandler(client, name, resource=res) google.cloud.logging.handlers.setup_logging(handler, log_level=log_level) return task_id
def get_handler(self): # gcp has three independent implementation of api bindings for python. # The one used by logging is not yet supported by our test recording. # TODO drop these grpc variants for the REST versions, and we can drop # protobuf/grpc deps, and also so we can record tests.. # gcp has three different python sdks all independently maintained .. hmmm... # and random monkey shims on top of those :-( from google.cloud.logging import Client as LogClient from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging.resource import Resource log_group = self.ctx.options.log_group if log_group.endswith('*'): log_group = "%s%s" % (log_group[:-1], self.ctx.policy.name) project_id = local_session( self.ctx.session_factory).get_default_project() client = LogClient(project_id) return CloudLoggingHandler(client, log_group, resource=Resource( type='project', labels={'project_id': project_id}))
def setup_logging_client(log_name: str): """ This function will connect to gcluod and post all logging commands there as well as locally The "Log Name" can then be set to the name of the service as a filter https://console.cloud.google.com/logs?project=development-278003 :param log_name: The custom name of the log file that will be used in gcloud :return: """ if os.path.exists("config/GCLOUD_LOGGING_SERVICE_KEY.json"): # The GCLOUD_LOGGING_SERVICE_KEY exists in circle ci, and is passed through to the service # There is one for each environment. eg. development: # console.cloud.google.com/iam-admin/serviceaccounts/details/104042617795891603364?project=development-278003 os.environ[ "GOOGLE_APPLICATION_CREDENTIALS"] = "config/GCLOUD_LOGGING_SERVICE_KEY.json" # Instantiates a client and handler for logging with gcloud client = google.cloud.logging.Client() handler = CloudLoggingHandler(name=log_name, client=client) logging.getLogger().setLevel(logging.INFO) # defaults to WARN setup_logging(handler) logging.debug("Logging connected to GCloud") else: print("No GCLOUD_LOGGING_SERVICE_KEY detected, using native logging.")
def __init__(self): self._refresh_balance() gcloud_logging = True try: import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler except ImportError: gcloud_logging = False handlers = [] if gcloud_logging: client = google.cloud.logging.Client() ghandler = CloudLoggingHandler(client) ghandler.setLevel(logging.INFO) handlers.append(ghandler) fmt = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") shandler = logging.StreamHandler() shandler.setFormatter(fmt) shandler.setLevel(logging.DEBUG) handlers.append(shandler) fhandler = logging.handlers.TimedRotatingFileHandler("bot.log", when="d", interval=1, backupCount=14) fhandler.setFormatter(fmt) fhandler.setLevel(logging.DEBUG) handlers.append(fhandler) logging.basicConfig( level=logging.DEBUG, format="%(levelname)s:%(message)s", handlers=handlers ) if not os.path.isfile("history.json"): self._save_history() with open('history.json', 'r') as fp: self.buy_history = json.load(fp) self.buy_history = {key: val for key, val in self.buy_history.items() if key in self.coin_pairs} logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) logging.info("Bot initialized")
def attach_log(self, spider): """Attach the StackDriver handler to the spider's logger.""" client = google.cloud.logging.Client(project=self.project) handler = CloudLoggingHandler(client, name="scraper." + spider.name) spider.logger.logger.addHandler(handler) spider.logger.debug("StackDriver logging enabled.")
def setup_logger(): # https://googleapis.dev/python/logging/latest/stdlib-usage.html client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) logger = logging.getLogger('cloudLogger') logger.setLevel(logging.DEBUG) logger.addHandler(handler) return logger
def create_app(config, debug=False, testing=False, config_overrides=None): app = Flask(__name__) app.config.from_object(config) app.debug = debug app.testing = testing if config_overrides: app.config.update(config_overrides) # [START setup_logging] if not app.testing: client = google.cloud.logging.Client(app.config['PROJECT_ID']) handler = CloudLoggingHandler(client) # Attaches the handler to the root logger setup_logging(handler) logging.getLogger().setLevel(logging.INFO) # [END setup_logging] # Setup the data model. with app.app_context(): model = get_model() model.init_app(app) # Initalize the OAuth2 helper. oauth2.init_app(app, scopes=['email', 'profile'], authorize_callback=_request_user_info) # Add a logout handler. @app.route('/logout') def logout(): # Delete the user's profile and the credentials stored by oauth2. del session['profile'] session.modified = True oauth2.storage.delete() return redirect(request.referrer or '/') # Register the Bookshelf CRUD blueprint. from .crud import crud app.register_blueprint(crud, url_prefix='/books') # Add a default root route. @app.route("/") def index(): return redirect(url_for('crud.list')) # Add an error handler. This is useful for debugging the live application, # however, you should disable the output of the exception for production # applications. @app.errorhandler(500) def server_error(e): return """ An internal error occurred: <pre>{}</pre> See logs for full stacktrace. """.format(e), 500 return app
def configure(global_config, **settings): gcp_project = settings.get('idris.google_cloud_project') gcp_auth = settings.get('idris.google_application_credentials') if gcp_project and gcp_auth: os.environ['GOOGLE_CLOUD_PROJECT'] = gcp_project os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = os.path.abspath( gcp_auth) if settings.get('idris.use_google_cloud_logging') == 'true': if 'GAE_INSTANCE' in os.environ: client = google.cloud.logging.Client() else: client = google.cloud.logging.Client().from_service_account_json( settings['idris.google_application_credentials']) handler = CloudLoggingHandler(client) logging.getLogger().setLevel(logging.INFO) setup_logging(handler) config = Configurator(settings=settings, root_factory=root_factory) config.add_tween('idris.token_tween_factory') config.include('cornice') config.include('cornice_swagger') config.include('pyramid_chameleon') config.include('pyramid_jwt') config.include('idris.storage') config.include('idris.blob') config.include('idris.services.lookup') config.include('idris.services.cache') config.include('idris.services.download_counter') config.include('idris.services.auditlog') config.include('idris.apps.base') config.include('idris.apps.course') config.set_authorization_policy(ACLAuthorizationPolicy()) config.set_jwt_authentication_policy(settings['idris.secret'], http_header='Authorization', auth_type='Bearer', expiration=3600, callback=add_role_principals) config.scan("idris.views") config.add_route('liveness_check', '/_live') config.add_route('readiness_check', '/_ready') config.add_route('api_without_slash', '/api') config.add_view(lambda _, __: HTTPFound('/api/'), route_name='api_without_slash') config.add_static_view('api', path='idris:static/dist/swagger') config.add_static_view('static', path='idris:static/dist/web') config.add_route('edit_without_slash', '/edit') config.add_view(lambda _, __: HTTPFound('/edit/'), route_name='edit_without_slash') return config
def init_logger(name, is_debug=False): # Is assigned to a platform logger logger = logging.getLogger(name) logger.setLevel(logging.INFO) if not is_debug: client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) logger.addHandler(handler) return logger
def set_stackdriver_client(client): """Sets stackdriver client""" stackdriver_client_name, stackdriver_client = client stackdriver_handler = CloudLoggingHandler(stackdriver_client, name=stackdriver_client_name) for name, logger in LOGGERS.items(): # filters out root logger if name: logger.addHandler(stackdriver_handler) set_stackdriver_client.stackdriver_handler = stackdriver_handler
def get_logger(): if os.getenv('GAE_APPLICATION', None): handler = CloudLoggingHandler(settings.client) cloud_logger = l.getLogger("cloudLogger") cloud_logger.setLevel(l.INFO) cloud_logger.addHandler(handler) logger = cloud_logger else: logger = l.getLogger("project-piss") return logger
def configure_gcp_logger(self): # Imports the Google Cloud client library if DiffgramLogger.logging_initialized.get(self.logger_name): return logging.getLogger(self.logger_name) from google.cloud import logging as gcp_logging from google.cloud.logging.handlers import CloudLoggingHandler # Instantiates a client fmt_str = '[%(asctime)s] %(levelname)s %(module)s.py @ line %(lineno)d: %(message)s' logging.basicConfig(level=logging.INFO, format=fmt_str) logging_client = gcp_logging.Client() handler = CloudLoggingHandler(logging_client, name=self.logger_name) handler.setFormatter(logging.Formatter(fmt_str)) logger = logging.getLogger(self.logger_name) logger.addHandler(handler) logger.info('Logger {} setup success.'.format(self.logger_name)) DiffgramLogger.logging_initialized[self.logger_name] = True return logger
def logging_handler(client): # [START create_default_handler] import logging handler = client.get_default_handler() cloud_logger = logging.getLogger('cloudLogger') cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) cloud_logger.error('bad news') # [END create_default_handler] # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler handler = CloudLoggingHandler(client) cloud_logger = logging.getLogger('cloudLogger') cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) cloud_logger.error('bad news') # [END create_cloud_handler] # [START create_named_handler] handler = CloudLoggingHandler(client, name='mycustomlog')
def logging_handler(client): # [START create_default_handler] import logging handler = client.get_default_handler() cloud_logger = logging.getLogger("cloudLogger") cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) cloud_logger.error("bad news") # [END create_default_handler] # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers import setup_logging handler = CloudLoggingHandler(client) setup_logging(handler) # [END create_cloud_handler] # [START create_named_handler] handler = CloudLoggingHandler(client, name="mycustomlog")
def ProductionLogger(): import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) cloud_logger = logging.getLogger("cloudLogger") cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) return cloud_logger
def get_default_handler(self): """Return the default logging handler based on the local environment. :rtype: :class:`logging.Handler` :returns: The default log handler based on the environment """ if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): return AppEngineHandler() elif _CONTAINER_ENGINE_ENV in os.environ: return ContainerEngineHandler() else: return CloudLoggingHandler(self)
def get_handler(self): # TODO drop these grpc variants for the REST versions, and we can drop # protobuf/grpc deps, and also so we can record tests. log_group = self.get_log_group() project_id = local_session(self.ctx.session_factory).get_default_project() client = LogClient(project_id) return CloudLoggingHandler( client, log_group, labels={ 'policy': self.ctx.policy.name, 'resource': self.ctx.policy.resource_type}, resource=Resource(type='project', labels={'project_id': project_id}))
def get_default_handler(self): """Return the default logging handler based on the local environment. :rtype: :class:`logging.Handler` :returns: The default log handler based on the environment """ gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): return AppEngineHandler(self) elif gke_cluster_name is not None: return ContainerEngineHandler() else: return CloudLoggingHandler(self)
def setup_gce_logging(gunicorn_access_logger, gunicorn_error_logger): # pragma: no cover if application.config['SQLALCHEMY_DATABASE_URI'][:22] in [ 'postgresql://localhost', 'db://localhost/test_db' ]: return import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging client = google.cloud.logging.Client() handler = CloudLoggingHandler(client, name=get_env()) setup_logging(handler) gunicorn_access_logger.addHandler(handler) gunicorn_error_logger.addHandler(handler)
def init_gcp_logger(): import logging from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging import google.cloud.logging as gcp_logging global GCP_LOGGER if GCP_LOGGER: return GCP_LOGGER GCP_LOG_CLIENT = gcp_logging.Client() GCP_LOG_HANDLER = CloudLoggingHandler(GCP_LOG_CLIENT) GCP_LOGGER = logging.getLogger() GCP_LOGGER.setLevel(logging.INFO) GCP_LOGGER.addHandler(GCP_LOG_HANDLER) return GCP_LOGGER
def get_handler(self): # gcp has three independent implementation of api bindings for python. # The one used by logging is not yet supported by our test recording. log_group = self.ctx.options.log_group if log_group.endswith('*'): log_group = "%s%s" % (log_group[:-1], self.ctx.policy.name) project_id = local_session( self.ctx.session_factory).get_default_project() client = LogClient(project_id) return CloudLoggingHandler(client, log_group, resource=Resource( type='project', labels={'project_id': project_id}))
def configure_remote_logging(log_name): # Avoid deadlock situation with subprocess. See: # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/4992 # and https://github.com/grpc/grpc/issues/14056#issuecomment-370962039 os.environ["GRPC_ENABLE_FORK_SUPPORT"] = "0" # Set up cloud logging from google.cloud import logging as cloud_logging from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging.resource import Resource logging_client = cloud_logging.Client() instance_id = get_instance_id() log_resource = Resource("gce_instance", {"instance_id": instance_id}) log.info("Configuring remote logging to %s with log name '%s'", str(log_resource), log_name) cloud_handler = CloudLoggingHandler(logging_client, name=log_name, resource=log_resource) log.getLogger().addHandler(cloud_handler)
def get_default_handler(self, **kw): """Return the default logging handler based on the local environment. :type kw: dict :param kw: keyword args passed to handler constructor :rtype: :class:`logging.Handler` :returns: The default log handler based on the environment """ gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_INSTANCE_ID in os.environ): return AppEngineHandler(self, **kw) elif gke_cluster_name is not None: return ContainerEngineHandler(**kw) else: return CloudLoggingHandler(self, **kw)
def getLogger(name, level=__loglevel): formatter = logging.Formatter('%(nasip)s %(nasid)s %(siteid)s %(msgtype)s %(message)s', '%a, %d %b %Y %H:%M:%S', ) logger = logging.getLogger(name) logger.setLevel(level) logger.addFilter(log_ctx) if GRAYLOG_SERVER: client = google.cloud.logging.Client() cloud_handler = CloudLoggingHandler(client, name="radiusd") logger.addHandler(cloud_handler) setup_logging(cloud_handler) cloud_handler.addFilter(log_ctx) cloud_handler.setFormatter(formatter) else: stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) return logger