def setup_logging(): configurator = DictConfigurator({ 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'sentry': { 'format': '[%(asctime)s][%(levelname)s] %(name)s ' '%(filename)s:%(funcName)s:%(lineno)d | %(message)s', }, 'default': { 'format': '[%(levelname)s] [%(asctime)s] [%(module)s] || %(message)s' }, }, 'handlers': { 'stdout': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'default' }, 'sentry': { 'level': 'ERROR', 'class': 'raven.handlers.logging.SentryHandler', 'client': raven_client, 'formatter': 'sentry' }, 'aiohttp_log_file': rotating_log_file_handler("aiohttp", "default"), 'memority_log_file': rotating_log_file_handler("memority", "default"), 'monitoring_log_file': rotating_log_file_handler("monitoring", "default"), }, 'loggers': { 'aiohttp.access': _aiohttp_logger_config, 'aiohttp.client': _aiohttp_logger_config, 'aiohttp.internal': _aiohttp_logger_config, 'aiohttp.server': _aiohttp_logger_config, 'aiohttp.web': _aiohttp_logger_config, 'aiohttp.websocket': _aiohttp_logger_config, 'memority': { 'handlers': ['stdout', 'memority_log_file', 'sentry'], 'level': 'INFO', }, 'monitoring': { 'handlers': ['stdout', 'monitoring_log_file', 'sentry'], 'level': 'INFO', }, 'apscheduler': { 'handlers': ['stdout', 'monitoring_log_file', 'sentry'], 'level': 'INFO', }, } }) configurator.configure()
def get_task_log_file_handler(self, log_file): if not self.task_log_file_formatter: config = self.get_dbnd_logging_config() configurator = DictConfigurator(config) file_formatter_config = configurator.config.get("formatters").get( self.file_formatter_name) self.task_log_file_formatter = configurator.configure_formatter( file_formatter_config) # "formatter": log_settings.file_formatter, log_file = str(log_file) setup_log_file(log_file) handler = logging.FileHandler(filename=log_file, encoding="utf-8") handler.setFormatter(self.task_log_file_formatter) handler.setLevel(self.level) return handler
def init(): from . import websockets # noqa from .plugins.base import PluginManager DictConfigurator(config.LOGGING).configure() app.plugin_manager = PluginManager() app.plugin_manager.init_plugins() socketio.run( app, debug=config.DEBUG, use_reloader=config.DEBUG, host='0.0.0.0', port=config.SERVER_PORT, )
from airflow.utils.email import send_email from airflow.utils.log.logging_mixin import set_context from airflow.utils.module_loading import import_string from airflow.utils.net import get_hostname from airflow.utils.session import provide_session from airflow.utils.state import PokeState, State from airflow.utils.timeout import timeout config = import_string(LOGGING_CLASS_PATH) handler_config = config['handlers']['task'] try: formatter_config = config['formatters'][handler_config['formatter']] except Exception as err: # pylint: disable=broad-except formatter_config = None print(err) dictConfigurator = DictConfigurator(config) class SensorWork: """ This class stores a sensor work with decoded context value. It is only used inside of smart sensor. Create a sensor work based on sensor instance record. A sensor work object has the following attributes: `dag_id`: sensor_instance dag_id. `task_id`: sensor_instance task_id. `execution_date`: sensor_instance execution_date. `try_number`: sensor_instance try_number `poke_context`: Decoded poke_context for the sensor task. `execution_context`: Decoded execution_context. `hashcode`: This is the signature of poking job. `operator`: The sensor operator class.