def test_no_task(self): class Record(object): msg = "hello world" levelname = "info" exc_text = exc_info = None def getMessage(self): return self.msg record = Record() x = TaskFormatter() x.format(record) self.assertEqual(record.task_name, "???") self.assertEqual(record.task_id, "???")
def test_no_task(self): class Record(object): msg = 'hello world' levelname = 'info' exc_text = exc_info = None stack_info = None def getMessage(self): return self.msg record = Record() x = TaskFormatter() x.format(record) self.assertEqual(record.task_name, '???') self.assertEqual(record.task_id, '???')
def test_no_task(self): class Record: msg = 'hello world' levelname = 'info' exc_text = exc_info = None stack_info = None def getMessage(self): return self.msg record = Record() x = TaskFormatter() x.format(record) assert record.task_name == '???' assert record.task_id == '???'
def setup_task_logger(logger, *_args, **_kwargs): """ Customizes task loggers """ for handler in logger.handlers: handler.setFormatter( TaskFormatter( "%(levelname)s: [%(asctime)s/%(processName)s][%(task_name)s(%(task_id)s)] [%(filename)s:%(lineno)d] %(message)s" ))
def wrapper(*args, **kwargs): request_log_handler = None if log_dir: request_id = _get_function_arg_value('request_id', func, args, kwargs) if not request_id: raise IIBError(f'Unable to get "request_id" from {func.__name__}') # for better filtering of all logs for one build in SPLUNK log_formatter = TaskFormatter( log_format.format(request_id=f'request-{request_id}'), use_color=False ) log_file_path = os.path.join(log_dir, f'{request_id}.log') request_log_handler = logging.FileHandler(log_file_path) request_log_handler.setLevel(log_level) request_log_handler.setFormatter(log_formatter) # Bandit complaining on too permissive logs os.chmod(log_file_path, 0o664) logger = logging.getLogger() logger.addHandler(request_log_handler) worker_info = f'Host: {socket.getfqdn()}; User: {getpass.getuser()}' logger.info(worker_info) try: return func(*args, **kwargs) finally: if request_log_handler: logger.removeHandler(request_log_handler) request_log_handler.flush() if worker_config['iib_aws_s3_bucket_name']: upload_file_to_s3_bucket(log_file_path, 'request_logs', f'{request_id}.log')
def setup_task_logger(logger, *args, **kwargs): sh = logging.handlers.RotatingFileHandler( app.config['LOG_CELERY_FILE'], maxBytes=app.config['CELERY_LOGGING']['maxBytes'], backupCount=app.config['CELERY_LOGGING']['backupCount']) sh.setFormatter(TaskFormatter(app.config['CELERY_LOGGING']['format'])) logger.setLevel(logging.INFO) logger.addHandler(sh)
def setup_task_logger(logger, *args, **kwargs): # FileHandler fh = logging.FileHandler('/var/log/celery/worker.log') fh.setFormatter( TaskFormatter( '%(asctime)s - %(task_id)s - %(task_name)s - %(name)s - %(levelname)s - %(message)s' )) logger.addHandler(fh)
def setup_task_logger(logger, *args, **kwargs): fmt = '%(asctime)s - %(task_id)s - %(task_name)s - %(name)s - %(levelname)s - %(message)s' for handler in logger.handlers: handler.setFormatter(TaskFormatter(fmt))
def setup_loggers(logger, *args, **kwargs): for handler in logger.handlers: # handler.setFormatter(TaskFormatter('%(asctime)s - %(task_id)s - %(task_name)s - %(name)s - %(levelname)s - %(message)s')) handler.setFormatter(TaskFormatter('%(message)s'))
def setup_task_loggers(logger, *args, **kwargs): for handler in logger.handlers: handler.setFormatter( TaskFormatter( '[%(asctime)s] [%(task_name)s:%(task_id)s] %(message)s'))
broker_heartbeat=None, result_backend=None, task_acks_late=True, task_default_rate_limit="1000/h", task_ignore_result=True, task_queues=[Queue(org.slug) for org in ORGS.values()] + [Queue("celery")], timezone="US/Central", worker_log_format=WORKER_LOG_FORMAT, worker_prefetch_multiplier=1, # how many tasks to fetch at once per worker worker_task_log_format=WORKER_TASK_LOG_FORMAT, ) for log in ["npsp", "eb_sf", "paypal_sf", "import_paypal"]: logger = logging.getLogger(log) logger.propagate = False formatter = TaskFormatter(WORKER_TASK_LOG_FORMAT) handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) class RateLimitException(Exception): pass @celery_app.task(autoretry_for=(Exception,), retry_backoff=CELERY_RETRY_BACKOFF, max_retries=CELERY_MAX_RETRIES) def handle_attendee_updated(org_slug: str, attendee: dict) -> bool: email = attendee["profile"]["email"] try: valid = validate_email(email)
def setup_task_logger(logger: logging.Logger, *args, **kwargs) -> None: # type: ignore for handler in logger.handlers: handler.setFormatter( TaskFormatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s"))