def __init__(self): configParser = ConfigParser() self.log = logging.get_logger(__name__, config=configParser) self.config = configParser.app_cfg try: self.rabbitClient = rabbit.RabbitClient() except pika.exceptions.AMQPConnectionError as error: self.log.error("Connection to RabbitMQ failed.") raise error
def __init__(self): config_parser = ConfigParser() self.config = config_parser.app_cfg self.log = logging.get_logger(__name__, config=config_parser) self.threads = [] try: self.rabbit_client = RabbitClient() except AMQPConnectionError as error: self.log.error("Connection to RabbitMQ failed.") raise error
def __init__(self): configParser = ConfigParser() self.log = logging.get_logger(__name__, config=configParser) self.config = configParser.app_cfg self.ftp_client = FTPClient(configParser) self.mh_client = MediahavenClient(configParser) self.event_parser = EventParser() try: self.rabbit_client = RabbitClient() except AMQPConnectionError as error: self.log.error("Connection to RabbitMQ failed.") raise error
def __init__(self): configParser = ConfigParser() self.log = logging.get_logger(__name__, config=configParser) self.rabbitConfig = configParser.app_cfg["rabbitmq"] self.credentials = pika.PlainCredentials(self.rabbitConfig["username"], self.rabbitConfig["password"]) self.connection = pika.BlockingConnection( pika.ConnectionParameters( host=self.rabbitConfig["host"], port=self.rabbitConfig["port"], credentials=self.credentials, )) self.channel = self.connection.channel() self.prefetch_count = int(self.rabbitConfig["prefetch_count"])
def __init__(self): configParser = ConfigParser() self.config = configParser.app_cfg self.log = logging.get_logger(__name__, config=configParser) self.mh_client = MediahavenClient(self.config) try: self.rabbit_client = RabbitClient() except AMQPConnectionError as error: self.log.error("Connection to RabbitMQ failed.") raise error self.pid_service = PIDService(self.config["pid-service"]["URL"]) self.essence_linked_rk = self.config["rabbitmq"][ "essence_linked_routing_key"] self.essence_unlinked_rk = self.config["rabbitmq"][ "essence_unlinked_routing_key"] self.object_deleted_rk = self.config["rabbitmq"][ "object_deleted_routing_key"] self.get_metadata_rk = self.config["rabbitmq"][ "get_metadata_routing_key"]
# -*- coding: utf-8 -*- """CONFIG for celery worker""" import os import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) import configparser from viaa.configuration import ConfigParser CONFIG = ConfigParser() #CONFIG.read('/etc/viaa-workers/config.ini') worker_hijack_root_logger=False if 'BROKER_URL' in os.environ: broker_url = os.environ.get('BROKER_URL') else: broker_url = CONFIG.app_cfg['Celery']['broker_url'] BROKER_URL = CONFIG.app_cfg['Celery']['broker_url'] if 'RESULT_BACKEND' in os.environ: result_backend = os.environ.get('RESULT_BACKEND') else: result_backend = CONFIG.app_cfg['Celery']['s3_result_backend'] task_serializer = 'json' result_serializer = 'json' accept_content = ['json'] enable_utc = True result_persistent = True
def companies(self, postgresql_wrapper_mock): config = ConfigParser() db_conf = config.app_cfg['postgresql_teamleader'] table_names = config.app_cfg['table_names'] self.companies = Companies(db_conf, table_names) return self.companies
def __init(): import structlog timestamper = structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S") pre_chain = [ # Add the log level and a timestamp to the event_dict if the log entry # is not from structlog. structlog.stdlib.add_log_level, __add_log_source_to_dict, timestamper, ] config = ConfigParser().config logging.config.dictConfig({ "version": 1, "disable_existing_loggers": False, "formatters": { "plain": { "()": structlog.stdlib.ProcessorFormatter, "processor": structlog.dev.ConsoleRenderer(colors=False), "foreign_pre_chain": pre_chain, }, "colored": { "()": structlog.stdlib.ProcessorFormatter, "processor": structlog.dev.ConsoleRenderer(colors=True), "foreign_pre_chain": pre_chain, }, }, "handlers": { "default": { "level": config['logging']['level'], "class": "logging.StreamHandler", "formatter": "colored", }, "file": { "level": config['logging']['level'], "class": "logging.handlers.WatchedFileHandler", "filename": __name__ + ".log", "formatter": "plain", }, "rabbit":{ "level": config['logging']['level'], "class": "python_logging_rabbitmq.RabbitMQHandlerOneWay", "host": config['logging']['RabPub']['host'], "username": config['logging']['RabPub']['user'], "password": config['logging']['RabPub']['passw'], "fields_under_root": True } }, "loggers": { "": { "handlers": ["default", "rabbit"], "level": config['logging']['level'], "propagate": True, } } }) structlog.configure( processors=[ # This performs the initial filtering, so we don't # evaluate e.g. DEBUG when unnecessary structlog.stdlib.filter_by_level, # Adds logger=module_name (e.g __main__) structlog.stdlib.add_logger_name, # Adds level=info, debug, etc. structlog.stdlib.add_log_level, # Performs the % string interpolation as expected structlog.stdlib.PositionalArgumentsFormatter(), # Include the stack when stack_info=True structlog.processors.StackInfoRenderer(), # Include the exception when exc_info=True # e.g log.exception() or log.warning(exc_info=True)'s behavior structlog.processors.format_exc_info, # Decodes the unicode values in any kv pairs structlog.processors.UnicodeDecoder(), # Adds timestamp in iso format to each log structlog.processors.TimeStamper( fmt="iso" ), # Adds linenumber and file to each log __add_log_source_to_dict, structlog.stdlib.render_to_log_kwargs, ], context_class=dict, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, ) handler = logging.StreamHandler(sys.stdout) handler.setFormatter(jsonlogger.JsonFormatter()) root_logger = logging.getLogger() if len(root_logger.handlers) == 0: root_logger.addHandler(handler)
import time from slackclient import SlackClient from archstats import stats import configparser from elasticapm import Client import elasticapm elasticapm.instrument() elasticapm.set_transaction_name('processor') elasticapm.set_transaction_result('SUCCESS') from elasticapm.handlers.logging import LoggingHandler from viaa.configuration import ConfigParser #config = ConfigParser() config = ConfigParser(config_file="config.yml") bot_id = config.app_cfg['slack_api']['bot_id'] client_token = config.app_cfg['slack_api']['client_token'] def clean_up_exit(): handlers = LOGGER.handlers[:] for handler in handlers: handler.close() LOGGER.removeHandler(handler) # constants BOT_ID = bot_id AT_BOT = "<@" + BOT_ID + ">"
def postgresql_wrapper(self): """Returns a PostgresqlWrapper initiliazed by the parameters in config.yml""" return PostgresqlWrapper( ConfigParser().app_cfg['postgresql_teamleader'])
def projects(self, postgresql_wrapper_mock): config = ConfigParser() db_conf = config.app_cfg['postgresql_teamleader'] table_names = config.app_cfg['table_names'] self.projects = Projects(db_conf, table_names) return self.projects
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import logging from uvicorn import Config, Server from viaa.configuration import ConfigParser cfg_log_level = ConfigParser().config["logging"]["level"] # Uvicorn expects lowercase string or integer as the logging level. LOG_LEVEL = cfg_log_level.lower() if isinstance(cfg_log_level, str) else cfg_log_level if __name__ == "__main__": server = Server( Config( "app.app:app", host="0.0.0.0", port=8080, access_log=False, log_level=LOG_LEVEL, ), ) # Remove the Uvicorn logging handlers, as our own loggers log in a JSON format. # Just remove them all although the access handler is already removed by "access_log=False". for name, logger in logging.root.manager.loggerDict.items(): if name.startswith("uvicorn"): logger.handlers = [] server.run()
import logging from uvicorn import Config, Server from viaa.configuration import ConfigParser cfg_log_level = ConfigParser().chassis_cfg["logging"]["level"] # Uvicorn expects lowercase string or integer as the logging level. LOG_LEVEL = cfg_log_level.lower() if isinstance(cfg_log_level, str) else cfg_log_level if __name__ == "__main__": server = Server( Config( "app.app:app", host="0.0.0.0", port=8080, access_log=False, log_level=LOG_LEVEL, ), ) # Remove the Uvicorn logging handlers, as our own loggers log in a JSON format. # Just remove them all although the access handler is already removed by "access_log=False". for name, logger in logging.root.manager.loggerDict.items(): if name.startswith("uvicorn"): logger.handlers = [] server.run()
def context(): from viaa.configuration import ConfigParser from meemoo.context import Context config = ConfigParser() return Context(config)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os import shlex import threading import time from typing import List import requests from paramiko import AutoAddPolicy, SSHClient, SSHException from retry import retry from viaa.configuration import ConfigParser from viaa.observability import logging config_parser = ConfigParser() config = config_parser.app_cfg log = logging.get_logger(__name__, config=config_parser) dest_conf = config["destination"] NUMBER_PARTS = 4 class TransferPartException(Exception): pass class TransferException(Exception): pass def calculate_ranges(size_bytes: int, number_parts: int) -> List[str]: """Split the filesize up in multiple ranges.
def upsert_entities_sql(self, mock_connect): config = ConfigParser() db_conf = config.app_cfg['postgresql_teamleader'] table_names = config.app_cfg['table_names'] self.contacts = Contacts(db_conf, table_names) return self.contacts.upsert_entities_sql()
from fastapi import APIRouter, BackgroundTasks, Depends from viaa.configuration import ConfigParser from viaa.observability import logging from app.core.event_handler import handle_event from app.core.events_parser import parse_premis_events from app.models.premis_events import PremisEvents from app.models.xml_body import XmlBody router = APIRouter() config = ConfigParser() log = logging.get_logger(__name__, config=config) @router.post("/", status_code=202) async def handle_events( background_tasks: BackgroundTasks, premis_events: PremisEvents = Depends( XmlBody(PremisEvents, parse_premis_events)), ): """ Returns OK if the xml parsing didn't crash. """ events = premis_events.events archived_events = [ event for event in events if event.is_valid and event.has_valid_outcome ] log.info(
def teamleader_auth(self, postgresql_wrapper_mock): config = ConfigParser() db_conf = config.app_cfg['postgresql_teamleader'] table_names = config.app_cfg['table_names'] self.teamleader_auth = TeamleaderAuth(db_conf, table_names) return self.teamleader_auth