def __configure(self, config, update=True): """ """ msgs = [] try: # create api manager params = {u'api_id':u'server-01', u'api_name':config[u'api_system'], u'api_subsystem':config[u'api_subsystem'], u'database_uri':config[u'db_uri'], u'api_module':[u'beehive.module.process.mod.ConfigModule'], u'api_plugin':[]} manager = ApiManager(params) # remove and create scchema if update is False: ConfigDbManager.remove_table(config[u'db_uri']) ConfigDbManager.create_table(config[u'db_uri']) self.logger.info(u'Create config DB %s' % (u'')) msgs.append(u'Create config DB %s' % (u'')) # create session operation.session = manager.get_session() #operation.perms = perms #operation.user = authuser # create config db manager db_manager = ConfigDbManager() # set configurations # # populate configs # for item in config[u'config']: value = item[u'value'] if isinstance(value, dict): value = json.dumps(value) res = db_manager.add(config[u'api_system'], item[u'group'], item[u'name'], value) self.logger.info(u'Add configuration %s' % (res)) msgs.append(u'Add configuration %s' % (res)) except Exception as ex: self.logger.error(ex, exc_info=1) raise finally: # release session manager.release_session(operation.session) operation.session = None return msgs
def start_event_consumer(params): """Start event consumer :param params: configuration params """ # internal logger logger = logging.getLogger('beehive.module.event.manager') logger_level = int(params.get('api_logging_level', logging.DEBUG)) name = params['api_id'].decode('utf-8') + '.event.consumer' log_path = params.get('api_log', None) if log_path is None: log_path = '/var/log/%s/%s' % (params['api_package'], params['api_env']) else: log_path = log_path.decode('utf-8') file_name = log_path + name + '.log' loggers = [ logger, logging.getLogger('beehive.common.event'), logging.getLogger('beehive.module.event.model') ] LoggerHelper.rotatingfile_handler(loggers, logger_level, file_name) # get event handlers event_handlers = params.pop('event_handler', []) # setup api manager api_manager = ApiManager(params) api_manager.configure() api_manager.register_modules() def terminate(*args): worker.should_stop = True for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): signal(sig, terminate) with Connection(api_manager.redis_event_uri) as conn: try: worker = EventConsumerRedis(conn, api_manager, event_handlers=event_handlers) logger.info('Start event consumer') logger.debug('Event handlers: %s' % event_handlers) logger.debug('Active worker: %s' % worker) logger.debug('Use redis connection: %s' % conn) worker.run() except KeyboardInterrupt: logger.info('Stop event consumer') logger.info('Stop event consumer')
def start_scheduler(params): """start celery scheduler """ name = ensure_text(params['api_id']) + '.scheduler' log_path = run_path = params.get('api_log', None) if log_path is not None: log_path = run_path = ensure_text(log_path) if log_path is None: log_path = '/var/log/%s/%s' % (params['api_package'], params['api_env']) run_path = '/var/run/%s/%s' % (params['api_package'], params['api_env']) file_name = log_path + name + '.log' pid_name = run_path + name + '.pid' logger_level = logging.INFO loggers = [ logging.getLogger('beehive'), logging.getLogger('beecell'), logging.getLogger('beedrones'), logging.getLogger('celery'), ] LoggerHelper.rotatingfile_handler(loggers, logger_level, file_name, formatter=ExtTaskFormatter) api_manager = ApiManager(params) api_manager.configure() api_manager.register_modules() task_scheduler.api_manager = api_manager configure_task_scheduler(params['broker_url'], params['result_backend'], task_queue=params['broker_queue']) from beehive.module.scheduler.redis_scheduler import RedisScheduler beat = task_scheduler.Beat( loglevel=logging.getLevelName(internal_logger_level), logfile=file_name, pidfile=pid_name, scheduler_cls=RedisScheduler) def terminate(*args): pass for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): signal(sig, terminate) beat.run()
def start_event_consumer(params, log_path=None): """Start event consumer """ # setup kombu logger #setup_logging(loglevel=u'DEBUG', loggers=[u'']) # internal logger logger = logging.getLogger(u'beehive.module.event.manager') logger_level = logging.DEBUG if log_path is None: log_path = u'/var/log/%s/%s' % (params[u'api_package'], params[u'api_env']) logname = u'%s/%s.event.consumer' % (log_path, params[u'api_id']) logger_file = u'%s.log' % logname #loggers = [logging.getLogger(), logger] loggers = [logger] LoggerHelper.rotatingfile_handler(loggers, logger_level, logger_file) # performance logging loggers = [logging.getLogger(u'beecell.perf')] logger_file = u'%s/%s.watch' % (log_path, params[u'api_id']) LoggerHelper.rotatingfile_handler(loggers, logging.DEBUG, logger_file, frmt=u'%(asctime)s - %(message)s') # setup api manager api_manager = ApiManager(params) api_manager.configure() api_manager.register_modules() def terminate(*args): worker.should_stop = True for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): signal(sig, terminate) with Connection(api_manager.redis_event_uri) as conn: try: worker = EventConsumerRedis(conn, api_manager) logger.info(u'Start event consumer') worker.run() except KeyboardInterrupt: logger.info(u'Stop event consumer') logger.info(u'Stop event consumer')
def start_scheduler(params): """start celery scheduler """ log_path = u'/var/log/%s/%s' % (params[u'api_package'], params[u'api_env']) run_path = u'/var/run/%s/%s' % (params[u'api_package'], params[u'api_env']) logger_file = u'%s/%s.scheduler.log' % (log_path, params[u'api_id']) loggers = [ logging.getLogger(u'beehive'), logging.getLogger(u'beecell'), logging.getLogger(u'beedrones'), logging.getLogger(u'celery'), ] LoggerHelper.rotatingfile_handler(loggers, logger_level, logger_file, formatter=ExtTaskFormatter) api_manager = ApiManager(params) api_manager.configure() api_manager.register_modules() #worker = ProcessEventConsumerRedis(api_manager) #from beehive.module.tasks import task_manager task_scheduler.api_manager = api_manager configure_task_scheduler(params['broker_url'], params['result_backend']) #from beehive.module.scheduler.scheduler import RedisScheduler from beehive.module.scheduler.redis_scheduler import RedisScheduler beat = task_scheduler.Beat( loglevel=logging.getLevelName(logger_level), logfile='%s/%s.scheduler.log' % (log_path, params['api_id']), pidfile='%s/%s.scheduler.pid' % (run_path, params['api_id']), scheduler_cls=RedisScheduler) def terminate(*args): #run_command(['celery', 'multi', 'stopwait', 'worker1', # '--pidfile="run/celery-%n.pid"']) #beat.Service.stop() pass for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): signal(sig, terminate) beat.run()
def start_catalog_consumer(params): """Start catalog consumer :param params: configuration params """ # internal logger logger = getLogger('beehive') logger_level = int(params.get('api_logging_level', DEBUG)) name = params['api_id'].decode('utf-8') + '.catalog' log_path = params.get('api_log', None) if log_path is None: log_path = '/var/log/%s/%s' % (params['api_package'], params['api_env']) else: log_path = log_path.decode('utf-8') file_name = log_path + name + '.log' loggers = [getLogger(), logger] LoggerHelper.rotatingfile_handler(loggers, logger_level, file_name) # setup api manager api_manager = ApiManager(params) api_manager.configure() api_manager.register_modules() def terminate(*args): worker.should_stop = True for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): signal(sig, terminate) with Connection(api_manager.redis_catalog_uri) as conn: try: worker = CatalogConsumerRedis(conn, api_manager) logger.info('Start catalog consumer') worker.run() except KeyboardInterrupt: logger.info('Stop catalog consumer') logger.info('Stop catalog consumer')
def start_task_manager(params): """Start celery task manager """ name = ensure_text(params['api_id']) + '.worker' log_path = run_path = params.get('api_log', None) if log_path is not None: log_path = run_path = ensure_text(log_path) if log_path is None: log_path = '/var/log/%s/%s' % (params['api_package'], params['api_env']) run_path = '/var/run/%s/%s' % (params['api_package'], params['api_env']) file_name = log_path + name + '.log' pid_name = run_path + name + '.pid' frmt = u'[%(asctime)s: %(levelname)s/%(task_name)s:%(task_id)s] %(name)s:%(funcName)s:%(lineno)d - %(message)s' logger_level = int(params.get('api_logging_level', logging.DEBUG)) main_loggers = [ logging.getLogger('beehive'), logging.getLogger('beehive.common.model'), logging.getLogger('beehive_service'), logging.getLogger('beehive_resource'), logging.getLogger('beehive.db'), logging.getLogger('beecell'), logging.getLogger('beedrones'), logging.getLogger('celery'), logging.getLogger('proxmoxer'), logging.getLogger('requests') ] LoggerHelper.rotatingfile_handler(main_loggers, logger_level, file_name, frmt=frmt, formatter=ExtTaskFormatter) # setup api manager api_manager = ApiManager(params, hostname=gethostname()) api_manager.configure() api_manager.register_modules(register_api=False) task_manager.api_manager = api_manager # elk logger if api_manager.elasticsearch is not None: frmt = '{"timestamp":"%(asctime)s", "levelname":"%(levelname)s", "task_name":"%(task_name)s", ' \ '"task_id":"%(task_id)s", "module":"%(name)s", "func":"%(funcName)s", "lineno":"%(lineno)d",' \ '"message":"%(message)s"}' tags = [] LoggerHelper.elastic_handler(main_loggers, logger_level, api_manager.elasticsearch, index='cmp', frmt=frmt, tags=tags, server=api_manager.server_name, app=api_manager.app_id, component='task') configure_task_manager(params['broker_url'], params['result_backend'], tasks=params['task_module'], expire=params['expire'], task_queue=params['broker_queue'], time_limit=params['task_time_limit']) argv = [ '', '--hostname=' + ensure_text(params['broker_queue']) + '@%h', '--loglevel=%s' % logging.getLevelName(internal_logger_level), '--purge', '--logfile=%s' % file_name, '--pidfile=%s' % pid_name, ] def terminate(*args): task_manager.stop() # for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): # signal(sig, terminate) task_manager.worker_main(argv)
def start_task_manager(params): """Start celery task manager """ logname = "%s.task" % params['api_id'] frmt = u'[%(asctime)s: %(levelname)s/%(processName)s] ' \ u'%(name)s:%(funcName)s:%(lineno)d - %(message)s' frmt = u'[%(asctime)s: %(levelname)s/%(task_name)s:%(task_id)s] '\ u'%(name)s:%(funcName)s:%(lineno)d - %(message)s' log_path = u'/var/log/%s/%s' % (params[u'api_package'], params[u'api_env']) run_path = u'/var/run/%s/%s' % (params[u'api_package'], params[u'api_env']) #loggers = [logging.getLogger('beehive.common.event')] #LoggerHelper.rotatingfile_handler(loggers, logger_level, # '%s/%s.event.log' % (log_path, logname), # frmt=frmt) # base logging loggers = [ logging.getLogger(u'beehive'), logging.getLogger(u'beehive.db'), logging.getLogger(u'beecell'), logging.getLogger(u'beedrones'), logging.getLogger(u'celery'), logging.getLogger(u'proxmoxer'), logging.getLogger(u'requests') ] LoggerHelper.rotatingfile_handler(loggers, logger_level, u'%s/%s.log' % (log_path, logname), frmt=frmt, formatter=ExtTaskFormatter) # transaction and db logging loggers = [ logging.getLogger('beehive.util.data'), logging.getLogger('sqlalchemy.engine'), logging.getLogger('sqlalchemy.pool') ] LoggerHelper.rotatingfile_handler(loggers, logger_level, '%s/%s.db.log' % (log_path, logname)) # performance logging loggers = [logging.getLogger('beecell.perf')] LoggerHelper.rotatingfile_handler(loggers, logger_level, '%s/%s.watch' % (log_path, params[u'api_id']), frmt='%(asctime)s - %(message)s') api_manager = ApiManager(params, hostname=gethostname()) api_manager.configure() api_manager.register_modules() #worker = ProcessEventConsumerRedis(api_manager) #from beehive.module.tasks import task_manager task_manager.api_manager = api_manager logger_file = '%s/%s.log' % (log_path, logname) configure_task_manager(params['broker_url'], params['result_backend'], tasks=params['task_module'], expire=params['expire'], logger_file=logger_file) argv = [ u'', u'--loglevel=%s' % logging.getLevelName(logger_level), #u'--pool=prefork', u'--pool=gevent', u'--purge', #'--time-limit=600', #'--soft-time-limit=300', u'--concurrency=100', u'--maxtasksperchild=100', #u'--autoscale=100,10', u'--logfile=%s' % logger_file, u'--pidfile=%s/%s.task.pid' % (run_path, logname) ] def terminate(*args): #run_command(['celery', 'multi', 'stopwait', 'worker1', # '--pidfile="run/celery-%n.pid"']) task_manager.stop() #for sig in (SIGHUP, SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGQUIT): # signal(sig, terminate) task_manager.worker_main(argv)
def __init__(self, *args, **kwargs): super(BeehiveApp, self).__init__(*args, **kwargs) # set debug mode self.debug = False # flask secret self.secret_key = urandom(48) self.http_socket = uwsgi_util.opt['http-socket'] self.server_name = uwsgi_util.opt['api_host'] self.server_fqdn = uwsgi_util.opt['api_fqdn'] try: self.server_ip = gethostbyname(uwsgi_util.opt['api_fqdn']) except: self.server_ip = '127.0.0.1' self.app_name = uwsgi_util.opt['api_name'] self.app_id = uwsgi_util.opt['api_id'] # api instance static config self.params = uwsgi_util.opt # set logging path log_path = '/var/log/%s/%s' % (self.params['api_package'], self.params['api_env']) self.log_path = self.params.get('api_log', log_path) def error405(e): error = { 'code': 405, 'message': 'Method Not Allowed', 'description': 'Method Not Allowed' } logger.error('Api response: %s' % error) return Response(response=json.dumps(error), mimetype='application/json', status=405) self._register_error_handler(None, 405, error405) def error404(e): error = { 'code': 404, 'message': 'Uri %s not found' % request.path, 'description': 'Uri %s not found' % request.path } logger.error('Api response: %s' % error) return Response(response=json.dumps(error), mimetype='application/json', status=404) self._register_error_handler(None, 404, error404) # setup loggers loggin_level = int(self.params['api_logging_level']) self.setup_loggers(level=loggin_level) logger.info('########## SERVER STARTING ##########') start = time() # api manager reference self.api_manager = ApiManager(self.params, app=self, hostname=self.server_ip) # server configuration # self.api_manager.configure_logger() self.api_manager.configure() # self.get_configurations() # setup additional handler if self.api_manager.elasticsearch is not None: tags = [] self.setup_additional_loggers(self.api_manager.elasticsearch, level=loggin_level, tags=tags, server=self.server_name, app=self.app_id, component='api') # load modules self.api_manager.register_modules() # register in catalog self.api_manager.register_catalog() # register in moitor # self.api_manager.register_monitor() logger.info('Setup server over: %s' % self.api_manager.app_uri) logger.info('Setup server over: %s' % self.api_manager.uwsgi_uri) logger.info('########## SERVER STARTED ########## - %s' % round(time() - start, 2))
class BeehiveApp(Flask): """Custom Flask app used to read configuration and initialize security. :param args: positional args :param kwargs: key value args """ def __init__(self, *args, **kwargs): super(BeehiveApp, self).__init__(*args, **kwargs) # set debug mode self.debug = False # flask secret self.secret_key = urandom(48) self.http_socket = uwsgi_util.opt['http-socket'] self.server_name = uwsgi_util.opt['api_host'] self.server_fqdn = uwsgi_util.opt['api_fqdn'] try: self.server_ip = gethostbyname(uwsgi_util.opt['api_fqdn']) except: self.server_ip = '127.0.0.1' self.app_name = uwsgi_util.opt['api_name'] self.app_id = uwsgi_util.opt['api_id'] # api instance static config self.params = uwsgi_util.opt # set logging path log_path = '/var/log/%s/%s' % (self.params['api_package'], self.params['api_env']) self.log_path = self.params.get('api_log', log_path) def error405(e): error = { 'code': 405, 'message': 'Method Not Allowed', 'description': 'Method Not Allowed' } logger.error('Api response: %s' % error) return Response(response=json.dumps(error), mimetype='application/json', status=405) self._register_error_handler(None, 405, error405) def error404(e): error = { 'code': 404, 'message': 'Uri %s not found' % request.path, 'description': 'Uri %s not found' % request.path } logger.error('Api response: %s' % error) return Response(response=json.dumps(error), mimetype='application/json', status=404) self._register_error_handler(None, 404, error404) # setup loggers loggin_level = int(self.params['api_logging_level']) self.setup_loggers(level=loggin_level) logger.info('########## SERVER STARTING ##########') start = time() # api manager reference self.api_manager = ApiManager(self.params, app=self, hostname=self.server_ip) # server configuration # self.api_manager.configure_logger() self.api_manager.configure() # self.get_configurations() # setup additional handler if self.api_manager.elasticsearch is not None: tags = [] self.setup_additional_loggers(self.api_manager.elasticsearch, level=loggin_level, tags=tags, server=self.server_name, app=self.app_id, component='api') # load modules self.api_manager.register_modules() # register in catalog self.api_manager.register_catalog() # register in moitor # self.api_manager.register_monitor() logger.info('Setup server over: %s' % self.api_manager.app_uri) logger.info('Setup server over: %s' % self.api_manager.uwsgi_uri) logger.info('########## SERVER STARTED ########## - %s' % round(time() - start, 2)) def del_configurations(self): del self.db_uri del self.tcp_proxy def setup_loggers(self, level=LoggerHelper.DEBUG): """Setup loggers :param level: :return: """ logname = uwsgi_util.opt['api_id'].decode('utf-8') # base logging file_name = self.log_path.decode('utf-8') + logname + '.api.log' loggers = [ logger, logging.getLogger('oauthlib'), logging.getLogger('beehive'), logging.getLogger('beecell'), logging.getLogger('beedrones'), logging.getLogger('beehive_oauth2'), logging.getLogger('beehive_service'), logging.getLogger('beehive_resource'), logging.getLogger('beehive_ssh'), # logging.getLogger('beehive.common.data') ] # LoggerHelper.DEBUG2 LoggerHelper.rotatingfile_handler(loggers, level, file_name) # # transaction and db logging # file_name = '%s/%s.db.log' % (self.log_path, logname) # loggers = [ # logging.getLogger('sqlalchemy.engine'), # logging.getLogger('sqlalchemy.pool') # ] # LoggerHelper.rotatingfile_handler(loggers, logging.DEBUG, file_name) def setup_additional_loggers(self, elasticsearch, level=LoggerHelper.DEBUG, tags=[], **custom_fields): """Setup loggers :param elasticsearch: elasticsearch.Elasticsearch class instance :param level: :return: """ loggers = [ logger, logging.getLogger('oauthlib'), logging.getLogger('beehive'), logging.getLogger('beehive.db'), logging.getLogger('beecell'), logging.getLogger('beedrones'), logging.getLogger('beehive_oauth2'), logging.getLogger('beehive_service'), logging.getLogger('beehive_resource'), logging.getLogger('beehive_ssh'), # logging.getLogger('beehive.common.data') ] # LoggerHelper.DEBUG2 LoggerHelper.elastic_handler(loggers, level, elasticsearch, index='cmp', tags=tags, **custom_fields) def open_db_session(self): """Open database session. """ operation.session = self.api_manager.get_session() # try: # operation.session = self.api_manager.db_manager.get_session() # return operation.session # except MysqlManagerError as e: # logger.error(e) # raise BeehiveAppError(e) def release_db_session(self): """Release database session. """ self.api_manager.release_session()
def __init_subsystem(self, config, update=True): """Init beehive subsystem :param dict config: subsystem configuration :param update: if update is True don't replace database schema :return: trace of execution """ msgs = [] try: # create api manager params = {u'api_id':u'server-01', u'api_name':config[u'api_system'], u'api_subsystem':config[u'api_subsystem'], u'database_uri':config[u'db_uri'], u'api_module':config[u'api_modules'], u'api_plugin':config[u'api_plugins']} manager = ApiManager(params) manager.configure() manager.register_modules() # create config db manager config_db_manager = ConfigDbManager() for db_manager_class in config[u'db_managers']: db_manager = import_class(db_manager_class) # remove and create/update scchema if update is False: db_manager.remove_table(config[u'db_uri']) db_manager.create_table(config[u'db_uri']) self.logger.info(u'Create DB %s' % (db_manager_class)) msgs.append(u'Create DB %s' % (db_manager_class)) except Exception as ex: self.logger.error(ex, exc_info=1) raise self.set_permissions(classes=self.classes) # create module for item in config[u'api_modules']: try: self.logger.info(u'Load module %s' % (item)) module = manager.modules[item.split(u'.')[-1]] controller = module.get_controller() # create session operation.session = manager.get_session() # init module module.init_object() self.logger.info(u'Init module %s' % (module)) msgs.append(u'Init module %s' % (module)) # create system users and roles if module.name == u'AuthModule': res = self.__create_main_users(controller, config, config_db_manager) controller.set_superadmin_permissions() msgs.extend(res) elif module.name == u'Oauth2Module': controller.set_superadmin_permissions() elif module.name == u'BasicModule': controller.set_superadmin_permissions() elif module.name == u'CatalogModule': res = self.__create_main_catalogs(controller, config, config_db_manager) controller.set_superadmin_permissions() msgs.extend(res) except Exception as ex: self.logger.error(ex, exc_info=1) raise finally: # release session module.release_session(operation.session) operation.session = None self.logger.info(u'Init subsystem %s' % (config[u'api_subsystem'])) msgs.append(u'Init subsystem %s' % (config[u'api_subsystem'])) return msgs
def __configure(self, config, update=True): """Main configuration steps :param config: subsystem configuration :param update: if update is True don't replace database schema :return: """ msgs = [] manager = None try: # create api manager params = {'api_id': 'server-01', 'api_name': config['api_system'], 'api_subsystem': config['api_subsystem'], 'database_uri': config['db_uri'], 'api_module': 1, 'api_module.1': 'beehive.module.process.mod.ConfigModule', 'api_plugin': 0} manager = ApiManager(params) # remove and create scchema if update is False: ConfigDbManager.remove_table(config['db_uri']) ConfigDbManager.create_table(config['db_uri']) self.logger.info('Create config DB %s' % ('')) msgs.append('Create config DB %s' % ('')) # create session operation.session = manager.get_session() # create config db manager db_manager = ConfigDbManager() # set configurations # # populate configs # for item in config['config']: # check if config already exists value = item['value'] if isinstance(value, dict): value = json.dumps(value) try: res = db_manager.get(app=config['api_system'], group=item['group'], name=item['name']) self.logger.warning('Configuration %s %s %s already exist' % (config['api_system'], item['group'], item['name'])) msgs.append('Configuration %s %s %s already exist' % (config['api_system'], item['group'], item['name'])) except QueryError as ex: res = db_manager.add(config['api_system'], item['group'], item['name'], value) self.logger.info('Add configuration %s' % res) msgs.append('Add configuration %s' % res) except Exception as ex: self.logger.error(ex, exc_info=True) raise finally: # release session if manager is not None: manager.release_session() return msgs
def __init_subsystem(self, config, update=True): """Init beehive subsystem :param config: subsystem configuration :param update: if update is True don't replace database schema :return: trace of execution """ msgs = [] try: # create api manager params = {'api_id': 'server-01', 'api_name': config['api_system'], 'api_subsystem': config['api_subsystem'], 'database_uri': config['db_uri'], 'redis_identity_uri': config['redis_identity_uri'], 'api_module': config['api_module'], 'api_plugin': config['api_plugin'], 'api_endpoint': config['api_endpoint'], 'api_catalog': config['api_catalog']} for i in range(1, params['api_module']+1): params['api_module.%s' % i] = config['api_module.%s' % i] if config['api_plugin'] > 0: for i in range(1, params['api_plugin']+1): params['api_plugin.%s' % i] = config['api_plugin.%s' % i] manager = ApiManager(params) manager.configure() manager.register_modules() # create config db manager config_db_manager = ConfigDbManager() for db_manager_class in config['db_managers']: db_manager = import_class(db_manager_class) # remove and create/update scchema if update is False: db_manager.remove_table(config['db_uri']) db_manager.create_table(config['db_uri']) self.logger.info('Create DB %s' % db_manager_class) msgs.append('Create DB %s' % db_manager_class) except Exception as ex: self.logger.error(ex, exc_info=True) raise self.set_permissions(classes=self.classes) # create module for i in range(1, config['api_module']+1): item = config['api_module.%s' % i] try: self.logger.info('Load module %s' % item) module = manager.modules[item.split('.')[-1]] controller = module.get_controller() # create session operation.session = manager.get_session() # init module module.init_object() self.logger.info('Init module %s' % module) msgs.append('Init module %s' % module) # create system users and roles if module.name == 'AuthModule': res = self.__create_main_users(controller, config, config_db_manager, update) controller.set_superadmin_permissions() msgs.extend(res) elif module.name == 'Oauth2Module': controller.set_superadmin_permissions() elif module.name == 'BasicModule': controller.set_superadmin_permissions() elif module.name == 'CatalogModule': res = self.__create_main_catalogs(controller, config, config_db_manager) controller.set_superadmin_permissions() msgs.extend(res) elif module.name == 'ServiceModule': controller.populate(config['db_uri']) msgs.extend('Populate service database') except Exception as ex: self.logger.error(ex, exc_info=True) raise finally: # release session module.release_session() self.logger.info('Init subsystem %s' % (config['api_subsystem'])) msgs.append('Init subsystem %s' % (config['api_subsystem'])) return msgs
def __init__(self, *args, **kwargs): """ """ #self._config = kwargs.pop('config') super(BeehiveApp, self).__init__(*args, **kwargs) # set debug mode self.debug = False # flask secret self.secret_key = urandom(48) self.http_socket = uwsgi_util.opt[u'http-socket'] self.server_name = gethostname() self.app_name = uwsgi_util.opt[u'api_name'] self.app_id = uwsgi_util.opt[u'api_id'] # api instance static config self.params = uwsgi_util.opt # set logging path log_path = u'/var/log/%s/%s' % (self.params[u'api_package'], self.params[u'api_env']) self.log_path = self.params.get(u'api_log', log_path) def error(e): error = { u'status': u'error', u'api': u'', u'operation': u'', u'data': u'', u'exception': u'', u'code': str(405), u'msg': u'Method Not Allowed' } return Response(response=json.dumps(error), mimetype=u'application/json', status=405) self._register_error_handler(None, 405, error) # setup loggers self.setup_loggers() self.logger.info("##### SERVER STARTING #####") start = time() # api manager reference self.api_manager = ApiManager(self.params, app=self, hostname=self.server_name) # server configuration #self.api_manager.configure_logger() self.api_manager.configure() #self.get_configurations() # load modules self.api_manager.register_modules() # register in catalog self.api_manager.register_catalog() # register in moitor self.api_manager.register_monitor() self.logger.info(u'Setup uwsgi over %s:%s' % (self.server_name, self.http_socket)) self.logger.info("##### SERVER STARTED ##### - %s" % round(time() - start, 2))
class BeehiveApp(Flask): """Custom Flask app used to read configuration and initialize security. TODO: pooller that execcute some periodically task like verify orchestrators are active """ def __init__(self, *args, **kwargs): """ """ #self._config = kwargs.pop('config') super(BeehiveApp, self).__init__(*args, **kwargs) # set debug mode self.debug = False # flask secret self.secret_key = urandom(48) self.http_socket = uwsgi_util.opt[u'http-socket'] self.server_name = gethostname() self.app_name = uwsgi_util.opt[u'api_name'] self.app_id = uwsgi_util.opt[u'api_id'] # api instance static config self.params = uwsgi_util.opt # set logging path log_path = u'/var/log/%s/%s' % (self.params[u'api_package'], self.params[u'api_env']) self.log_path = self.params.get(u'api_log', log_path) def error(e): error = { u'status': u'error', u'api': u'', u'operation': u'', u'data': u'', u'exception': u'', u'code': str(405), u'msg': u'Method Not Allowed' } return Response(response=json.dumps(error), mimetype=u'application/json', status=405) self._register_error_handler(None, 405, error) # setup loggers self.setup_loggers() self.logger.info("##### SERVER STARTING #####") start = time() # api manager reference self.api_manager = ApiManager(self.params, app=self, hostname=self.server_name) # server configuration #self.api_manager.configure_logger() self.api_manager.configure() #self.get_configurations() # load modules self.api_manager.register_modules() # register in catalog self.api_manager.register_catalog() # register in moitor self.api_manager.register_monitor() self.logger.info(u'Setup uwsgi over %s:%s' % (self.server_name, self.http_socket)) self.logger.info("##### SERVER STARTED ##### - %s" % round(time() - start, 2)) def del_configurations(self): del self.db_uri del self.tcp_proxy #del self.orchestrators #self.orchestrators = OrchestratorManager() def setup_loggers(self): """ """ logname = uwsgi_util.opt[u'api_id'] # base logging file_name = u'%s/%s.log' % (self.log_path, logname) loggers = [ self.logger, logging.getLogger(u'oauthlib'), logging.getLogger(u'beehive'), logging.getLogger(u'beehive.db'), logging.getLogger(u'beecell'), logging.getLogger(u'beedrones'), logging.getLogger(u'beehive_oauth2'), logging.getLogger(u'beehive_monitor'), logging.getLogger(u'beehive_service'), logging.getLogger(u'beehive_resource') ] LoggerHelper.rotatingfile_handler(loggers, logging.DEBUG, file_name, formatter=ColorFormatter) # transaction and db logging file_name = u'%s/%s.db.log' % (self.log_path, logname) loggers = [ logging.getLogger(u'beehive.util.data'), logging.getLogger(u'sqlalchemy.engine'), logging.getLogger(u'sqlalchemy.pool') ] LoggerHelper.rotatingfile_handler(loggers, logging.DEBUG, file_name) # performance logging file_name = u'%s/%s.watch' % (self.log_path, logname) file_name = u'%s/beehive.watch' % (self.log_path) loggers = [logging.getLogger(u'beecell.perf')] LoggerHelper.rotatingfile_handler(loggers, logging.DEBUG, file_name, frmt=u'%(asctime)s - %(message)s') #from openstack import utils #utils.enable_logging(debug=True) def open_db_session(self): """Open database session. """ try: operation.session = self.api_manager.db_manager.get_session() return operation.session except MysqlManagerError, e: self.logger.error(e) raise BeehiveAppError(e)