def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app
def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialiation of db # connections. repositories.setup_database_engine_and_factory() # Setup app with transactional hook enabled wsgi_app = build_wsgi_app(transactional=True) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def __init__(self): super(TaskServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # This property must be defined for the 'endpoints' specified below, # as the oslo_messaging RPC server will ask for it. self.target = queue.get_target() # Create an oslo RPC server, that calls back on to this class # instance to invoke tasks, such as 'process_order()' on the # extended Tasks class above. self._server = queue.get_server(target=self.target, endpoints=[self])
def __init__(self, queue_resource=None): super(PeriodicServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # Connect to the worker queue, to send retry RPC tasks to it later. self.queue = queue_resource or async_client.TaskClient() # Start the task retry periodic scheduler process up. periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds) self.tg.add_dynamic_timer( self._check_retry_tasks, initial_delay=CONF.retry_scheduler.initial_delay_seconds, periodic_interval_max=periodic_interval) self.order_retry_repo = repositories.get_order_retry_tasks_repository()
def __init__(self, queue_resource=None): super(PeriodicServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # Connect to the worker queue, to send retry RPC tasks to it later. self.queue = queue_resource or async_client.TaskClient() # Start the task retry periodic scheduler process up. periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds ) self.tg.add_dynamic_timer( self._check_retry_tasks, initial_delay=CONF.retry_scheduler.initial_delay_seconds, periodic_interval_max=periodic_interval) self.order_retry_repo = repositories.get_order_retry_tasks_repository()
def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. repositories.setup_database_engine_and_factory() wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialiation of db # connections. repositories.setup_database_engine_and_factory() wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def sync_secret_stores(sql_url, verbose, log_file): """Command to sync secret stores table with config . :param sql_url: sql connection string to connect to a database :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Syncing the secret_stores table with barbican.conf") log.setup(CONF, 'barbican') try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory( initialize_secret_stores=True) repo.commit() except Exception as ex: LOG.exception('Failed to sync secret_stores table.') repo.rollback() raise ex finally: if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides
def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, log_file): """Clean command to clean up the database. :param sql_url: sql connection string to connect to a database :param min_num_days: clean up soft deletions older than this date :param do_clean_unassociated_projects: If True, clean up unassociated projects :param do_soft_delete_expired_secrets: If True, soft delete secrets that have expired :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Cleaning up soft deletions in the barbican database") log.setup(CONF, 'barbican') cleanup_total = 0 current_time = timeutils.utcnow() stop_watch = timeutils.StopWatch() stop_watch.start() try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory() if do_clean_unassociated_projects: cleanup_total += cleanup_unassociated_projects() if do_soft_delete_expired_secrets: cleanup_total += soft_delete_expired_secrets( threshold_date=current_time) threshold_date = None if min_num_days >= 0: threshold_date = current_time - datetime.timedelta( days=min_num_days) else: threshold_date = current_time cleanup_total += cleanup_all(threshold_date=threshold_date) repo.commit() except Exception as ex: LOG.exception('Failed to clean up soft deletions in database.') repo.rollback() cleanup_total = 0 # rollback happened, no entries affected raise ex finally: stop_watch.stop() elapsed_time = stop_watch.elapsed() if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides LOG.info("Cleaning of database affected %s entries", cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time)
def __init__(self, conf): self.conf = conf repositories.setup_database_engine_and_factory()