def list(self, excludeType=None): repositories.setup_database_engine_and_factory() repo = repositories.get_vmexclude_repository() exclude_type = None if excludeType is not None: print("Filter by exclude type %s" % (excludeType)) if excludeType == 'domain': exclude_type = 0 elif excludeType == 'project': exclude_type = 1 elif excludeType == 'user': exclude_type = 2 excludes = repo.get_type_entities(exclude_type=exclude_type) headers = ['id', 'type', 'exclude_id'] pt = prettytable.PrettyTable(headers) for instance in excludes: pt.add_row([ instance.id, VmExcludeCommands.EXCLUSION_MAP[instance.exclude_type], instance.exclude_id ]) if six.PY3: print(encodeutils.safe_encode(pt.get_string()).decode()) else: print(encodeutils.safe_encode(pt.get_string()))
def list(self, instanceid=None, days=None): repositories.setup_database_engine_and_factory() repo = repositories.get_vmexpire_repository() res = repo.get_all_by(instance_id=instanceid, project_id=None) headers = [ 'id', 'expire', 'instance.name', 'instance.id', 'project.id', 'user.id', 'notif', 'notif.last' ] limit = None if days: try: dt = datetime.datetime.now() + datetime.timedelta( days=int(days)) limit = time.mktime(dt.timetuple()) except Exception as e: print(str(e)) return pt = prettytable.PrettyTable(headers) for instance in res: if limit and instance.expire > limit: continue pt.add_row([ instance.id, datetime.datetime.fromtimestamp(instance.expire), instance.instance_name, instance.instance_id, instance.project_id, instance.user_id, instance.notified, instance.notified_last ]) if six.PY3: print(encodeutils.safe_encode(pt.get_string()).decode()) else: print(encodeutils.safe_encode(pt.get_string()))
def add(self, excludeType=None, excludeId=None): if excludeId is None: print("id option is mandatory") return if excludeType not in VmExcludeCommands.EXCLUSION_TYPES: print("type is not valid") return repositories.setup_database_engine_and_factory() repo = repositories.get_vmexclude_repository() entity = VmExclude() entity.exclude_type = VmExcludeCommands.EXCLUSION_TYPES[excludeType] entity.exclude_id = excludeId try: entity = repo.create_exclude(entity) except Exception as e: print(str(e)) return repositories.commit() headers = ['id', 'type', 'exclude_id'] pt = prettytable.PrettyTable(headers) pt.add_row([ entity.id, VmExcludeCommands.EXCLUSION_MAP[entity.exclude_type], entity.exclude_id ]) if six.PY3: print(encodeutils.safe_encode(pt.get_string()).decode()) else: print(encodeutils.safe_encode(pt.get_string()))
def clean_command(sql_url, min_num_days, verbose, log_file): """Clean command to clean up the database. :param sql_url: sql connection string to connect to a database :param min_num_days: clean up soft deletions older than this date :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Cleaning up soft deletions in the barbican database") log.setup(CONF, 'osvmexpire') cleanup_total = 0 current_time = timeutils.utcnow() stop_watch = timeutils.StopWatch() stop_watch.start() try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory() threshold_date = None if min_num_days >= 0: threshold_date = current_time - datetime.timedelta( days=min_num_days) else: threshold_date = current_time cleanup_total += cleanup_all(threshold_date=threshold_date) repo.commit() except Exception as ex: LOG.exception('Failed to clean up soft deletions in database.') repo.rollback() cleanup_total = 0 # rollback happened, no entries affected raise ex finally: stop_watch.stop() elapsed_time = stop_watch.elapsed() if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'osvmexpire') # reset the overrides LOG.info("Cleaning of database affected %s entries", cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time)
def remove(self, expirationid): if not expirationid: print("Missing id paramerer") return repositories.setup_database_engine_and_factory() repo = repositories.get_vmexpire_repository() repo.delete_entity_by_id(entity_id=expirationid) repositories.commit() print("VM expiration successfully removed!")
def extend(self, expirationid): if not expirationid: print("Missing id parameter") return repositories.setup_database_engine_and_factory() repo = repositories.get_vmexpire_repository() repo.extend_vm(entity_id=expirationid) repositories.commit() print("VM expiration successfully extended!")
def delete(self, excludeId=None): if excludeId is None: print("Missing mandatory id parameter") return repositories.setup_database_engine_and_factory() repo = repositories.get_vmexclude_repository() repo.delete_entity_by_id(excludeId) repositories.commit() print("Exclude deleted")
def add(self, instanceid): if not instanceid: print("Missing id parameter") return repositories.setup_database_engine_and_factory() repo = repositories.get_vmexpire_repository() instance = repo.add_vm(instanceid) if not instance: print("Failure to add VM expiration, check logs") return repositories.commit() print("VM expiration successfully generated!")
def __init__(self): super(TaskServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() transport = oslo_messaging.get_transport(CONF) conf_opts = getattr(CONF, config.KS_NOTIFICATIONS_GRP_NAME) targets = [ oslo_messaging.Target(topic=conf_opts.topic, exchange=conf_opts.control_exchange) ] endpoints = [self] self._server = oslo_messaging.get_notification_listener( transport, targets, endpoints, pool=conf_opts.pool_name)
def _wrapper(global_config, **local_conf): # Configure oslo logging and configuration services. log.setup(CONF, 'osvmexpire') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. repositories.setup_database_engine_and_factory() wsgi_app = func(global_config, **local_conf) LOG = log.getLogger(__name__) LOG.info('os-vm-expire app created and initialized') return wsgi_app
def __init__(self): super(CleanerServer, self).__init__() repositories.setup_database_engine_and_factory() started_at = time.time() callables = [(check, (started_at, ), {})] self.w = periodics.PeriodicWorker(callables)