def main(filename): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() data = JsonPkgTree(db_instance, filename) data.dump()
def db_conn(): """Fixture for db connection.""" def _handler(postgresql): """Init DB with data.""" conn = psycopg2.connect(**postgresql.dsn()) cursor = conn.cursor() with open("../../database/vmaas_user_create_postgresql.sql", "r", encoding='utf8') as psql_user: cursor.execute(psql_user.read()) with open("../../database/vmaas_db_postgresql.sql", "r", encoding='utf8') as vmaas_db: cursor.execute(vmaas_db.read()) cursor.close() conn.commit() conn.close() # Create temporary posgresql server # pylint: disable=invalid-name Postgresql = testing.postgresql.PostgresqlFactory( cache_initialized_db=True, on_initialized=_handler) postgresql = Postgresql() os.environ["POSTGRESQL_PORT"] = str(postgresql.dsn()["port"]) init_db() conn = psycopg2.connect(**postgresql.dsn()) yield conn # teardown - close connection, stop postgresql conn.close() postgresql.stop()
def main(): """Wait for services.""" init_logging() init_db() config = Config() if config.db_available: wait(DatabaseHandler.get_connection, service="PostgreSQL") else: LOGGER.info("Skipping PostgreSQL check") if config.websocket_host and "vmaas-websocket" not in config.pod_hostname and not config.is_init_container: wait( request, "GET", f"http://{config.websocket_host}:{config.websocket_port}/api/v1/monitoring/health", service="Websocket server", timeout=1, ) else: LOGGER.info("Skipping Websocket server check") if config.reposcan_host and "vmaas-reposcan" not in config.pod_hostname: wait( request, "GET", f"http://{config.reposcan_host}:{config.reposcan_port}/api/v1/monitoring/health", service="Reposcan API", timeout=1, ) else: LOGGER.info("Skipping Reposcan API check") os.execvp(sys.argv[1], sys.argv[1:])
def run_task(*args, **kwargs): """Start importing from git""" try: init_logging() init_db() if not REPOLIST_GIT_TOKEN: LOGGER.warning( "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git." ) return "SKIPPED" _, repos = GitRepoListHandler.fetch_git_repolists() if not repos: return "ERROR" repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories() for _, content_set, basearch, releasever, _, _, _, _ in repos: repos_in_db.pop((content_set, basearch, releasever), None) repository_controller.delete_repos(repos_in_db) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def main(): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() timestamp = format_datetime(now()) data = SqliteDump(db_instance, DUMP) data.dump(timestamp)
def db_conn(): """Fixture for db connection.""" postgresql = create_pg() init_db() conn = psycopg2.connect(**postgresql.dsn()) yield conn # teardown - close connection, stop postgresql conn.close() postgresql.stop()
def process(self): """Processes the dbchange get request. """ init_db() self.db_instance = DatabaseHandler.get_connection() result = {} with self.db_instance.cursor() as crs: crs.execute("select pkgtree_change from dbchange") timestamp = crs.fetchone() result["pkgtree_change"] = str(timestamp[0]) return result
def __init__(self): LOGGER.info('DatabaseUpgrade initializing.') DatabaseHandler.close_connection() init_db() self.init_schema() # get upgrade sql scripts directory self.scripts_dir = os.getenv('DB_UPGRADE_SCRIPTS_DIR', str(DB_UPGRADES_PATH)) if not self.scripts_dir.endswith('/'): self.scripts_dir += '/' # load the version2file_map and version_max self.version2file_map, self.version_max = self._load_upgrade_file_list(self.scripts_dir)
def _create_db_conn(): """Create database connection""" user = os.getenv("POSTGRESQL_USER", "FILL") host = os.getenv("POSTGRESQL_HOST", "FILL") password = os.getenv("POSTGRESQL_PASSWORD", "FILL") database = os.getenv("POSTGRESQL_DATABASE", "FILL") port = os.getenv("POSTGRESQL_PORT", "FILL") conn = psycopg2.connect(user=user, host=host, password=password, database=database, port=port) init_db() return conn
def run_task(*args, **kwargs): """Function to start deleting OVAL files.""" try: oval_id = kwargs.get("oval_id", None) init_logging() init_db() oval_controller = OvalController() oval_controller.delete_oval_file(oval_id) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start deleting repos.""" try: repo = kwargs.get("repo", None) init_logging() init_db() repository_controller = RepositoryController() repository_controller.delete_content_set(repo) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to import all repositories from input list to the DB.""" try: products = kwargs.get("products", None) repos = kwargs.get("repos", None) git_sync = kwargs.get("git_sync", False) init_logging() init_db() if products: product_store = ProductStore() product_store.store(products) if repos: repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories( ) # Sync repos from input for repo_url, content_set, basearch, releasever, cert_name, ca_cert, cert, key in repos: repository_controller.add_repository(repo_url, content_set, basearch, releasever, cert_name=cert_name, ca_cert=ca_cert, cert=cert, key=key) repos_in_db.pop((content_set, basearch, releasever), None) if git_sync: # Warn about extra repos in DB when syncing main repolist from git for content_set, basearch, releasever in repos_in_db: LOGGER.warning( "Repository in DB but not in git repolist: %s", ", ".join( filter(None, (content_set, basearch, releasever)))) REPOS_TO_CLEANUP.set(len(repos_in_db)) repository_controller.import_repositories() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start syncing OVALs.""" try: init_logging() init_db() controller = OvalController() controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) FAILED_IMPORT_OVAL.inc() DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start syncing all repositories available from database.""" try: init_logging() init_db() repository_controller = RepositoryController() repository_controller.add_db_repositories() repository_controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"