def run_task(*args, **kwargs): """Start importing from git""" try: init_logging() init_db() if not REPOLIST_GIT_TOKEN: LOGGER.warning( "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git." ) return "SKIPPED" _, repos = GitRepoListHandler.fetch_git_repolists() if not repos: return "ERROR" repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories() for _, content_set, basearch, releasever, _, _, _, _ in repos: repos_in_db.pop((content_set, basearch, releasever), None) repository_controller.delete_repos(repos_in_db) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start exporting disk dump.""" try: export_pkgtree(PKGTREE_FILE) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def __init__(self): LOGGER.info('DatabaseUpgrade initializing.') DatabaseHandler.close_connection() init_db() self.init_schema() # get upgrade sql scripts directory self.scripts_dir = os.getenv('DB_UPGRADE_SCRIPTS_DIR', str(DB_UPGRADES_PATH)) if not self.scripts_dir.endswith('/'): self.scripts_dir += '/' # load the version2file_map and version_max self.version2file_map, self.version_max = self._load_upgrade_file_list(self.scripts_dir)
def run_task(*args, **kwargs): """Function to start deleting OVAL files.""" try: oval_id = kwargs.get("oval_id", None) init_logging() init_db() oval_controller = OvalController() oval_controller.delete_oval_file(oval_id) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def main(filename): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() data = JsonPkgTree(db_instance, filename) data.dump()
def run_task(*args, **kwargs): """Function to start deleting repos.""" try: repo = kwargs.get("repo", None) init_logging() init_db() repository_controller = RepositoryController() repository_controller.delete_content_set(repo) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection() self.module_store = ModulesStore() self.package_store = PackageStore() self.update_store = UpdateStore() self.content_set_to_db_id = self._prepare_content_set_map()
def run_task(*args, **kwargs): """Function to import all repositories from input list to the DB.""" try: products = kwargs.get("products", None) repos = kwargs.get("repos", None) git_sync = kwargs.get("git_sync", False) init_logging() init_db() if products: product_store = ProductStore() product_store.store(products) if repos: repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories( ) # Sync repos from input for repo_url, content_set, basearch, releasever, cert_name, ca_cert, cert, key in repos: repository_controller.add_repository(repo_url, content_set, basearch, releasever, cert_name=cert_name, ca_cert=ca_cert, cert=cert, key=key) repos_in_db.pop((content_set, basearch, releasever), None) if git_sync: # Warn about extra repos in DB when syncing main repolist from git for content_set, basearch, releasever in repos_in_db: LOGGER.warning( "Repository in DB but not in git repolist: %s", ", ".join( filter(None, (content_set, basearch, releasever)))) REPOS_TO_CLEANUP.set(len(repos_in_db)) repository_controller.import_repositories() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def main(): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() timestamp = format_datetime(now()) data = SqliteDump(db_instance, DUMP) data.dump(timestamp)
def run_task(*args, **kwargs): """Function to start syncing OVALs.""" try: init_logging() init_db() controller = OvalController() controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) FAILED_IMPORT_OVAL.inc() DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start syncing all repositories available from database.""" try: init_logging() init_db() repository_controller = RepositoryController() repository_controller.add_db_repositories() repository_controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def process(self): """Processes the dbchange get request. """ init_db() self.db_instance = DatabaseHandler.get_connection() result = {} with self.db_instance.cursor() as crs: crs.execute("select pkgtree_change from dbchange") timestamp = crs.fetchone() result["pkgtree_change"] = str(timestamp[0]) return result
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection() self.cpe_label_to_id = {} self.cpe_label_to_name = {} cur = self.conn.cursor() cur.execute("select id, label, name from cpe") for cpe_id, label, name in cur.fetchall(): self.cpe_label_to_id[label] = cpe_id self.cpe_label_to_name[label] = name cur.close()
def exporter_db_conn(): """Fixture for db connection.""" def _handler(postgresql): """Init DB with data.""" conn = psycopg2.connect(**postgresql.dsn()) cursor = conn.cursor() with open("../../database/vmaas_user_create_postgresql.sql", "r", encoding='utf8') as psql_user: cursor.execute(psql_user.read()) with open("../../database/vmaas_db_postgresql.sql", "r", encoding='utf8') as vmaas_db: cursor.execute(vmaas_db.read()) with open("test_data/exporter/exporter_test_data.sql", "r", encoding='utf8') as test_data: cursor.execute(test_data.read()) cursor.close() conn.commit() conn.close() # Create temporary posgresql server # pylint: disable=invalid-name Postgresql = testing.postgresql.PostgresqlFactory( cache_initialized_db=True, on_initialized=_handler) postgresql = Postgresql() os.environ["POSTGRESQL_PORT"] = str(postgresql.dsn()["port"]) DatabaseHandler.close_connection() init_db() conn = psycopg2.connect(**postgresql.dsn()) yield conn # teardown - close connection, stop postgresql conn.close() postgresql.stop()
def upgrade(self): """perform database upgrade""" conn = DatabaseHandler.get_connection() try: self._get_db_lock(conn) db_version = self._get_current_db_version(conn) if db_version == self.version_max: LOGGER.info('Database is up to date at version: %d', db_version) return if db_version > self.version_max: msg = 'Database version %d is greater than upgrade version %d' % (db_version, self.version_max) LOGGER.warning(msg) return LOGGER.info('Database requires upgrade from version %d to %d', db_version, self.version_max) upgrades_to_apply = self._get_upgrades_to_apply(db_version, self.version_max) for upgrade in upgrades_to_apply: self._apply_upgrade(upgrade['ver'], upgrade['script'], conn) finally: self._release_db_lock(conn)
def init_schema(self): """Initialize database schema.""" cfg = Config() conn = DatabaseHandler.get_connection() if self._is_initialized(conn): LOGGER.info("DB schema is already initialized.") return try: self._get_db_lock(conn) LOGGER.info("Empty database, initializing...") with conn.cursor() as cur: with open(USER_CREATE_SQL_PATH, "r", encoding='utf8') as f_user, \ open(DB_CREATE_SQL_PATH, "r", encoding='utf8') as f_db: cur.execute(f_user.read()) cur.execute(f_db.read()) cur.execute(f"ALTER USER vmaas_writer WITH PASSWORD '{cfg.postgresql_writer_password}'") cur.execute(f"ALTER USER vmaas_reader WITH PASSWORD '{cfg.postgresql_reader_password}'") conn.commit() finally: self._release_db_lock(conn)
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection()