def main(): """Sets up and run whole application""" # Set up endpoint for prometheus monitoring init_logging() upgrader = DatabaseUpgrade() upgrader.upgrade()
def main(): """Wait for services.""" init_logging() init_db() config = Config() if config.db_available: wait(DatabaseHandler.get_connection, service="PostgreSQL") else: LOGGER.info("Skipping PostgreSQL check") if config.websocket_host and "vmaas-websocket" not in config.pod_hostname and not config.is_init_container: wait( request, "GET", f"http://{config.websocket_host}:{config.websocket_port}/api/v1/monitoring/health", service="Websocket server", timeout=1, ) else: LOGGER.info("Skipping Websocket server check") if config.reposcan_host and "vmaas-reposcan" not in config.pod_hostname: wait( request, "GET", f"http://{config.reposcan_host}:{config.reposcan_port}/api/v1/monitoring/health", service="Reposcan API", timeout=1, ) else: LOGGER.info("Skipping Reposcan API check") os.execvp(sys.argv[1], sys.argv[1:])
def run_task(*args, **kwargs): """Start importing from git""" try: init_logging() init_db() if not REPOLIST_GIT_TOKEN: LOGGER.warning( "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git." ) return "SKIPPED" _, repos = GitRepoListHandler.fetch_git_repolists() if not repos: return "ERROR" repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories() for _, content_set, basearch, releasever, _, _, _, _ in repos: repos_in_db.pop((content_set, basearch, releasever), None) repository_controller.delete_repos(repos_in_db) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def main(filename): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() data = JsonPkgTree(db_instance, filename) data.dump()
def main(): """ Main loop.""" init_logging() init_db() db_instance = DatabaseHandler.get_connection() timestamp = format_datetime(now()) data = SqliteDump(db_instance, DUMP) data.dump(timestamp)
def run_task(*args, **kwargs): """Start importing from git""" init_logging() if not REPOLIST_GIT_TOKEN: LOGGER.warning( "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git." ) return "SKIPPED" products, repos = GitRepoListHandler.fetch_git_repolists() if products is None or repos is None: return "ERROR" return RepolistImportHandler.run_task(products=products, repos=repos, git_sync=True)
def run_task(*args, **kwargs): """Start importing from git""" init_logging() if not REPOLIST_GIT_TOKEN: LOGGER.warning( "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git." ) return "SKIPPED" LOGGER.info("Downloading repolist.json from git %s", REPOLIST_GIT) shutil.rmtree(REPOLIST_DIR, True) os.makedirs(REPOLIST_DIR, exist_ok=True) # Should we just use replacement or add a url handling library, which # would be used replace the username in the provided URL ? git_url = REPOLIST_GIT.replace( 'https://', f'https://{REPOLIST_GIT_TOKEN}:x-oauth-basic@') git_ref = REPOLIST_GIT_REF if REPOLIST_GIT_REF else 'master' git.Repo.clone_from(git_url, REPOLIST_DIR, branch=git_ref) paths = REPOLIST_PATH.split(',') products, repos = {}, [] for path in paths: # Trim the spaces so we can have nicely formatted comma lists path = path.strip() if not os.path.isdir(REPOLIST_DIR) or not os.path.isfile( REPOLIST_DIR + '/' + path): LOGGER.error( "Downloading repolist failed: Directory was not created") with open(REPOLIST_DIR + '/' + path, 'r', encoding='utf8') as json_file: data = json.load(json_file) assert data item_products, item_repos = RepolistImportHandler.parse_repolist_json( data) if not item_products and not item_repos: LOGGER.warning("Input json is not valid") return "ERROR" products.update(item_products) repos += item_repos return RepolistImportHandler.run_task(products=products, repos=repos, git_sync=True)
def run_task(*args, **kwargs): """Function to start deleting repos.""" try: repo = kwargs.get("repo", None) init_logging() init_db() repository_controller = RepositoryController() repository_controller.delete_content_set(repo) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start deleting OVAL files.""" try: oval_id = kwargs.get("oval_id", None) init_logging() init_db() oval_controller = OvalController() oval_controller.delete_oval_file(oval_id) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to import all repositories from input list to the DB.""" try: products = kwargs.get("products", None) repos = kwargs.get("repos", None) git_sync = kwargs.get("git_sync", False) init_logging() init_db() if products: product_store = ProductStore() product_store.store(products) if repos: repository_controller = RepositoryController() repos_in_db = repository_controller.repo_store.list_repositories( ) # Sync repos from input for repo_url, content_set, basearch, releasever, cert_name, ca_cert, cert, key in repos: repository_controller.add_repository(repo_url, content_set, basearch, releasever, cert_name=cert_name, ca_cert=ca_cert, cert=cert, key=key) repos_in_db.pop((content_set, basearch, releasever), None) if git_sync: # Warn about extra repos in DB when syncing main repolist from git for content_set, basearch, releasever in repos_in_db: LOGGER.warning( "Repository in DB but not in git repolist: %s", ", ".join( filter(None, (content_set, basearch, releasever)))) REPOS_TO_CLEANUP.set(len(repos_in_db)) repository_controller.import_repositories() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start syncing OVALs.""" try: init_logging() init_db() controller = OvalController() controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) FAILED_IMPORT_OVAL.inc() DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start syncing all repositories available from database.""" try: init_logging() init_db() repository_controller = RepositoryController() repository_controller.add_db_repositories() repository_controller.store() except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) DatabaseHandler.rollback() if isinstance(err, DatabaseError): return "DB_ERROR" return "ERROR" finally: DatabaseHandler.close_connection() return "OK"
def run_task(*args, **kwargs): """Function to start cleaning temporary data.""" try: init_logging() for item in os.listdir("/tmp"): full_path = os.path.join("/tmp", item) try: if os.path.isdir(full_path): shutil.rmtree(full_path) else: os.unlink(full_path) LOGGER.info("Deleted file or directory: %s", full_path) except Exception as err: # pylint: disable=broad-except LOGGER.warning( "Unable to delete file or directory: %s (%s)", full_path, err) except Exception as err: # pylint: disable=broad-except msg = "Internal server error <%s>" % err.__hash__() LOGGER.exception(msg) return "ERROR" return "OK"
def __init__(self): init_logging() self.db_pool = DB.DatabasePoolHandler(POOL_SIZE)
def main(): """Main entrypoint.""" init_logging() create_app() IOLoop.instance().start()
def create_app(specs): """Create reposcan app.""" init_logging() LOGGER.info("Starting (version %s).", VMAAS_VERSION) sync_interval = int(os.getenv('REPOSCAN_SYNC_INTERVAL_MINUTES', "360")) * 60000 if sync_interval > 0: PeriodicCallback(periodic_sync, sync_interval).start() else: LOGGER.info("Periodic syncing disabled.") ws_handler = ReposcanWebsocket() def terminate(*_): """Trigger shutdown.""" LOGGER.info("Signal received, stopping application.") # Kill asyncio ioloop IOLoop.instance().add_callback_from_signal(ws_handler.stop) # Kill background pool SyncTask.cancel() for sig in KILL_SIGNALS: signal.signal(sig, terminate) ws_handler.websocket_reconnect() ws_handler.reconnect_callback = PeriodicCallback( ws_handler.websocket_reconnect, WEBSOCKET_RECONNECT_INTERVAL * 1000) ws_handler.reconnect_callback.start() app = connexion.App(__name__, options={ 'swagger_ui': True, 'openapi_spec_path': '/openapi.json' }) # Response validation is disabled due to returing streamed response in GET /pkgtree # https://github.com/zalando/connexion/pull/467 should fix it for route, spec in specs.items(): app.add_api(spec, resolver=connexion.RestyResolver('reposcan'), validate_responses=False, strict_validation=True, base_path=route, arguments={"vmaas_version": VMAAS_VERSION}) @app.app.route('/metrics', methods=['GET']) def metrics(): # pylint: disable=unused-variable # /metrics API shouldn't be visible in the API documentation, # hence it's added here in the create_app step return generate_latest(), 200, { 'Content-Type': 'text/plain; charset=utf-8' } @app.app.after_request def set_headers(response): # pylint: disable=unused-variable response.headers["Access-Control-Allow-Origin"] = "*" response.headers["Access-Control-Allow-Headers"] = "Content-Type" return response return app
def __init__(self, dsn=None): init_logging() self.db_pool = DB.DatabasePoolHandler(POOL_SIZE, dsn)
"""Entry point for the application""" import asyncio from aiohttp import web from prometheus_client import start_http_server from vmaas.webapp.app import create_app, DEFAULT_PATH, DEFAULT_PATH_API from vmaas.webapp.app import init_websocket from vmaas.common.config import Config from vmaas.common.logging_utils import get_logger from vmaas.common.logging_utils import init_logging LOGGER = get_logger(__name__) if __name__ == '__main__': init_logging() # pylint: disable=invalid-name application = create_app({ DEFAULT_PATH + "/v1": "webapp.v1.spec.yaml", DEFAULT_PATH + "/v2": "webapp.v2.spec.yaml", DEFAULT_PATH + "/v3": "webapp.v3.spec.yaml", DEFAULT_PATH_API + "/v1": "webapp.v1.spec.yaml", DEFAULT_PATH_API + "/v2": "webapp.v2.spec.yaml", DEFAULT_PATH_API + "/v3": "webapp.v3.spec.yaml" }) init_websocket() cfg = Config() port = cfg.web_port or cfg.webapp_port start_http_server(int(cfg.metrics_port)) web.run_app(application.app,