Exemplo n.º 1
0
    def run_task(*args, **kwargs):
        """Start importing from git"""

        init_logging()

        if not REPOLIST_GIT_TOKEN:
            LOGGER.warning(
                "REPOLIST_GIT_TOKEN not set, skipping download of repositories from git."
            )
            return "SKIPPED"

        LOGGER.info("Downloading repolist.json from git %s", REPOLIST_GIT)
        shutil.rmtree(REPOLIST_DIR, True)
        os.makedirs(REPOLIST_DIR, exist_ok=True)

        # Should we just use replacement or add a url handling library, which
        # would be used replace the username in the provided URL ?
        git_url = REPOLIST_GIT.replace(
            'https://', f'https://{REPOLIST_GIT_TOKEN}:x-oauth-basic@')

        git.Git('/').clone(git_url, REPOLIST_DIR)
        if not os.path.isdir(REPOLIST_DIR) or not os.path.isfile(
                REPOLIST_DIR + '/' + REPOLIST_PATH):
            LOGGER.error(
                "Downloading repolist failed: Directory was not created")

        json_file = open(REPOLIST_DIR + '/' + REPOLIST_PATH, 'r')
        data = json.load(json_file)
        assert data

        products, repos = RepolistImportHandler.parse_repolist_json(data)
        if not products and not repos:
            LOGGER.warning("Input json is not valid")
            return "ERROR"
        return RepolistImportHandler.run_task(products=products, repos=repos)
Exemplo n.º 2
0
    def run_task(*args, **kwargs):
        """Function to import all repositories from input list to the DB."""
        try:
            products = kwargs.get("products", None)
            repos = kwargs.get("repos", None)
            init_logging()
            init_db()

            if products:
                product_store = ProductStore()
                product_store.store(products)

            if repos:
                repository_controller = RepositoryController()
                # Sync repos from input
                for repo_url, content_set, basearch, releasever, cert_name, ca_cert, cert, key in repos:
                    repository_controller.add_repository(repo_url,
                                                         content_set,
                                                         basearch,
                                                         releasever,
                                                         cert_name=cert_name,
                                                         ca_cert=ca_cert,
                                                         cert=cert,
                                                         key=key)
                repository_controller.import_repositories()
        except Exception as err:  # pylint: disable=broad-except
            msg = "Internal server error <%s>" % err.__hash__()
            LOGGER.exception(msg)
            DatabaseHandler.rollback()
            return "ERROR"
        return "OK"
Exemplo n.º 3
0
    def run_task(*args, **kwargs):
        """Function to import all repositories from input list to the DB."""
        try:
            products = kwargs.get("products", None)
            repos = kwargs.get("repos", None)
            git_sync = kwargs.get("git_sync", False)
            init_logging()
            init_db()

            if products:
                product_store = ProductStore()
                product_store.store(products)

            if repos:
                repository_controller = RepositoryController()
                repos_in_db = repository_controller.repo_store.list_repositories()
                # Sync repos from input
                for repo_url, content_set, basearch, releasever, cert_name, ca_cert, cert, key in repos:
                    repository_controller.add_repository(repo_url, content_set, basearch, releasever,
                                                         cert_name=cert_name, ca_cert=ca_cert,
                                                         cert=cert, key=key)
                    repos_in_db.pop((content_set, basearch, releasever), None)
                if git_sync:  # Warn about extra repos in DB when syncing main repolist from git
                    for content_set, basearch, releasever in repos_in_db:
                        LOGGER.warning("Repository in DB but not in git repolist: %s", ", ".join(
                                       filter(None, (content_set, basearch, releasever))))
                repository_controller.import_repositories()
        except Exception as err:  # pylint: disable=broad-except
            msg = "Internal server error <%s>" % err.__hash__()
            LOGGER.exception(msg)
            DatabaseHandler.rollback()
            return "ERROR"
        finally:
            DatabaseHandler.close_connection()
        return "OK"
Exemplo n.º 4
0
def main():
    """Wait for services."""
    init_logging()
    init_db()
    config = Config()
    if config.db_available:
        wait(DatabaseHandler.get_connection, service="PostgreSQL")
    else:
        LOGGER.info("Skipping PostgreSQL check")
    if config.websocket_host and "vmaas-websocket" not in config.pod_hostname:
        wait(
            request,
            "GET",
            f"http://{config.websocket_host}:{config.websocket_port}/api/v1/monitoring/health",
            service="Websocket server")
    else:
        LOGGER.info("Skipping Websocket server check")
    if config.reposcan_host and "vmaas-reposcan" not in config.pod_hostname:
        wait(
            request,
            "GET",
            f"http://{config.reposcan_host}:{config.reposcan_port}/api/v1/monitoring/health",
            service="Reposcan API")
    else:
        LOGGER.info("Skipping Reposcan API check")

    os.execvp(sys.argv[1], sys.argv[1:])
Exemplo n.º 5
0
def main():
    """Sets up and run whole application"""
    # Set up endpoint for prometheus monitoring
    init_logging()

    upgrader = DatabaseUpgrade()
    upgrader.upgrade()
Exemplo n.º 6
0
def main(filename):
    """ Main loop."""
    init_logging()
    init_db()
    db_instance = DatabaseHandler.get_connection()
    data = JsonPkgTree(db_instance, filename)
    data.dump()
Exemplo n.º 7
0
def create_app(specs):
    """Create reposcan app."""

    init_logging()
    LOGGER.info("Starting (version %s).", VMAAS_VERSION)
    sync_interval = int(os.getenv('REPOSCAN_SYNC_INTERVAL_MINUTES', "360")) * 60000
    if sync_interval > 0:
        PeriodicCallback(periodic_sync, sync_interval).start()
    else:
        LOGGER.info("Periodic syncing disabled.")

    ws_handler = ReposcanWebsocket()

    def terminate(*_):
        """Trigger shutdown."""
        LOGGER.info("Signal received, stopping application.")
        # Kill asyncio ioloop
        IOLoop.instance().add_callback_from_signal(ws_handler.stop)
        # Kill background pool
        SyncTask.cancel()

    for sig in KILL_SIGNALS:
        signal.signal(sig, terminate)

    ws_handler.websocket_reconnect()
    ws_handler.reconnect_callback = PeriodicCallback(ws_handler.websocket_reconnect,
                                                     WEBSOCKET_RECONNECT_INTERVAL * 1000)
    ws_handler.reconnect_callback.start()


    app = connexion.App(__name__, options={
        'swagger_ui': True,
        'openapi_spec_path': '/openapi.json'
    })

    # Response validation is disabled due to returing streamed response in GET /pkgtree
    # https://github.com/zalando/connexion/pull/467 should fix it
    for route, spec in specs.items():
        app.add_api(spec, resolver=connexion.RestyResolver('reposcan'),
                    validate_responses=False,
                    strict_validation=True,
                    base_path=route,
                    arguments={"vmaas_version": VMAAS_VERSION}
                    )


    @app.app.route('/metrics', methods=['GET'])
    def metrics():  # pylint: disable=unused-variable
        # /metrics API shouldn't be visible in the API documentation,
        # hence it's added here in the create_app step
        return generate_latest(), 200, {'Content-Type': 'text/plain; charset=utf-8'}

    @app.app.after_request
    def set_headers(response):  # pylint: disable=unused-variable
        response.headers["Access-Control-Allow-Origin"] = "*"
        response.headers["Access-Control-Allow-Headers"] = "Content-Type"
        return response

    return app
Exemplo n.º 8
0
 def run_task(*args, **kwargs):
     """Function to start syncing all CVEs."""
     try:
         init_logging()
         init_db()
         controller = CvemapController()
         controller.store()
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         DatabaseHandler.rollback()
         return "ERROR"
     return "OK"
Exemplo n.º 9
0
 def run_task(*args, **kwargs):
     """Function to start syncing all repositories available from database."""
     try:
         init_logging()
         init_db()
         repository_controller = RepositoryController()
         repository_controller.add_db_repositories()
         repository_controller.store()
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         DatabaseHandler.rollback()
         return "ERROR"
     return "OK"
Exemplo n.º 10
0
 def run_task(*args, **kwargs):
     """Function to start deleting repos."""
     try:
         repo = kwargs.get("repo", None)
         init_logging()
         init_db()
         repository_controller = RepositoryController()
         repository_controller.delete_content_set(repo)
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         DatabaseHandler.rollback()
         return "ERROR"
     return "OK"
Exemplo n.º 11
0
 def run_task(*args, **kwargs):
     """Function to start syncing OVALs."""
     try:
         init_logging()
         init_db()
         controller = OvalController()
         controller.store()
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         FAILED_IMPORT_OVAL.inc()
         DatabaseHandler.rollback()
         return "ERROR"
     finally:
         DatabaseHandler.close_connection()
     return "OK"
Exemplo n.º 12
0
 def run_task(*args, **kwargs):
     """Function to start cleaning temporary data."""
     try:
         init_logging()
         for item in os.listdir("/tmp"):
             full_path = os.path.join("/tmp", item)
             try:
                 if os.path.isdir(full_path):
                     shutil.rmtree(full_path)
                 else:
                     os.unlink(full_path)
                 LOGGER.info("Deleted file or directory: %s", full_path)
             except Exception as err:  # pylint: disable=broad-except
                 LOGGER.warning("Unable to delete file or directory: %s", full_path)
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         return "ERROR"
     return "OK"
Exemplo n.º 13
0
def create_app():
    """Create VmaaS application and servers"""

    init_logging()

    LOGGER.info("Starting (version %s).", VMAAS_VERSION)
    LOGGER.info('Hotcache enabled: %s', os.getenv("HOTCACHE_ENABLED", "YES"))

    with open('webapp.spec.yaml', 'rb') as specfile:
        SPEC = yaml.safe_load(specfile)  # pylint: disable=invalid-name

    @web.middleware
    async def timing_middleware(request, handler, **kwargs):
        """ Middleware that handles timing of requests"""
        start_time = time.time()
        if asyncio.iscoroutinefunction(handler):
            res = await handler(request, **kwargs)
        else:
            res = handler(request, **kwargs)

        duration = (time.time() - start_time)
        # (0)  /(1) /(2) /(3)
        #     /api /v1  /updates
        const_path = '/'.join(request.path.split('/')[:4])
        REQUEST_TIME.labels(request.method, const_path).observe(duration)
        REQUEST_COUNTS.labels(request.method, const_path, res.status).inc()

        return res

    @web.middleware
    async def error_formater(request, handler, **kwargs):
        #pylint: disable=broad-except
        def build_error(detail, status):
            errors = {"detail": detail, "status": status}
            return {"errors": [errors]}

        res = await handler(request, **kwargs)

        try:
            if res.status >= 400:
                original_error = loads(res.body)
                better_error = build_error(original_error["detail"], original_error["status"])
                return web.json_response(better_error, status=res.status)
            return res
        except TypeError: # The error response is not made by connexion
            better_error = build_error(original_error, res.status)
            return web.json_response(better_error, status=res.status)
        except Exception as _:
            LOGGER.exception(_)
            return web.json_response(build_error("Internal server error", 500), status=500)


    app = connexion.AioHttpApp(__name__, options={
        'swagger_ui': True,
        'openapi_spec_path': '/openapi.json',
        'middlewares': [error_formater,
                        timing_middleware]
    })

    def metrics(request, **kwargs): #pylint: disable=unused-argument
        """Provide current prometheus metrics"""
        # /metrics API shouldn't be visible in the API documentation,
        # hence it's added here in the create_app step
        return web.Response(text=generate_latest().decode('utf-8'))

    async def on_prepare(request, response): #pylint: disable=unused-argument
        """Hook for preparing new responses"""
        response.headers["Access-Control-Allow-Origin"] = "*"
        response.headers["Access-Control-Allow-Headers"] = "Content-Type"

    app.app.on_response_prepare.append(on_prepare)
    app.app.router.add_get("/metrics", metrics)

    app.add_api(SPEC, resolver=connexion.RestyResolver('app'),
                validate_responses=False,
                strict_validation=False,
                base_path='/api',
                pass_context_arg_name='request'
                )

    BaseHandler.db_cache = Cache()
    load_cache_to_apis()

    return app
Exemplo n.º 14
0
 def __init__(self, dsn=None):
     init_logging()
     self.db_pool = DB.DatabasePoolHandler(POOL_SIZE, dsn)
Exemplo n.º 15
0
"""Entry point for the application"""
from aiohttp import web

from app import create_app, init_websocket, PUBLIC_API_PORT
from common.logging_utils import init_logging, get_logger

LOGGER = get_logger(__name__)

if __name__ == '__main__':
    init_logging()
    # pylint: disable=invalid-name
    application = create_app()
    init_websocket()

    web.run_app(application.app, port=PUBLIC_API_PORT)
Exemplo n.º 16
0
 def __init__(self):
     init_logging()
     self.db_pool = DB.DatabasePoolHandler(POOL_SIZE)
Exemplo n.º 17
0
def create_app():
    """Create VmaaS application and servers"""

    init_logging()

    LOGGER.info("Starting (version %s).", VMAAS_VERSION)

    with open('webapp.spec.yaml', 'rb') as specfile:
        SPEC = yaml.safe_load(specfile)  # pylint: disable=invalid-name
    SPEC['info']['version'] = VMAAS_VERSION

    @web.middleware
    async def timing_middleware(request, handler, **kwargs):
        """ Middleware that handles timing of requests"""
        start_time = time.time()
        if asyncio.iscoroutinefunction(handler):
            res = await handler(request, **kwargs)
        else:
            res = handler(request, **kwargs)

        duration = (time.time() - start_time)
        # (0)  /(1) /(2) /(3)
        #     /api /v1  /updates
        const_path = '/'.join(request.path.split('/')[:4])
        REQUEST_TIME.labels(request.method, const_path).observe(duration)
        REQUEST_COUNTS.labels(request.method, const_path, res.status).inc()

        return res

    @web.middleware
    async def gzip_middleware(request, handler, **kwargs):
        """ Middleware that compress response using gzip"""
        res = await handler(request, **kwargs)
        header = 'Accept-Encoding'
        if header in request.headers and "gzip" in request.headers[header]:
            gzipped_body = gzip.compress(res.body,
                                         compresslevel=GZIP_COMPRESS_LEVEL)
            res.body = gzipped_body
            res.headers["Content-Encoding"] = "gzip"
        return res

    @web.middleware
    async def error_handler(request, handler, **kwargs):
        def format_error(detail, status):
            res = {}
            res["type"] = "about:blank"
            res["detail"] = detail
            res["status"] = status

            return res

        res = await handler(request, **kwargs)

        if res.status >= 400:
            body = loads(res.body)
            better_error = format_error(body, res.status)
            return web.json_response(better_error, status=res.status)

        return res

    middlewares = []
    if GZIP_RESPONSE_ENABLE:
        middlewares.append(gzip_middleware)
    middlewares.extend([error_handler, timing_middleware])

    app = connexion.AioHttpApp(__name__,
                               options={
                                   'swagger_ui': True,
                                   'openapi_spec_path': '/openapi.json',
                                   'middlewares': middlewares
                               })

    def metrics(request, **kwargs):  # pylint: disable=unused-argument
        """Provide current prometheus metrics"""
        # /metrics API shouldn't be visible in the API documentation,
        # hence it's added here in the create_app step
        return web.Response(text=generate_latest().decode('utf-8'))

    async def on_prepare(request, response):  # pylint: disable=unused-argument
        """Hook for preparing new responses"""
        response.headers["Access-Control-Allow-Origin"] = "*"
        response.headers["Access-Control-Allow-Headers"] = "Content-Type"

    app.app.on_response_prepare.append(on_prepare)
    app.app.router.add_get("/metrics", metrics)

    app.add_api(SPEC,
                resolver=connexion.RestyResolver('app'),
                validate_responses=False,
                strict_validation=False,
                base_path='/api',
                pass_context_arg_name='request')

    BaseHandler.db_cache = Cache()
    load_cache_to_apis()

    return app
Exemplo n.º 18
0
def main():
    """Main entrypoint."""
    init_logging()
    create_app()
    IOLoop.instance().start()