Example #1
0
def test_configure_logger(capsys):
    "configure_logger should be idempotent"

    configure_logger()
    logger = logging.getLogger("quetz")
    logger.error("my test")

    captured = capsys.readouterr()
    assert "[quetz]" in captured.err
    assert "ERROR" in captured.err
    assert "my test" in captured.err
    assert len(captured.err.splitlines()) == 1

    captured = capsys.readouterr()
    assert not captured.err

    configure_logger()
    logger.info("second")
    captured = capsys.readouterr()
    assert "[quetz]" in captured.err
    assert "INFO" in captured.err
    assert "second" in captured.err
    assert captured.err.count("second") == 1
    assert "my test" not in captured.err
    assert len(captured.err.splitlines()) == 1
Example #2
0
def start_supervisor_daemon(path, num_procs=None):
    from quetz.jobs.runner import Supervisor
    from quetz.tasks.workers import get_worker

    configure_logger(loggers=("quetz", ))
    config = _get_config(path)
    manager = get_worker(config, num_procs=num_procs)
    with working_directory(path):
        db = get_session(config.sqlalchemy_database_url)
        supervisor = Supervisor(db, manager)
        try:
            supervisor.run()
        except KeyboardInterrupt:
            logger.info("stopping supervisor")
        finally:
            db.close()
Example #3
0
def job_wrapper(func, api_key, browser_session, config, **kwargs):

    # database connections etc. are not serializable
    # so we need to recreate them in the process.
    # This allows us to manage database connectivity prior
    # to running a job.

    import logging
    import os

    from quetz.authorization import Rules
    from quetz.config import configure_logger
    from quetz.dao import Dao
    from quetz.database import get_session
    from quetz.deps import get_remote_session

    pkgstore = config.get_package_store()
    db = get_session(config.sqlalchemy_database_url)
    dao = Dao(db)
    auth = Rules(api_key, browser_session, db)
    session = get_remote_session()

    configure_logger(config)

    logger = logging.getLogger("quetz")
    logger.debug(
        f"evaluating function {func} in a subprocess task with pid {os.getpid()}"
    )

    extra_kwargs = prepare_arguments(
        func,
        dao=dao,
        auth=auth,
        session=session,
        config=config,
        pkgstore=pkgstore,
    )

    kwargs.update(extra_kwargs)

    func(**kwargs)
Example #4
0
File: cli.py Project: beenje/quetz
def watch_job_queue(
    path: str = typer.Argument(None, help="Path to the plugin folder"),
    num_procs: Optional[int] = typer.Option(
        None, help="Number of processes to use. Default: number of CPU cores"),
) -> None:
    import time

    configure_logger(loggers=("quetz", ))

    from quetz.jobs.runner import check_status, run_jobs, run_tasks
    from quetz.tasks.workers import SubprocessWorker

    config = _get_config(path)
    manager = SubprocessWorker("", {}, config, {'max_workers': num_procs})
    with working_directory(path):
        db = get_session(config.sqlalchemy_database_url)
        try:
            while True:
                run_jobs(db)
                run_tasks(db, manager)
                check_status(db)
                time.sleep(5)
        except KeyboardInterrupt:
            db.close()
Example #5
0
from quetz.jobs import rest_models as jobs_rest
from quetz.metrics import api as metrics_api
from quetz.metrics.middleware import DOWNLOAD_COUNT, UPLOAD_COUNT
from quetz.rest_models import ChannelActionEnum, CPRole
from quetz.tasks import indexing
from quetz.tasks.common import Task
from quetz.tasks.mirror import LocalCache, RemoteRepository, get_from_cache_or_download
from quetz.utils import TicToc, generate_random_key, parse_query

from .condainfo import CondaInfo

app = FastAPI()

config = Config()

configure_logger(config)

logger = logging.getLogger("quetz")

app.add_middleware(
    SessionMiddleware,
    secret_key=config.session_secret,
    https_only=config.session_https_only,
)

metrics.init(app)

if config.configured_section("cors"):
    logger.info("Configuring CORS with ")
    logger.info(f"allow_origins     = {config.cors_allow_origins}")
    logger.info(f"allow_credentials = {config.cors_allow_credentials}")
Example #6
0
File: cli.py Project: beenje/quetz
from quetz.database import get_session
from quetz.db_models import (
    ApiKey,
    Channel,
    ChannelMember,
    Identity,
    Package,
    PackageMember,
    Profile,
    User,
)

app = typer.Typer()

logger = logging.getLogger("quetz-cli")
configure_logger(loggers=("quetz-cli", "alembic"))


class LogLevel(str, Enum):
    critical = "critical"
    error = "error"
    warning = "warning"
    info = "info"
    debug = "debug"
    trace = "trace"


@contextlib.contextmanager
def working_directory(path):
    """Change working directory and return to previous on exit."""
    prev_cwd = Path.cwd()
Example #7
0
def job_wrapper(
    func: Union[Callable, bytes],
    config,
    task_id=None,
    exc_passthrou=False,
    **kwargs,
):

    # database connections etc. are not serializable
    # so we need to recreate them in the process.
    # This allows us to manage database connectivity prior
    # to running a job.

    import logging
    import pickle

    from quetz.authorization import Rules
    from quetz.config import configure_logger
    from quetz.dao import Dao
    from quetz.database import get_session
    from quetz.deps import get_remote_session

    configure_logger(config)

    logger = logging.getLogger("quetz.worker")

    pkgstore = kwargs.pop("pkgstore", None)
    db = kwargs.pop("db", None)
    dao = kwargs.pop("dao", None)
    auth = kwargs.pop("auth", None)
    session = kwargs.pop("session", None)

    if db:
        close_session = False
    elif dao:
        db = dao.db
        close_session = False
    else:
        db = get_session(config.sqlalchemy_database_url)
        close_session = True

    user_id: Optional[str]
    if task_id:
        task = db.query(Task).filter(Task.id == task_id).one_or_none()
        # take extra arguments from job definition
        if task.job.extra_args:
            job_extra_args = json.loads(task.job.extra_args)
            kwargs.update(job_extra_args)
        if task.job.owner_id:
            user_id = str(uuid.UUID(bytes=task.job.owner_id))
        else:
            user_id = None
    else:
        task = None
        user_id = None

    if not pkgstore:
        pkgstore = config.get_package_store()

    dao = Dao(db)

    if not auth:
        browser_session: Dict[str, str] = {}
        api_key = None
        if user_id:
            browser_session['user_id'] = user_id
        auth = Rules(api_key, browser_session, db)
    if not session:
        session = get_remote_session()

    if task:
        task.status = TaskStatus.running
        task.job.status = JobStatus.running
        db.commit()

    callable_f: Callable = pickle.loads(func) if isinstance(func,
                                                            bytes) else func

    extra_kwargs = prepare_arguments(
        callable_f,
        dao=dao,
        auth=auth,
        session=session,
        config=config,
        pkgstore=pkgstore,
        user_id=user_id,
    )

    kwargs.update(extra_kwargs)

    try:
        callable_f(**kwargs)
    except Exception as exc:
        if task:
            task.status = TaskStatus.failed
        logger.error(
            f"exception occurred when evaluating function {callable_f.__name__}:{exc}"
        )
        if exc_passthrou:
            raise exc
    else:
        if task:
            task.status = TaskStatus.success
    finally:
        db.commit()
        if close_session:
            db.close()
Example #8
0
    ApiKey,
    Channel,
    ChannelMember,
    Identity,
    Package,
    PackageMember,
    Profile,
    User,
)

app = typer.Typer()

_deployments_file = os.path.join(_user_dir, 'deployments.json')

logger = logging.getLogger("quetz-cli")
configure_logger(loggers=("quetz-cli",))


class LogLevel(str, Enum):
    critical = "critical"
    error = "error"
    warning = "warning"
    info = "info"
    debug = "debug"
    trace = "trace"


def _init_db(db: Session, config: Config):
    """Initialize the database and add users from config."""

    if config.configured_section("users"):