コード例 #1
0
ファイル: celery.py プロジェクト: tarun911/camcops
def export_task_backend(self: "CeleryTask", recipient_name: str,
                        basetable: str, task_pk: int) -> None:
    """
    This function exports a single task but does so with only simple (string,
    integer) information, so it can be called via the Celery task queue.

    Args:
        self: the Celery task, :class:`celery.app.task.Task`
        recipient_name: export recipient name (as per the config file)
        basetable: name of the task's base table
        task_pk: server PK of the task
    """
    from camcops_server.cc_modules.cc_export import export_task  # delayed import  # noqa
    from camcops_server.cc_modules.cc_request import command_line_request_context  # delayed import  # noqa
    from camcops_server.cc_modules.cc_taskfactory import (
        task_factory_no_security_checks, )  # delayed import

    try:
        with command_line_request_context() as req:
            recipient = req.get_export_recipient(recipient_name)
            task = task_factory_no_security_checks(req.dbsession, basetable,
                                                   task_pk)
            if task is None:
                log.error(
                    "export_task_backend for recipient {!r}: No task found "
                    "for {} {}", recipient_name, basetable, task_pk)
                return
            export_task(req, recipient, task)
    except Exception as exc:
        self.retry(countdown=backoff(self.request.retries), exc=exc)
コード例 #2
0
def create_user_download(
    self: "CeleryTask",
    collection: "TaskCollection",
    options: "DownloadOptions",
) -> None:
    """
    Create a research dump file for the user to download later.
    Let them know by e-mail.

    Args:
        self:
            the Celery task, :class:`celery.app.task.Task`
        collection:
            a
            :class:`camcops_server.cc_modules.cc_taskcollection.TaskCollection`
        options:
            :class:`camcops_server.cc_modules.cc_export.DownloadOptions`
            governing the download
    """
    from camcops_server.cc_modules.cc_export import (
        make_exporter, )  # delayed import
    from camcops_server.cc_modules.cc_request import (
        command_line_request_context, )  # delayed import

    with retry_backoff_if_raises(self):
        # Create request for a specific user, so the auditing is correct.
        with command_line_request_context(user_id=options.user_id) as req:
            collection.set_request(req)
            exporter = make_exporter(req=req,
                                     collection=collection,
                                     options=options)
            exporter.create_user_download_and_email()
コード例 #3
0
def housekeeping() -> None:
    """
    Function that is run regularly to do cleanup tasks.

    (Remember that the ``bind`` parameter to ``@celery_app.task()`` means that
    the first argument to the function, typically called ``self``, is the
    Celery task. We don't need it here. See
    https://docs.celeryproject.org/en/latest/userguide/tasks.html#bound-tasks.)
    """
    from camcops_server.cc_modules.cc_request import (
        command_line_request_context, )  # delayed import
    from camcops_server.cc_modules.cc_session import (
        CamcopsSession, )  # delayed import
    from camcops_server.cc_modules.cc_user import (
        SecurityAccountLockout,
        SecurityLoginFailure,
    )  # delayed import

    log.debug("Housekeeping!")
    with command_line_request_context() as req:
        # ---------------------------------------------------------------------
        # Housekeeping tasks
        # ---------------------------------------------------------------------
        # We had a problem with MySQL locking here (two locks open for what
        # appeared to be a single delete, followed by a lock timeout). Seems to
        # be working now.
        CamcopsSession.delete_old_sessions(req)
        SecurityAccountLockout.delete_old_account_lockouts(req)
        SecurityLoginFailure.clear_dummy_login_failures_if_necessary(req)
        delete_old_user_downloads(req)
コード例 #4
0
def dev_cli() -> None:
    """
    Fire up a developer debug command-line.
    """
    config = get_default_config_from_os_env()
    # noinspection PyUnusedLocal
    engine = config.get_sqla_engine()  # noqa: F841
    with command_line_request_context() as req:
        # noinspection PyUnusedLocal
        dbsession = req.dbsession  # noqa: F841
        log.error(
            """Entering developer command-line.
    - Config is available in 'config'.
    - Database engine is available in 'engine'.
    - Dummy request is available in 'req'.
    - Database session is available in 'dbsession'.
        """
        )
        import pdb

        pdb.set_trace()
        # There must be a line below this, or the context is not available;
        # maybe a pdb bug; see
        # https://stackoverflow.com/questions/51743057/custom-context-manager-is-left-when-running-pdb-set-trace  # noqa
        pass  # this does the job
コード例 #5
0
def make_superuser(username: str = None) -> bool:
    """
    Make a superuser from the command line.
    """
    with command_line_request_context() as req:
        username = get_username_from_cli(
            req=req,
            prompt="Username for new superuser (or to gain superuser status)",
            starting_username=username,
        )
        existing_user = User.get_user_by_name(req.dbsession, username)
        if existing_user:
            log.info("Giving superuser status to {!r}", username)
            existing_user.superuser = True
            success = True
        else:
            log.info("Creating superuser {!r}", username)
            password = get_new_password_from_cli(username=username)
            success = User.create_superuser(req, username, password)
        if success:
            log.info("Success")
            return True
        else:
            log.critical("Failed to create superuser")
            return False
コード例 #6
0
def make_data_dictionary(
    filename: str, recipient_name: str, cris: bool = False
) -> None:
    """
    Writes a data dictionary for the CRATE anonymisation tool.
    See :func:`camcops_server.cc_export.write_crate_data_dictionary`.

    Args:
        filename: destination filename
        recipient_name: export recipient name
        cris: make DD for CRIS, not CRATE
    """
    target = "CRIS" if cris else "CRATE"
    with command_line_request_context() as req:
        recipients = req.get_export_recipients(
            recipient_names=[recipient_name],
            save=False,
            database_versions=False,
        )
        recipient = recipients[0]  # type: ExportRecipientInfo
        log.info(
            f"Generating {target} data dictionary for export recipient "
            f"{recipient_name!r}; writing to {filename!r}"
        )
        with open(filename, "wt") as file:
            if cris:
                write_cris_data_dictionary(
                    req=req, file=file, recipient=recipient
                )
            else:
                write_crate_data_dictionary(
                    req=req, file=file, recipient=recipient
                )
コード例 #7
0
def cmd_export(
    recipient_names: List[str] = None,
    all_recipients: bool = False,
    via_index: bool = True,
    schedule_via_backend: bool = False,
) -> None:
    """
    Send all outbound incremental export messages (e.g. HL7).

    Args:
        recipient_names:
            List of export recipient names (as per the config file).
        all_recipients:
            Use all recipients?
        via_index:
            Use the task index (faster)?
        schedule_via_backend:
            Schedule the export via the backend, rather than performing it now.
    """
    with command_line_request_context() as req:
        export(
            req,
            recipient_names=recipient_names,
            all_recipients=all_recipients,
            via_index=via_index,
            schedule_via_backend=schedule_via_backend,
        )
コード例 #8
0
def precache() -> None:
    """
    Populates the major caches.
    """
    log.info("Prepopulating caches")
    config_filename = get_config_filename_from_os_env()
    config = get_default_config_from_os_env()
    _ = all_extra_strings_as_dicts(config_filename)
    _ = config.get_task_snomed_concepts()
    _ = config.get_icd9cm_snomed_concepts()
    _ = config.get_icd10_snomed_concepts()
    with command_line_request_context() as req:
        _ = req.get_export_recipients(all_recipients=True)
コード例 #9
0
def precache() -> None:
    """
    Populates the major caches. (These are process-wide caches, e.g. using
    dogpile's ``@cache_region_static.cache_on_arguments``, not config-specific
    caches.)
    """
    log.info("Prepopulating caches")
    config_filename = get_config_filename_from_os_env()
    config = get_default_config_from_os_env()
    _ = all_extra_strings_as_dicts(config_filename)
    _ = config.get_task_snomed_concepts()
    _ = config.get_icd9cm_snomed_concepts()
    _ = config.get_icd10_snomed_concepts()
    with command_line_request_context() as req:
        _ = req.get_export_recipients(all_recipients=True)
コード例 #10
0
def enable_user_cli(username: str = None) -> bool:
    """
    Re-enable a locked user account from the command line.
    """
    with command_line_request_context() as req:
        if username is None:
            username = get_username_from_cli(
                req=req, prompt="Username to unlock", must_exist=True
            )
        else:
            if not User.user_exists(req, username):
                log.critical("No such user: {!r}", username)
                return False
        SecurityLoginFailure.enable_user(req, username)
        log.info("Enabled.")
        return True
コード例 #11
0
def cmd_export(recipient_names: List[str] = None,
               all_recipients: bool = False,
               via_index: bool = True) -> None:
    """
    Send all outbound incremental export messages (e.g. HL7).

    Args:
        recipient_names: list of export recipient names (as per the config
            file)
        all_recipients: use all recipients?
        via_index: use the task index (faster)?
    """
    with command_line_request_context() as req:
        export(req,
               recipient_names=recipient_names,
               all_recipients=all_recipients,
               via_index=via_index)
コード例 #12
0
def reset_password(username: str = None) -> bool:
    """
    Reset a password from the command line.
    """
    with command_line_request_context() as req:
        username = get_username_from_cli(
            req=req,
            prompt="Username to reset password for",
            starting_username=username,
            must_exist=True,
        )
        log.info("Resetting password for user {!r}", username)
        password = get_new_password_from_cli(username)
        success = set_password_directly(req, username, password)
        if success:
            log.info("Success")
        else:
            log.critical("Failure")
        return success
コード例 #13
0
def export_to_recipient_backend(self: "CeleryTask",
                                recipient_name: str) -> None:
    """
    From the backend, exports all pending tasks for a given recipient.

    - Calls :func:`camcops_server.cc_modules.cc_export.export`.

    There are two ways of doing this, when we call
    :func:`camcops_server.cc_modules.cc_export.export`. If we set
    ``schedule_via_backend=True``, this backend job fires up a whole bunch of
    other backend jobs, one per task to export. If we set
    ``schedule_via_backend=False``, our current backend job does all the work.

    Which is best?

    - Well, keeping it to one job is a bit simpler, perhaps.
    - But everything is locked independently so we can do the multi-job
      version, and we may as well use all the workers available. So my thought
      was to use ``schedule_via_backend=True``.
    - However, that led to database deadlocks (multiple processes trying to
      write a new ExportRecipient).
    - With some bugfixes to equality checking and a global lock (see
      :meth:`camcops_server.cc_modules.cc_config.CamcopsConfig.get_master_export_recipient_lockfilename`),
      we can try again with ``True``.
    - Yup, works nicely.

    Args:
        self: the Celery task, :class:`celery.app.task.Task`
        recipient_name: export recipient name (as per the config file)
    """
    from camcops_server.cc_modules.cc_export import export  # delayed import
    from camcops_server.cc_modules.cc_request import (
        command_line_request_context, )  # delayed import

    with retry_backoff_if_raises(self):
        with command_line_request_context() as req:
            export(
                req,
                recipient_names=[recipient_name],
                schedule_via_backend=True,
            )
コード例 #14
0
def cmd_show_export_queue(recipient_names: List[str] = None,
                          all_recipients: bool = False,
                          via_index: bool = True,
                          pretty: bool = False) -> None:
    """
    Shows tasks that would be exported.

    Args:
        recipient_names: list of export recipient names (as per the config
            file)
        all_recipients: use all recipients?
        via_index: use the task index (faster)?
        pretty: use ``str(task)`` not ``repr(task)`` (prettier, slower because
            it has to query the patient)
    """
    with command_line_request_context() as req:
        print_export_queue(req,
                           recipient_names=recipient_names,
                           all_recipients=all_recipients,
                           via_index=via_index,
                           pretty=pretty)
コード例 #15
0
def cmd_show_export_queue(
    recipient_names: List[str] = None,
    all_recipients: bool = False,
    via_index: bool = True,
    pretty: bool = False,
    debug_show_fhir: bool = False,
    debug_fhir_include_docs: bool = False,
) -> None:
    """
    Shows tasks that would be exported.

    Args:
        recipient_names:
            List of export recipient names (as per the config file).
        all_recipients:
            Use all recipients?
        via_index:
            Use the task index (faster)?
        pretty:
            Use ``str(task)`` not ``repr(task)``? (Prettier, but slower because
            it has to query the patient.)
        debug_show_fhir:
            Show FHIR output for each task, as JSON?
        debug_fhir_include_docs:
            (If debug_show_fhir.) Include document content? Large!
    """
    with command_line_request_context() as req:
        print_export_queue(
            req,
            recipient_names=recipient_names,
            all_recipients=all_recipients,
            via_index=via_index,
            pretty=pretty,
            debug_show_fhir=debug_show_fhir,
            debug_fhir_include_docs=debug_fhir_include_docs,
        )
コード例 #16
0
def print_database_title() -> None:
    """
    Prints the database title (for the current config) to stdout.
    """
    with command_line_request_context() as req:
        print(req.database_title)