Ejemplo n.º 1
0
def ensure_database_is_ok() -> None:
    """
    Opens a link to the database and checks it's of the correct version
    (or otherwise raises an assertion error).
    """
    config = get_default_config_from_os_env()
    config.assert_database_ok()
Ejemplo n.º 2
0
def dev_cli() -> None:
    """
    Fire up a developer debug command-line.
    """
    config = get_default_config_from_os_env()
    # noinspection PyUnusedLocal
    engine = config.get_sqla_engine()  # noqa: F841
    with command_line_request_context() as req:
        # noinspection PyUnusedLocal
        dbsession = req.dbsession  # noqa: F841
        log.error(
            """Entering developer command-line.
    - Config is available in 'config'.
    - Database engine is available in 'engine'.
    - Dummy request is available in 'req'.
    - Database session is available in 'dbsession'.
        """
        )
        import pdb

        pdb.set_trace()
        # There must be a line below this, or the context is not available;
        # maybe a pdb bug; see
        # https://stackoverflow.com/questions/51743057/custom-context-manager-is-left-when-running-pdb-set-trace  # noqa
        pass  # this does the job
Ejemplo n.º 3
0
def _test_serve_pyramid() -> None:
    import camcops_server.camcops_server_core as core  # delayed import; import side effects  # noqa
    application = make_wsgi_app_from_config()
    cfg = get_default_config_from_os_env()
    core.test_serve_pyramid(application=application,
                            host=cfg.host,
                            port=cfg.port)
Ejemplo n.º 4
0
def launch_celery_beat(
    verbose: bool = False, cleanup_timeout_s: float = DEFAULT_CLEANUP_TIMEOUT_S
) -> None:
    """
    Launch the Celery Beat scheduler.

    (This can be combined with ``celery worker``, but that's not recommended;
    https://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html#starting-the-scheduler).
    """  # noqa: E501
    ensure_directories_exist()
    config = get_default_config_from_os_env()
    cmdargs = [
        CELERY,
        "--app",
        CELERY_APP_NAME,
        "beat",
        "--schedule",
        config.celery_beat_schedule_database,
        "--pidfile",
        config.get_celery_beat_pidfilename(),
        "--loglevel",
        "DEBUG" if verbose else "INFO",
    ]
    cmdargs += config.celery_beat_extra_args
    log.info("Launching: {!r}", cmdargs)
    nice_call(cmdargs, cleanup_timeout=cleanup_timeout_s)
Ejemplo n.º 5
0
def launch_celery_workers(
    verbose: bool = False, cleanup_timeout_s: float = DEFAULT_CLEANUP_TIMEOUT_S
) -> None:
    """
    Launch Celery workers.

    See also advice in

    - https://medium.com/@taylorhughes/three-quick-tips-from-two-years-with-celery-c05ff9d7f9eb

    - Re ``-Ofair``:
      https://docs.celeryproject.org/en/latest/userguide/optimizing.html

    """  # noqa: E501
    config = get_default_config_from_os_env()
    cmdargs = [
        CELERY,
        "--app",
        CELERY_APP_NAME,
        "worker",
        "-O",
        "fair",  # optimization
        "--soft-time-limit",
        str(CELERY_SOFT_TIME_LIMIT_SEC),
        "--loglevel",
        "DEBUG" if verbose else "INFO",
    ]
    if WINDOWS:
        # See crate_anon/tools/launch_celery.py, and
        # camcops_server/cc_modules/cc_export.py
        os.environ["FORKED_BY_MULTIPROCESSING"] = "1"
        cmdargs.extend(["--concurrency", "1", "--pool", "solo"])
    cmdargs += config.celery_worker_extra_args
    log.info("Launching: {!r}", cmdargs)
    nice_call(cmdargs, cleanup_timeout=cleanup_timeout_s)
Ejemplo n.º 6
0
def make_wsgi_app_from_config() -> "Router":
    """
    Reads the config file and creates a WSGI application.
    """
    import camcops_server.camcops_server_core as core  # delayed import; import side effects  # noqa

    cfg = get_default_config_from_os_env()
    reverse_proxied_config = ReverseProxiedConfig(
        trusted_proxy_headers=cfg.trusted_proxy_headers,
        http_host=cfg.proxy_http_host,
        remote_addr=cfg.proxy_remote_addr,
        script_name=(
            cfg.proxy_script_name or os.environ.get(WsgiEnvVar.SCRIPT_NAME, "")
        ),
        server_port=cfg.proxy_server_port,
        server_name=cfg.proxy_server_name,
        url_scheme=cfg.proxy_url_scheme,
        rewrite_path_info=cfg.proxy_rewrite_path_info,
    )
    return core.make_wsgi_app(
        debug_toolbar=cfg.debug_toolbar,
        reverse_proxied_config=reverse_proxied_config,
        debug_reverse_proxy=cfg.debug_reverse_proxy,
        show_requests=cfg.show_requests,
        show_request_immediately=cfg.show_request_immediately,
        show_response=cfg.show_response,
        show_timing=cfg.show_timing,
        static_cache_duration_s=cfg.static_cache_duration_s,
    )
Ejemplo n.º 7
0
def precache() -> None:
    """
    Populates the major caches.
    """
    log.info("Prepopulating caches")
    config_filename = get_config_filename_from_os_env()
    config = get_default_config_from_os_env()
    _ = all_extra_strings_as_dicts(config_filename)
    _ = config.get_task_snomed_concepts()
    _ = config.get_icd9cm_snomed_concepts()
    _ = config.get_icd10_snomed_concepts()
    with command_line_request_context() as req:
        _ = req.get_export_recipients(all_recipients=True)
Ejemplo n.º 8
0
def _upgrade_database_to_head(
    show_sql_only: bool, reindex: bool = False
) -> None:
    # noinspection PyUnresolvedReferences
    import camcops_server.camcops_server_core as core  # delayed import; import side effects  # noqa
    from camcops_server.cc_modules.cc_alembic import (
        upgrade_database_to_head,
    )  # delayed import

    upgrade_database_to_head(show_sql_only=show_sql_only)

    if reindex and not show_sql_only:
        cfg = get_default_config_from_os_env()
        core.reindex(cfg)
Ejemplo n.º 9
0
def _serve_gunicorn() -> None:
    import camcops_server.camcops_server_core as core  # delayed import; import side effects  # noqa
    application = make_wsgi_app_from_config()
    cfg = get_default_config_from_os_env()
    core.serve_gunicorn(
        application=application,
        host=cfg.host,
        port=cfg.port,
        unix_domain_socket_filename=cfg.unix_domain_socket,
        num_workers=cfg.gunicorn_num_workers,
        ssl_certificate=cfg.ssl_certificate,
        ssl_private_key=cfg.ssl_private_key,
        reload=cfg.gunicorn_debug_reload,
        timeout_s=cfg.gunicorn_timeout_s,
        debug_show_gunicorn_options=cfg.debug_show_gunicorn_options)
Ejemplo n.º 10
0
def _serve_cherrypy() -> None:
    import camcops_server.camcops_server_core as core  # delayed import; import side effects  # noqa
    application = make_wsgi_app_from_config()
    cfg = get_default_config_from_os_env()
    core.serve_cherrypy(application=application,
                        host=cfg.host,
                        port=cfg.port,
                        unix_domain_socket_filename=cfg.unix_domain_socket,
                        threads_start=cfg.cherrypy_threads_start,
                        threads_max=cfg.cherrypy_threads_max,
                        server_name=cfg.cherrypy_server_name,
                        log_screen=cfg.cherrypy_root_path,
                        ssl_certificate=cfg.ssl_certificate,
                        ssl_private_key=cfg.ssl_private_key,
                        root_path=cfg.cherrypy_root_path)
Ejemplo n.º 11
0
def precache() -> None:
    """
    Populates the major caches. (These are process-wide caches, e.g. using
    dogpile's ``@cache_region_static.cache_on_arguments``, not config-specific
    caches.)
    """
    log.info("Prepopulating caches")
    config_filename = get_config_filename_from_os_env()
    config = get_default_config_from_os_env()
    _ = all_extra_strings_as_dicts(config_filename)
    _ = config.get_task_snomed_concepts()
    _ = config.get_icd9cm_snomed_concepts()
    _ = config.get_icd10_snomed_concepts()
    with command_line_request_context() as req:
        _ = req.get_export_recipients(all_recipients=True)
Ejemplo n.º 12
0
def run_alembic() -> None:
    alembic_config = context.config  # type: Config
    target_metadata = Base.metadata
    camcops_config = get_default_config_from_os_env()
    dburl = camcops_config.db_url
    alembic_config.set_main_option('sqlalchemy.url', dburl)
    log.warning("Applying migrations to database at URL: {}",
                get_safe_url_from_url(dburl))
    log.info("Current database revision is {!r}",
             get_current_revision(dburl, ALEMBIC_VERSION_TABLE))

    if context.is_offline_mode():
        run_migrations_offline(alembic_config, target_metadata)
    else:
        run_migrations_online(alembic_config, target_metadata)
Ejemplo n.º 13
0
def dev_cli() -> None:
    """
    Fire up a developer debug command-line.
    """
    config = get_default_config_from_os_env()
    # noinspection PyUnusedLocal
    engine = config.get_sqla_engine()
    # noinspection PyUnusedLocal
    req = get_command_line_request()
    # noinspection PyUnusedLocal
    dbsession = req.dbsession
    log.error("""Entering developer command-line.
    - Config is available in 'config'.
    - Database engine is available in 'engine'.
    - Dummy request is available in 'req'.
    - Database session is available in 'dbsession'.
    """)
    import pdb
    pdb.set_trace()
Ejemplo n.º 14
0
def launch_celery_beat(verbose: bool = False) -> None:
    """
    Launch the Celery Beat scheduler.

    (This can be combined with ``celery worker``, but that's not recommended;
    http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html#starting-the-scheduler).
    """  # noqa
    config = get_default_config_from_os_env()
    cmdargs = [
        "celery",
        "beat",
        "--app",
        CELERY_APP_NAME,
        "--schedule",
        config.celery_beat_schedule_database,
        "--pidfile",
        config.get_celery_beat_pidfilename(),
        "--loglevel",
        "DEBUG" if verbose else "INFO",
    ]
    cmdargs += config.celery_beat_extra_args
    log.info("Launching: {!r}", cmdargs)
    subprocess.call(cmdargs)
Ejemplo n.º 15
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('_idnum_index',
                    sa.Column('idnum_pk', sa.Integer(), nullable=False),
                    sa.Column('indexed_at_utc', sa.DateTime(), nullable=False),
                    sa.Column('patient_pk', sa.Integer(), nullable=True),
                    sa.Column('which_idnum', sa.Integer(), nullable=False),
                    sa.Column('idnum_value', sa.BigInteger(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['which_idnum'], ['_idnum_definitions.which_idnum'],
                        name=op.f('fk__idnum_index_which_idnum')),
                    sa.PrimaryKeyConstraint('idnum_pk',
                                            name=op.f('pk__idnum_index')),
                    mysql_charset='utf8mb4 COLLATE utf8mb4_unicode_ci',
                    mysql_engine='InnoDB',
                    mysql_row_format='DYNAMIC')
    with op.batch_alter_table('_idnum_index', schema=None) as batch_op:
        batch_op.create_index(batch_op.f('ix__idnum_index_idnum_pk'),
                              ['idnum_pk'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__idnum_index_patient_pk'),
                              ['patient_pk'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__idnum_index_which_idnum'),
                              ['which_idnum'],
                              unique=False)

    op.create_table(
        '_task_index',
        sa.Column('index_entry_pk',
                  sa.Integer(),
                  autoincrement=True,
                  nullable=False),
        sa.Column('indexed_at_utc', sa.DateTime(), nullable=False),
        sa.Column('task_table_name', sa.String(length=128), nullable=True),
        sa.Column('task_pk', sa.Integer(), nullable=True),
        sa.Column('patient_pk', sa.Integer(), nullable=True),
        sa.Column('device_id', sa.Integer(), nullable=False),
        sa.Column('era', sa.String(length=32), nullable=False),
        sa.Column('when_created_utc', sa.DateTime(), nullable=False),
        sa.Column('when_created_iso',
                  camcops_server.cc_modules.cc_sqla_coltypes.
                  PendulumDateTimeAsIsoTextColType(length=32),
                  nullable=False),
        sa.Column('when_added_batch_utc', sa.DateTime(), nullable=False),
        sa.Column('adding_user_id', sa.Integer(), nullable=True),
        sa.Column('group_id', sa.Integer(), nullable=False),
        sa.Column('task_is_complete', sa.Boolean(), nullable=False),
        sa.ForeignKeyConstraint(['adding_user_id'], ['_security_users.id'],
                                name=op.f('fk__task_index_adding_user_id')),
        sa.ForeignKeyConstraint(['device_id'], ['_security_devices.id'],
                                name=op.f('fk__task_index_device_id')),
        sa.ForeignKeyConstraint(['group_id'], ['_security_groups.id'],
                                name=op.f('fk__task_index_group_id')),
        sa.ForeignKeyConstraint(['patient_pk'], ['patient._pk'],
                                name=op.f('fk__task_index_patient_pk')),
        sa.PrimaryKeyConstraint('index_entry_pk', name=op.f('pk__task_index')),
        mysql_charset='utf8mb4 COLLATE utf8mb4_unicode_ci',
        mysql_engine='InnoDB',
        mysql_row_format='DYNAMIC')
    with op.batch_alter_table('_task_index', schema=None) as batch_op:
        batch_op.create_index(batch_op.f('ix__task_index_device_id'),
                              ['device_id'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_era'), ['era'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_group_id'),
                              ['group_id'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_patient_pk'),
                              ['patient_pk'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_task_pk'),
                              ['task_pk'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_task_table_name'),
                              ['task_table_name'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_when_created_iso'),
                              ['when_created_iso'],
                              unique=False)
        batch_op.create_index(batch_op.f('ix__task_index_when_created_utc'),
                              ['when_created_utc'],
                              unique=False)

    # ### end Alembic commands ###

    if not context.is_offline_mode():
        # Offline mode means "print SQL only". So we only execute the following
        # in online ("talk to the database") mode:
        cfg = get_default_config_from_os_env()
        with cfg.get_dbsession_context() as dbsession:
            reindex_everything(dbsession)
Ejemplo n.º 16
0
def ensure_directories_exist() -> None:
    config = get_default_config_from_os_env()
    mkdir_p(config.export_lockdir)
    if config.user_download_dir:
        mkdir_p(config.user_download_dir)
Ejemplo n.º 17
0
def camcops_main() -> int:
    """
    Primary command-line entry point. Parse command-line arguments and act.

    Note that we can't easily use delayed imports to speed up the help output,
    because the help system has function calls embedded into it.
    """
    # Fetch command-line options.

    # -------------------------------------------------------------------------
    # Base parser
    # -------------------------------------------------------------------------

    # noinspection PyTypeChecker
    parser = ArgumentParser(
        description=(
            f"CamCOPS server, created by Rudolf Cardinal; version "
            f"{CAMCOPS_SERVER_VERSION}.\n"
            f"Use 'camcops_server <COMMAND> --help' for more detail on each "
            f"command."
        ),
        formatter_class=RawDescriptionHelpFormatter,
        # add_help=False  # only do this if manually overriding the method
    )

    # -------------------------------------------------------------------------
    # Common arguments
    # -------------------------------------------------------------------------

    parser.add_argument(
        "--allhelp",
        action=ShowAllSubparserHelpAction,
        help="show help for all commands and exit",
    )
    parser.add_argument(
        "--version",
        action="version",
        version=f"CamCOPS {CAMCOPS_SERVER_VERSION}",
    )
    parser.add_argument(
        "-v", "--verbose", action="store_true", help="Be verbose"
    )
    parser.add_argument(
        "--no_log", action="store_true", help="Disable log (stderr) entirely."
    )

    # -------------------------------------------------------------------------
    # Subcommand subparser
    # -------------------------------------------------------------------------

    subparsers = parser.add_subparsers(
        title="commands",
        description="Valid CamCOPS commands are as follows.",
        help="Specify one command.",
        dest="command",  # sorts out the help for the command being mandatory
        # https://stackoverflow.com/questions/23349349/argparse-with-required-subparser  # noqa
    )  # type: _SubParsersAction
    subparsers.required = True  # requires a command
    # You can't use "add_subparsers" more than once.
    # Subparser groups seem not yet to be supported:
    #   https://bugs.python.org/issue9341
    #   https://bugs.python.org/issue14037

    # -------------------------------------------------------------------------
    # "Getting started" commands
    # -------------------------------------------------------------------------

    # Launch documentation
    docs_parser = add_sub(
        subparsers,
        "docs",
        config_mandatory=None,
        help="Launch the main documentation (CamCOPS manual)",
    )
    docs_parser.set_defaults(func=lambda args: launch_manual())

    # Print demo CamCOPS config
    democonfig_parser = add_sub(
        subparsers,
        "demo_camcops_config",
        config_mandatory=None,
        help="Print a demo CamCOPS config file",
    )
    democonfig_parser.add_argument(
        "--docker", action="store_true", help="Use settings for Docker"
    )
    democonfig_parser.set_defaults(
        func=lambda args: print_demo_camcops_config(docker=args.docker)
    )

    # Print demo supervisor config
    demosupervisorconf_parser = add_sub(
        subparsers,
        "demo_supervisor_config",
        config_mandatory=None,
        help="Print a demo 'supervisor' config file for CamCOPS",
    )
    demosupervisorconf_parser.set_defaults(
        func=lambda args: print_demo_supervisor_config()
    )

    # Print demo Apache config section
    demoapacheconf_parser = add_sub(
        subparsers,
        "demo_apache_config",
        config_mandatory=None,
        help="Print a demo Apache config file section for CamCOPS",
    )
    demoapacheconf_parser.set_defaults(
        func=lambda args: print_demo_apache_config()
    )

    # -------------------------------------------------------------------------
    # Database commands
    # -------------------------------------------------------------------------

    # Upgrade database
    upgradedb_parser = add_sub(
        subparsers,
        "upgrade_db",
        config_mandatory=True,
        help="Upgrade database to most recent version (via Alembic)",
    )
    upgradedb_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it",
    )
    upgradedb_parser.add_argument(
        "--no_reindex",
        action="store_true",
        help="Don't recreate the task index",
    )
    upgradedb_parser.set_defaults(
        func=lambda args: _upgrade_database_to_head(
            show_sql_only=args.show_sql_only, reindex=not args.no_reindex
        )
    )

    # Developer: upgrade database to a specific revision
    dev_upgrade_db_parser = add_sub(
        subparsers,
        "dev_upgrade_db",
        config_mandatory=True,
        help="(DEVELOPER OPTION ONLY.) Upgrade a database to "
        "a specific revision.",
    )
    dev_upgrade_db_parser.add_argument(
        "--destination_db_revision",
        type=str,
        required=True,
        help="The target database revision",
    )
    dev_upgrade_db_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it",
    )
    dev_upgrade_db_parser.set_defaults(
        func=lambda args: _upgrade_database_to_revision(
            revision=args.destination_db_revision,
            show_sql_only=args.show_sql_only,
        )
    )

    # Developer: downgrade database
    dev_downgrade_parser = add_sub(
        subparsers,
        "dev_downgrade_db",
        config_mandatory=True,
        help="(DEVELOPER OPTION ONLY.) Downgrades a database to "
        "a specific revision. May DESTROY DATA.",
    )
    dev_downgrade_parser.add_argument(
        "--destination_db_revision",
        type=str,
        required=True,
        help="The target database revision",
    )
    dev_downgrade_parser.add_argument(
        "--confirm_downgrade_db",
        action="store_true",
        help="Must specify this too, as a safety measure",
    )
    dev_downgrade_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it",
    )
    dev_downgrade_parser.set_defaults(
        func=lambda args: _downgrade_database_to_revision(
            revision=args.destination_db_revision,
            show_sql_only=args.show_sql_only,
            confirm_downgrade_db=args.confirm_downgrade_db,
        )
    )

    # Developer: create dummy database
    dummy_database_parser = add_sub(
        subparsers,
        "dev_add_dummy_data",
        config_mandatory=True,
        help="(DEVELOPER OPTION ONLY.) Populates the database with "
        "a set of dummy patients and tasks for testing.",
    )

    dummy_database_parser.add_argument(
        "--confirm_add_dummy_data",
        action="store_true",
        help="Must specify this too, as a safety measure",
    )

    dummy_database_parser.set_defaults(
        func=lambda args: _add_dummy_data(
            cfg=get_default_config_from_os_env(),
            confirm_add_dummy_data=args.confirm_add_dummy_data,
        )
    )

    # Show database title
    showdbtitle_parser = add_sub(
        subparsers, "show_db_title", help="Show database title"
    )
    showdbtitle_parser.set_defaults(func=lambda args: _print_database_title())

    # Show database schema
    showdbschema_parser = add_sub(
        subparsers,
        "show_db_schema",
        help="Show the database schema as PlantUML +/- PNG",
    )
    showdbschema_parser.add_argument(
        "--schemastem",
        default="schema",
        help="Stem for output filenames (for schema diagrams); "
        "'.plantuml' and '.png' are appended",
    )
    showdbschema_parser.add_argument(
        "--make_image",
        action="store_true",
        help="Create a PNG image (impractically large!)",
    )
    showdbschema_parser.add_argument(
        "--java", default="java", help="Java executable (for schema diagrams)"
    )
    showdbschema_parser.add_argument(
        "--plantuml",
        default="plantuml.jar",
        help="PlantUML Java .jar file (for schema diagrams)",
    )
    showdbschema_parser.add_argument(
        "--height_width_limit",
        type=int,
        default=20000,
        help="Maximum image height/width in pixels",
    )
    showdbschema_parser.add_argument(
        "--java_memory_limit_mb",
        type=int,
        default=2048,
        help="Java virtual machine memory limit in Mb",
    )
    showdbschema_parser.set_defaults(
        func=lambda args: _show_database_schema(
            schemastem=args.schemastem,
            make_image=args.make_image,
            java=args.java,
            plantuml=args.plantuml,
            height_width_limit=args.height_width_limit,
            java_memory_limit_mb=args.java_memory_limit_mb,
        )
    )

    # Merge in data fom another database
    # noinspection PyTypeChecker
    int_int_mapper = MapType(
        from_type=nonnegative_int, to_type=nonnegative_int
    )
    mergedb_parser = add_sub(
        subparsers,
        "merge_db",
        config_mandatory=True,
        help="Merge in data from an old or recent CamCOPS database",
    )
    mergedb_parser.add_argument(
        "--report_every",
        type=int,
        default=10000,
        help="Report progress every n rows",
    )
    mergedb_parser.add_argument(
        "--echo", action="store_true", help="Echo SQL to source database"
    )
    mergedb_parser.add_argument(
        "--dummy_run",
        action="store_true",
        help="Perform a dummy run only; do not alter destination database",
    )
    mergedb_parser.add_argument(
        "--info_only",
        action="store_true",
        help="Show table information only; don't do any work",
    )
    # mergedb_parser.add_argument(
    #     "--skip_export_logs", action="store_true",
    #     help="Skip the export log tables")
    # mergedb_parser.add_argument(
    #     "--skip_audit_logs", action="store_true",
    #     help="Skip the audit log table")
    mergedb_parser.add_argument(
        "--default_group_id",
        type=int,
        default=None,
        help="Default group ID (integer) to apply to old records without one. "
        "If none is specified, a new group will be created for such "
        "records.",
    )
    mergedb_parser.add_argument(
        "--default_group_name",
        type=str,
        default=None,
        help="If default_group_id is not specified, use this group name. The "
        "group will be looked up if it exists, and created if not.",
    )
    add_req_named(
        mergedb_parser,
        "--src",
        help="Source database (specified as an SQLAlchemy URL). The contents "
        "of this database will be merged into the database specified "
        "in the config file.",
    )
    # noinspection PyTypeChecker
    add_req_named(
        mergedb_parser,
        "--whichidnum_map",
        type=int_int_mapper,
        help="Map to convert ID number types, in the format "
        "'from_a:to_a,from_b:to_b,...', where all values are integers.",
    )
    # noinspection PyTypeChecker
    add_req_named(
        mergedb_parser,
        "--groupnum_map",
        type=int_int_mapper,
        help="Map to convert group numbers, in the format "
        "'from_a:to_a,from_b:to_b,...', where all values are integers.",
    )
    mergedb_parser.set_defaults(
        func=lambda args: _merge_camcops_db(
            src=args.src,
            echo=args.echo,
            report_every=args.report_every,
            dummy_run=args.dummy_run,
            info_only=args.info_only,
            # skip_export_logs=args.skip_export_logs,
            # skip_audit_logs=args.skip_audit_logs,
            default_group_id=args.default_group_id,
            default_group_name=args.default_group_name,
            whichidnum_map=args.whichidnum_map,
            groupnum_map=args.groupnum_map,
        )
    )
    # WATCH OUT. There appears to be a bug somewhere in the way that the
    # Pyramid debug toolbar registers itself with SQLAlchemy (see
    # pyramid_debugtoolbar/panels/sqla.py; look for "before_cursor_execute"
    # and "after_cursor_execute". Somehow, some connections (but not all) seem
    # to get this event registered twice. The upshot is that the sequence can
    # lead to an attempt to double-delete the debug toolbar's timer:
    #
    # _before_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    # _before_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    #       ^^^ this is the problem: event called twice
    # _after_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    # _after_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    #       ^^^ and this is where the problem becomes evident
    # Traceback (most recent call last):
    # ...
    #   File "/home/rudolf/dev/venvs/camcops/lib/python3.5/site-packages/pyramid_debugtoolbar/panels/sqla.py", line 51, in _after_cursor_execute  # noqa
    #     delattr(conn, 'pdtb_start_timer')
    # AttributeError: pdtb_start_timer
    #
    # So the simplest thing is only to register the debug toolbar for stuff
    # that might need it...

    # Create database
    createdb_parser = add_sub(
        subparsers,
        "create_db",
        config_mandatory=True,
        help="Create CamCOPS database from scratch (AVOID; use the upgrade "
        "facility instead)",
    )
    add_req_named(
        createdb_parser,
        "--confirm_create_db",
        action="store_true",
        help="Must specify this too, as a safety measure",
    )
    createdb_parser.set_defaults(
        func=lambda args: _create_database_from_scratch(
            cfg=get_default_config_from_os_env()
        )
    )

    # Print database schema
    ddl_parser = add_sub(
        subparsers,
        "ddl",
        help="Print database schema (data definition language; DDL)",
    )
    ddl_parser.add_argument(
        "--dialect",
        type=str,
        default=SqlaDialectName.MYSQL,
        help=f"SQL dialect (options: {', '.join(sorted(ALL_SQLA_DIALECTS))})",
    )
    ddl_parser.set_defaults(
        func=lambda args: print(_get_all_ddl(dialect_name=args.dialect))
    )

    # Rebuild server indexes
    reindex_parser = add_sub(subparsers, "reindex", help="Recreate task index")
    reindex_parser.set_defaults(
        func=lambda args: _reindex(cfg=get_default_config_from_os_env())
    )

    check_index_parser = add_sub(
        subparsers,
        "check_index",
        help="Check index validity (exit code 0 for OK, 1 for bad)",
    )
    check_index_parser.add_argument(
        "--show_all_bad",
        action="store_true",
        help="Show all bad index entries (rather than stopping at the first)",
    )
    check_index_parser.set_defaults(
        func=lambda args: _check_index(
            cfg=get_default_config_from_os_env(),
            show_all_bad=args.show_all_bad,
        )
    )

    # -------------------------------------------------------------------------
    # User commands
    # -------------------------------------------------------------------------

    # Make superuser
    superuser_parser = add_sub(
        subparsers,
        "make_superuser",
        help="Make superuser, or give superuser status to an existing user",
    )
    superuser_parser.add_argument(
        "--username",
        help="Username of superuser to create/promote (if omitted, you will "
        "be asked to type it in)",
    )
    superuser_parser.set_defaults(
        func=lambda args: _make_superuser(username=args.username)
    )

    # Reset a user's password
    password_parser = add_sub(
        subparsers, "reset_password", help="Reset a user's password"
    )
    password_parser.add_argument(
        "--username",
        help="Username to change password for (if omitted, you will be asked "
        "to type it in)",
    )
    password_parser.set_defaults(
        func=lambda args: _reset_password(username=args.username)
    )

    # Re-enable a locked account
    enableuser_parser = add_sub(
        subparsers, "enable_user", help="Re-enable a locked user account"
    )
    enableuser_parser.add_argument(
        "--username",
        help="Username to enable (if omitted, you will be asked "
        "to type it in)",
    )
    enableuser_parser.set_defaults(
        func=lambda args: _enable_user_cli(username=args.username)
    )

    # -------------------------------------------------------------------------
    # Export options
    # -------------------------------------------------------------------------

    def _add_export_options(sp: ArgumentParser) -> None:
        sp.add_argument(
            "--recipients",
            type=str,
            nargs="*",
            help="Export recipients (as named in config file)",
        )
        sp.add_argument(
            "--all_recipients", action="store_true", help="Use all recipients"
        )
        sp.add_argument(
            "--disable_task_index",
            action="store_true",
            help="Disable use of the task index (for debugging only)",
        )

    # Export data
    export_parser = add_sub(
        subparsers, "export", help="Trigger pending exports"
    )
    _add_export_options(export_parser)
    export_parser.add_argument(
        "--schedule_via_backend",
        action="store_true",
        help="Export tasks as a background job",
    )
    export_parser.set_defaults(
        func=lambda args: _cmd_export(
            recipient_names=args.recipients,
            all_recipients=args.all_recipients,
            via_index=not args.disable_task_index,
            schedule_via_backend=args.schedule_via_backend,
        )
    )

    # Show export queue
    show_export_queue_parser = add_sub(
        subparsers,
        "show_export_queue",
        help="View outbound export queue (without sending)",
    )
    _add_export_options(show_export_queue_parser)
    show_export_queue_parser.add_argument(
        "--pretty",
        action="store_true",
        help="Pretty (but slower) formatting for tasks",
    )
    show_export_queue_parser.add_argument(
        "--debug_show_fhir",
        action="store_true",
        help="Show FHIR output for tasks",
    )
    show_export_queue_parser.add_argument(
        "--debug_fhir_include_docs",
        action="store_true",
        help="(If --debug_show_fhir) Included FHIR documents? Large.",
    )
    show_export_queue_parser.set_defaults(
        func=lambda args: _cmd_show_export_queue(
            recipient_names=args.recipients,
            all_recipients=args.all_recipients,
            via_index=not args.disable_task_index,
            pretty=args.pretty,
            debug_show_fhir=args.debug_show_fhir,
            debug_fhir_include_docs=args.debug_fhir_include_docs,
        )
    )

    # Make CRATE data dictionary
    crate_dd_parser = add_sub(
        subparsers,
        "crate_dd",
        help="Make draft data dictionary for CRATE anonymisation tool",
    )
    crate_dd_parser.add_argument(
        "--filename",
        type=str,
        required=True,
        help="Output filename (data dictionary to write)",
    )
    crate_dd_parser.add_argument(
        "--recipient",
        type=str,
        required=True,
        help="Export recipient (as named in config file)",
    )
    crate_dd_parser.set_defaults(
        func=lambda args: _cmd_crate_dd(
            filename=args.filename, recipient_name=args.recipient
        )
    )

    # Make CRIS data dictionary
    cris_dd_parser = add_sub(
        subparsers,
        "cris_dd",
        help="Make draft data dictionary for CRIS anonymisation tool",
    )
    cris_dd_parser.add_argument(
        "--filename",
        type=str,
        required=True,
        help="Filename of data dictionary to write",
    )
    cris_dd_parser.add_argument(
        "--recipient",
        type=str,
        required=True,
        help="Export recipient (as named in config file)",
    )
    cris_dd_parser.set_defaults(
        func=lambda args: _cmd_cris_dd(
            filename=args.filename, recipient_name=args.recipient
        )
    )

    # -------------------------------------------------------------------------
    # Web server options
    # -------------------------------------------------------------------------

    # Serve via CherryPy
    serve_cp_parser = add_sub(
        subparsers, "serve_cherrypy", help="Start web server via CherryPy"
    )
    serve_cp_parser.set_defaults(func=lambda args: _serve_cherrypy())

    # Serve via Gunicorn
    serve_gu_parser = add_sub(
        subparsers,
        "serve_gunicorn",
        help="Start web server via Gunicorn (not available under Windows)",
    )
    serve_gu_parser.set_defaults(func=lambda args: _serve_gunicorn())

    # Serve via the Pyramid test server
    serve_pyr_parser = add_sub(
        subparsers,
        "serve_pyramid",
        help="Start test web server via Pyramid (single-thread, "
        "single-process, HTTP-only; for development use only)",
    )
    serve_pyr_parser.set_defaults(func=lambda args: _test_serve_pyramid())

    # -------------------------------------------------------------------------
    # Preprocessing options
    # -------------------------------------------------------------------------

    athena_icd_snomed_to_xml_parser = add_sub(
        subparsers,
        "convert_athena_icd_snomed_to_xml",
        help="Fetch SNOMED-CT codes for ICD-9-CM and ICD-10 from the Athena "
        "OHDSI data set (https://athena.ohdsi.org/) and write them to "
        "the CamCOPS XML format",
    )
    athena_icd_snomed_to_xml_parser.add_argument(
        "--athena_concept_tsv_filename",
        type=str,
        required=True,
        help="Path to CONCEPT.csv file from Athena download",
    )
    athena_icd_snomed_to_xml_parser.add_argument(
        "--athena_concept_relationship_tsv_filename",
        type=str,
        required=True,
        help="Path to CONCEPT_RELATIONSHIP.csv file from Athena download",
    )
    athena_icd_snomed_to_xml_parser.add_argument(
        "--icd9_xml_filename",
        type=str,
        required=True,
        help="Filename of ICD-9-CM/SNOMED-CT XML file to write",
    )
    athena_icd_snomed_to_xml_parser.add_argument(
        "--icd10_xml_filename",
        type=str,
        required=True,
        help="Filename of ICD-10/SNOMED-CT XML file to write",
    )
    athena_icd_snomed_to_xml_parser.set_defaults(
        func=lambda args: send_athena_icd_snomed_to_xml(
            athena_concept_tsv_filename=args.athena_concept_tsv_filename,
            athena_concept_relationship_tsv_filename=args.athena_concept_relationship_tsv_filename,  # noqa
            icd9_xml_filename=args.icd9_xml_filename,
            icd10_xml_filename=args.icd10_xml_filename,
        )
    )

    # -------------------------------------------------------------------------
    # Celery options
    # -------------------------------------------------------------------------

    # Launch Celery workers
    celery_worker_parser = add_sub(
        subparsers,
        "launch_workers",
        help="Launch Celery workers, for background processing",
    )
    celery_worker_parser.set_defaults(
        func=lambda args: _launch_celery_workers(verbose=args.verbose)
    )

    # Launch Celery Bear
    celery_beat_parser = add_sub(
        subparsers,
        "launch_scheduler",
        help="Launch Celery Beat scheduler, to schedule background jobs",
    )
    celery_beat_parser.set_defaults(
        func=lambda args: _launch_celery_beat(verbose=args.verbose)
    )

    # Launch Celery Flower monitor
    celery_flower_parser = add_sub(
        subparsers,
        "launch_monitor",
        help="Launch Celery Flower monitor, to monitor background jobs",
    )
    celery_flower_parser.add_argument(
        "--address",
        type=str,
        default=DEFAULT_FLOWER_ADDRESS,
        help="Address to use for Flower",
    )
    celery_flower_parser.add_argument(
        "--port",
        type=int,
        default=DEFAULT_FLOWER_PORT,
        help="Port to use for Flower",
    )
    celery_flower_parser.set_defaults(
        func=lambda args: _launch_celery_flower(
            address=args.address, port=args.port
        )
    )

    # Housekeeping task
    housekeeping_parser = add_sub(
        subparsers,
        "housekeeping",
        help="Run housekeeping tasks (remove stale sessions, etc.)",
    )
    housekeeping_parser.set_defaults(func=lambda args: _housekeeping())

    # Purge Celery tasks
    purge_jobs_parser = add_sub(
        subparsers,
        "purge_jobs",
        help="Purge any outstanding background (back-end, worker) jobs",
    )
    purge_jobs_parser.set_defaults(func=lambda args: _purge_jobs())

    # -------------------------------------------------------------------------
    # Test options
    # -------------------------------------------------------------------------

    # Launch a Python command line
    dev_cli_parser = add_sub(
        subparsers,
        "dev_cli",
        help="Developer command-line interface, with config loaded as "
        "'config'.",
    )
    dev_cli_parser.set_defaults(func=lambda args: _dev_cli())

    # -------------------------------------------------------------------------
    # OK; parser built; now parse the arguments
    # -------------------------------------------------------------------------
    progargs = parser.parse_args()

    # Initial log level (overridden later by config file but helpful for start)
    if progargs.no_log:
        loglevel = logging.CRITICAL + 1
    elif progargs.verbose:
        loglevel = logging.DEBUG
    else:
        loglevel = logging.INFO
    main_only_quicksetup_rootlogger(
        level=loglevel, with_process_id=True, with_thread_id=True
    )
    rootlogger = logging.getLogger()
    set_level_for_logger_and_its_handlers(rootlogger, loglevel)

    # Say hello
    log.info(
        f"""
# =============================================================================
# CamCOPS server version {CAMCOPS_SERVER_VERSION}
# Created by Rudolf Cardinal. See {CAMCOPS_URL}
# =============================================================================
"""
    )
    log.debug(
        """
# -----------------------------------------------------------------------------
# Python interpreter: {interpreter!r}
# This program: {thisprog!r}
# Command-line arguments:
{progargs}
# -----------------------------------------------------------------------------
""",
        interpreter=sys.executable,
        thisprog=__file__,
        progargs=pprint.pformat(vars(progargs)),
    )
    if DEBUG_LOG_CONFIG:
        print_report_on_all_logs()

    # Finalize the config filename; ensure it's in the environment variable
    if hasattr(progargs, "config") and progargs.config:
        # We want the the config filename in the environment from now on:
        os.environ[ENVVAR_CONFIG_FILE] = progargs.config
    cfg_name = os.environ.get(ENVVAR_CONFIG_FILE, None)
    log.debug("Using configuration file: {!r}", cfg_name)

    # Call the subparser function for the chosen command
    if progargs.func is None:
        raise NotImplementedError("Command-line function not implemented!")
    success = progargs.func(progargs)  # type: Optional[bool]
    if success is None or success is True:
        return EXIT_SUCCESS
    else:
        return EXIT_FAILURE
Ejemplo n.º 18
0
def camcops_main() -> None:
    """
    Primary command-line entry point. Parse command-line arguments and act.

    Note that we can't easily use delayed imports to speed up the help output,
    because the help system has function calls embedded into it.
    """
    # Fetch command-line options.

    # -------------------------------------------------------------------------
    # Base parser
    # -------------------------------------------------------------------------

    parser = ArgumentParser(
        description=(
            "CamCOPS server, created by Rudolf Cardinal; version {}.\n"
            "Use 'camcops_server <COMMAND> --help' for more detail on each "
            "command.".format(CAMCOPS_SERVER_VERSION)),
        formatter_class=RawDescriptionHelpFormatter,
        # add_help=False  # only do this if manually overriding the method
    )

    # -------------------------------------------------------------------------
    # Common arguments
    # -------------------------------------------------------------------------

    parser.add_argument('--allhelp',
                        action=ShowAllSubparserHelpAction,
                        help='show help for all commands and exit')
    parser.add_argument("--version",
                        action="version",
                        version="CamCOPS {}".format(CAMCOPS_SERVER_VERSION))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help="Be verbose")

    # -------------------------------------------------------------------------
    # Subcommand subparser
    # -------------------------------------------------------------------------

    subparsers = parser.add_subparsers(
        title="commands",
        description="Valid CamCOPS commands are as follows.",
        help='Specify one command.',
        dest='command',  # sorts out the help for the command being mandatory
        # https://stackoverflow.com/questions/23349349/argparse-with-required-subparser  # noqa
    )  # type: _SubParsersAction  # noqa
    subparsers.required = True  # requires a command
    # You can't use "add_subparsers" more than once.
    # Subparser groups seem not yet to be supported:
    #   https://bugs.python.org/issue9341
    #   https://bugs.python.org/issue14037

    # -------------------------------------------------------------------------
    # "Getting started" commands
    # -------------------------------------------------------------------------

    # Launch documentation
    docs_parser = add_sub(
        subparsers,
        "docs",
        config_mandatory=None,
        help="Launch the main documentation (CamCOPS manual)")
    docs_parser.set_defaults(func=lambda args: launch_manual())

    # Print demo CamCOPS config
    democonfig_parser = add_sub(subparsers,
                                "demo_camcops_config",
                                config_mandatory=None,
                                help="Print a demo CamCOPS config file")
    democonfig_parser.set_defaults(
        func=lambda args: print_demo_camcops_config())

    # Print demo supervisor config
    demosupervisorconf_parser = add_sub(
        subparsers,
        "demo_supervisor_config",
        config_mandatory=None,
        help="Print a demo 'supervisor' config file for CamCOPS")
    demosupervisorconf_parser.set_defaults(
        func=lambda args: print_demo_supervisor_config())

    # Print demo Apache config section
    demoapacheconf_parser = add_sub(
        subparsers,
        "demo_apache_config",
        config_mandatory=None,
        help="Print a demo Apache config file section for CamCOPS")
    demoapacheconf_parser.set_defaults(
        func=lambda args: print_demo_apache_config())

    # Print demo MySQL database creation commands
    demo_mysql_create_db_parser = add_sub(
        subparsers,
        "demo_mysql_create_db",
        config_mandatory=None,
        help="Print demo instructions to create a MySQL database for CamCOPS")
    demo_mysql_create_db_parser.set_defaults(
        func=lambda args: print_demo_mysql_create_db())

    # Print demo Bash MySQL dump script
    demo_mysql_dump_script_parser = add_sub(
        subparsers,
        "demo_mysql_dump_script",
        config_mandatory=None,
        help="Print demo instructions to dump all current MySQL databases")
    demo_mysql_dump_script_parser.set_defaults(
        func=lambda args: print_demo_mysql_dump_script())

    # -------------------------------------------------------------------------
    # Database commands
    # -------------------------------------------------------------------------

    # Upgrade database
    upgradedb_parser = add_sub(
        subparsers,
        "upgrade_db",
        config_mandatory=True,
        help="Upgrade database to most recent version (via Alembic)")
    upgradedb_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it")
    upgradedb_parser.set_defaults(func=lambda args: _upgrade_database_to_head(
        show_sql_only=args.show_sql_only))

    # Developer: upgrade database to a specific revision
    dev_upgrade_to_parser = add_sub(
        subparsers,
        "dev_upgrade_to",
        config_mandatory=True,
        help="(DEVELOPER OPTION ONLY.) Upgrade a database to "
        "a specific revision.")
    dev_upgrade_to_parser.add_argument("--destination_db_revision",
                                       type=str,
                                       required=True,
                                       help="The target database revision")
    dev_upgrade_to_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it")
    dev_upgrade_to_parser.set_defaults(
        func=lambda args: _upgrade_database_to_revision(
            revision=args.destination_db_revision,
            show_sql_only=args.show_sql_only))

    # Developer: downgrade database
    dev_downgrade_parser = add_sub(
        subparsers,
        "dev_downgrade_db",
        config_mandatory=True,
        help="(DEVELOPER OPTION ONLY.) Downgrades a database to "
        "a specific revision. May DESTROY DATA.")
    dev_downgrade_parser.add_argument("--destination_db_revision",
                                      type=str,
                                      required=True,
                                      help="The target database revision")
    dev_downgrade_parser.add_argument(
        '--confirm_downgrade_db',
        action="store_true",
        help="Must specify this too, as a safety measure")
    dev_downgrade_parser.add_argument(
        "--show_sql_only",
        action="store_true",
        help="Show SQL only (to stdout); don't execute it")
    dev_downgrade_parser.set_defaults(
        func=lambda args: _downgrade_database_to_revision(
            revision=args.destination_db_revision,
            show_sql_only=args.show_sql_only,
            confirm_downgrade_db=args.confirm_downgrade_db,
        ))

    # Show database title
    showdbtitle_parser = add_sub(subparsers,
                                 "show_db_title",
                                 help="Show database title")
    showdbtitle_parser.set_defaults(func=lambda args: _print_database_title())

    # Merge in data fom another database
    mergedb_parser = add_sub(
        subparsers,
        "merge_db",
        config_mandatory=True,
        help="Merge in data from an old or recent CamCOPS database")
    mergedb_parser.add_argument('--report_every',
                                type=int,
                                default=10000,
                                help="Report progress every n rows")
    mergedb_parser.add_argument('--echo',
                                action="store_true",
                                help="Echo SQL to source database")
    mergedb_parser.add_argument(
        '--dummy_run',
        action="store_true",
        help="Perform a dummy run only; do not alter destination database")
    mergedb_parser.add_argument(
        '--info_only',
        action="store_true",
        help="Show table information only; don't do any work")
    mergedb_parser.add_argument('--skip_hl7_logs',
                                action="store_true",
                                help="Skip the HL7 message log table")
    mergedb_parser.add_argument('--skip_audit_logs',
                                action="store_true",
                                help="Skip the audit log table")
    mergedb_parser.add_argument(
        '--default_group_id',
        type=int,
        default=None,
        help="Default group ID (integer) to apply to old records without one. "
        "If none is specified, a new group will be created for such "
        "records.")
    mergedb_parser.add_argument(
        '--default_group_name',
        type=str,
        default=None,
        help="If default_group_id is not specified, use this group name. The "
        "group will be looked up if it exists, and created if not.")
    add_req_named(
        mergedb_parser,
        "--src",
        help="Source database (specified as an SQLAlchemy URL). The contents "
        "of this database will be merged into the database specified "
        "in the config file.")
    mergedb_parser.set_defaults(func=lambda args: _merge_camcops_db(
        src=args.src,
        echo=args.echo,
        report_every=args.report_every,
        dummy_run=args.dummy_run,
        info_only=args.info_only,
        skip_export_logs=args.skip_hl7_logs,
        skip_audit_logs=args.skip_audit_logs,
        default_group_id=args.default_group_id,
        default_group_name=args.default_group_name,
    ))
    # WATCH OUT. There appears to be a bug somewhere in the way that the
    # Pyramid debug toolbar registers itself with SQLAlchemy (see
    # pyramid_debugtoolbar/panels/sqla.py; look for "before_cursor_execute"
    # and "after_cursor_execute". Somehow, some connections (but not all) seem
    # to get this event registered twice. The upshot is that the sequence can
    # lead to an attempt to double-delete the debug toolbar's timer:
    #
    # _before_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    # _before_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    #       ^^^ this is the problem: event called twice
    # _after_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    # _after_cursor_execute: <sqlalchemy.engine.base.Connection object at 0x7f5c1fa7c630>, 'SHOW CREATE TABLE `_hl7_run_log`', ()  # noqa
    #       ^^^ and this is where the problem becomes evident
    # Traceback (most recent call last):
    # ...
    #   File "/home/rudolf/dev/venvs/camcops/lib/python3.5/site-packages/pyramid_debugtoolbar/panels/sqla.py", line 51, in _after_cursor_execute  # noqa
    #     delattr(conn, 'pdtb_start_timer')
    # AttributeError: pdtb_start_timer
    #
    # So the simplest thing is only to register the debug toolbar for stuff
    # that might need it...

    # Create database
    createdb_parser = add_sub(
        subparsers,
        "create_db",
        config_mandatory=True,
        help="Create CamCOPS database from scratch (AVOID; use the upgrade "
        "facility instead)")
    add_req_named(createdb_parser,
                  '--confirm_create_db',
                  action="store_true",
                  help="Must specify this too, as a safety measure")
    createdb_parser.set_defaults(
        func=lambda args: _create_database_from_scratch(
            cfg=get_default_config_from_os_env()))

    # Print database schema
    ddl_parser = add_sub(
        subparsers,
        "ddl",
        help="Print database schema (data definition language; DDL)")
    ddl_parser.add_argument('--dialect',
                            type=str,
                            default=SqlaDialectName.MYSQL,
                            help="SQL dialect (options: {})".format(
                                ", ".join(ALL_SQLA_DIALECTS)))
    ddl_parser.set_defaults(
        func=lambda args: print(_get_all_ddl(dialect_name=args.dialect)))

    # Rebuild server indexes
    reindex_parser = add_sub(subparsers, "reindex", help="Recreate task index")
    reindex_parser.set_defaults(
        func=lambda args: _reindex(cfg=get_default_config_from_os_env()))

    # -------------------------------------------------------------------------
    # User commands
    # -------------------------------------------------------------------------

    # Make superuser
    superuser_parser = add_sub(
        subparsers,
        "make_superuser",
        help="Make superuser, or give superuser status to an existing user")
    superuser_parser.add_argument(
        '--username',
        help="Username of superuser to create/promote (if omitted, you will "
        "be asked to type it in)")
    superuser_parser.set_defaults(
        func=lambda args: _make_superuser(username=args.username))

    # Reset a user's password
    password_parser = add_sub(subparsers,
                              "reset_password",
                              help="Reset a user's password")
    password_parser.add_argument(
        '--username',
        help="Username to change password for (if omitted, you will be asked "
        "to type it in)")
    password_parser.set_defaults(
        func=lambda args: _reset_password(username=args.username))

    # Re-enable a locked account
    enableuser_parser = add_sub(subparsers,
                                "enable_user",
                                help="Re-enable a locked user account")
    enableuser_parser.add_argument(
        '--username',
        help="Username to enable (if omitted, you will be asked "
        "to type it in)")
    enableuser_parser.set_defaults(
        func=lambda args: _enable_user_cli(username=args.username))

    # -------------------------------------------------------------------------
    # Export options
    # -------------------------------------------------------------------------

    def _add_export_options(sp: ArgumentParser) -> None:
        sp.add_argument('--recipients',
                        type=str,
                        nargs="*",
                        help="Export recipients (as named in config file)")
        sp.add_argument('--all_recipients',
                        action="store_true",
                        help="Use all recipients")
        sp.add_argument(
            '--disable_task_index',
            action="store_true",
            help="Disable use of the task index (for debugging only)")

    # Send incremental export messages
    export_parser = add_sub(subparsers,
                            "export",
                            help="Trigger pending exports")
    _add_export_options(export_parser)
    export_parser.set_defaults(func=lambda args: _cmd_export(
        recipient_names=args.recipients,
        all_recipients=args.all_recipients,
        via_index=not args.disable_task_index,
    ))

    # Show incremental export queue
    show_export_queue_parser = add_sub(
        subparsers,
        "show_export_queue",
        help="View outbound export queue (without sending)")
    _add_export_options(show_export_queue_parser)
    show_export_queue_parser.add_argument(
        '--pretty',
        action="store_true",
        help="Pretty (but slower) formatting for tasks")
    show_export_queue_parser.set_defaults(
        func=lambda args: _cmd_show_export_queue(
            recipient_names=args.recipients,
            all_recipients=args.all_recipients,
            via_index=not args.disable_task_index,
            pretty=args.pretty,
        ))

    # -------------------------------------------------------------------------
    # Web server options
    # -------------------------------------------------------------------------

    # Serve via CherryPy
    serve_cp_parser = add_sub(subparsers,
                              "serve_cherrypy",
                              help="Start web server via CherryPy")
    serve_cp_parser.set_defaults(func=lambda args: _serve_cherrypy())

    # Serve via Gunicorn
    cpu_count = multiprocessing.cpu_count()
    serve_gu_parser = add_sub(
        subparsers,
        "serve_gunicorn",
        help="Start web server via Gunicorn (not available under Windows)")
    serve_gu_parser.set_defaults(func=lambda args: _serve_gunicorn())

    # Serve via the Pyramid test server
    serve_pyr_parser = add_sub(
        subparsers,
        "serve_pyramid",
        help="Start test web server via Pyramid (single-thread, "
        "single-process, HTTP-only; for development use only)")
    serve_pyr_parser.set_defaults(func=lambda args: _test_serve_pyramid())

    # -------------------------------------------------------------------------
    # Preprocessing options
    # -------------------------------------------------------------------------

    athena_icd_snomed_to_xml_parser = add_sub(
        subparsers,
        "convert_athena_icd_snomed_to_xml",
        help="Fetch SNOMED-CT codes for ICD-9-CM and ICD-10 from the Athena "
        "OHDSI data set (http://athena.ohdsi.org/) and write them to "
        "the CamCOPS XML format")
    athena_icd_snomed_to_xml_parser.add_argument(
        "--athena_concept_tsv_filename",
        type=str,
        required=True,
        help="Path to CONCEPT.csv file from Athena download")
    athena_icd_snomed_to_xml_parser.add_argument(
        "--athena_concept_relationship_tsv_filename",
        type=str,
        required=True,
        help="Path to CONCEPT_RELATIONSHIP.csv file from Athena download")
    athena_icd_snomed_to_xml_parser.add_argument(
        "--icd9_xml_filename",
        type=str,
        required=True,
        help="Filename of ICD-9-CM/SNOMED-CT XML file to write")
    athena_icd_snomed_to_xml_parser.add_argument(
        "--icd10_xml_filename",
        type=str,
        required=True,
        help="Filename of ICD-10/SNOMED-CT XML file to write")
    athena_icd_snomed_to_xml_parser.set_defaults(
        func=lambda args: send_athena_icd_snomed_to_xml(
            athena_concept_tsv_filename=args.athena_concept_tsv_filename,
            athena_concept_relationship_tsv_filename=args.
            athena_concept_relationship_tsv_filename,  # noqa
            icd9_xml_filename=args.icd9_xml_filename,
            icd10_xml_filename=args.icd10_xml_filename,
        ))

    # -------------------------------------------------------------------------
    # Celery options
    # -------------------------------------------------------------------------

    # Launch Celery workers
    celery_worker_parser = add_sub(
        subparsers,
        "launch_workers",
        help="Launch Celery workers, for background processing")
    celery_worker_parser.set_defaults(
        func=lambda args: _launch_celery_workers(verbose=args.verbose, ))

    # Launch Celery Bear
    celery_beat_parser = add_sub(
        subparsers,
        "launch_scheduler",
        help="Launch Celery Beat scheduler, to schedule background jobs")
    celery_beat_parser.set_defaults(
        func=lambda args: _launch_celery_beat(verbose=args.verbose, ))

    # Launch Celery Flower monitor
    celery_flower_parser = add_sub(
        subparsers,
        "launch_monitor",
        help="Launch Celery Flower monitor, to monitor background jobs")
    celery_flower_parser.add_argument("--address",
                                      type=str,
                                      default=DEFAULT_FLOWER_ADDRESS,
                                      help="Address to use for Flower")
    celery_flower_parser.add_argument("--port",
                                      type=int,
                                      default=DEFAULT_FLOWER_PORT,
                                      help="Port to use for Flower")
    celery_flower_parser.set_defaults(func=lambda args: _launch_celery_flower(
        address=args.address,
        port=args.port,
    ))

    # -------------------------------------------------------------------------
    # Test options
    # -------------------------------------------------------------------------

    # Show available self-tests
    showtests_parser = add_sub(subparsers,
                               "show_tests",
                               config_mandatory=None,
                               help="Show available self-tests")
    showtests_parser.set_defaults(func=lambda args: _self_test(show_only=True))

    # Self-test
    selftest_parser = add_sub(subparsers,
                              "self_test",
                              config_mandatory=None,
                              help="Test internal code")
    selftest_parser.set_defaults(func=lambda args: _self_test())

    # Launch a Python command line
    dev_cli_parser = add_sub(
        subparsers,
        "dev_cli",
        help="Developer command-line interface, with config loaded as "
        "'config'.")
    dev_cli_parser.set_defaults(func=lambda args: _dev_cli())

    # -------------------------------------------------------------------------
    # OK; parser built; now parse the arguments
    # -------------------------------------------------------------------------
    progargs = parser.parse_args()

    # Initial log level (overridden later by config file but helpful for start)
    loglevel = logging.DEBUG if progargs.verbose else logging.INFO
    main_only_quicksetup_rootlogger(level=loglevel,
                                    with_process_id=True,
                                    with_thread_id=True)
    rootlogger = logging.getLogger()
    set_level_for_logger_and_its_handlers(rootlogger, loglevel)

    # Say hello
    log.info(
        """
# =============================================================================
# CamCOPS server version {version}
# Created by Rudolf Cardinal. See {url}
# =============================================================================
""",
        version=CAMCOPS_SERVER_VERSION,
        url=CAMCOPS_URL,
    )
    log.debug(
        """
# -----------------------------------------------------------------------------
# Python interpreter: {interpreter!r}
# This program: {thisprog!r}
# Command-line arguments:
{progargs}
# -----------------------------------------------------------------------------
""",
        interpreter=sys.executable,
        thisprog=__file__,
        progargs=pprint.pformat(vars(progargs)),
    )
    if DEBUG_LOG_CONFIG or DEBUG_RUN_WITH_PDB:
        log.warning("Debugging options enabled!")
    if DEBUG_LOG_CONFIG:
        print_report_on_all_logs()

    if progargs.command not in ["self_test"]:
        # Finalize the config filename; ensure it's in the environment variable
        if hasattr(progargs, 'config') and progargs.config:
            # We want the the config filename in the environment from now on:
            os.environ[ENVVAR_CONFIG_FILE] = progargs.config
        cfg_name = os.environ.get(ENVVAR_CONFIG_FILE, None)
        log.info("Using configuration file: {!r}", cfg_name)

    # Call the subparser function for the chosen command
    if progargs.func is None:
        raise NotImplementedError("Command-line function not implemented!")
    success = progargs.func(progargs)  # type: Optional[bool]
    if success is None or success is True:
        sys.exit(0)
    else:
        sys.exit(1)