コード例 #1
0
def run_cwltool(ctx, path, job_path, **kwds):
    """Translate planemo kwds to cwltool kwds and run cwltool main function."""
    ensure_cwltool_available()

    args = []
    if ctx.verbose:
        args.append("--verbose")
    output_directory = kwds.get("output_directory", None)
    if output_directory:
        args.append("--outdir")
        args.append(output_directory)
    if kwds.get("no_container", False):
        args.append("--no-container")
        ensure_dependency_resolvers_conf_configured(ctx, kwds)
        args.append("--beta-dependency-resolvers-configuration")
        args.append(kwds["dependency_resolvers_config_file"])
    if kwds.get("mulled_containers"):
        args.append("--beta-use-biocontainers")

    if kwds.get("non_strict_cwl", False):
        args.append("--non-strict")

    args.extend([path, job_path])
    ctx.vlog("Calling cwltool with arguments %s" % args)
    with tempfile.NamedTemporaryFile("w") as tmp_stdout, \
            tempfile.NamedTemporaryFile("w") as tmp_stderr:
        # cwltool passes sys.stderr to subprocess.Popen - ensure it has
        # and actual fileno.
        with real_io():
            ret_code = main.main(
                args,
                stdout=tmp_stdout,
                stderr=tmp_stderr,
            )
        tmp_stdout.flush()
        tmp_stderr.flush()
        with open(tmp_stderr.name, "r") as stderr_f:
            log = stderr_f.read()
            ctx.vlog("cwltool log output [%s]" % log)
        with open(tmp_stdout.name, "r") as stdout_f:
            try:
                result = json.load(stdout_f)
            except ValueError:
                message = JSON_PARSE_ERROR_MESSAGE % (
                    open(tmp_stdout.name, "r").read(),
                    tmp_stdout.name,
                    log
                )
                error(message)
                raise Exception(message)

        if ret_code != 0:
            return ErrorRunResponse("Error running cwltool", log=log)
        outputs = result
    return CwlToolRunResponse(
        log,
        outputs=outputs,
    )
コード例 #2
0
ファイル: run.py プロジェクト: galaxyproject/planemo
def run_cwltool(ctx, path, job_path, **kwds):
    """Translate planemo kwds to cwltool kwds and run cwltool main function."""
    ensure_cwltool_available()

    args = []
    if ctx.verbose:
        args.append("--verbose")
    output_directory = kwds.get("output_directory", None)
    if output_directory:
        args.append("--outdir")
        args.append(output_directory)
    if kwds.get("no_container", False):
        args.append("--no-container")
        ensure_dependency_resolvers_conf_configured(ctx, kwds)
        args.append("--beta-dependency-resolvers-configuration")
        args.append(kwds["dependency_resolvers_config_file"])
    if kwds.get("mulled_containers"):
        args.append("--beta-use-biocontainers")

    if kwds.get("non_strict_cwl", False):
        args.append("--non-strict")

    args.extend([path, job_path])
    ctx.vlog("Calling cwltool with arguments %s" % args)
    with tempfile.NamedTemporaryFile("w") as tmp_stdout, \
            tempfile.NamedTemporaryFile("w") as tmp_stderr:
        # cwltool passes sys.stderr to subprocess.Popen - ensure it has
        # and actual fileno.
        with real_io():
            ret_code = main.main(
                args,
                stdout=tmp_stdout,
                stderr=tmp_stderr,
            )
        tmp_stdout.flush()
        tmp_stderr.flush()
        with open(tmp_stderr.name, "r") as stderr_f:
            log = stderr_f.read()
            ctx.vlog("cwltool log output [%s]" % log)
        with open(tmp_stdout.name, "r") as stdout_f:
            try:
                result = json.load(stdout_f)
            except ValueError:
                message = JSON_PARSE_ERROR_MESSAGE % (
                    open(tmp_stdout.name, "r").read(),
                    tmp_stdout.name,
                    log
                )
                error(message)
                raise Exception(message)

        if ret_code != 0:
            return ErrorRunResponse("Error running cwltool", log=log)
        outputs = result
    return CwlToolRunResponse(
        log,
        outputs=outputs,
    )
コード例 #3
0
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds):
    """Set up a ``GalaxyConfig`` in an auto-cleaned context."""
    tool_paths = [r.path for r in runnables if r.has_tools]
    test_data_dir = _find_test_data(tool_paths, **kwds)
    tool_data_table = _find_tool_data_table(tool_paths,
                                            test_data_dir=test_data_dir,
                                            **kwds)
    galaxy_root = _find_galaxy_root(ctx, **kwds)
    install_galaxy = kwds.get("install_galaxy", False)
    if galaxy_root is not None:
        if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root):
            os.rmdir(galaxy_root)
        if os.path.isdir(galaxy_root) and install_galaxy:
            raise Exception(
                "%s is an existing non-empty directory, cannot install Galaxy again"
                % galaxy_root)

    # Duplicate block in docker variant above.
    if kwds.get("mulled_containers", False) and not kwds.get("docker", False):
        if ctx.get_option_source("docker") != OptionSource.cli:
            kwds["docker"] = True
        else:
            raise Exception(
                "Specified no docker and mulled containers together.")

    with _config_directory(ctx, **kwds) as config_directory:

        def config_join(*args):
            return os.path.join(config_directory, *args)

        install_env = {}
        if kwds.get('galaxy_skip_client_build', True):
            install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1'
        if galaxy_root is None:
            galaxy_root = config_join("galaxy-dev")
        if not os.path.isdir(galaxy_root):
            _build_eggs_cache(ctx, install_env, kwds)
            _install_galaxy(ctx, galaxy_root, install_env, kwds)

        if parse_version(
                kwds.get('galaxy_python_version')
                or DEFAULT_PYTHON_VERSION) >= parse_version('3'):
            # on python 3 we use gunicorn,
            # which requires 'main' as server name
            server_name = 'main'
        else:
            server_name = "planemo%d" % random.randint(0, 100000)
        # Once we don't have to support earlier than 18.01 - try putting these files
        # somewhere better than with Galaxy.
        log_file = "%s.log" % server_name
        pid_file = "%s.pid" % server_name
        ensure_dependency_resolvers_conf_configured(
            ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
        _handle_job_config_file(config_directory, server_name, kwds)
        _handle_job_metrics(config_directory, kwds)
        file_path = kwds.get("file_path") or config_join("files")
        _ensure_directory(file_path)

        tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join(
            "deps")
        _ensure_directory(tool_dependency_dir)

        shed_tool_conf = kwds.get("shed_tool_conf") or config_join(
            "shed_tools_conf.xml")
        all_tool_paths = _all_tool_paths(runnables, **kwds)
        empty_tool_conf = config_join("empty_tool_conf.xml")

        tool_conf = config_join("tool_conf.xml")

        shed_data_manager_config_file = config_join(
            "shed_data_manager_conf.xml")

        shed_tool_path = kwds.get("shed_tool_path") or config_join(
            "shed_tools")
        _ensure_directory(shed_tool_path)

        sheds_config_path = _configure_sheds_config_file(
            ctx, config_directory, **kwds)

        database_location = config_join("galaxy.sqlite")
        master_api_key = _get_master_api_key(kwds)
        dependency_dir = os.path.join(config_directory, "deps")
        preseeded_database = attempt_database_preseed(ctx, galaxy_root,
                                                      database_location,
                                                      **kwds)
        _ensure_directory(shed_tool_path)
        port = _get_port(kwds)
        template_args = dict(
            port=port,
            host=kwds.get("host", "127.0.0.1"),
            server_name=server_name,
            temp_directory=config_directory,
            shed_tool_path=shed_tool_path,
            database_location=database_location,
            tool_conf=tool_conf,
            debug=kwds.get("debug", "true"),
            id_secret=kwds.get("id_secret", "test_secret"),
            log_level="DEBUG" if ctx.verbose else "INFO",
        )
        tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)
        # Setup both galaxy_email and older test user [email protected]
        # as admins for command_line, etc...
        properties = _shared_galaxy_properties(config_directory,
                                               kwds,
                                               for_tests=for_tests)
        properties.update(
            dict(
                server_name="main",
                ftp_upload_dir_template="${ftp_upload_dir}",
                ftp_upload_purge="False",
                ftp_upload_dir=test_data_dir or os.path.abspath('.'),
                ftp_upload_site="Test Data",
                check_upload_content="False",
                tool_dependency_dir=dependency_dir,
                file_path=file_path,
                new_file_path="${temp_directory}/tmp",
                tool_config_file=tool_config_file,
                tool_sheds_config_file=sheds_config_path,
                manage_dependency_relationships="False",
                job_working_directory="${temp_directory}/job_working_directory",
                template_cache_path="${temp_directory}/compiled_templates",
                citation_cache_type="file",
                citation_cache_data_dir="${temp_directory}/citations/data",
                citation_cache_lock_dir="${temp_directory}/citations/lock",
                database_auto_migrate="True",
                enable_beta_tool_formats="True",
                id_secret="${id_secret}",
                log_level="${log_level}",
                debug="${debug}",
                watch_tools="auto",
                default_job_shell="/bin/bash",  # For conda dependency resolution
                tool_data_table_config_path=tool_data_table,
                integrated_tool_panel_config=(
                    "${temp_directory}/"
                    "integrated_tool_panel_conf.xml"),
                migrated_tools_config=empty_tool_conf,
                test_data_dir=test_data_dir,  # TODO: make gx respect this
                shed_data_manager_config_file=shed_data_manager_config_file,
            ))
        _handle_container_resolution(ctx, kwds, properties)
        write_file(config_join("logging.ini"),
                   _sub(LOGGING_TEMPLATE, template_args))
        if not for_tests:
            properties["database_connection"] = _database_connection(
                database_location, **kwds)

        _handle_kwd_overrides(properties, kwds)

        # TODO: consider following property
        # watch_tool = False
        # datatypes_config_file = config/datatypes_conf.xml
        # welcome_url = /static/welcome.html
        # logo_url = /
        # sanitize_all_html = True
        # serve_xss_vulnerable_mimetypes = False
        # track_jobs_in_database = None
        # outputs_to_working_directory = False
        # retry_job_output_collection = 0

        env = _build_env_for_galaxy(properties, template_args)
        env.update(install_env)
        _build_test_env(properties, env)
        env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf

        # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION
        # work?
        if preseeded_database:
            env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location)
        env["GALAXY_TEST_UPLOAD_ASYNC"] = "false"
        env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini")
        env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1"
        # Following are needed in 18.01 to prevent Galaxy from changing log and pid.
        # https://github.com/galaxyproject/planemo/issues/788
        env["GALAXY_LOG"] = log_file
        env["GALAXY_PID"] = pid_file
        web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args)
        write_file(config_join("galaxy.ini"), web_config)
        _write_tool_conf(ctx, all_tool_paths, tool_conf)
        write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)

        shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args)
        # Write a new shed_tool_conf.xml if needed.
        write_file(shed_tool_conf, shed_tool_conf_contents, force=False)

        write_file(shed_data_manager_config_file,
                   SHED_DATA_MANAGER_CONF_TEMPLATE)

        yield LocalGalaxyConfig(
            ctx,
            config_directory,
            env,
            test_data_dir,
            port,
            server_name,
            master_api_key,
            runnables,
            galaxy_root,
            kwds,
        )
コード例 #4
0
def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds):
    """Set up a ``GalaxyConfig`` for Docker container."""
    tool_paths = [r.path for r in runnables if r.has_tools]
    test_data_dir = _find_test_data(tool_paths, **kwds)

    with _config_directory(ctx, **kwds) as config_directory:

        def config_join(*args):
            return os.path.join(config_directory, *args)

        ensure_dependency_resolvers_conf_configured(
            ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
        _handle_job_metrics(config_directory, kwds)

        shed_tool_conf = "config/shed_tool_conf.xml"
        all_tool_paths = _all_tool_paths(runnables, **kwds)

        tool_directories = set([])  # Things to mount...
        for tool_path in all_tool_paths:
            directory = os.path.dirname(os.path.normpath(tool_path))
            if os.path.exists(directory):
                tool_directories.add(directory)

        # TODO: remap these.
        tool_volumes = []
        for tool_directory in tool_directories:
            volume = simple_docker_volume(tool_directory)
            tool_volumes.append(volume)

        empty_tool_conf = config_join("empty_tool_conf.xml")

        tool_conf = config_join("tool_conf.xml")

        shed_tool_path = kwds.get("shed_tool_path") or config_join(
            "shed_tools")
        _ensure_directory(shed_tool_path)

        sheds_config_path = _configure_sheds_config_file(
            ctx, config_directory, **kwds)
        port = _get_port(kwds)
        properties = _shared_galaxy_properties(config_directory,
                                               kwds,
                                               for_tests=for_tests)
        _handle_container_resolution(ctx, kwds, properties)
        master_api_key = _get_master_api_key(kwds)

        template_args = dict(
            shed_tool_path=shed_tool_path,
            tool_conf=tool_conf,
        )
        tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)

        _write_tool_conf(ctx, all_tool_paths, tool_conf)
        write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)

        properties.update(
            dict(
                tool_config_file=tool_config_file,
                tool_sheds_config_file=sheds_config_path,
                migrated_tools_config=empty_tool_conf,
            ))

        server_name = "planemo%d" % random.randint(0, 100000)

        # Value substitutions in Galaxy properties - for consistency with
        # non-Dockerized version.
        template_args = dict()
        env = _build_env_for_galaxy(properties, template_args)
        env["NONUSE"] = "nodejs,proftp,reports"
        if ctx.verbose:
            env["GALAXY_LOGGING"] = "full"

        # TODO: setup FTP upload dir and disable FTP server in container.
        _build_test_env(properties, env)

        docker_target_kwds = docker_host_args(**kwds)
        volumes = tool_volumes + [simple_docker_volume(config_directory)]
        export_directory = kwds.get("export_directory", None)
        if export_directory is not None:
            volumes.append(DockerVolume("%s:/export:rw" % export_directory))

        # TODO: Allow this to real Docker volumes and allow multiple.
        extra_volume = kwds.get("docker_extra_volume")
        if extra_volume:
            volumes.append(simple_docker_volume(extra_volume))
        yield DockerGalaxyConfig(
            ctx,
            config_directory,
            env,
            test_data_dir,
            port,
            server_name,
            master_api_key,
            runnables,
            docker_target_kwds=docker_target_kwds,
            volumes=volumes,
            export_directory=export_directory,
            kwds=kwds,
        )
コード例 #5
0
ファイル: config.py プロジェクト: pvanheus/planemo
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds):
    """Set up a ``GalaxyConfig`` in an auto-cleaned context."""
    tool_paths = [r.path for r in runnables if r.has_tools]
    test_data_dir = _find_test_data(tool_paths, **kwds)
    tool_data_table = _find_tool_data_table(
        tool_paths,
        test_data_dir=test_data_dir,
        **kwds
    )
    galaxy_root = _find_galaxy_root(ctx, **kwds)
    install_galaxy = kwds.get("install_galaxy", False)
    if galaxy_root is not None:
        if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root):
            os.rmdir(galaxy_root)
        if os.path.isdir(galaxy_root) and install_galaxy:
            raise Exception("%s is an existing non-empty directory, cannot install Galaxy again" % galaxy_root)

    # Duplicate block in docker variant above.
    if kwds.get("mulled_containers", False) and not kwds.get("docker", False):
        if ctx.get_option_source("docker") != OptionSource.cli:
            kwds["docker"] = True
        else:
            raise Exception("Specified no docker and mulled containers together.")

    with _config_directory(ctx, **kwds) as config_directory:
        def config_join(*args):
            return os.path.join(config_directory, *args)

        install_env = {}
        if kwds.get('galaxy_skip_client_build', True):
            install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1'
        if galaxy_root is None:
            galaxy_root = config_join("galaxy-dev")
        if not os.path.isdir(galaxy_root):
            _build_eggs_cache(ctx, install_env, kwds)
            _install_galaxy(ctx, galaxy_root, install_env, kwds)

        if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'):
            # on python 3 we use gunicorn,
            # which requires 'main' as server name
            server_name = 'main'
        else:
            server_name = "planemo%d" % random.randint(0, 100000)
        # Once we don't have to support earlier than 18.01 - try putting these files
        # somewhere better than with Galaxy.
        log_file = "%s.log" % server_name
        pid_file = "%s.pid" % server_name
        ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
        _handle_job_config_file(config_directory, server_name, kwds)
        _handle_job_metrics(config_directory, kwds)
        file_path = kwds.get("file_path") or config_join("files")
        _ensure_directory(file_path)

        tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps")
        _ensure_directory(tool_dependency_dir)

        shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml")
        all_tool_paths = _all_tool_paths(runnables, **kwds)
        empty_tool_conf = config_join("empty_tool_conf.xml")

        tool_conf = config_join("tool_conf.xml")

        shed_data_manager_config_file = config_join("shed_data_manager_conf.xml")

        shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools")
        _ensure_directory(shed_tool_path)

        sheds_config_path = _configure_sheds_config_file(
            ctx, config_directory, **kwds
        )

        database_location = config_join("galaxy.sqlite")
        master_api_key = _get_master_api_key(kwds)
        dependency_dir = os.path.join(config_directory, "deps")
        preseeded_database = attempt_database_preseed(
            ctx,
            galaxy_root,
            database_location,
            **kwds
        )
        _ensure_directory(shed_tool_path)
        port = _get_port(kwds)
        template_args = dict(
            port=port,
            host=kwds.get("host", "127.0.0.1"),
            server_name=server_name,
            temp_directory=config_directory,
            shed_tool_path=shed_tool_path,
            database_location=database_location,
            tool_conf=tool_conf,
            debug=kwds.get("debug", "true"),
            id_secret=kwds.get("id_secret", "test_secret"),
            log_level="DEBUG" if ctx.verbose else "INFO",
        )
        tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)
        # Setup both galaxy_email and older test user [email protected]
        # as admins for command_line, etc...
        properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests)
        properties.update(dict(
            server_name="main",
            ftp_upload_dir_template="${ftp_upload_dir}",
            ftp_upload_purge="False",
            ftp_upload_dir=test_data_dir or os.path.abspath('.'),
            ftp_upload_site="Test Data",
            check_upload_content="False",
            tool_dependency_dir=dependency_dir,
            file_path=file_path,
            new_file_path="${temp_directory}/tmp",
            tool_config_file=tool_config_file,
            tool_sheds_config_file=sheds_config_path,
            manage_dependency_relationships="False",
            job_working_directory="${temp_directory}/job_working_directory",
            template_cache_path="${temp_directory}/compiled_templates",
            citation_cache_type="file",
            citation_cache_data_dir="${temp_directory}/citations/data",
            citation_cache_lock_dir="${temp_directory}/citations/lock",
            database_auto_migrate="True",
            enable_beta_tool_formats="True",
            id_secret="${id_secret}",
            log_level="${log_level}",
            debug="${debug}",
            watch_tools="auto",
            default_job_shell="/bin/bash",  # For conda dependency resolution
            tool_data_table_config_path=tool_data_table,
            integrated_tool_panel_config=("${temp_directory}/"
                                          "integrated_tool_panel_conf.xml"),
            # Use in-memory database for kombu to avoid database contention
            # during tests.
            amqp_internal_connection="sqlalchemy+sqlite://",
            migrated_tools_config=empty_tool_conf,
            test_data_dir=test_data_dir,  # TODO: make gx respect this
            shed_data_manager_config_file=shed_data_manager_config_file,
        ))
        _handle_container_resolution(ctx, kwds, properties)
        write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args))
        if not for_tests:
            properties["database_connection"] = _database_connection(database_location, **kwds)

        _handle_kwd_overrides(properties, kwds)

        # TODO: consider following property
        # watch_tool = False
        # datatypes_config_file = config/datatypes_conf.xml
        # welcome_url = /static/welcome.html
        # logo_url = /
        # sanitize_all_html = True
        # serve_xss_vulnerable_mimetypes = False
        # track_jobs_in_database = None
        # outputs_to_working_directory = False
        # retry_job_output_collection = 0

        env = _build_env_for_galaxy(properties, template_args)
        env.update(install_env)
        _build_test_env(properties, env)
        env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf

        # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION
        # work?
        if preseeded_database:
            env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location)
        env["GALAXY_TEST_UPLOAD_ASYNC"] = "false"
        env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini")
        env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1"
        # Following are needed in 18.01 to prevent Galaxy from changing log and pid.
        # https://github.com/galaxyproject/planemo/issues/788
        env["GALAXY_LOG"] = log_file
        env["GALAXY_PID"] = pid_file
        web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args)
        write_file(config_join("galaxy.ini"), web_config)
        _write_tool_conf(ctx, all_tool_paths, tool_conf)
        write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)

        shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args)
        # Write a new shed_tool_conf.xml if needed.
        write_file(shed_tool_conf, shed_tool_conf_contents, force=False)

        write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE)

        yield LocalGalaxyConfig(
            ctx,
            config_directory,
            env,
            test_data_dir,
            port,
            server_name,
            master_api_key,
            runnables,
            galaxy_root,
            kwds,
        )
コード例 #6
0
ファイル: config.py プロジェクト: pvanheus/planemo
def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds):
    """Set up a ``GalaxyConfig`` for Docker container."""
    tool_paths = [r.path for r in runnables if r.has_tools]
    test_data_dir = _find_test_data(tool_paths, **kwds)

    with _config_directory(ctx, **kwds) as config_directory:
        def config_join(*args):
            return os.path.join(config_directory, *args)

        ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
        _handle_job_metrics(config_directory, kwds)

        shed_tool_conf = "config/shed_tool_conf.xml"
        all_tool_paths = _all_tool_paths(runnables, **kwds)

        tool_directories = set([])  # Things to mount...
        for tool_path in all_tool_paths:
            directory = os.path.dirname(os.path.normpath(tool_path))
            if os.path.exists(directory):
                tool_directories.add(directory)

        # TODO: remap these.
        tool_volumes = []
        for tool_directory in tool_directories:
            volume = simple_docker_volume(tool_directory)
            tool_volumes.append(volume)

        empty_tool_conf = config_join("empty_tool_conf.xml")

        tool_conf = config_join("tool_conf.xml")

        shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools")
        _ensure_directory(shed_tool_path)

        sheds_config_path = _configure_sheds_config_file(
            ctx, config_directory, **kwds
        )
        port = _get_port(kwds)
        properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests)
        _handle_container_resolution(ctx, kwds, properties)
        master_api_key = _get_master_api_key(kwds)

        template_args = dict(
            shed_tool_path=shed_tool_path,
            tool_conf=tool_conf,
        )
        tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)

        _write_tool_conf(ctx, all_tool_paths, tool_conf)
        write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)

        properties.update(dict(
            tool_config_file=tool_config_file,
            tool_sheds_config_file=sheds_config_path,
            migrated_tools_config=empty_tool_conf,
        ))

        server_name = "planemo%d" % random.randint(0, 100000)

        # Value substitutions in Galaxy properties - for consistency with
        # non-Dockerized version.
        template_args = dict(
        )
        env = _build_env_for_galaxy(properties, template_args)
        env["NONUSE"] = "nodejs,proftp,reports"
        if ctx.verbose:
            env["GALAXY_LOGGING"] = "full"

        # TODO: setup FTP upload dir and disable FTP server in container.
        _build_test_env(properties, env)

        docker_target_kwds = docker_host_args(**kwds)
        volumes = tool_volumes + [simple_docker_volume(config_directory)]
        export_directory = kwds.get("export_directory", None)
        if export_directory is not None:
            volumes.append(DockerVolume("%s:/export:rw" % export_directory))

        # TODO: Allow this to real Docker volumes and allow multiple.
        extra_volume = kwds.get("docker_extra_volume")
        if extra_volume:
            volumes.append(simple_docker_volume(extra_volume))
        yield DockerGalaxyConfig(
            ctx,
            config_directory,
            env,
            test_data_dir,
            port,
            server_name,
            master_api_key,
            runnables,
            docker_target_kwds=docker_target_kwds,
            volumes=volumes,
            export_directory=export_directory,
            kwds=kwds,
        )