def cli(ctx, workflow_path, output=None, force=False, **kwds): """Convert Format 2 workflow to a native Galaxy workflow. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if workflow_path.endswith(".ga"): if output is None: output = os.path.splitext(workflow_path)[0] + ".gxwf.yml" with open(workflow_path, "r") as f: workflow_dict = json.load(f) format2_wrapper = from_galaxy_native(workflow_dict, json_wrapper=True) with open(output, "w") as f: f.write(format2_wrapper["yaml_content"]) else: if output is None: output = os.path.splitext(workflow_path)[0] + ".ga" runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(workflow_path) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict) write_file(output, output_contents, force=force)
def _handle_job_config_file(config_directory, server_name, kwds): job_config_file = kwds.get("job_config_file", None) if not job_config_file: template_str = JOB_CONFIG_LOCAL job_config_file = os.path.join( config_directory, "job_conf.xml", ) docker_enable = str(kwds.get("docker", False)) docker_host = str(kwds.get("docker_host", docker_util.DEFAULT_HOST)) docker_host_param = "" if docker_host: docker_host_param = """<param id="docker_host">%s</param>""" % docker_host conf_contents = Template(template_str).safe_substitute({ "server_name": server_name, "docker_enable": docker_enable, "require_container": "false", "docker_sudo": str(kwds.get("docker_sudo", False)), "docker_sudo_cmd": str(kwds.get("docker_sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND)), "docker_cmd": str(kwds.get("docker_cmd", docker_util.DEFAULT_DOCKER_COMMAND)), "docker_host": docker_host_param, }) write_file(job_config_file, conf_contents) kwds["job_config_file"] = job_config_file
def cli(ctx, workflow_identifier, output=None, force=False, **kwds): """Convert Format 2 workflows to native Galaxy workflows, and vice-versa. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if workflow_identifier.endswith(".ga"): if output is None: output = os.path.splitext(workflow_identifier)[0] + ".gxwf.yml" with open(workflow_identifier, "r") as f: workflow_dict = json.load(f) format2_wrapper = from_galaxy_native(workflow_dict, json_wrapper=True) with open(output, "w") as f: f.write(format2_wrapper["yaml_content"]) else: if output is None: output = os.path.splitext(workflow_identifier)[0] + ".ga" runnable = for_path(workflow_identifier) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(workflow_identifier) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict, indent=4, sort_keys=True) write_file(output, output_contents, force=force)
def test_shed_diff(self): with self._isolate_repo("single_tool") as f: upload_command = ["shed_upload", "--force_repository_creation"] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) io.write_file(join(f, "related_file"), "A related non-tool file (modified).\n") self._check_diff(f, True) self._check_diff(f, False)
def _configure_sheds_config_file(ctx, config_directory, **kwds): if "shed_target" not in kwds: kwds = kwds.copy() kwds["shed_target"] = "toolshed" shed_target_url = tool_shed_url(ctx, **kwds) contents = _sub(TOOL_SHEDS_CONF, {"shed_target_url": shed_target_url}) tool_sheds_conf = os.path.join(config_directory, "tool_sheds_conf.xml") write_file(tool_sheds_conf, contents) return tool_sheds_conf
def _write_tool_conf(ctx, tool_paths, tool_conf_path): tool_definition = _tool_conf_entry_for(tool_paths) tool_conf_template_kwds = dict(tool_definition=tool_definition) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, tool_conf_template_kwds) write_file(tool_conf_path, tool_conf_contents) ctx.vlog( "Writing tool_conf to path %s with contents [%s]", tool_conf_path, tool_conf_contents, )
def test_shed_diff(self): with self._isolate_repo("single_tool") as f: upload_command = ["shed_upload", "--force_repository_creation"] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) io.write_file( join(f, "related_file"), "A related non-tool file (modified).\n", ) self._check_diff(f, True) self._check_diff(f, False)
def test_diff_recursive(self): with self._isolate_repo("multi_repos_nested") as f: upload_command = ["shed_upload", "-r", "--force_repository_creation"] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) diff_command = ["shed_diff", "-r"] diff_command.extend(self._shed_args(read_only=True)) self._check_exit_code(diff_command, exit_code=0) io.write_file(join(f, "cat1", "related_file"), "A related non-tool file (modified).\n") self._check_exit_code(diff_command, exit_code=1)
def test_diff_recursive(self): with self._isolate_repo("multi_repos_nested") as f: self._shed_create(recursive=True) diff_command = ["shed_diff", "-r"] diff_command.extend(self._shed_args(read_only=True)) self._check_exit_code(diff_command, exit_code=0) io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=1)
def test_diff_xunit(self): with self._isolate_repo("multi_repos_nested") as f: upload_command = [ "shed_upload", "-r", "--force_repository_creation" ] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) xunit_report = tempfile.NamedTemporaryFile(delete=False) xunit_report.flush() xunit_report.close() diff_command = ["shed_diff", "-r", "--report_xunit", xunit_report.name] diff_command.extend(self._shed_args(read_only=True)) known_good_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit.xml') known_bad_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit-bad.xml') self._check_exit_code(diff_command, exit_code=0) compare = open(xunit_report.name, 'r').read() if not diff( ElementTree.parse(known_good_xunit_report).getroot(), ElementTree.fromstring(compare), reporter=sys.stdout.write ): self.assertTrue(True) else: sys.stdout.write(compare) self.assertTrue(False) io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=1) compare = open(xunit_report.name, 'r').read() if not diff( ElementTree.parse(known_bad_xunit_report).getroot(), ElementTree.fromstring(compare), reporter=sys.stdout.write ): self.assertTrue(True) else: sys.stdout.write(compare) self.assertTrue(False) os.unlink(xunit_report.name)
def test_diff_xunit(self): with self._isolate_repo("multi_repos_nested") as f: upload_command = [ "shed_upload", "-r", "--force_repository_creation" ] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) xunit_report = tempfile.NamedTemporaryFile(delete=False) xunit_report.flush() xunit_report.close() diff_command = [ "shed_diff", "-r", "--report_xunit", xunit_report.name ] diff_command.extend(self._shed_args(read_only=True)) known_good_xunit_report = os.path.join( TEST_REPOS_DIR, 'multi_repos_nested.xunit.xml') known_bad_xunit_report = os.path.join( TEST_REPOS_DIR, 'multi_repos_nested.xunit-bad.xml') self._check_exit_code(diff_command, exit_code=0) compare = open(xunit_report.name, 'r').read() if not diff(ElementTree.parse(known_good_xunit_report).getroot(), ElementTree.fromstring(compare), reporter=sys.stdout.write): self.assertTrue(True) else: sys.stdout.write(compare) self.assertTrue(False) io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=1) compare = open(xunit_report.name, 'r').read() if not diff(ElementTree.parse(known_bad_xunit_report).getroot(), ElementTree.fromstring(compare), reporter=sys.stdout.write): self.assertTrue(True) else: sys.stdout.write(compare) self.assertTrue(False) os.unlink(xunit_report.name)
def test_diff_recusrive(self): with self._isolate_repo("multi_repos_nested") as f: upload_command = [ "shed_upload", "-r", "--force_repository_creation" ] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) diff_command = ["shed_diff", "-r"] diff_command.extend(self._shed_args(read_only=True)) self._check_exit_code(diff_command, exit_code=0) io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=-1)
def cli(ctx, workflow_path, output=None, force=False, **kwds): """Convert Format 2 workflow to a native Galaxy workflow. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if output is None: output = os.path.splitext(workflow_path)[0] + ".ga" runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.serve_runnables([runnable]) as config: workflow_id = config.workflow_id(workflow_path) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict) write_file(output, output_contents, force=force)
def _handle_job_config_file(config_directory, server_name, kwds): job_config_file = kwds.get("job_config_file", None) if not job_config_file: template_str = JOB_CONFIG_LOCAL job_config_file = os.path.join( config_directory, "job_conf.xml", ) conf_contents = Template(template_str).safe_substitute({ "server_name": server_name, "docker_enable": str(kwds.get("docker", False)), "docker_sudo": str(kwds.get("docker_sudo", False)), "docker_sudo_cmd": str(kwds.get("docker_sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND)), "docker_cmd": str(kwds.get("docker_cmd", docker_util.DEFAULT_DOCKER_COMMAND)), "docker_host": str(kwds.get("docker_host", docker_util.DEFAULT_HOST)), }) write_file(job_config_file, conf_contents) kwds["job_config_file"] = job_config_file
def test_diff_xunit(self): with self._isolate_repo("multi_repos_nested") as f: self._shed_create(recursive=True) xunit_report = tempfile.NamedTemporaryFile(delete=False) xunit_report.flush() xunit_report.close() diff_command = ["shed_diff", "-r", "--report_xunit", xunit_report.name] diff_command.extend(self._shed_args(read_only=True)) known_good_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit.xml') known_bad_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit-bad.xml') self._check_exit_code(diff_command, exit_code=0) with open(xunit_report.name, 'r') as fh: compare = fh.read() if diff( self._make_deterministic(ElementTree.parse(known_good_xunit_report).getroot()), self._make_deterministic(ElementTree.fromstring(compare)), reporter=sys.stdout.write ): sys.stdout.write(compare) assert False, "XUnit report different from multi_repos_nested.xunit.xml." io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=1) with open(xunit_report.name, 'r') as fh: compare = fh.read() if diff( self._make_deterministic(ElementTree.parse(known_bad_xunit_report).getroot()), self._make_deterministic(ElementTree.fromstring(compare)), reporter=sys.stdout.write ): sys.stdout.write(compare) assert False, "XUnit report different from multi_repos_nested.xunit-bad.xml." os.unlink(xunit_report.name)
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) test_contents = tool_description.test_contents if test_contents: sep = "-" if "-" in tool_id else "_" tests_path = "%s%stests.yml" % (kwds.get("id"), sep) if not io.can_write_to_path(tests_path, **kwds): ctx.exit(1) io.write_file(tests_path, test_contents) io.info("Tool tests written to %s" % tests_path) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def test_diff_xunit(self): with self._isolate_repo("multi_repos_nested") as f: self._shed_create(recursive=True) xunit_report = tempfile.NamedTemporaryFile(delete=False) xunit_report.flush() xunit_report.close() diff_command = ["shed_diff", "-r", "--report_xunit", xunit_report.name] diff_command.extend(self._shed_args(read_only=True)) known_good_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit.xml') known_bad_xunit_report = os.path.join(TEST_REPOS_DIR, 'multi_repos_nested.xunit-bad.xml') self._check_exit_code(diff_command, exit_code=0) compare = open(xunit_report.name, 'r').read() if diff( self._make_deterministic(ElementTree.parse(known_good_xunit_report).getroot()), self._make_deterministic(ElementTree.fromstring(compare)), reporter=sys.stdout.write ): sys.stdout.write(compare) assert False, "XUnit report different from multi_repos_nested.xunit.xml." io.write_file( join(f, "cat1", "related_file"), "A related non-tool file (modified).\n", ) self._check_exit_code(diff_command, exit_code=1) compare = open(xunit_report.name, 'r').read() if diff( self._make_deterministic(ElementTree.parse(known_bad_xunit_report).getroot()), self._make_deterministic(ElementTree.fromstring(compare)), reporter=sys.stdout.write ): sys.stdout.write(compare) assert False, "XUnit report different from multi_repos_nested.xunit-bad.xml." os.unlink(xunit_report.name)
def test_shed(self): shed_url = os.environ.get("TEST_TOOL_SHED_URL", "http://localhost:9009") shed_api_key = os.environ.get("TEST_TOOL_SHED_API_KEY") tsi = toolshed.ToolShedInstance(shed_url, key=shed_api_key) owner = username(tsi) name = "planemotestrepo%d" % random.randint(0, 1000000) with self._isolate(): shed_yml_contents = SHED_TEMPLATE.safe_substitute( owner=owner, name=name, ) io.write_file(".shed.yml", shed_yml_contents) test_path = os.path.join(TEST_DIR, "tool_dependencies_good_1.xml") contents = open(test_path).read() io.write_file("tool_dependencies.xml", contents) init_cmd = [ "shed_create", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(init_cmd) with open(".shed.yml", "r") as fh: contents_dict = yaml.safe_load(fh) contents_dict["description"] = "Update test repository." io.write_file(".shed.yml", yaml.dump(contents_dict)) update_cmd = [ "shed_update", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(update_cmd) upload_cmd = [ "shed_upload", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(upload_cmd) download_cmd = [ "shed_download", "--shed_target", shed_url, "--destination", "shed_download.tar.gz" ] self._check_exit_code(download_cmd)
def test_shed(self): shed_url = os.environ.get("TEST_TOOL_SHED_URL", "http://localhost:9009") shed_api_key = os.environ.get("TEST_TOOL_SHED_API_KEY") tsi = toolshed.ToolShedInstance(shed_url, key=shed_api_key) owner = username(tsi) name = "planemotestrepo%d" % random.randint(0, 1000000) with self._isolate(): shed_yml_contents = SHED_TEMPLATE.safe_substitute( owner=owner, name=name, ) io.write_file(".shed.yml", shed_yml_contents) test_path = os.path.join(TEST_DIR, "tool_dependencies_good_1.xml") contents = open(test_path).read() io.write_file("tool_dependencies.xml", contents) init_cmd = [ "shed_create", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(init_cmd) contents_dict = yaml.load(open(".shed.yml", "r")) contents_dict["description"] = "Update test repository." io.write_file(".shed.yml", yaml.dump(contents_dict)) update_cmd = [ "shed_update", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(update_cmd) upload_cmd = [ "shed_upload", "--shed_key", shed_api_key, "--shed_target", shed_url ] self._check_exit_code(upload_cmd) download_cmd = [ "shed_download", "--shed_target", shed_url, "--destination", "shed_download.tar.gz" ] self._check_exit_code(download_cmd)
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) for tool_file in tool_description.tool_files: if tool_file.contents is None: continue path = tool_file.filename if not io.can_write_to_path(path, **kwds): ctx.exit(1) io.write_file(path, tool_file.contents) io.info("Tool %s written to %s" % (tool_file.description, path)) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") os.makedirs('test-data') for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) try: shutil.copy(test_file, 'test-data') except Exception as e: io.info("Copy of %s failed: %s" % (test_file, e))
def galaxy_config(ctx, tool_paths, for_tests=False, **kwds): test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table( tool_paths, test_data_dir=test_data_dir, **kwds ) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None config_directory = kwds.get("config_directory", None) def config_join(*args): return os.path.join(config_directory, *args) created_config_directory = False if not config_directory: created_config_directory = True config_directory = mkdtemp() try: latest_galaxy = False if _install_galaxy_if_needed(ctx, config_directory, kwds): latest_galaxy = True galaxy_root = config_join("galaxy-dev") _handle_dependency_resolution(config_directory, kwds) _handle_job_metrics(config_directory, kwds) tool_definition = _tool_conf_entry_for(tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") shed_tool_conf = _shed_tool_conf(install_galaxy, config_directory) tool_conf = config_join("tool_conf.xml") database_location = config_join("galaxy.sqlite") shed_tools_path = config_join("shed_tools") sheds_config_path = _configure_sheds_config_file( config_directory, **kwds ) preseeded_database = True master_api_key = kwds.get("master_api_key", "test_key") dependency_dir = os.path.join(config_directory, "deps") try: _download_database_template( galaxy_root, database_location, latest=latest_galaxy ) except Exception as e: print(e) # No network access - just roll forward from null. preseeded_database = False os.makedirs(shed_tools_path) server_name = "planemo%d" % random.randint(0, 100000) port = kwds.get("port", 9090) template_args = dict( port=port, host="127.0.0.1", server_name=server_name, temp_directory=config_directory, shed_tools_path=shed_tools_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), master_api_key=master_api_key, id_secret=kwds.get("id_secret", "test_secret"), log_level=kwds.get("log_level", "DEBUG"), ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) properties = dict( tool_dependency_dir=dependency_dir, file_path="${temp_directory}/files", new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, check_migrate_tools="False", manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", collect_outputs_from="job_working_directory", database_auto_migrate="True", cleanup_job="never", master_api_key="${master_api_key}", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this ) if not for_tests: properties["database_connection"] = \ "sqlite:///${database_location}?isolation_level=IMMEDIATE" _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) if install_galaxy: _build_eggs_cache(ctx, env, kwds) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) write_file(shed_tool_conf, shed_tool_conf_contents) yield GalaxyConfig( galaxy_root, config_directory, env, test_data_dir, port, server_name, master_api_key, ) finally: cleanup = not kwds.get("no_cleanup", False) if created_config_directory and cleanup: shutil.rmtree(config_directory)
def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` for Docker container.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) ensure_dependency_resolvers_conf_configured( ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) _handle_job_metrics(config_directory, kwds) shed_tool_conf = "config/shed_tool_conf.xml" all_tool_paths = _all_tool_paths(runnables, **kwds) tool_directories = set([]) # Things to mount... for tool_path in all_tool_paths: directory = os.path.dirname(os.path.normpath(tool_path)) if os.path.exists(directory): tool_directories.add(directory) # TODO: remap these. tool_volumes = [] for tool_directory in tool_directories: volume = simple_docker_volume(tool_directory) tool_volumes.append(volume) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join( "shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds) port = _get_port(kwds) properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) _handle_container_resolution(ctx, kwds, properties) master_api_key = _get_master_api_key(kwds) template_args = dict( shed_tool_path=shed_tool_path, tool_conf=tool_conf, ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) _write_tool_conf(ctx, all_tool_paths, tool_conf) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) properties.update( dict( tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, migrated_tools_config=empty_tool_conf, )) server_name = "planemo%d" % random.randint(0, 100000) # Value substitutions in Galaxy properties - for consistency with # non-Dockerized version. template_args = dict() env = _build_env_for_galaxy(properties, template_args) env["NONUSE"] = "nodejs,proftp,reports" if ctx.verbose: env["GALAXY_LOGGING"] = "full" # TODO: setup FTP upload dir and disable FTP server in container. _build_test_env(properties, env) docker_target_kwds = docker_host_args(**kwds) volumes = tool_volumes + [simple_docker_volume(config_directory)] export_directory = kwds.get("export_directory", None) if export_directory is not None: volumes.append(DockerVolume("%s:/export:rw" % export_directory)) # TODO: Allow this to real Docker volumes and allow multiple. extra_volume = kwds.get("docker_extra_volume") if extra_volume: volumes.append(simple_docker_volume(extra_volume)) yield DockerGalaxyConfig( ctx, config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, docker_target_kwds=docker_target_kwds, volumes=volumes, export_directory=export_directory, kwds=kwds, )
def galaxy_config(ctx, tool_paths, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table( tool_paths, test_data_dir=test_data_dir, **kwds ) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None config_directory = kwds.get("config_directory", None) def config_join(*args): return os.path.join(config_directory, *args) created_config_directory = False if not config_directory: created_config_directory = True config_directory = mkdtemp() try: latest_galaxy = False install_env = {} if install_galaxy: _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, config_directory, install_env, kwds) latest_galaxy = True galaxy_root = config_join("galaxy-dev") _handle_dependency_resolution(config_directory, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml") tool_definition = _tool_conf_entry_for(tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") database_location = config_join("galaxy.sqlite") shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds ) master_api_key = kwds.get("master_api_key", DEFAULT_MASTER_API_KEY) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed( galaxy_root, database_location, latest_galaxy=latest_galaxy, **kwds ) _ensure_directory(shed_tool_path) server_name = "planemo%d" % random.randint(0, 100000) port = int(kwds.get("port", 9090)) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), master_api_key=master_api_key, id_secret=kwds.get("id_secret", "test_secret"), log_level=kwds.get("log_level", "DEBUG"), ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) user_email = kwds.get("galaxy_email") properties = dict( single_user=user_email, admin_users=user_email, ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, check_migrate_tools="False", manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", collect_outputs_from="job_working_directory", database_auto_migrate="True", cleanup_job="never", master_api_key="${master_api_key}", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this ) if not for_tests: properties["database_connection"] = _database_connection(database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) pid_file = kwds.get("pid_file") or config_join("galaxy.pid") yield GalaxyConfig( galaxy_root, pid_file, config_directory, env, test_data_dir, port, server_name, master_api_key, ) finally: cleanup = not kwds.get("no_cleanup", False) if created_config_directory and cleanup: shutil.rmtree(config_directory)
def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` for Docker container.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) _handle_job_metrics(config_directory, kwds) shed_tool_conf = "config/shed_tool_conf.xml" all_tool_paths = _all_tool_paths(runnables, **kwds) tool_directories = set([]) # Things to mount... for tool_path in all_tool_paths: directory = os.path.dirname(os.path.normpath(tool_path)) if os.path.exists(directory): tool_directories.add(directory) # TODO: remap these. tool_volumes = [] for tool_directory in tool_directories: volume = simple_docker_volume(tool_directory) tool_volumes.append(volume) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds ) port = _get_port(kwds) properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) _handle_container_resolution(ctx, kwds, properties) master_api_key = _get_master_api_key(kwds) template_args = dict( shed_tool_path=shed_tool_path, tool_conf=tool_conf, ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) _write_tool_conf(ctx, all_tool_paths, tool_conf) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) properties.update(dict( tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, migrated_tools_config=empty_tool_conf, )) server_name = "planemo%d" % random.randint(0, 100000) # Value substitutions in Galaxy properties - for consistency with # non-Dockerized version. template_args = dict( ) env = _build_env_for_galaxy(properties, template_args) env["NONUSE"] = "nodejs,proftp,reports" if ctx.verbose: env["GALAXY_LOGGING"] = "full" # TODO: setup FTP upload dir and disable FTP server in container. _build_test_env(properties, env) docker_target_kwds = docker_host_args(**kwds) volumes = tool_volumes + [simple_docker_volume(config_directory)] export_directory = kwds.get("export_directory", None) if export_directory is not None: volumes.append(DockerVolume("%s:/export:rw" % export_directory)) # TODO: Allow this to real Docker volumes and allow multiple. extra_volume = kwds.get("docker_extra_volume") if extra_volume: volumes.append(simple_docker_volume(extra_volume)) yield DockerGalaxyConfig( ctx, config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, docker_target_kwds=docker_target_kwds, volumes=volumes, export_directory=export_directory, kwds=kwds, )
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table( tool_paths, test_data_dir=test_data_dir, **kwds ) galaxy_root = _find_galaxy_root(ctx, **kwds) install_galaxy = kwds.get("install_galaxy", False) if galaxy_root is not None: if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root): os.rmdir(galaxy_root) if os.path.isdir(galaxy_root) and install_galaxy: raise Exception("%s is an existing non-empty directory, cannot install Galaxy again" % galaxy_root) # Duplicate block in docker variant above. if kwds.get("mulled_containers", False) and not kwds.get("docker", False): if ctx.get_option_source("docker") != OptionSource.cli: kwds["docker"] = True else: raise Exception("Specified no docker and mulled containers together.") with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) install_env = {} if kwds.get('galaxy_skip_client_build', True): install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1' if galaxy_root is None: galaxy_root = config_join("galaxy-dev") if not os.path.isdir(galaxy_root): _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, galaxy_root, install_env, kwds) if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'): # on python 3 we use gunicorn, # which requires 'main' as server name server_name = 'main' else: server_name = "planemo%d" % random.randint(0, 100000) # Once we don't have to support earlier than 18.01 - try putting these files # somewhere better than with Galaxy. log_file = "%s.log" % server_name pid_file = "%s.pid" % server_name ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) _handle_job_config_file(config_directory, server_name, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml") all_tool_paths = _all_tool_paths(runnables, **kwds) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_data_manager_config_file = config_join("shed_data_manager_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds ) database_location = config_join("galaxy.sqlite") master_api_key = _get_master_api_key(kwds) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed( ctx, galaxy_root, database_location, **kwds ) _ensure_directory(shed_tool_path) port = _get_port(kwds) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_conf=tool_conf, debug=kwds.get("debug", "true"), id_secret=kwds.get("id_secret", "test_secret"), log_level="DEBUG" if ctx.verbose else "INFO", ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) # Setup both galaxy_email and older test user [email protected] # as admins for command_line, etc... properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) properties.update(dict( server_name="main", ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", check_upload_content="False", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", database_auto_migrate="True", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this shed_data_manager_config_file=shed_data_manager_config_file, )) _handle_container_resolution(ctx, kwds, properties) write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args)) if not for_tests: properties["database_connection"] = _database_connection(database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini") env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" # Following are needed in 18.01 to prevent Galaxy from changing log and pid. # https://github.com/galaxyproject/planemo/issues/788 env["GALAXY_LOG"] = log_file env["GALAXY_PID"] = pid_file web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) _write_tool_conf(ctx, all_tool_paths, tool_conf) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE) yield LocalGalaxyConfig( ctx, config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, galaxy_root, kwds, )
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table(tool_paths, test_data_dir=test_data_dir, **kwds) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None # Duplicate block in docker variant above. if kwds.get("mulled_containers", False) and not kwds.get("docker", False): if ctx.get_option_source("docker") != OptionSource.cli: kwds["docker"] = True else: raise Exception( "Specified no docker and mulled containers together.") with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) latest_galaxy = False install_env = {} if install_galaxy: _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, config_directory, install_env, kwds) latest_galaxy = True galaxy_root = config_join("galaxy-dev") server_name = "planemo%d" % random.randint(0, 100000) _handle_dependency_resolution(ctx, config_directory, kwds) _handle_job_config_file(config_directory, server_name, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join( "deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join( "shed_tools_conf.xml") all_tool_paths = list(tool_paths) + list(kwds.get("extra_tools", [])) tool_definition = _tool_conf_entry_for(all_tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_data_manager_config_file = config_join( "shed_data_manager_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join( "shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds) database_location = config_join("galaxy.sqlite") master_api_key = _get_master_api_key(kwds) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed( galaxy_root, database_location, latest_galaxy=latest_galaxy, **kwds) _ensure_directory(shed_tool_path) port = _get_port(kwds) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), id_secret=kwds.get("id_secret", "test_secret"), log_level="DEBUG" if ctx.verbose else "INFO", ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) # Setup both galaxy_email and older test user [email protected] # as admins for command_line, etc... properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) properties.update( dict( server_name="main", ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", database_auto_migrate="True", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=( "${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this shed_data_manager_config_file=shed_data_manager_config_file, )) _handle_container_resolution(ctx, kwds, properties) write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args)) if not for_tests: properties["database_connection"] = _database_connection( database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini") env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE) pid_file = kwds.get("pid_file") or config_join("galaxy.pid") yield LocalGalaxyConfig( config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, galaxy_root, pid_file, )
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table( tool_paths, test_data_dir=test_data_dir, **kwds ) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None # Duplicate block in docker variant above. if kwds.get("mulled_containers", False) and not kwds.get("docker", False): if ctx.get_option_source("docker") != OptionSource.cli: kwds["docker"] = True else: raise Exception("Specified no docker and mulled containers together.") with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) latest_galaxy = False install_env = {} if install_galaxy: _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, config_directory, install_env, kwds) latest_galaxy = True galaxy_root = config_join("galaxy-dev") server_name = "planemo%d" % random.randint(0, 100000) _handle_dependency_resolution(ctx, config_directory, kwds) _handle_job_config_file(config_directory, server_name, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml") all_tool_paths = list(tool_paths) + list(kwds.get("extra_tools", [])) tool_definition = _tool_conf_entry_for(all_tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds ) database_location = config_join("galaxy.sqlite") master_api_key = _get_master_api_key(kwds) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed( galaxy_root, database_location, latest_galaxy=latest_galaxy, **kwds ) _ensure_directory(shed_tool_path) port = _get_port(kwds) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), id_secret=kwds.get("id_secret", "test_secret"), log_level=kwds.get("log_level", "DEBUG"), ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) # Setup both galaxy_email and older test user [email protected] # as admins for command_line, etc... properties = _shared_galaxy_properties(kwds) properties.update(dict( server_name="main", ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", database_auto_migrate="True", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this )) _handle_container_resolution(ctx, kwds, properties) if not for_tests: properties["database_connection"] = _database_connection(database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) pid_file = kwds.get("pid_file") or config_join("galaxy.pid") yield LocalGalaxyConfig( config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, galaxy_root, pid_file, )
def galaxy_config(ctx, tool_paths, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table(tool_paths, test_data_dir=test_data_dir, **kwds) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None config_directory = kwds.get("config_directory", None) def config_join(*args): return os.path.join(config_directory, *args) created_config_directory = False if not config_directory: created_config_directory = True config_directory = mkdtemp() try: latest_galaxy = False install_env = {} if install_galaxy: _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, config_directory, install_env, kwds) latest_galaxy = True galaxy_root = config_join("galaxy-dev") _handle_dependency_resolution(config_directory, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join( "deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join( "shed_tools_conf.xml") tool_definition = _tool_conf_entry_for(tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") database_location = config_join("galaxy.sqlite") shed_tool_path = kwds.get("shed_tool_path") or config_join( "shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds) master_api_key = kwds.get("master_api_key", DEFAULT_MASTER_API_KEY) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed( galaxy_root, database_location, latest_galaxy=latest_galaxy, **kwds) _ensure_directory(shed_tool_path) server_name = "planemo%d" % random.randint(0, 100000) port = int(kwds.get("port", 9090)) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), master_api_key=master_api_key, id_secret=kwds.get("id_secret", "test_secret"), log_level=kwds.get("log_level", "DEBUG"), ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) user_email = kwds.get("galaxy_email") properties = dict( single_user=user_email, admin_users=user_email, ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, check_migrate_tools="False", manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", collect_outputs_from="job_working_directory", database_auto_migrate="True", cleanup_job="never", master_api_key="${master_api_key}", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this ) if not for_tests: properties["database_connection"] = _database_connection( database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) pid_file = kwds.get("pid_file") or config_join("galaxy.pid") yield GalaxyConfig( galaxy_root, pid_file, config_directory, env, test_data_dir, port, server_name, master_api_key, ) finally: cleanup = not kwds.get("no_cleanup", False) if created_config_directory and cleanup: shutil.rmtree(config_directory)
def local_galaxy_config(ctx, runnables, for_tests=False, **kwds): """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" tool_paths = [r.path for r in runnables if r.has_tools] test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table(tool_paths, test_data_dir=test_data_dir, **kwds) galaxy_root = _find_galaxy_root(ctx, **kwds) install_galaxy = kwds.get("install_galaxy", False) if galaxy_root is not None: if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root): os.rmdir(galaxy_root) if os.path.isdir(galaxy_root) and install_galaxy: raise Exception( "%s is an existing non-empty directory, cannot install Galaxy again" % galaxy_root) # Duplicate block in docker variant above. if kwds.get("mulled_containers", False) and not kwds.get("docker", False): if ctx.get_option_source("docker") != OptionSource.cli: kwds["docker"] = True else: raise Exception( "Specified no docker and mulled containers together.") with _config_directory(ctx, **kwds) as config_directory: def config_join(*args): return os.path.join(config_directory, *args) install_env = {} if kwds.get('galaxy_skip_client_build', True): install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1' if galaxy_root is None: galaxy_root = config_join("galaxy-dev") if not os.path.isdir(galaxy_root): _build_eggs_cache(ctx, install_env, kwds) _install_galaxy(ctx, galaxy_root, install_env, kwds) if parse_version( kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'): # on python 3 we use gunicorn, # which requires 'main' as server name server_name = 'main' else: server_name = "planemo%d" % random.randint(0, 100000) # Once we don't have to support earlier than 18.01 - try putting these files # somewhere better than with Galaxy. log_file = "%s.log" % server_name pid_file = "%s.pid" % server_name ensure_dependency_resolvers_conf_configured( ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) _handle_job_config_file(config_directory, server_name, kwds) _handle_job_metrics(config_directory, kwds) file_path = kwds.get("file_path") or config_join("files") _ensure_directory(file_path) tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join( "deps") _ensure_directory(tool_dependency_dir) shed_tool_conf = kwds.get("shed_tool_conf") or config_join( "shed_tools_conf.xml") all_tool_paths = _all_tool_paths(runnables, **kwds) empty_tool_conf = config_join("empty_tool_conf.xml") tool_conf = config_join("tool_conf.xml") shed_data_manager_config_file = config_join( "shed_data_manager_conf.xml") shed_tool_path = kwds.get("shed_tool_path") or config_join( "shed_tools") _ensure_directory(shed_tool_path) sheds_config_path = _configure_sheds_config_file( ctx, config_directory, **kwds) database_location = config_join("galaxy.sqlite") master_api_key = _get_master_api_key(kwds) dependency_dir = os.path.join(config_directory, "deps") preseeded_database = attempt_database_preseed(ctx, galaxy_root, database_location, **kwds) _ensure_directory(shed_tool_path) port = _get_port(kwds) template_args = dict( port=port, host=kwds.get("host", "127.0.0.1"), server_name=server_name, temp_directory=config_directory, shed_tool_path=shed_tool_path, database_location=database_location, tool_conf=tool_conf, debug=kwds.get("debug", "true"), id_secret=kwds.get("id_secret", "test_secret"), log_level="DEBUG" if ctx.verbose else "INFO", ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) # Setup both galaxy_email and older test user [email protected] # as admins for command_line, etc... properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) properties.update( dict( server_name="main", ftp_upload_dir_template="${ftp_upload_dir}", ftp_upload_purge="False", ftp_upload_dir=test_data_dir or os.path.abspath('.'), ftp_upload_site="Test Data", check_upload_content="False", tool_dependency_dir=dependency_dir, file_path=file_path, new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", database_auto_migrate="True", enable_beta_tool_formats="True", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", default_job_shell="/bin/bash", # For conda dependency resolution tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=( "${temp_directory}/" "integrated_tool_panel_conf.xml"), migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this shed_data_manager_config_file=shed_data_manager_config_file, )) _handle_container_resolution(ctx, kwds, properties) write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args)) if not for_tests: properties["database_connection"] = _database_connection( database_location, **kwds) _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) env.update(install_env) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini") env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" # Following are needed in 18.01 to prevent Galaxy from changing log and pid. # https://github.com/galaxyproject/planemo/issues/788 env["GALAXY_LOG"] = log_file env["GALAXY_PID"] = pid_file web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) _write_tool_conf(ctx, all_tool_paths, tool_conf) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) # Write a new shed_tool_conf.xml if needed. write_file(shed_tool_conf, shed_tool_conf_contents, force=False) write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE) yield LocalGalaxyConfig( ctx, config_directory, env, test_data_dir, port, server_name, master_api_key, runnables, galaxy_root, kwds, )
def galaxy_config(ctx, tool_paths, for_tests=False, **kwds): test_data_dir = _find_test_data(tool_paths, **kwds) tool_data_table = _find_tool_data_table(tool_paths, test_data_dir=test_data_dir, **kwds) galaxy_root = _check_galaxy(ctx, **kwds) install_galaxy = galaxy_root is None config_directory = kwds.get("config_directory", None) def config_join(*args): return os.path.join(config_directory, *args) created_config_directory = False if not config_directory: created_config_directory = True config_directory = mkdtemp() try: latest_galaxy = False if install_galaxy: _install_galaxy(ctx, config_directory, kwds) latest_galaxy = True galaxy_root = config_join("galaxy-dev") _handle_dependency_resolution(config_directory, kwds) _handle_job_metrics(config_directory, kwds) tool_definition = _tool_conf_entry_for(tool_paths) empty_tool_conf = config_join("empty_tool_conf.xml") shed_tool_conf = _shed_tool_conf(install_galaxy, config_directory) tool_conf = config_join("tool_conf.xml") database_location = config_join("galaxy.sqlite") shed_tools_path = config_join("shed_tools") sheds_config_path = _configure_sheds_config_file( config_directory, **kwds) preseeded_database = True master_api_key = kwds.get("master_api_key", "test_key") dependency_dir = os.path.join(config_directory, "deps") try: _download_database_template(galaxy_root, database_location, latest=latest_galaxy) except Exception as e: print(e) # No network access - just roll forward from null. preseeded_database = False os.makedirs(shed_tools_path) server_name = "planemo%d" % random.randint(0, 100000) port = kwds.get("port", 9090) template_args = dict( port=port, host="127.0.0.1", server_name=server_name, temp_directory=config_directory, shed_tools_path=shed_tools_path, database_location=database_location, tool_definition=tool_definition, tool_conf=tool_conf, debug=kwds.get("debug", "true"), master_api_key=master_api_key, id_secret=kwds.get("id_secret", "test_secret"), log_level=kwds.get("log_level", "DEBUG"), ) tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) properties = dict( tool_dependency_dir=dependency_dir, file_path="${temp_directory}/files", new_file_path="${temp_directory}/tmp", tool_config_file=tool_config_file, tool_sheds_config_file=sheds_config_path, check_migrate_tools="False", manage_dependency_relationships="False", job_working_directory="${temp_directory}/job_working_directory", template_cache_path="${temp_directory}/compiled_templates", citation_cache_type="file", citation_cache_data_dir="${temp_directory}/citations/data", citation_cache_lock_dir="${temp_directory}/citations/lock", collect_outputs_from="job_working_directory", database_auto_migrate="True", cleanup_job="never", master_api_key="${master_api_key}", id_secret="${id_secret}", log_level="${log_level}", debug="${debug}", watch_tools="auto", tool_data_table_config_path=tool_data_table, integrated_tool_panel_config=("${temp_directory}/" "integrated_tool_panel_conf.xml"), # Use in-memory database for kombu to avoid database contention # during tests. amqp_internal_connection="sqlalchemy+sqlite://", migrated_tools_config=empty_tool_conf, test_data_dir=test_data_dir, # TODO: make gx respect this ) if not for_tests: properties["database_connection"] = \ "sqlite:///${database_location}?isolation_level=IMMEDIATE" _handle_kwd_overrides(properties, kwds) # TODO: consider following property # watch_tool = False # datatypes_config_file = config/datatypes_conf.xml # welcome_url = /static/welcome.html # logo_url = / # sanitize_all_html = True # serve_xss_vulnerable_mimetypes = False # track_jobs_in_database = None # outputs_to_working_directory = False # retry_job_output_collection = 0 env = _build_env_for_galaxy(properties, template_args) if install_galaxy: _build_eggs_cache(ctx, env, kwds) _build_test_env(properties, env) env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf # No need to download twice - would GALAXY_TEST_DATABASE_CONNECTION # work? if preseeded_database: env["GALAXY_TEST_DB_TEMPLATE"] = os.path.abspath(database_location) env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) write_file(config_join("galaxy.ini"), web_config) tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, template_args) write_file(tool_conf, tool_conf_contents) write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) write_file(shed_tool_conf, shed_tool_conf_contents) yield GalaxyConfig( galaxy_root, config_directory, env, test_data_dir, port, server_name, master_api_key, ) finally: cleanup = not kwds.get("no_cleanup", False) if created_config_directory and cleanup: shutil.rmtree(config_directory)