def cli(ctx, workflow_identifier, output=None, force=False, **kwds): """Convert Format 2 workflows to native Galaxy workflows, and vice-versa. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if workflow_identifier.endswith(".ga"): if output is None: output = os.path.splitext(workflow_identifier)[0] + ".gxwf.yml" with open(workflow_identifier, "r") as f: workflow_dict = json.load(f) format2_wrapper = from_galaxy_native(workflow_dict, json_wrapper=True) with open(output, "w") as f: f.write(format2_wrapper["yaml_content"]) else: if output is None: output = os.path.splitext(workflow_identifier)[0] + ".ga" runnable = for_path(workflow_identifier) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(workflow_identifier) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict, indent=4, sort_keys=True) write_file(output, output_contents, force=force)
def cli(ctx, workflow_path, output=None, force=False, **kwds): """Convert Format 2 workflow to a native Galaxy workflow. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if workflow_path.endswith(".ga"): if output is None: output = os.path.splitext(workflow_path)[0] + ".gxwf.yml" with open(workflow_path, "r") as f: workflow_dict = json.load(f) format2_wrapper = from_galaxy_native(workflow_dict, json_wrapper=True) with open(output, "w") as f: f.write(format2_wrapper["yaml_content"]) else: if output is None: output = os.path.splitext(workflow_path)[0] + ".ga" runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(workflow_path) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict) write_file(output, output_contents, force=force)
def test_tutorial_create_hands_on_tutorial(): """Test :func:`planemo.training.tutorial.tutorial.create_hands_on_tutorial`.""" tuto = Tutorial( training=training, topic=topic) os.makedirs(tuto.wf_dir) # with init_wf_id and no Galaxy URL tuto.init_wf_id = 'ID' tuto.training.galaxy_url = None exp_exception = "No Galaxy URL given" with assert_raises_regexp(Exception, exp_exception): tuto.create_hands_on_tutorial(CTX) # with init_wf_id and no Galaxy API key tuto.init_wf_id = 'ID' tuto.training.galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) tuto.training.galaxy_api_key = None exp_exception = "No API key to access the given Galaxy instance" with assert_raises_regexp(Exception, exp_exception): tuto.create_hands_on_tutorial(CTX) # with init_wf_id assert is_galaxy_engine(**KWDS) with engine_context(CTX, **KWDS) as galaxy_engine: with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: tuto.init_wf_id = config.workflow_id(WF_FP) tuto.training.galaxy_api_key = config.user_api_key tuto.create_hands_on_tutorial(CTX) assert os.path.exists(tuto.tuto_fp) os.remove(tuto.tuto_fp) # with init_wf_fp tuto.init_wf_id = None tuto.init_wf_fp = WF_FP tuto.create_hands_on_tutorial(CTX) assert os.path.exists(tuto.tuto_fp) shutil.rmtree("topics")
def get_hands_on_boxes_from_local_galaxy(kwds, wf_filepath, ctx): """Server local Galaxy and get the workflow dictionary.""" assert is_galaxy_engine(**kwds) runnable = for_path(wf_filepath) tuto_body = '' with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(wf_filepath) wf = config.gi.workflows.export_workflow_dict(workflow_id) tuto_body = format_wf_steps(wf, config.gi) return tuto_body
def test_get_hands_on_boxes_from_running_galaxy(): """Test :func:`planemo.training.tutorial.get_hands_on_boxes_from_running_galaxy`.""" assert is_galaxy_engine(**KWDS) galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) with engine_context(CTX, **KWDS) as galaxy_engine: with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: wf_id = config.workflow_id(WF_FP) tuto_body = get_hands_on_boxes_from_running_galaxy(wf_id, galaxy_url, config.user_api_key) assert '## Sub-step with **FastQC**' in tuto_body assert '## Sub-step with **Query Tabular**' in tuto_body assert '## Sub-step with **Select first**' in tuto_body
def test_format_wf_steps(): """Test :func:`planemo.training.tutorial.format_wf_steps`.""" assert is_galaxy_engine(**KWDS) with engine_context(CTX, **KWDS) as galaxy_engine: with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: workflow_id = config.workflow_id(WF_FP) wf = config.gi.workflows.export_workflow_dict(workflow_id) body = format_wf_steps(wf, config.gi) assert '## Sub-step with **FastQC**' in body assert '## Sub-step with **Query Tabular**' in body assert '## Sub-step with **Select first**' in body
def get_hands_on_boxes_from_local_galaxy(kwds, wf_filepath, ctx): """Server local Galaxy and get the workflow dictionary.""" assert is_galaxy_engine(**kwds) runnable = for_path(wf_filepath) tuto_body = '' with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(wf_filepath) wf = config.gi.workflows.export_workflow_dict(workflow_id) tuto_body = format_wf_steps(wf, config.gi) return tuto_body
def cli(ctx, workflow_path, output=None, force=False, **kwds): """Open a synchronized Galaxy workflow editor. """ assert is_galaxy_engine(**kwds) kwds["workflows_from_path"] = True runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(workflow_path) url = "%s/workflow/editor?id=%s" % (config.galaxy_url, workflow_id) click.launch(url) sleep_for_serve()
def cli(ctx, workflow_identifier, output=None, force=False, **kwds): """Open a synchronized Galaxy workflow editor. """ assert is_galaxy_engine(**kwds) runnable = for_runnable_identifier(ctx, workflow_identifier, kwds.get("profile")) kwds["workflows_from_path"] = True with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id_for_runnable(runnable) url = "%s/workflow/editor?id=%s" % (config.galaxy_url, workflow_id) click.launch(url) if kwds["engine"] != "external_galaxy": sleep_for_serve()
def cli(ctx, workflow_path, output=None, force=False, **kwds): """Convert Format 2 workflow to a native Galaxy workflow. """ assert is_galaxy_engine(**kwds) kwds["no_dependency_resolution"] = True if output is None: output = os.path.splitext(workflow_path)[0] + ".ga" runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.serve_runnables([runnable]) as config: workflow_id = config.workflow_id(workflow_path) output_dict = config.gi.workflows.export_workflow_dict(workflow_id) output_contents = json.dumps(output_dict) write_file(output, output_contents, force=force)
def cli(ctx, paths, **kwds): """Run specified tool's tests within Galaxy. All referenced tools (by default all the tools in the current working directory) will be tested and the results quickly summarized. To run these tests planemo needs a Galaxy instance to utilize, planemo will search parent directories to see if any is a Galaxy instance - but one can pick the Galaxy instance to use with the --galaxy_root option or force planemo to download a disposable instance with the ``--install_galaxy`` flag. In additon to to quick summary printed to the console - various detailed output summaries can be configured. ``tool_test_output.html`` (settable via ``--test_output``) will contain a human consumable HTML report describing the test run. A JSON file (settable via ``--test_output_json`` and defaulting to ``tool_test_output.json``) will also be created. These files can can be disabled by passing in empty arguments or globally by setting the values ``default_test_output`` and/or ``default_test_output_json`` in ``~/.planemo.yml`` to ``null``. For continuous integration testing a xUnit-style report can be confiured using the ``--test_output_xunit``. planemo uses temporarily generated config files and environment variables to attempt to shield this execution of Galaxy from manually launched runs against that same Galaxy root - but this may not be bullet proof yet so please careful and do not try this against production Galaxy instances. """ runnables = for_paths(paths) enable_beta_test = any([r.type not in [RunnableType.galaxy_tool, RunnableType.directory] for r in runnables]) enable_beta_test = enable_beta_test or not is_galaxy_engine(**kwds) if enable_beta_test: info("Enable beta testing mode to test artifact that isn't a Galaxy tool.") with engine_context(ctx, **kwds) as engine: test_data = engine.test(runnables) return_value = handle_reports_and_summary(ctx, test_data.structured_data, kwds=kwds) else: kwds["for_tests"] = True with galaxy_config(ctx, runnables, **kwds) as config: return_value = run_in_config(ctx, config, **kwds) ctx.exit(return_value)
def test_tutorial_export_workflow_file(): """Test :func:`planemo.training.tutorial.tutorial.export_workflow_file`.""" tuto = Tutorial(training=training, topic=topic) os.makedirs(tuto.wf_dir) # with worflow fp tuto.init_wf_fp = WF_FP tuto.export_workflow_file() assert os.path.exists(tuto.wf_fp) # with workflow id tuto.init_wf_fp = None os.remove(tuto.wf_fp) assert is_galaxy_engine(**KWDS) galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) with engine_context(CTX, **KWDS) as galaxy_engine: with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: tuto.init_wf_id = config.workflow_id(WF_FP) tuto.training.galaxy_url = galaxy_url tuto.training.galaxy_api_key = config.user_api_key tuto.export_workflow_file() assert os.path.exists(tuto.wf_fp) shutil.rmtree("topics")
def cli(ctx, workflow_identifier, output=None, force=False, **kwds): """Open a synchronized Galaxy workflow editor. """ assert is_galaxy_engine(**kwds) workflow_identifier = translate_alias(ctx, workflow_identifier, kwds.get('profile')) if os.path.exists(workflow_identifier): runnable = for_path(workflow_identifier) else: # assume galaxy workflow id runnable = for_id(workflow_identifier) kwds["workflows_from_path"] = True with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id_for_runnable(runnable) url = "%s/workflow/editor?id=%s" % (config.galaxy_url, workflow_id) click.launch(url) if kwds["engine"] != "external_galaxy": sleep_for_serve()
def test_tutorial_export_workflow_file(): """Test :func:`planemo.training.tutorial.tutorial.export_workflow_file`.""" tuto = Tutorial( training=training, topic=topic) os.makedirs(tuto.wf_dir) # with worflow fp tuto.init_wf_fp = WF_FP tuto.export_workflow_file() assert os.path.exists(tuto.wf_fp) # with workflow id tuto.init_wf_fp = None os.remove(tuto.wf_fp) assert is_galaxy_engine(**KWDS) galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) with engine_context(CTX, **KWDS) as galaxy_engine: with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: tuto.init_wf_id = config.workflow_id(WF_FP) tuto.training.galaxy_url = galaxy_url tuto.training.galaxy_api_key = config.user_api_key tuto.export_workflow_file() assert os.path.exists(tuto.wf_fp) shutil.rmtree("topics")
def cli(ctx, paths, **kwds): # noqa C901 """Auto-update tool requirements by checking against Conda and updating if newer versions are available.""" assert_tools = kwds.get("assert_tools", True) recursive = kwds.get("recursive", False) exit_codes = [] modified_files = set() tools_to_skip = [line.rstrip() for line in open(kwds['skiplist']) ] if kwds['skiplist'] else [] runnables = for_paths(paths) if any(r.type in {RunnableType.galaxy_tool, RunnableType.directory} for r in runnables): # update Galaxy tools for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive): if tool_path.split('/')[-1] in tools_to_skip: info("Skipping tool %s" % tool_path) continue info("Auto-updating tool %s" % tool_path) try: updated = autoupdate.autoupdate_tool( ctx, tool_path, modified_files=modified_files, **kwds) if updated: modified_files.update(updated) except Exception as e: error( f"{tool_path} could not be updated - the following error was raised: {e.__str__()}" ) if handle_tool_load_error(tool_path, tool_xml): exit_codes.append(EXIT_CODE_GENERIC_FAILURE) continue else: exit_codes.append(EXIT_CODE_OK) workflows = [ r for r in runnables if r.type == RunnableType.galaxy_workflow ] modified_workflows = [] if workflows: assert is_galaxy_engine(**kwds) if kwds.get("engine") != "external_galaxy": kwds["install_most_recent_revision"] = True kwds["install_resolver_dependencies"] = False kwds["install_repository_dependencies"] = False kwds['shed_install'] = True with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served(workflows) as config: for workflow in workflows: if config.updated_repos.get(workflow.path) or kwds.get( "engine") == "external_galaxy": info("Auto-updating workflow %s" % workflow.path) updated_workflow = autoupdate.autoupdate_wf( ctx, config, workflow) if workflow.path.endswith(".ga"): with open(workflow.path, 'w') as f: json.dump(updated_workflow, f, indent=4, sort_keys=True) else: format2_wrapper = from_galaxy_native( updated_workflow, json_wrapper=True) with open(workflow.path, "w") as f: f.write(format2_wrapper["yaml_content"]) modified_workflows.append(workflow.path) else: info( "No newer tool versions were found, so the workflow was not updated." ) if kwds['test']: if not modified_files: info("No tools were updated, so no tests were run.") else: with temp_directory(dir=ctx.planemo_directory) as temp_path: # only test tools in updated directories modified_paths = [ path for path, tool_xml in yield_tool_sources_on_paths( ctx, paths, recursive) if path in modified_files ] info( f"Running tests for the following auto-updated tools: {', '.join(modified_paths)}" ) runnables = for_paths(modified_paths + modified_workflows, temp_path=temp_path) kwds["engine"] = "galaxy" return_value = test_runnables(ctx, runnables, original_paths=paths, **kwds) exit_codes.append(return_value) return coalesce_return_codes(exit_codes, assert_at_least_one=assert_tools)