def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) test_contents = tool_description.test_contents if test_contents: sep = "-" if "-" in tool_id else "_" tests_path = "%s%stests.yml" % (kwds.get("id"), sep) if not io.can_write_to_path(tests_path, **kwds): ctx.exit(1) io.write_file(tests_path, test_contents) io.info("Tool tests written to %s" % tests_path) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def shed_init(ctx, path, **kwds): """Initialize a new shed repository.""" if not os.path.exists(path): os.makedirs(path) shed_config_path = os.path.join(path, SHED_CONFIG_NAME) if not can_write_to_path(shed_config_path, **kwds): # .shed.yml exists and no --force sent. return 1 create_failed = _create_shed_config(ctx, shed_config_path, **kwds) if create_failed: return 1 repo_dependencies_path = os.path.join(path, REPO_DEPENDENCIES_CONFIG_NAME) from_workflow = kwds.get("from_workflow") if from_workflow: workflow_name = os.path.basename(from_workflow) workflow_target = os.path.join(path, workflow_name) if not os.path.exists(workflow_target): shutil.copyfile(from_workflow, workflow_target) if not can_write_to_path(repo_dependencies_path, **kwds): return 1 repo_pairs = _parse_repos_from_workflow(from_workflow) repository_dependencies = RepositoryDependencies(repo_pairs) repository_dependencies.write_to_path(repo_dependencies_path) return 0
def shed_init(ctx, path, **kwds): if not os.path.exists(path): os.makedirs(path) shed_config_path = os.path.join(path, SHED_CONFIG_NAME) if not can_write_to_path(shed_config_path, **kwds): # .shed.yml exists and no --force sent. return 1 create_failed = _create_shed_config(ctx, shed_config_path, **kwds) if create_failed: return 1 repo_dependencies_path = os.path.join(path, REPO_DEPENDENCIES_CONFIG_NAME) from_workflow = kwds.get("from_workflow", None) if from_workflow: workflow_name = os.path.basename(from_workflow) workflow_target = os.path.join(path, workflow_name) if not os.path.exists(workflow_target): shutil.copyfile(from_workflow, workflow_target) if not can_write_to_path(repo_dependencies_path, **kwds): return 1 repo_pairs = _parse_repos_from_workflow(from_workflow) repository_dependencies = RepositoryDependencies() repository_dependencies.repo_pairs = repo_pairs repository_dependencies.write_to_path(repo_dependencies_path) return 0
def shed_init(ctx, path, **kwds): if not os.path.exists(path): os.makedirs(path) shed_config_path = os.path.join(path, SHED_CONFIG_NAME) if not can_write_to_path(shed_config_path, **kwds): # .shed.yml exists and no --force sent. return 1 _create_shed_config(ctx, shed_config_path, **kwds) repo_dependencies_path = os.path.join(path, "repository_dependencies.xml") from_workflow = kwds.get("from_workflow", None) if from_workflow: workflow_name = os.path.basename(from_workflow) workflow_target = os.path.join(path, workflow_name) if not os.path.exists(workflow_target): shutil.copyfile(from_workflow, workflow_target) if not can_write_to_path(repo_dependencies_path, **kwds): return 1 repo_pairs = _parse_repos_from_workflow(from_workflow) contents = '<repositories description="">' line_template = ' <repository owner="%s" name="%s" />' for (owner, name) in repo_pairs: contents += line_template % (owner, name) contents += "</repositories>" with open(repo_dependencies_path, "w") as f: f.write(contents) return 0
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) test_contents = tool_description.test_contents if test_contents: sep = "-" if "-" in tool_id else "_" tests_path = "%s%stests.yml" % (kwds.get("id"), sep) if not io.can_write_to_path(tests_path, **kwds): ctx.exit(1) io.write_file(tests_path, test_contents) io.info("Tool tests written to %s" % tests_path) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def cli(ctx, workflow_path, output=None, split_test=False, **kwds): """Initialize a Galaxy workflow test description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this to ensure inputs and outputs comply with best practices that make workflow testing easier. """ path_basename = os.path.basename(workflow_path) job = job_template(workflow_path) if output is None: output = new_workflow_associated_path(workflow_path) job_output = new_workflow_associated_path(workflow_path, suffix="job1") if not can_write_to_path(output, **kwds): ctx.exit(1) test_description = [{ 'doc': 'Test outline for %s' % path_basename, 'job': job, 'outputs': output_stubs_for_workflow(workflow_path), }] if split_test: job_output = new_workflow_associated_path(workflow_path, suffix="job1") if not can_write_to_path(job_output, **kwds): ctx.exit(1) test_description[0]['job'] = os.path.basename(job_output) with open(job_output, "w") as f_job: yaml.dump(job, f_job) with open(output, "w") as f: yaml.dump(test_description, f)
def cli(ctx, **kwds): """Generate a tool outline from supplied arguments. """ invalid = _validate_kwds(kwds) if invalid: return invalid output = kwds.get("tool") if not output: output = "%s.xml" % kwds.get("id") if not io.can_write_to_path(output, **kwds): sys.exit(1) tool_description = tool_builder.build(**kwds) open(output, "w").write(tool_description.contents) io.info("Tool written to %s" % output) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): open(macros_file, "w").write(tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def cli(ctx, **kwds): """Generate a tool outline from supplied arguments. """ invalid = _validate_kwds(kwds) if invalid: return invalid output = kwds.get("tool") if not output: output = "%s.xml" % kwds.get("id") if not io.can_write_to_path(output, **kwds): sys.exit(1) tool_description = tool_builder.build(**kwds) open(output, "w").write(tool_description.contents) io.info("Tool written to %s" % output) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): open(macros_file, "w").write(tool_description.macro_contents) else: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def cli(ctx, workflow_identifier, output=None, **kwds): """Initialize a Galaxy workflow job description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this to ensure inputs and outputs comply with best practices that make workflow testing easier. Jobs can be run with the planemo run command (``planemo run workflow.ga job.yml``). Planemo run works with Galaxy tools and CWL artifacts (both tools and workflows) as well so this command may be renamed to to job_init at something along those lines at some point. """ if kwds["from_invocation"]: if not os.path.isdir('test-data'): ctx.log("Creating test-data directory.") os.makedirs("test-data") path_basename = get_workflow_from_invocation_id( workflow_identifier, kwds["galaxy_url"], kwds["galaxy_user_key"]) job = job_template(workflow_identifier, **kwds) if output is None: output = new_workflow_associated_path( path_basename if kwds["from_invocation"] else workflow_identifier, suffix="job") if not can_write_to_path(output, **kwds): ctx.exit(1) with open(output, "w") as f_job: yaml.dump(job, f_job)
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) for tool_file in tool_description.tool_files: if tool_file.contents is None: continue path = tool_file.filename if not io.can_write_to_path(path, **kwds): ctx.exit(1) io.write_file(path, tool_file.contents) io.info("Tool %s written to %s" % (tool_file.description, path)) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") os.makedirs('test-data') for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) try: shutil.copy(test_file, 'test-data') except Exception as e: io.info("Copy of %s failed: %s" % (test_file, e))
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) for tool_file in tool_description.tool_files: if tool_file.contents is None: continue path = tool_file.filename if not io.can_write_to_path(path, **kwds): ctx.exit(1) io.write_file(path, tool_file.contents) io.info("Tool %s written to %s" % (tool_file.description, path)) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") os.makedirs('test-data') for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) try: shutil.copy(test_file, 'test-data') except Exception as e: io.info("Copy of %s failed: %s" % (test_file, e))
def cli(ctx, workflow_identifier, output=None, split_test=False, **kwds): """Initialize a Galaxy workflow test description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this to ensure inputs and outputs comply with best practices that make workflow testing easier. """ if kwds["from_invocation"]: if not os.path.isdir('test-data'): ctx.log("Creating test-data directory.") os.makedirs("test-data") path_basename = get_workflow_from_invocation_id(workflow_identifier, kwds["galaxy_url"], kwds["galaxy_user_key"]) else: path_basename = os.path.basename(workflow_identifier) job = job_template(workflow_identifier, **kwds) if output is None: output = new_workflow_associated_path(path_basename if kwds["from_invocation"] else workflow_identifier) job_output = new_workflow_associated_path(path_basename if kwds["from_invocation"] else workflow_identifier, suffix="job1") if not can_write_to_path(output, **kwds): ctx.exit(1) test_description = [{ 'doc': 'Test outline for %s' % path_basename, 'job': job, 'outputs': output_stubs_for_workflow(workflow_identifier, **kwds), }] if split_test: job_output = new_workflow_associated_path(path_basename if kwds["from_invocation"] else workflow_identifier, suffix="job1") if not can_write_to_path(job_output, **kwds): ctx.exit(1) test_description[0]['job'] = os.path.basename(job_output) with open(job_output, "w") as f_job: yaml.dump(job, f_job) with open(output, "w") as f: yaml.dump(test_description, f)
def cli(ctx, workflow_identifier, output=None, **kwds): """Initialize a Galaxy workflow job description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this to ensure inputs and outputs comply with best practices that make workflow testing easier. Jobs can be run with the planemo run command (``planemo run workflow.ga job.yml``). Planemo run works with Galaxy tools and CWL artifacts (both tools and workflows) as well so this command may be renamed to to job_init at something along those lines at some point. """ job = job_template(workflow_identifier) if output is None: output = new_workflow_associated_path(workflow_identifier, suffix="job") if not can_write_to_path(output, **kwds): ctx.exit(1) with open(output, "w") as f_job: yaml.dump(job, f_job)