def cli(ctx, **kwds): """Generate a tool outline from supplied arguments. """ invalid = _validate_kwds(kwds) if invalid: return invalid output = kwds.get("tool") if not output: output = "%s.xml" % kwds.get("id") if not io.can_write_to_path(output, **kwds): sys.exit(1) tool_description = tool_builder.build(**kwds) open(output, "w").write(tool_description.contents) io.info("Tool written to %s" % output) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): open(macros_file, "w").write(tool_description.macro_contents) else: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def write_tool_description(ctx, tool_description, **kwds): """Write a tool description to the file system guided by supplied CLI kwds.""" tool_id = kwds.get("id") output = kwds.get("tool") if not output: extension = "cwl" if kwds.get("cwl") else "xml" output = "%s.%s" % (tool_id, extension) if not io.can_write_to_path(output, **kwds): ctx.exit(1) io.write_file(output, tool_description.contents) io.info("Tool written to %s" % output) test_contents = tool_description.test_contents if test_contents: sep = "-" if "-" in tool_id else "_" tests_path = "%s%stests.yml" % (kwds.get("id"), sep) if not io.can_write_to_path(tests_path, **kwds): ctx.exit(1) io.write_file(tests_path, test_contents) io.info("Tool tests written to %s" % tests_path) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): io.write_file(macros_file, tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def _diff_in(ctx, working, realized_repository, **kwds): path = realized_repository.path shed_target_source = kwds.get("shed_target_source", None) label_a = "_%s_" % (shed_target_source if shed_target_source else "local") shed_target = kwds.get("shed_target", "B") if "/" in shed_target: shed_target = "custom_shed" label_b = "_%s_" % shed_target mine = os.path.join(working, label_a) other = os.path.join(working, label_b) tsi = tool_shed_client(ctx, read_only=True, **kwds) # In order to download the tarball, require repository ID... repo_id = realized_repository.find_repository_id(ctx, tsi) if repo_id is None: error( "Repository [%s] does not exist in the targeted Tool Shed, can't do shed_diff" % realized_repository.name) # TODO - Should this return an error code which can be checked for in recursive mode? return 0 download_tarball(ctx, tsi, realized_repository, destination=other, clean=True, destination_is_pattern=False, **kwds) if shed_target_source: new_kwds = kwds.copy() new_kwds["shed_target"] = shed_target_source tsi = tool_shed_client(ctx, read_only=True, **new_kwds) download_tarball(ctx, tsi, realized_repository, destination=mine, clean=True, destination_is_pattern=False, **new_kwds) else: tar_path = build_tarball(path) cmd_template = 'mkdir "%s"; tar -xzf "%s" -C "%s"; rm -rf %s' shell(cmd_template % (mine, tar_path, mine, tar_path)) output = kwds.get("output", None) raw = kwds.get("raw", False) is_diff = 0 if not raw: if output: with open(output, "w") as f: is_diff = diff_and_remove(working, label_a, label_b, f) else: is_diff = diff_and_remove(working, label_a, label_b, sys.stdout) cmd = 'cd "%s"; diff -r %s %s' % (working, label_a, label_b) if output: cmd += ">> '%s'" % output exit = shell(cmd) or is_diff return exit
def cli(ctx, **kwds): """Generate a tool outline from supplied arguments. """ invalid = _validate_kwds(kwds) if invalid: return invalid output = kwds.get("tool") if not output: output = "%s.xml" % kwds.get("id") if not io.can_write_to_path(output, **kwds): sys.exit(1) tool_description = tool_builder.build(**kwds) open(output, "w").write(tool_description.contents) io.info("Tool written to %s" % output) macros = kwds["macros"] macros_file = "macros.xml" if macros and not os.path.exists(macros_file): open(macros_file, "w").write(tool_description.macro_contents) elif macros: io.info(REUSING_MACROS_MESSAGE) if tool_description.test_files: if not os.path.exists("test-data"): io.info("No test-data directory, creating one.") io.shell("mkdir -p 'test-data'") for test_file in tool_description.test_files: io.info("Copying test-file %s" % test_file) io.shell("cp '%s' 'test-data'" % test_file)
def test_upload_from_git(self): with self._isolate() as f: with self._git_configured(): dest = join(f, "single_tool") self._copy_repo("single_tool", dest) shell(" && ".join([ "cd %s" % dest, "git init", "git add .", "git commit -m 'initial commit'" ])) rev = git.rev(None, "single_tool") upload_command = [ "shed_update", "--force_repository_creation", "git+single_tool/.git" ] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) self._verify_single_uploaded(f, ["single_tool"]) model = self.mock_shed.model repo_id = self.repository_by_name("single_tool")["id"] message = model._repositories_msg[repo_id][0] assert "planemo upload for repository " in message assert "repository https://github.com/galaxyproject" in message assert rev in message
def _ensure_galaxy_repository_available(ctx): workspace = ctx.workspace gx_repo = os.path.join(workspace, "gx_repo") if os.path.exists(gx_repo): # Attempt fetch - but don't fail if not interweb, etc... shell("git --git-dir %s fetch >/dev/null 2>&1" % gx_repo) else: remote_repo = "https://github.com/galaxyproject/galaxy" shell("git clone --bare %s %s" % (remote_repo, gx_repo))
def _install_with_command(config_directory, command): install_cmds = [ "cd %s" % config_directory, command, "cd galaxy-dev", "type virtualenv >/dev/null 2>&1 && virtualenv .venv", galaxy_run.ACTIVATE_COMMAND, ] shell(";".join(install_cmds))
def kill(self): if self._ctx.verbose: shell(["ps", "ax"]) exists = os.path.exists(self.pid_file) print("Killing pid file [%s]" % self.pid_file) print("pid_file exists? [%s]" % exists) if exists: print("pid_file contents are [%s]" % open(self.pid_file, "r").read()) kill_pid_file(self.pid_file)
def _install_with_command(ctx, galaxy_root, env, kwds): setup_venv_command = setup_venv(ctx, kwds) env['__PYVENV_LAUNCHER__'] = '' install_cmd = shell_join( setup_venv_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd, cwd=galaxy_root, env=env)
def _install_with_command(ctx, command, galaxy_root, env, kwds): setup_venv_command = setup_venv(ctx, kwds) env['__PYVENV_LAUNCHER__'] = '' install_cmd = shell_join( command, ['cd', galaxy_root], setup_venv_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd, env=env)
def _install_with_command(ctx, config_directory, command, env, kwds): setup_venv_command = setup_venv(ctx, kwds) env['__PYVENV_LAUNCHER__'] = '' install_cmd = shell_join( ['cd', config_directory], command, ['cd', 'galaxy-dev'], setup_venv_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd, env=env)
def cli(ctx): """Download linuxbrew install and run it with ruby. Linuxbrew is a fork of Homebrew (http://brew.sh/linuxbrew/). For more information on installing linuxbrew and pre-requisites see https://github.com/Homebrew/linuxbrew#installation. Homebrew or linuxbrew are required in order to use the other commands ``brew`` and ``brew_shell``. """ fname = mkstemp('install_brew') urllib.urlretrieve(INSTALL_SCRIPT, fname) shell(["ruby", fname])
def _ensure_galaxy_repository_available(ctx, kwds): workspace = ctx.workspace cwl = kwds.get("cwl", False) galaxy_source = kwds.get('galaxy_source') if galaxy_source and galaxy_source != DEFAULT_GALAXY_SOURCE: sanitized_repo_name = "".join( c if c.isalnum() else '_' for c in kwds['galaxy_source']).rstrip()[:255] gx_repo = os.path.join(workspace, "gx_repo_%s" % sanitized_repo_name) else: gx_repo = os.path.join(workspace, "gx_repo") if cwl: gx_repo += "_cwl" if os.path.exists(gx_repo): # Convert the git repository from bare to mirror, if needed shell([ 'git', '--git-dir', gx_repo, 'config', 'remote.origin.fetch', '+refs/*:refs/*' ]) shell([ 'git', '--git-dir', gx_repo, 'config', 'remote.origin.mirror', 'true' ]) # Attempt remote update - but don't fail if not interweb, etc... shell("git --git-dir %s remote update >/dev/null 2>&1" % gx_repo) else: remote_repo = _galaxy_source(kwds) command = git.command_clone(ctx, remote_repo, gx_repo, mirror=True) shell(command) return gx_repo
def _ensure_galaxy_repository_available(ctx, kwds): workspace = ctx.workspace cwl = kwds.get("cwl", False) gx_repo = os.path.join(workspace, "gx_repo") if cwl: gx_repo += "_cwl" if os.path.exists(gx_repo): # Attempt fetch - but don't fail if not interweb, etc... shell("git --git-dir %s fetch >/dev/null 2>&1" % gx_repo) else: remote_repo = _galaxy_source(kwds) command = git.command_clone(ctx, remote_repo, gx_repo, bare=True) shell(command) return gx_repo
def test_tar_from_git(self): with self._isolate() as f: dest = join(f, "single_tool") self._copy_repo("single_tool", dest) shell(" && ".join([ "cd single_tool", "git init", "git add .", "git commit -m 'initial commit'" ])) upload_command = [ "shed_update", "--force_repository_creation", "git+single_tool/.git" ] upload_command.extend(self._shed_args()) self._check_exit_code(upload_command) self._verify_single_uploaded(f, ["single_tool"])
def kill(self): """Kill planemo container...""" kill_command = docker_util.kill_command( self.server_name, **self.docker_target_kwds ) return shell(kill_command)
def _install_galaxy_via_git(ctx, galaxy_root, env, kwds): gx_repo = _ensure_galaxy_repository_available(ctx, kwds) branch = _galaxy_branch(kwds) command = git.command_clone(ctx, gx_repo, galaxy_root, branch=branch) exit_code = shell(command, env=env) if exit_code != 0: raise Exception("Failed to glone Galaxy via git") _install_with_command(ctx, galaxy_root, env, kwds)
def _ensure_galaxy_repository_available(ctx, kwds): workspace = ctx.workspace cwl = kwds.get("cwl", False) gx_repo = os.path.join(workspace, "gx_repo") if cwl: gx_repo += "_cwl" if os.path.exists(gx_repo): # Attempt fetch - but don't fail if not interweb, etc... shell("git --git-dir %s fetch >/dev/null 2>&1" % gx_repo) else: if cwl: remote_repo = "https://github.com/common-workflow-language/galaxy" else: remote_repo = "https://github.com/galaxyproject/galaxy" command = git.command_clone(ctx, remote_repo, gx_repo, bare=True) shell(command) return gx_repo
def __handle_upload(ctx, realized_repository, **kwds): """Upload a tool directory as a tarball to a tool shed. """ path = realized_repository.path tar_path = kwds.get("tar", None) if not tar_path: tar_path = shed.build_tarball(path, **kwds) if kwds["tar_only"]: suffix = "" if realized_repository.multiple: name = realized_repository.config["name"] suffix = "_%s" % name.replace("-", "_") shell("cp %s shed_upload%s.tar.gz" % (tar_path, suffix)) return 0 tsi = shed.tool_shed_client(ctx, **kwds) update_kwds = {} message = kwds.get("message", None) if message: update_kwds["commit_message"] = message # TODO: this needs to use realized repository repo_id = realized_repository.find_repository_id(ctx, tsi) if repo_id is None and kwds["force_repository_creation"]: repo_id = realized_repository.create(ctx, tsi) # failing to create the repo, give up if repo_id is None: return -1 # TODO: support updating repo information if it changes in the config file try: tsi.repositories.update_repository(repo_id, tar_path, **update_kwds) except Exception as e: exception_content = e.read() try: # Galaxy passes nice JSON messages as their errors, which bioblend # blindly returns. Attempt to parse those. upstream_error = json.loads(exception_content) error(upstream_error['err_msg']) except Exception as e2: error("Could not update %s" % realized_repository.name) error(exception_content) error(e2.read()) return -1 info("Repository %s updated successfully." % realized_repository.name) return 0
def _install_with_command(ctx, config_directory, command, env, kwds): # TODO: --watchdog pip_installs = [] if pip_installs: pip_install_command = ['pip', 'install'] + pip_installs else: pip_install_command = "" setup_venv_command = setup_venv(ctx, kwds) install_cmd = shell_join( ['cd', config_directory], command, ['cd', 'galaxy-dev'], setup_venv_command, pip_install_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd, env=env)
def _install_with_command(ctx, config_directory, command, env, kwds): # TODO: --watchdog pip_installs = [] if pip_installs: pip_install_command = PIP_INSTALL_CMD % " ".join(pip_installs) else: pip_install_command = "" setup_venv_command = setup_venv(ctx, kwds) install_cmd = shell_join( "cd %s" % config_directory, command, "cd galaxy-dev", setup_venv_command, pip_install_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd, env=env)
def cli(ctx, path, template=None, **kwds): """Initialize a new tool project (demo only right now). """ if template is None: warn("Creating empty project, this function doesn't do much yet.") if not os.path.exists(path): os.makedirs(path) if template is None: return tempdir = tempfile.mkdtemp() try: untar_args = UNTAR_ARGS % (tempdir) untar_to(DOWNLOAD_URL, tempdir, untar_args) shell("ls '%s'" % (tempdir)) shell("mv '%s/%s'/* '%s'" % (tempdir, template, path)) finally: shutil.rmtree(tempdir)
def upload_repository(ctx, realized_repository, **kwds): """Upload a tool directory as a tarball to a tool shed.""" path = realized_repository.path tar_path = kwds.get("tar", None) if not tar_path: tar_path = build_tarball(path, **kwds) if kwds.get("tar_only", False): name = realized_repository.pattern_to_file_name("shed_upload.tar.gz") shell("cp '%s' '%s'" % (tar_path, name)) return 0 shed_context = get_shed_context(ctx, **kwds) update_kwds = {} _update_commit_message(ctx, realized_repository, update_kwds, **kwds) repo_id = handle_force_create(realized_repository, ctx, shed_context, **kwds) # failing to create the repo, give up if repo_id is None: return report_non_existent_repository(realized_repository) if kwds.get("check_diff", False): is_diff = diff_repo(ctx, realized_repository, **kwds) != 0 if not is_diff: name = realized_repository.name info("Repository [%s] not different, skipping upload." % name) return 0 # TODO: support updating repo information if it changes in the config file try: shed_context.tsi.repositories.update_repository( str(repo_id), tar_path, **update_kwds) except Exception as e: if isinstance(e, bioblend.ConnectionError) and e.status_code == 400 and \ e.body == '{"content_alert": "", "err_msg": "No changes to repository."}': warn( "Repository %s was not updated because there were no changes" % realized_repository.name) return 0 message = api_exception_to_message(e) error("Could not update %s" % realized_repository.name) error(message) return -1 info("Repository %s updated successfully." % realized_repository.name) return 0
def diff_in(ctx, working, path, **kwds): shed_target_source = kwds.get("shed_target_source", None) label_a = "_%s_" % (shed_target_source if shed_target_source else "local") shed_target = kwds.get("shed_target", "B") if "/" in shed_target: shed_target = "custom_shed" label_b = "_%s_" % shed_target mine = os.path.join(working, label_a) other = os.path.join(working, label_b) tsi = shed.tool_shed_client(ctx, read_only=True, **kwds) shed.download_tarball( ctx, tsi, path, destination=other, clean=True, **kwds ) if shed_target_source: new_kwds = kwds.copy() new_kwds["shed_target"] = shed_target_source tsi = shed.tool_shed_client(ctx, read_only=True, **new_kwds) shed.download_tarball( ctx, tsi, path, destination=mine, clean=True, **new_kwds ) else: tar_path = shed.build_tarball(path) cmd_template = 'mkdir "%s"; tar -xzf "%s" -C "%s"; rm -rf %s' shell(cmd_template % (mine, tar_path, mine, tar_path)) cmd = 'cd "%s"; diff -r %s %s' % (working, label_a, label_b) if kwds["output"]: cmd += "> '%s'" % kwds["output"] shell(cmd)
def upload_repository(ctx, realized_repository, **kwds): """Upload a tool directory as a tarball to a tool shed. """ path = realized_repository.path tar_path = kwds.get("tar", None) if not tar_path: tar_path = build_tarball(path, **kwds) if kwds.get("tar_only", False): name = realized_repository.pattern_to_file_name("shed_upload.tar.gz") shell("cp '%s' '%s'" % (tar_path, name)) return 0 shed_context = get_shed_context(ctx, **kwds) update_kwds = {} _update_commit_message(ctx, realized_repository, update_kwds, **kwds) repo_id = realized_repository.find_repository_id(ctx, shed_context) if repo_id is None and kwds["force_repository_creation"]: repo_id = realized_repository.create(ctx, shed_context) # failing to create the repo, give up if repo_id is None: name = realized_repository.name error("Repository [%s] does not exist in the targeted Tool Shed." % name) return 2 if kwds.get("check_diff", False): is_diff = diff_repo(ctx, realized_repository, **kwds) if not is_diff: name = realized_repository.name info("Repository [%s] not different, skipping upload." % name) return 0 # TODO: support updating repo information if it changes in the config file try: shed_context.tsi.repositories.update_repository( repo_id, tar_path, **update_kwds ) except Exception as e: message = api_exception_to_message(e) error("Could not update %s" % realized_repository.name) error(message) return -1 info("Repository %s updated successfully." % realized_repository.name) return 0
def _ensure_galaxy_repository_available(ctx, kwds): workspace = ctx.workspace cwl = kwds.get("cwl", False) galaxy_source = kwds.get('galaxy_source') if galaxy_source and galaxy_source != DEFAULT_GALAXY_SOURCE: sanitized_repo_name = "".join(c if c.isalnum() else '_' for c in kwds['galaxy_source']).rstrip()[:255] gx_repo = os.path.join(workspace, "gx_repo_%s" % sanitized_repo_name) else: gx_repo = os.path.join(workspace, "gx_repo") if cwl: gx_repo += "_cwl" if os.path.exists(gx_repo): # Attempt fetch - but don't fail if not interweb, etc... shell("git --git-dir %s remote update >/dev/null 2>&1" % gx_repo) else: remote_repo = _galaxy_source(kwds) command = git.command_clone(ctx, remote_repo, gx_repo, mirror=True) shell(command) return gx_repo
def upload_repository(ctx, realized_repository, **kwds): """Upload a tool directory as a tarball to a tool shed.""" path = realized_repository.path tar_path = kwds.get("tar", None) if not tar_path: tar_path = build_tarball(path, **kwds) if kwds.get("tar_only", False): name = realized_repository.pattern_to_file_name("shed_upload.tar.gz") shell("cp '%s' '%s'" % (tar_path, name)) return 0 shed_context = get_shed_context(ctx, **kwds) update_kwds = {} _update_commit_message(ctx, realized_repository, update_kwds, **kwds) repo_id = handle_force_create(realized_repository, ctx, shed_context, **kwds) # failing to create the repo, give up if repo_id is None: return report_non_existent_repository(realized_repository) if kwds.get("check_diff", False): is_diff = diff_repo(ctx, realized_repository, **kwds) != 0 if not is_diff: name = realized_repository.name info("Repository [%s] not different, skipping upload." % name) return 0 # TODO: support updating repo information if it changes in the config file try: shed_context.tsi.repositories.update_repository( str(repo_id), tar_path, **update_kwds ) except Exception as e: if isinstance(e, bioblend.ConnectionError) and e.status_code == 400 and \ e.body == '{"content_alert": "", "err_msg": "No changes to repository."}': warn("Repository %s was not updated because there were no changes" % realized_repository.name) return 0 message = api_exception_to_message(e) error("Could not update %s" % realized_repository.name) error(message) return -1 info("Repository %s updated successfully." % realized_repository.name) return 0
def _install_with_command(ctx, config_directory, command, kwds): # TODO: --watchdog pip_installs = [] if kwds.get("cwl", False): pip_installs.append("cwltool") if pip_installs: pip_install_command = PIP_INSTALL_CMD % " ".join(pip_installs) else: pip_install_command = "" setup_venv_command = galaxy_run.setup_venv(ctx, kwds) install_cmd = shell_join( "cd %s" % config_directory, command, "cd galaxy-dev", setup_venv_command, pip_install_command, galaxy_run.setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) shell(install_cmd)
def test_io_capture(): """Test :func:`planemo.io.conditionally_captured_io`.""" with io.conditionally_captured_io(True, tee=False) as capture: io.warn("Problem...") assert_equal(capture[0]["data"], "Problem...") with io.conditionally_captured_io(True, tee=False) as capture: io.shell("echo 'Problem...'") assert_equal(capture[0]["data"], "echo 'Problem...'") assert_equal(capture[1]["data"], "Problem...") with io.conditionally_captured_io(True, tee=False) as capture: io.communicate("echo 'Problem...'") assert_equal(capture[0]["data"], "echo 'Problem...'") assert_equal(capture[1]["data"], "Problem...") with io.conditionally_captured_io(False, tee=False) as capture: io.communicate("echo 'Test...'") assert capture is None
def shed_serve(ctx, install_args_list, **kwds): """Serve a daemon instance of Galaxy with specified repositories installed.""" with serve_daemon(ctx, **kwds) as config: install_deps = not kwds.get("skip_dependencies", False) print(INSTALLING_MESSAGE) io.info(INSTALLING_MESSAGE) for install_args in install_args_list: install_args["install_tool_dependencies"] = install_deps install_args["install_repository_dependencies"] = True install_args["new_tool_panel_section_label"] = "Shed Installs" config.install_repo(**install_args) try: config.wait_for_all_installed() except Exception: if ctx.verbose: print("Failed to install tool repositories, Galaxy log:") print(config.log_contents) print("Galaxy root:") io.shell(['ls', config.galaxy_root]) raise yield config
def cli(ctx): """This command is used internally by planemo to assist in contineous testing of tools with Travis CI (https://travis-ci.org/). """ build_dir = os.environ.get("TRAVIS_BUILD_DIR", None) if not build_dir: raise Exception("Failed to determine ${TRAVIS_BUILD_DIR}") build_travis_dir = os.path.join(build_dir, ".travis") if not os.path.exists(build_travis_dir): os.makedirs(build_travis_dir) build_bin_dir = os.path.join(build_travis_dir, "bin") if not os.path.exists(build_bin_dir): os.makedirs(build_bin_dir) build_env_path = os.path.join(build_travis_dir, "env.sh") template_vars = { "BUILD_TRAVIS_DIR": build_travis_dir, "BUILD_BIN_DIR": build_bin_dir, "BUILD_ENV_PATH": build_env_path, } build_env = string.Template(BUILD_ENVIRONMENT_TEMPLATE).safe_substitute( **template_vars ) open(build_env_path, "a").write(build_env) shell(FIX_EGGS_DIR) shell(INSTALL_SAMTOOLS) setup_file = os.path.join(build_travis_dir, SETUP_FILE_NAME) if os.path.exists(setup_file): shell( ". %s && bash -x %s" % (build_env_path, setup_file), env=template_vars )
def cli(ctx, path, template=None, **kwds): """(Experimental) Initialize a new tool project. This is only a proof-of-concept demo right now. """ if template is None: warn("Creating empty project, this function doesn't do much yet.") if not os.path.exists(path): os.makedirs(path) if template is None: return tempdir = tempfile.mkdtemp() try: untar_args = UNTAR_ARGS % (tempdir) untar_to(DOWNLOAD_URL, tempdir, untar_args) template_dir = os.path.join(tempdir, template) shell("ls '%s'" % (template_dir)) shell("mv '%s'/* '%s'" % (template_dir, path)) dot_files = [os.path.join(template_dir, f) for f in os.listdir(template_dir) if f.startswith(".")] if len(dot_files) > 0: dot_files_quoted = "'" + "' '".join(dot_files) + "'" shell("mv %s '%s'" % (dot_files_quoted, path)) finally: shutil.rmtree(tempdir)
def cli(ctx, path, template=None, **kwds): """(Experimental) Initialize a new tool project. This is only a proof-of-concept demo right now. """ if template is None: warn("Creating empty project, this function doesn't do much yet.") if not os.path.exists(path): os.makedirs(path) if template is None: return tempdir = tempfile.mkdtemp() try: untar_args = UNTAR_ARGS % (tempdir) untar_to(DOWNLOAD_URL, tempdir, untar_args) template_dir = os.path.join(tempdir, template) shell("ls '%s'" % (template_dir)) shell("mv '%s'/* '%s'" % (template_dir, path)) dot_files = [ os.path.join(template_dir, f) for f in os.listdir(template_dir) if f.startswith(".") ] if len(dot_files) > 0: dot_files_quoted = "'" + "' '".join(dot_files) + "'" shell("mv %s '%s'" % (dot_files_quoted, path)) finally: shutil.rmtree(tempdir)
def __handle_upload(ctx, path, **kwds): """Upload a tool directory as a tarball to a tool shed. """ tar_path = kwds.get("tar", None) if not tar_path: tar_path = shed.build_tarball(path, **kwds) if kwds["tar_only"]: shell("cp %s shed_upload.tar.gz" % tar_path) return 0 tsi = shed.tool_shed_client(ctx, **kwds) update_kwds = {} message = kwds.get("message", None) if message: update_kwds["commit_message"] = message repo_id = __find_repository(ctx, tsi, path, **kwds) if repo_id is None and kwds["force_repository_creation"]: repo_id = __create_repository(ctx, tsi, path, **kwds) # failing to create the repo, give up if repo_id is None: return -1 # TODO: support updating repo information if it changes in the config file try: tsi.repositories.update_repository(repo_id, tar_path, **update_kwds) except Exception as e: exception_content = e.read() try: # Galaxy passes nice JSON messages as their errors, which bioblend # blindly returns. Attempt to parse those. upstream_error = json.loads(exception_content) error(upstream_error['err_msg']) except Exception as e2: error("Could not update %s" % path) error(exception_content) error(e2.read()) return -1 info("Repository %s updated successfully." % path) return 0
def _install_with_command(ctx, galaxy_root, env, kwds): setup_venv_command = setup_venv(ctx, kwds) install_cmd = shell_join( setup_venv_command, setup_common_startup_args(), COMMAND_STARTUP_COMMAND, ) exit_code = shell(install_cmd, cwd=galaxy_root, env=env) if exit_code != 0: raise Exception("Failed to install Galaxy via command [%s]" % install_cmd) if not os.path.exists(galaxy_root): raise Exception("Failed to create Galaxy directory [%s]" % galaxy_root) if not os.path.exists(os.path.join(galaxy_root, "lib")): raise Exception("Failed to create Galaxy directory [%s], lib missing" % galaxy_root)
def cli(ctx): """Internal command for GitHub/TravisCI testing. This command is used internally by planemo to assist in contineous testing of tools with Travis CI (https://travis-ci.org/). """ build_dir = os.environ.get("TRAVIS_BUILD_DIR", None) if not build_dir: raise Exception("Failed to determine ${TRAVIS_BUILD_DIR}") build_travis_dir = os.path.join(build_dir, ".travis") if not os.path.exists(build_travis_dir): os.makedirs(build_travis_dir) build_bin_dir = os.path.join(build_travis_dir, "bin") if not os.path.exists(build_bin_dir): os.makedirs(build_bin_dir) build_env_path = os.path.join(build_travis_dir, "env.sh") template_vars = { "BUILD_TRAVIS_DIR": build_travis_dir, "BUILD_BIN_DIR": build_bin_dir, "BUILD_ENV_PATH": build_env_path, } build_env = string.Template(BUILD_ENVIRONMENT_TEMPLATE).safe_substitute( **template_vars ) open(build_env_path, "a").write(build_env) eggs_dir = os.path.join(os.getenv('HOME'), '.python-eggs') if not os.path.exists(eggs_dir): os.makedirs(eggs_dir, 0o700) else: os.chmod(eggs_dir, 0o700) # samtools essentially required by Galaxy shell(['wget', SAMTOOLS_URL]) shell(['sudo', 'dpkg', '-i', SAMTOOLS_DEB]) setup_file = os.path.join(build_travis_dir, SETUP_FILE_NAME) if os.path.exists(setup_file): shell( ". %s && bash -x %s" % (build_env_path, setup_file), env=template_vars )
def _copy_repo(self, name, dest): repo = os.path.join(TEST_REPOS_DIR, name) io.shell("cp -r '%s/.' '%s'" % (repo, dest))
def clone(*args, **kwds): command = command_clone(*args, **kwds) return io.shell(command)
def _install_galaxy_via_git(ctx, galaxy_root, env, kwds): gx_repo = _ensure_galaxy_repository_available(ctx, kwds) branch = _galaxy_branch(kwds) command = git.command_clone(ctx, gx_repo, galaxy_root, branch=branch) shell(command, env=env) _install_with_command(ctx, galaxy_root, env, kwds)
def is_rev_dirty(ctx, directory): cmd = "cd '%s' && git diff --quiet" % directory return io.shell(cmd) != 0
def _diff_in(ctx, working, realized_repository, **kwds): path = realized_repository.path shed_target_source = kwds.get("shed_target_source") label_a = "_%s_" % (shed_target_source if shed_target_source else "workingdir") shed_target = kwds.get("shed_target", "B") if "/" in shed_target: shed_target = "custom_shed" label_b = "_%s_" % shed_target mine = os.path.join(working, label_a) other = os.path.join(working, label_b) shed_context = get_shed_context(ctx, read_only=True, **kwds) # In order to download the tarball, require repository ID... repo_id = realized_repository.find_repository_id(ctx, shed_context) if repo_id is None: error("shed_diff: Repository [%s] does not exist in the targeted Tool Shed." % realized_repository.name) # $ diff README.rst not_a_file 2&>1 /dev/null; echo $? # 2 return 2 info("Diffing repository [%s]" % realized_repository.name) download_tarball( ctx, shed_context, realized_repository, destination=other, clean=True, destination_is_pattern=False, **kwds ) if shed_target_source: new_kwds = kwds.copy() new_kwds["shed_target"] = shed_target_source shed_context = get_shed_context(ctx, read_only=True, **new_kwds) download_tarball( ctx, shed_context, realized_repository, destination=mine, clean=True, destination_is_pattern=False, **new_kwds ) else: tar_path = build_tarball(path) os.mkdir(mine) shell(['tar', '-xzf', tar_path, '-C', mine]) shutil.rmtree(tar_path, ignore_errors=True) output = kwds.get("output") raw = kwds.get("raw", False) xml_diff = 0 if not raw: if output: with open(output, "w") as f: xml_diff = diff_and_remove(working, label_a, label_b, f) else: xml_diff = diff_and_remove(working, label_a, label_b, sys.stdout) cmd = ['diff', '-r', label_a, label_b] if output: with open(output, 'ab') as fh: raw_diff = shell(cmd, cwd=working, stdout=fh) else: raw_diff = shell(cmd, cwd=working) exit = raw_diff or xml_diff if not raw: if xml_diff: ctx.vlog("One or more shed XML file(s) different!") if raw_diff: ctx.vlog("One or more non-shed XML file(s) different.") if not xml_diff and not raw_diff: ctx.vlog("No differences.") return exit
def _diff_in(ctx, working, realized_repository, **kwds): path = realized_repository.path shed_target_source = kwds.get("shed_target_source", None) label_a = "_%s_" % (shed_target_source if shed_target_source else "local") shed_target = kwds.get("shed_target", "B") if "/" in shed_target: shed_target = "custom_shed" label_b = "_%s_" % shed_target mine = os.path.join(working, label_a) other = os.path.join(working, label_b) tsi = tool_shed_client(ctx, read_only=True, **kwds) # In order to download the tarball, require repository ID... repo_id = realized_repository.find_repository_id(ctx, tsi) if repo_id is None: error("Repository [%s] does not exist in the targeted Tool Shed, can't do shed_diff" % realized_repository.name) # TODO - Should this return an error code which can be checked for in recursive mode? return 0 download_tarball( ctx, tsi, realized_repository, destination=other, clean=True, destination_is_pattern=False, **kwds ) if shed_target_source: new_kwds = kwds.copy() new_kwds["shed_target"] = shed_target_source tsi = tool_shed_client(ctx, read_only=True, **new_kwds) download_tarball( ctx, tsi, realized_repository, destination=mine, clean=True, destination_is_pattern=False, **new_kwds ) else: tar_path = build_tarball(path) cmd_template = 'mkdir "%s"; tar -xzf "%s" -C "%s"; rm -rf %s' shell(cmd_template % (mine, tar_path, mine, tar_path)) output = kwds.get("output", None) raw = kwds.get("raw", False) is_diff = 0 if not raw: if output: with open(output, "w") as f: is_diff = diff_and_remove(working, label_a, label_b, f) else: is_diff = diff_and_remove(working, label_a, label_b, sys.stdout) cmd = 'cd "%s"; diff -r %s %s' % (working, label_a, label_b) if output: cmd += ">> '%s'" % output exit = shell(cmd) or is_diff return exit
def is_rev_dirty(ctx, directory): """Check if specified git repository has uncommitted changes.""" # TODO: Use ENV instead of cd. cmd = "cd '%s' && git diff --quiet" % directory return io.shell(cmd) != 0
def _diff_in(ctx, working, realized_repository, **kwds): path = realized_repository.path shed_target_source = kwds.get("shed_target_source", None) label_a = "_%s_" % (shed_target_source if shed_target_source else "workingdir") shed_target = kwds.get("shed_target", "B") if "/" in shed_target: shed_target = "custom_shed" label_b = "_%s_" % shed_target mine = os.path.join(working, label_a) other = os.path.join(working, label_b) shed_context = get_shed_context(ctx, read_only=True, **kwds) # In order to download the tarball, require repository ID... repo_id = realized_repository.find_repository_id(ctx, shed_context) if repo_id is None: error("shed_diff: Repository [%s] does not exist in the targeted Tool Shed." % realized_repository.name) # $ diff README.rst not_a_file 2&>1 /dev/null; echo $? # 2 return 2 info("Diffing repository [%s]" % realized_repository.name) download_tarball( ctx, shed_context, realized_repository, destination=other, clean=True, destination_is_pattern=False, **kwds ) if shed_target_source: new_kwds = kwds.copy() new_kwds["shed_target"] = shed_target_source shed_context = get_shed_context(ctx, read_only=True, **new_kwds) download_tarball( ctx, shed_context, realized_repository, destination=mine, clean=True, destination_is_pattern=False, **new_kwds ) else: tar_path = build_tarball(path) cmd_template = 'mkdir "%s"; tar -xzf "%s" -C "%s"; rm -rf %s' shell(cmd_template % (mine, tar_path, mine, tar_path)) output = kwds.get("output", None) raw = kwds.get("raw", False) xml_diff = 0 if not raw: if output: with open(output, "w") as f: xml_diff = diff_and_remove(working, label_a, label_b, f) else: xml_diff = diff_and_remove(working, label_a, label_b, sys.stdout) cmd = 'cd "%s"; diff -r %s %s' % (working, label_a, label_b) if output: cmd += " >> '%s'" % output raw_diff = shell(cmd) exit = raw_diff or xml_diff if not raw: if xml_diff: ctx.vlog("One or more shed XML file(s) different!") if raw_diff: ctx.vlog("One or more non-shed XML file(s) different.") if not xml_diff and not raw_diff: ctx.vlog("No differences.") return exit