def r_library_installer(config): """Install R libraries using CRAN and Bioconductor. """ if config.get("cran") or config.get("bioc") or config.get("github"): with shared._make_tmp_dir() as tmp_dir: with cd(tmp_dir): # Create an Rscript file with install details. out_file = os.path.join(tmp_dir, "install_packages.R") _make_install_script(out_file, config) # run the script and then get rid of it # try using either rlib_installed = False rscripts = [] conda_bin = shared._conda_cmd(env) if conda_bin: rscripts.append(fabutils.find_cmd(env, os.path.join(os.path.dirname(conda_bin), "Rscript"), "--version")) rscripts.append(fabutils.find_cmd(env, "Rscript", "--version")) for rscript in rscripts: if rscript: env.safe_run("%s %s" % (rscript, out_file)) rlib_installed = True break if not rlib_installed: env.logger.warn("Rscript not found; skipping install of R libraries.") env.safe_run("rm -f %s" % out_file)
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join([ "-c %s" % x for x in yaml.safe_load(in_handle).get("channels", []) ]) conda_info = json.loads( env.safe_run_output("{conda_bin} info --json".format(**locals()))) # install our customized packages if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format( **locals())) for package in packages: _link_bin(package, env, conda_info, conda_bin) # work around ncurses issues -- we don't always get the R version # https://github.com/bioconda/bioconda-recipes/issues/637 env.safe_run("{conda_bin} update -y -c r ncurses".format(**locals())) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join([ "-c %s" % x for x in yaml.safe_load(in_handle).get("channels", []) ]) conda_info = json.loads( env.safe_run_output("{conda_bin} info --json".format(**locals()))) if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format( **locals())) for package in packages: _link_bin(package, env, conda_info, conda_bin) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_") # remove packages we want the system to supply # curl https://github.com/ContinuumIO/anaconda-issues/issues/72 system_packages = ["curl"] pkgs_str = " ".join(system_packages) with settings(warn_only=True): env.safe_run( "{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals()))) if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(**locals())) for package in packages: _symlink_bin(package, env, conda_info, conda_bin) for pkg in ["python", "conda", "pip"]: _symlink_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_") # remove packages that can cause failures # curl https://github.com/ContinuumIO/anaconda-issues/issues/72 problem_packages = ["curl"] pkgs_str = " ".join(problem_packages) with settings(warn_only=True): env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
def r_library_installer(config): """Install R libraries using CRAN and Bioconductor. """ if config.get("cran") or config.get("bioc") or config.get("github"): with shared._make_tmp_dir() as tmp_dir: with cd(tmp_dir): # Create an Rscript file with install details. out_file = os.path.join(tmp_dir, "install_packages.R") _make_install_script(out_file, config) # run the script and then get rid of it # try using either rlib_installed = False rscripts = [] conda_bin = shared._conda_cmd(env) if conda_bin: rscripts.append( fabutils.find_cmd( env, os.path.join(os.path.dirname(conda_bin), "Rscript"), "--version")) rscripts.append(fabutils.find_cmd(env, "Rscript", "--version")) for rscript in rscripts: if rscript: env.safe_run("%s %s" % (rscript, out_file)) rlib_installed = True break if not rlib_installed: env.logger.warn( "Rscript not found; skipping install of R libraries.") env.safe_run("rm -f %s" % out_file)
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals()))) # Transition change -- ensure installed perl is perl-threaded system_packages = ["perl", "perl-threaded"] pkgs_str = " ".join(system_packages) with settings(warn_only=True): env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals())) # install our customized packages if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(**locals())) for package in packages: _link_bin(package, env, conda_info, conda_bin) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_") # remove packages we want the system to supply # curl https://github.com/ContinuumIO/anaconda-issues/issues/72 system_packages = ["curl"] pkgs_str = " ".join(system_packages) with settings(warn_only=True): env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) if hasattr(env, "conda_yaml"): Config = collections.namedtuple("Config", "base dist") config_file = Config(base=env.conda_yaml, dist=None) else: config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals()))) # install our customized packages if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install --quiet -y {channels} {pkgs_str}".format(**locals())) for package in packages: _link_bin(package, env, conda_info, conda_bin) # work around ncurses issues -- we don't always get the R version # https://github.com/bioconda/bioconda-recipes/issues/637 env.safe_run("{conda_bin} update -y -c r ncurses".format(**locals())) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) if len(packages) > 0: for pkg in packages: env.safe_run("{conda_bin} install -y {channels} {pkg}".format(**locals()))
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) if hasattr(env, "conda_yaml"): Config = collections.namedtuple("Config", "base dist") config_file = Config(base=env.conda_yaml, dist=None) else: config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] channels = "" else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) conda_envs = _create_environments(env, conda_bin, packages) for env_dir in conda_envs.values(): _clean_environment(env_dir) conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals()))) # libedit pins to curses 6.0 but bioconda requires 5.9 # Ensure we have conda-forge conda installed, otherwise creates resolution # and package issues with removed libedit. Hopefully can remove along with libedit # hack when conda-forge synchronizes ncurses and conda with the base install. env.safe_run("{conda_bin} install -y {channels} conda python=2".format(**locals())) # Uninstall old R packages that clash with updated versions # Temporary fix to allow upgrades from older versions that have migrated # r-tximport is now bioconductor-tximport # py2cairo is incompatible with r 3.4.1 for problem in ["r-tximport", "py2cairo", "libedit"]: cur_packages = [x["name"] for x in json.loads(env.safe_run_output("{conda_bin} list --json {problem}".format(**locals())))] if problem in cur_packages: env.safe_run("{conda_bin} remove --force -y {problem}".format(**locals())) # install our customized packages if len(packages) > 0: for env_name, env_packages in _split_by_condaenv(packages): if env_name: assert env_name in conda_envs, (env_name, conda_envs) env_str = "-n %s" % env_name else: env_str = "" pkgs_str = " ".join(env_packages) env.safe_run("{conda_bin} install -y {env_str} {channels} {pkgs_str}".format(**locals())) conda_pkg_list = json.loads(env.safe_run_output( "{conda_bin} list --json {env_str}".format(**locals()))) for package in env_packages: _link_bin(package, env, conda_info, conda_bin, conda_pkg_list, conda_envdir=conda_envs.get(env_name)) conda_pkg_list = json.loads(env.safe_run_output("{conda_bin} list --json".format(**locals()))) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
def install_packages(env, to_install=None, packages=None): """Old installation, based on pre-configured fabric inputs. """ from cloudbio.flavor.config import get_config_file from cloudbio.custom import shared if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) if hasattr(env, "conda_yaml"): Config = collections.namedtuple("Config", "base dist") config_file = Config(base=env.conda_yaml, dist=None) else: config_file = get_config_file(env, "packages-conda.yaml") install_in(conda_bin, env.system_install, config_file.base, packages)
def _python_library_installer(config): """Install python specific libraries using easy_install. Handles using isolated anaconda environments. """ if shared._is_anaconda(env): for pname in env.flavor.rewrite_config_items("python", config.get("conda", [])): env.safe_run("{0} install --yes {1}".format(shared._conda_cmd(env), pname)) cmd = env.safe_run else: version_ext = "-%s" % env.python_version_ext if env.python_version_ext else "" env.safe_sudo("easy_install%s -U pip" % version_ext) cmd = env.safe_sudo for pname in env.flavor.rewrite_config_items("python", config['pypi']): cmd("{0} install --upgrade {1}".format(shared._pip_cmd(env), pname))
def _python_library_installer(config): """Install python specific libraries using easy_install. Handles using isolated anaconda environments. """ if shared._is_anaconda(env): for pname in env.flavor.rewrite_config_items("python", config.get("conda", [])): env.safe_run("{0} install --yes {1}".format( shared._conda_cmd(env), pname)) cmd = env.safe_run else: version_ext = "-%s" % env.python_version_ext if env.python_version_ext else "" env.safe_sudo("easy_install%s -U pip" % version_ext) cmd = env.safe_sudo for pname in env.flavor.rewrite_config_items("python", config['pypi']): cmd("{0} install --upgrade {1}".format(shared._pip_cmd(env), pname))
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) if hasattr(env, "conda_yaml"): Config = collections.namedtuple("Config", "base dist") config_file = Config(base=env.conda_yaml, dist=None) else: config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])]) conda_envs = _create_environments(env, conda_bin, packages) conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals()))) # Uninstall old R packages that clash with updated versions # Temporary fix to allow upgrades from older versions that have migrated # r-tximport is now bioconductor-tximport # py2cairo is incompatible with r 3.4.1 # libedit pins to curses 6.0 but bioconda requires 5.9 for problem in ["r-tximport", "py2cairo", "libedit"]: cur_packages = [x["name"] for x in json.loads(env.safe_run_output("{conda_bin} list --json {problem}".format(**locals())))] if problem in cur_packages: env.safe_run("{conda_bin} remove --force -y {problem}".format(**locals())) # install our customized packages if len(packages) > 0: for env_name, env_packages in _split_by_condaenv(packages): if env_name: assert env_name in conda_envs, (env_name, conda_envs) env_str = "-n %s" % env_name else: env_str = "" pkgs_str = " ".join(env_packages) env.safe_run("{conda_bin} install --quiet -y {env_str} {channels} {pkgs_str}".format(**locals())) conda_pkg_list = json.loads(env.safe_run_output( "{conda_bin} list --json {env_str}".format(**locals()))) for package in env_packages: _link_bin(package, env, conda_info, conda_bin, conda_pkg_list, conda_envdir=conda_envs.get(env_name)) conda_pkg_list = json.loads(env.safe_run_output("{conda_bin} list --json".format(**locals()))) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join([ "-c %s" % x for x in yaml.safe_load(in_handle).get("channels", []) ]) if len(packages) > 0: pkgs_str = " ".join(packages) env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format( **locals()))
def _python_library_installer(config): """Install python specific libraries using pip, conda and easy_install. Handles using isolated anaconda environments. """ if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) for pname in env.flavor.rewrite_config_items("python", config.get("conda", [])): env.safe_run("{0} install --yes {1}".format(conda_bin, pname)) cmd = env.safe_run with settings(warn_only=True): cmd("%s -U distribute" % os.path.join(os.path.dirname(conda_bin), "easy_install")) else: pip_bin = shared._pip_cmd(env) ei_bin = pip_bin.replace("pip", "easy_install") env.safe_sudo("%s -U pip" % ei_bin) with settings(warn_only=True): env.safe_sudo("%s -U distribute" % ei_bin) cmd = env.safe_sudo for pname in env.flavor.rewrite_config_items("python", config['pypi']): cmd("{0} install --upgrade {1} --allow-unverified {1} --allow-external {1}".format(shared._pip_cmd(env), pname)) # fixes problem with packages not being in pypi
def install_packages(env, to_install=None, packages=None): if shared._is_anaconda(env): conda_bin = shared._conda_cmd(env) if hasattr(env, "conda_yaml"): Config = collections.namedtuple("Config", "base dist") config_file = Config(base=env.conda_yaml, dist=None) else: config_file = get_config_file(env, "packages-conda.yaml") if config_file.base is None and packages is None: packages = [] channels = "" else: if to_install: (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) with open(config_file.base) as in_handle: channels = " ".join([ "-c %s" % x for x in yaml.safe_load(in_handle).get("channels", []) ]) conda_envs = _create_environments(env, conda_bin, packages) for env_dir in conda_envs.values(): _clean_environment(env_dir) conda_info = json.loads( env.safe_run_output("{conda_bin} info --json".format(**locals()))) # Temporary workaround: # Ensure we have conda-forge conda installed, < 4.6.0, since 4.6.0 resolves slowly # with conda-forge https://groups.google.com/d/msg/biovalidation/ZfcH1K7I-_I/q8FxBu9BDgAJ py_version = ENV_PY_VERSIONS[None] conda_max_version = "4.5.12" conda_version = subprocess.check_output( [conda_bin, "--version"], stderr=subprocess.STDOUT).split()[-1] if LooseVersion(conda_version) > LooseVersion(conda_max_version): env.safe_run( "{conda_bin} install -y {channels} 'conda={conda_max_version}' {py_version}" .format(**locals())) # Uninstall old R packages that clash with updated versions # Temporary fix to allow upgrades from older versions that have migrated # r-tximport is now bioconductor-tximport # py2cairo is incompatible with r 3.4.1 for problem in ["r-tximport", "py2cairo", "libedit"]: cur_packages = [ x["name"] for x in json.loads( env.safe_run_output("{conda_bin} list --json {problem}". format(**locals()))) ] if problem in cur_packages: env.safe_run("{conda_bin} remove --force -y {problem}".format( **locals())) # install our customized packages if len(packages) > 0: for env_name, env_packages in _split_by_condaenv(packages): if env_name: assert env_name in conda_envs, (env_name, conda_envs) env_str = "-n %s" % env_name else: env_str = "" pkgs_str = " ".join(["'%s'" % x for x in sorted(env_packages)]) py_version = ENV_PY_VERSIONS[env_name] if "deepvariant" in env_packages: # Ignore /etc/boto.cfg which creates conflicts with conda gsutils # https://github.com/GoogleCloudPlatform/gsutil/issues/516 exports = "export BOTO_CONFIG=/ignoreglobal && " else: exports = "" env.safe_run( "{exports}{conda_bin} install -y {env_str} {channels} " "{py_version} {pkgs_str}".format(**locals())) conda_pkg_list = json.loads( env.safe_run_output( "{conda_bin} list --json {env_str}".format( **locals()))) for package in env_packages: _link_bin(package, env, conda_info, conda_bin, conda_pkg_list, conda_envdir=conda_envs.get(env_name)) conda_pkg_list = json.loads( env.safe_run_output("{conda_bin} list --json".format(**locals()))) for pkg in ["python", "conda", "pip"]: _link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")