Exemplo n.º 1
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        if hasattr(env, "conda_yaml"):
            Config = collections.namedtuple("Config", "base dist")
            config_file = Config(base=env.conda_yaml, dist=None)
        else:
            config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # install our customized packages
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install --quiet -y {channels} {pkgs_str}".format(**locals()))
            for package in packages:
                _link_bin(package, env, conda_info, conda_bin)
        # work around ncurses issues -- we don't always get the R version
        # https://github.com/bioconda/bioconda-recipes/issues/637
        env.safe_run("{conda_bin} update -y -c r ncurses".format(**locals()))
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
Exemplo n.º 2
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                                  config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join([
                    "-c %s" % x
                    for x in yaml.safe_load(in_handle).get("channels", [])
                ])
        conda_info = json.loads(
            env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # install our customized packages
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(
                **locals()))
            for package in packages:
                _link_bin(package, env, conda_info, conda_bin)
        # work around ncurses issues -- we don't always get the R version
        # https://github.com/bioconda/bioconda-recipes/issues/637
        env.safe_run("{conda_bin} update -y -c r ncurses".format(**locals()))
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
Exemplo n.º 3
0
def install_packages(env, to_install=None, packages=None):
    """Install packages using the home brew package manager.

    Handles upgrading brew, tapping required repositories and installing or upgrading
    packages as appropriate.

    `to_install` is a CloudBioLinux compatible set of top level items to add,
    alternatively `packages` is a list of raw package names.
    """
    config_file = get_config_file(env, "packages-homebrew.yaml")
    if to_install:
        (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
    # if we have no packages to install, do not try to install or update brew
    if len(packages) == 0:
        return
    system.install_homebrew(env)
    brew_cmd = _brew_cmd(env)
    formula_repos = ["homebrew/science", "chapmanb/cbl"]
    current_taps = set([x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
    _safe_update(env, brew_cmd, formula_repos, current_taps)
    for repo in formula_repos:
        if repo not in current_taps:
            env.safe_run("%s tap %s" % (brew_cmd, repo))
    env.safe_run("%s tap --repair" % brew_cmd)
    ipkgs = {"outdated": set([x.strip() for x in env.safe_run_output("%s outdated" % brew_cmd).split()]),
             "current": _get_current_pkgs(env, brew_cmd)}
    _install_brew_baseline(env, brew_cmd, ipkgs, packages)
    for pkg_str in packages:
        _install_pkg(env, pkg_str, brew_cmd, ipkgs)
Exemplo n.º 4
0
def install_packages(env, to_install=None, packages=None):
    """Install packages using the home brew package manager.

    Handles upgrading brew, tapping required repositories and installing or upgrading
    packages as appropriate.

    `to_install` is a CloudBioLinux compatible set of top level items to add,
    alternatively `packages` is a list of raw package names.
    """
    config_file = get_config_file(env, "packages-homebrew.yaml")
    if to_install:
        (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
    brew_cmd = _brew_cmd(env)
    formula_repos = ["homebrew/science"]
    env.safe_run("%s update" % brew_cmd)
    current_taps = set([x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
    for repo in formula_repos:
        if repo not in current_taps:
            env.safe_run("%s tap %s" % (brew_cmd, repo))
    current_pkgs = set([x.strip() for x in env.safe_run_output("%s list" % brew_cmd).split()])
    outdated_pkgs = set([x.strip() for x in env.safe_run_output("%s outdated" % brew_cmd).split()])
    for pkg in packages:
        if pkg in outdated_pkgs:
            brew_subcmd = "upgrade"
        elif pkg in current_pkgs:
            brew_subcmd = None
        else:
            brew_subcmd = "install"
        if brew_subcmd:
            env.safe_run("%s %s %s" % (brew_cmd, brew_subcmd, pkg))
Exemplo n.º 5
0
def _parse_fabricrc(env):
    """Defaults from fabricrc.txt file; loaded if not specified at commandline.
    """
    env.config_dir = os.path.join(os.path.dirname(__file__), "..", "config")
    if not env.has_key("distribution") and not env.has_key("system_install"):
        env.logger.info("Reading default fabricrc.txt")
        env.update(load_settings(get_config_file(env, "fabricrc.txt").base))
Exemplo n.º 6
0
def install_gatk(env):
    """GATK-lite: library for writing efficient analysis tools using next-generation sequencing data
    http://www.broadinstitute.org/gatk/
    """
    # Install main gatk executable
    version = "2.3-9-gdcdccbb"
    ext = ".tar.bz2"
    url = "ftp://ftp.broadinstitute.org/pub/gsa/GenomeAnalysisTK/"\
          "GenomeAnalysisTKLite-%s%s" % (version, ext)
    _java_install("gatk", version, url, env)
    # Install R gsalib for report and pdf generation
    # XXX Currently have issues with gsalib R installation.
    # Need to make this into a proper R package and re-enable
    if False:
        with quiet():
            have_gsalib = env.safe_run("Rscript -e '\"gsalib\" %in% installed.packages()'")
        if have_gsalib and "FALSE" in have_gsalib:
            # install dependencies for gsalib
            rlib_config = get_config_file(env, "r-libs.yaml").base
            with open(rlib_config) as in_handle:
                config = yaml.load(in_handle)
            config["bioc"] = []
            config["update_packages"] = False
            config["cran"] = ["ggplot2", "gplots"]
            libraries.r_library_installer(config)
            # install gsalib
            git_repo = "git clone --depth 1 https://github.com/broadgsa/gatk.git"
            def install_gsalib(env):
                env.safe_sudo("ant gsalib")
            _get_install(git_repo, env, install_gsalib)
Exemplo n.º 7
0
def _setup_env(env):
    """
    Setup the system environment required to run CloudMan. This means
    installing required system-level packages (as defined in CBL's
    ``packages.yaml``, or a flavor thereof) and Python dependencies
    (i.e., libraries) as defined in CloudMan's ``requirements.txt`` file.
    """
    # Get and install required system packages
    if env.distribution in ["debian", "ubuntu"]:
        config_file = get_config_file(env, "packages.yaml")
        (packages, _) = _yaml_to_packages(config_file.base, 'cloudman')
        # Allow editions and flavors to modify the package list
        packages = env.edition.rewrite_config_items("packages", packages)
        packages = env.flavor.rewrite_config_items("packages", packages)
        _setup_apt_automation()
        _apt_packages(pkg_list=packages)
    elif env.distribution in ["centos", "scientificlinux"]:
        env.logger.warn("No CloudMan system package dependencies for CentOS")
        pass
    # Get and install required Python libraries
    with _make_tmp_dir() as work_dir:
        with cd(work_dir):
            url = os.path.join(CM_REPO_ROOT_URL, 'requirements.txt')
            _create_python_virtualenv(env, 'CM', reqs_url=url)
    # Add a custom vimrc
    vimrc_url = os.path.join(MI_REPO_ROOT_URL, 'conf_files', 'vimrc')
    remote_file = '/etc/vim/vimrc'
    sudo("wget --output-document=%s %s" % (remote_file, vimrc_url))
    env.logger.debug("Added a custom vimrc to {0}".format(remote_file))
    # Setup profile
    aliases = ['alias lt="ls -ltr"', 'alias ll="ls -l"']
    for alias in aliases:
        _add_to_profiles(alias, ['/etc/bash.bashrc'])
    env.logger.info("Done setting up CloudMan's environment")
Exemplo n.º 8
0
def _parse_fabricrc(env):
    """Defaults from fabricrc.txt file; loaded if not specified at commandline.
    """
    env.config_dir = os.path.join(os.path.dirname(__file__), "..", "config")
    if not env.has_key("distribution") and not env.has_key("system_install"):
        env.logger.info("Reading default fabricrc.txt")
        env.update(load_settings(get_config_file(env, "fabricrc.txt").base))
Exemplo n.º 9
0
def install_gatk(env):
    """GATK-lite: library for writing efficient analysis tools using next-generation sequencing data
    http://www.broadinstitute.org/gatk/
    """
    # Install main gatk executable
    version = "2.3-9-gdcdccbb"
    ext = ".tar.bz2"
    url = "ftp://ftp.broadinstitute.org/pub/gsa/GenomeAnalysisTK/"\
          "GenomeAnalysisTKLite-%s%s" % (version, ext)
    _java_install("gatk", version, url, env)
    # Install R gsalib for report and pdf generation
    # XXX Currently have issues with gsalib R installation.
    # Need to make this into a proper R package and re-enable
    if False:
        with quiet():
            have_gsalib = run(
                "Rscript -e '\"gsalib\" %in% installed.packages()'")
        if have_gsalib and "FALSE" in have_gsalib:
            # install dependencies for gsalib
            rlib_config = get_config_file(env, "r-libs.yaml").base
            with open(rlib_config) as in_handle:
                config = yaml.load(in_handle)
            config["bioc"] = []
            config["update_packages"] = False
            config["cran"] = ["ggplot2", "gplots"]
            libraries.r_library_installer(config)
            # install gsalib
            git_repo = "git clone --depth 1 https://github.com/broadgsa/gatk.git"

            def install_gsalib(env):
                env.safe_sudo("ant gsalib")

            _get_install(git_repo, env, install_gsalib)
Exemplo n.º 10
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                                  config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join([
                    "-c %s" % x
                    for x in yaml.safe_load(in_handle).get("channels", [])
                ])
        conda_info = json.loads(
            env.safe_run_output("{conda_bin} info --json".format(**locals())))
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(
                **locals()))
            for package in packages:
                _link_bin(package, env, conda_info, conda_bin)
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
        # remove packages we want the system to supply
        # curl https://github.com/ContinuumIO/anaconda-issues/issues/72
        system_packages = ["curl"]
        pkgs_str = " ".join(system_packages)
        with settings(warn_only=True):
            env.safe_run(
                "{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
Exemplo n.º 11
0
def _setup_env(env):
    """
    Setup the system environment required to run CloudMan. This means
    installing required system-level packages (as defined in CBL's
    ``packages.yaml``, or a flavor thereof) and Python dependencies
    (i.e., libraries) as defined in CloudMan's ``requirements.txt`` file.
    """
    # Get and install required system packages
    if env.distribution in ["debian", "ubuntu"]:
        config_file = get_config_file(env, "packages.yaml")
        (packages, _) = _yaml_to_packages(config_file.base, 'cloudman')
        # Allow flavors to modify the package list
        packages = env.flavor.rewrite_config_items("packages", packages)
        _setup_apt_automation()
        _apt_packages(pkg_list=packages)
    elif env.distribution in ["centos", "scientificlinux"]:
        env.logger.warn("No CloudMan system package dependencies for CentOS")
        pass
    # Get and install required Python libraries
    with _make_tmp_dir() as work_dir:
        with cd(work_dir):
            url = os.path.join(CM_REPO_ROOT_URL, 'requirements.txt')
            _create_python_virtualenv(env, 'CM', reqs_url=url)
    # Add a custom vimrc
    vimrc_url = os.path.join(MI_REPO_ROOT_URL, 'conf_files', 'vimrc')
    remote_file = '/etc/vim/vimrc'
    if env.safe_exists("/etc/vim"):
        env.safe_sudo("wget --output-document=%s %s" % (remote_file, vimrc_url))
        env.logger.debug("Added a custom vimrc to {0}".format(remote_file))
    # Setup profile
    aliases = ['alias lt="ls -ltr"', 'alias ll="ls -l"']
    for alias in aliases:
        _add_to_profiles(alias, ['/etc/bash.bashrc'])
    env.logger.info("Done setting up CloudMan's environment")
Exemplo n.º 12
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # Transition change -- ensure installed perl is perl-threaded
        system_packages = ["perl", "perl-threaded"]
        pkgs_str = " ".join(system_packages)
        with settings(warn_only=True):
            env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
        # install our customized packages
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(**locals()))
            for package in packages:
                _link_bin(package, env, conda_info, conda_bin)
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
        # remove packages we want the system to supply
        # curl https://github.com/ContinuumIO/anaconda-issues/issues/72
        system_packages = ["curl"]
        pkgs_str = " ".join(system_packages)
        with settings(warn_only=True):
            env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
Exemplo n.º 13
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(**locals()))
            for package in packages:
                _symlink_bin(package, env, conda_info, conda_bin)
        for pkg in ["python", "conda", "pip"]:
            _symlink_bin(pkg, env, conda_info, conda_bin, [pkg], "bcbio_")
        # remove packages that can cause failures
        # curl https://github.com/ContinuumIO/anaconda-issues/issues/72
        problem_packages = ["curl"]
        pkgs_str = " ".join(problem_packages)
        with settings(warn_only=True):
            env.safe_run("{conda_bin} uninstall -y {pkgs_str}".format(**locals()))
Exemplo n.º 14
0
def _custom_installs(to_install, ignore=None):
    if not env.safe_exists(env.local_install) and env.local_install:
        env.safe_run("mkdir -p %s" % env.local_install)
    pkg_config = get_config_file(env, "custom.yaml").base
    packages, pkg_to_group = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    for p in env.flavor.rewrite_config_items("custom", packages):
        install_custom(p, True, pkg_to_group)
Exemplo n.º 15
0
def _custom_installs(to_install, ignore=None):
    if not env.safe_exists(env.local_install) and env.local_install:
        env.safe_run("mkdir -p %s" % env.local_install)
    pkg_config = get_config_file(env, "custom.yaml").base
    packages, pkg_to_group = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    for p in env.flavor.rewrite_config_items("custom", packages):
        install_custom(p, True, pkg_to_group)
Exemplo n.º 16
0
def _configure_chef(env, chef):

    # Set node json properties
    node_json_path = get_config_file(env, "node_extra.json").base
    chef.json = _build_chef_properties(env, node_json_path)

    # Set whether to use the Opscode Omnibus Installer to load Chef.
    use_omnibus_installer_str = env.get("use_chef_omnibus_installer", "false")
    chef.use_omnibus_installer = use_omnibus_installer_str.upper() in ["TRUE", "YES"]
Exemplo n.º 17
0
def _configure_chef(env, chef):

    # Set node json properties
    node_json_path = get_config_file(env, "node_extra.json").base
    chef.json = _build_chef_properties(env, node_json_path)

    # Set whether to use the Opscode Omnibus Installer to load Chef.
    use_omnibus_installer_str = env.get("use_chef_omnibus_installer", "false")
    chef.use_omnibus_installer = use_omnibus_installer_str.upper() in ["TRUE", "YES"]
Exemplo n.º 18
0
def _provision_puppet_classes(to_install, ignore=None):
    """
    Much like _custom_installs, read config file, determine what to install,
    and install it.
    """
    pkg_config = get_config_file(env, "puppet_classes.yaml").base
    packages, _ = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    classes = [recipe for recipe in env.flavor.rewrite_config_items("puppet_classes", packages)]
Exemplo n.º 19
0
def install_packages(env):
    config_file = get_config_file(env, "perl-libs.yaml")
    (packages, _) = _yaml_to_packages(config_file.base, subs_yaml_file=config_file.dist, namesort=False)
    cpanm_cmd = find_cmd(env, "cpanm", "--version")
    for package in packages:
        if package.count("==") > 1:
            _install_from_url(env, cpanm_cmd, package)
        else:
            _install_from_cpan(env, cpanm_cmd, package)
Exemplo n.º 20
0
def install_packages(env, to_install=None, packages=None):
    """Install packages using the home brew package manager.

    Handles upgrading brew, tapping required repositories and installing or upgrading
    packages as appropriate.

    `to_install` is a CloudBioLinux compatible set of top level items to add,
    alternatively `packages` is a list of raw package names.
    """
    config_file = get_config_file(env, "packages-homebrew.yaml")
    if to_install:
        (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                          config_file.dist)
    # if we have no packages to install, do not try to install or update brew
    if len(packages) == 0:
        _remove_old(env, config_file.base)
        return
    system.install_homebrew(env)
    brew_cmd = _brew_cmd(env)
    formula_repos = ["homebrew/science", "chapmanb/cbl", "homebrew/dupes"]
    current_taps = set(
        [x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
    _safe_update(env, brew_cmd, formula_repos, current_taps)
    current_taps = set(
        [x.strip() for x in env.safe_run_output("%s tap" % brew_cmd).split()])
    for repo in formula_repos:
        if repo not in current_taps:
            env.safe_run("%s tap %s" % (brew_cmd, repo))
    env.safe_run("%s tap --repair" % brew_cmd)
    ipkgs = {
        "outdated":
        set([
            x.strip()
            for x in env.safe_run_output("%s outdated" % brew_cmd).split()
        ]),
        "current":
        _get_current_pkgs(env, brew_cmd)
    }
    _install_brew_baseline(env, brew_cmd, ipkgs, packages)
    ipkgs = {
        "outdated":
        set([
            x.strip()
            for x in env.safe_run_output("%s outdated" % brew_cmd).split()
        ]),
        "current":
        _get_current_pkgs(env, brew_cmd)
    }
    for pkg_str in packages:
        _install_pkg(env, pkg_str, brew_cmd, ipkgs)
    for pkg_str in ["pkg-config", "openssl", "cmake", "unzip"]:
        _safe_unlink_pkg(env, pkg_str, brew_cmd)
    with open(config_file.base) as in_handle:
        to_remove = yaml.load(in_handle).get("to_remove", [])
    for pkg_str in ["curl"] + to_remove:
        _safe_uninstall_pkg(env, pkg_str, brew_cmd)
Exemplo n.º 21
0
def _provision_chef_recipes(to_install, ignore=None):
    """
    Much like _custom_installs, read config file, determine what to install,
    and install it.
    """
    pkg_config = get_config_file(env, "chef_recipes.yaml").base
    packages, _ = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    recipes = [recipe for recipe in env.flavor.rewrite_config_items("chef_recipes", packages)]
    if recipes:  # Don't bother running chef if nothing to configure
        install_chef_recipe(recipes, True)
Exemplo n.º 22
0
def _provision_puppet_classes(to_install, ignore=None):
    """
    Much like _custom_installs, read config file, determine what to install,
    and install it.
    """
    pkg_config = get_config_file(env, "puppet_classes.yaml").base
    packages, _ = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    classes = [recipe for recipe in env.flavor.rewrite_config_items("puppet_classes", packages)]
    if classes:  # Don't bother running chef if nothing to configure
        install_puppet_class(classes, True)
Exemplo n.º 23
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if to_install:
            (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
        with open(config_file.base) as in_handle:
            channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        if len(packages) > 0:
            for pkg in packages:
                env.safe_run("{conda_bin} install -y {channels} {pkg}".format(**locals()))
Exemplo n.º 24
0
def _nix_packages(to_install):
    """Install packages available via nixpkgs (optional)
    """
    if env.nixpkgs:
        env.logger.info("Update and install NixPkgs packages")
        pkg_config_file = get_config_file(env, "packages-nix.yaml").base
        sudo("nix-channel --update")
        # Retrieve final package names
        (packages, _) = _yaml_to_packages(pkg_config_file, to_install)
        packages = env.flavor.rewrite_config_items("packages", packages)
        for p in packages:
            sudo("nix-env -b -i %s" % p)
Exemplo n.º 25
0
def _nix_packages(to_install):
    """Install packages available via nixpkgs (optional)
    """
    if env.nixpkgs:
        env.logger.info("Update and install NixPkgs packages")
        pkg_config_file = get_config_file(env, "packages-nix.yaml").base
        sudo("nix-channel --update")
        # Retrieve final package names
        (packages, _) = _yaml_to_packages(pkg_config_file, to_install)
        packages = env.flavor.rewrite_config_items("packages", packages)
        for p in packages:
            sudo("nix-env -b -i %s" % p)
Exemplo n.º 26
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        if hasattr(env, "conda_yaml"):
            Config = collections.namedtuple("Config", "base dist")
            config_file = Config(base=env.conda_yaml, dist=None)
        else:
            config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
            channels = ""
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        conda_envs = _create_environments(env, conda_bin, packages)
        for env_dir in conda_envs.values():
            _clean_environment(env_dir)
        conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # libedit pins to curses 6.0 but bioconda requires 5.9
        # Ensure we have conda-forge conda installed, otherwise creates resolution
        # and package issues with removed libedit. Hopefully can remove along with libedit
        # hack when conda-forge synchronizes ncurses and conda with the base install.
        env.safe_run("{conda_bin} install -y {channels} conda python=2".format(**locals()))
        # Uninstall old R packages that clash with updated versions
        # Temporary fix to allow upgrades from older versions that have migrated
        # r-tximport is now bioconductor-tximport
        # py2cairo is incompatible with r 3.4.1
        for problem in ["r-tximport", "py2cairo", "libedit"]:
            cur_packages = [x["name"] for x in
                            json.loads(env.safe_run_output("{conda_bin} list --json {problem}".format(**locals())))]
            if problem in cur_packages:
                env.safe_run("{conda_bin} remove --force -y {problem}".format(**locals()))
        # install our customized packages
        if len(packages) > 0:
            for env_name, env_packages in _split_by_condaenv(packages):
                if env_name:
                    assert env_name in conda_envs, (env_name, conda_envs)
                    env_str = "-n %s" % env_name
                else:
                    env_str = ""
                pkgs_str = " ".join(env_packages)
                env.safe_run("{conda_bin} install -y {env_str} {channels} {pkgs_str}".format(**locals()))
                conda_pkg_list = json.loads(env.safe_run_output(
                    "{conda_bin} list --json {env_str}".format(**locals())))
                for package in env_packages:
                    _link_bin(package, env, conda_info, conda_bin, conda_pkg_list,
                              conda_envdir=conda_envs.get(env_name))
        conda_pkg_list = json.loads(env.safe_run_output("{conda_bin} list --json".format(**locals())))
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
Exemplo n.º 27
0
def install_custom(p, automated=False, pkg_to_group=None, flavor=None):
    """
    Install a single custom program or package by name.

    This method fetches program name from ``config/custom.yaml`` and delegates
    to a method in ``custom/*name*.py`` to proceed with the installation.
    Alternatively, if a program install method is defined in the appropriate
    package, it will be called directly (see param ``p``).

    Usage: fab [-i key] [-u user] -H host install_custom:program_name

    :type p:  string
    :param p: A name of the custom program to install. This has to be either a name
              that is listed in ``custom.yaml`` as a subordinate to a group name or a
              program name whose install method is defined in either ``cloudbio`` or
              ``custom`` packages
              (e.g., ``cloudbio/custom/cloudman.py -> install_cloudman``).

    :type automated:  bool
    :param automated: If set to True, the environment is not loaded and reading of
                      the ``custom.yaml`` is skipped.
    """
    _setup_logging(env)
    p = p.lower()  # All packages listed in custom.yaml are in lower case
    time_start = _print_time_stats("Custom install for '{0}'".format(p),
                                   "start")
    if not automated:
        _configure_fabric_environment(env, flavor, ignore_distcheck=True)
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)

    try:
        env.logger.debug("Import %s" % p)
        # Allow direct calling of a program install method, even if the program
        # is not listed in the custom list (ie, not contained as a key value in
        # pkg_to_group). For an example, see 'install_cloudman' or use p=cloudman.
        mod_name = pkg_to_group[p] if p in pkg_to_group else p
        mod = __import__("cloudbio.custom.%s" % mod_name,
                         fromlist=["cloudbio", "custom"])
    except ImportError:
        raise ImportError("Need to write a %s module in custom." %
                          pkg_to_group[p])
    replace_chars = ["-"]
    try:
        for to_replace in replace_chars:
            p = p.replace(to_replace, "_")
        fn = getattr(mod, "install_%s" % p)
    except AttributeError:
        raise ImportError("Need to write a install_%s function in custom.%s" %
                          (p, pkg_to_group[p]))
    fn(env)
    _print_time_stats("Custom install for '%s'" % p, "end", time_start)
Exemplo n.º 28
0
def _custom_installs(to_install, ignore=None, add=None):
    if not env.safe_exists(env.local_install) and env.local_install:
        env.safe_run("mkdir -p %s" % env.local_install)
    pkg_config = get_config_file(env, "custom.yaml").base
    packages, pkg_to_group = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    if add is not None:
        for key, vals in add.iteritems():
            for v in vals:
                pkg_to_group[v] = key
                packages.append(v)
    for p in env.flavor.rewrite_config_items("custom", packages):
        install_custom(p, True, pkg_to_group)
Exemplo n.º 29
0
def _custom_installs(to_install, ignore=None, add=None):
    if not env.safe_exists(env.local_install) and env.local_install:
        env.safe_run("mkdir -p %s" % env.local_install)
    pkg_config = get_config_file(env, "custom.yaml").base
    packages, pkg_to_group = _yaml_to_packages(pkg_config, to_install)
    packages = [p for p in packages if ignore is None or p not in ignore]
    if add is not None:
        for key, vals in add.iteritems():
            for v in vals:
                pkg_to_group[v] = key
                packages.append(v)
    for p in env.flavor.rewrite_config_items("custom", packages):
        install_custom(p, True, pkg_to_group)
Exemplo n.º 30
0
def install_custom(p, automated=False, pkg_to_group=None, flavor=None):
    """
    Install a single custom program or package by name.

    This method fetches program name from ``config/custom.yaml`` and delegates
    to a method in ``custom/*name*.py`` to proceed with the installation.
    Alternatively, if a program install method is defined in the appropriate
    package, it will be called directly (see param ``p``).

    Usage: fab [-i key] [-u user] -H host install_custom:program_name

    :type p:  string
    :param p: A name of the custom program to install. This has to be either a name
              that is listed in ``custom.yaml`` as a subordinate to a group name or a
              program name whose install method is defined in either ``cloudbio`` or
              ``custom`` packages
              (e.g., ``cloudbio/custom/cloudman.py -> install_cloudman``).

    :type automated:  bool
    :param automated: If set to True, the environment is not loaded and reading of
                      the ``custom.yaml`` is skipped.
    """
    _setup_logging(env)
    p = p.lower() # All packages listed in custom.yaml are in lower case
    time_start = _print_time_stats("Custom install for '{0}'".format(p), "start")
    if not automated:
        _configure_fabric_environment(env, flavor)
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)

    try:
        env.logger.debug("Import %s" % p)
        # Allow direct calling of a program install method, even if the program
        # is not listed in the custom list (ie, not contained as a key value in
        # pkg_to_group). For an example, see 'install_cloudman' or use p=cloudman.
        mod_name = pkg_to_group[p] if p in pkg_to_group else p
        mod = __import__("cloudbio.custom.%s" % mod_name,
                         fromlist=["cloudbio", "custom"])
    except ImportError:
        raise ImportError("Need to write a %s module in custom." %
                pkg_to_group[p])
    replace_chars = ["-"]
    try:
        for to_replace in replace_chars:
            p = p.replace(to_replace, "_")
        fn = getattr(mod, "install_%s" % p)
    except AttributeError:
        raise ImportError("Need to write a install_%s function in custom.%s"
                % (p, pkg_to_group[p]))
    fn(env)
    _print_time_stats("Custom install for '%s'" % p, "end", time_start)
Exemplo n.º 31
0
def _parse_fabricrc(env):
    """Defaults from fabricrc.txt file; loaded if not specified at commandline.
    """
    env.config_dir = os.path.join(os.path.dirname(__file__), "..", "config")
    env.tool_data_table_conf_file = os.path.join(env.config_dir, "..",
                                                 "installed_files",
                                                 "tool_data_table_conf.xml")
    if not env.has_key("distribution") and not env.has_key("system_install"):
        env.logger.info("Reading default fabricrc.txt")
        env.update(load_settings(get_config_file(env, "fabricrc.txt").base))
    if "shell_config" not in env:
        env.shell_config = "~/.bashrc"
    if "shell" not in env:
        env.shell = "/bin/bash -i -c"
Exemplo n.º 32
0
def install_packages(env, to_install=None, packages=None):
    """Old installation, based on pre-configured fabric inputs.
    """
    from cloudbio.flavor.config import get_config_file
    from cloudbio.custom import shared

    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        if hasattr(env, "conda_yaml"):
            Config = collections.namedtuple("Config", "base dist")
            config_file = Config(base=env.conda_yaml, dist=None)
        else:
            config_file = get_config_file(env, "packages-conda.yaml")
        install_in(conda_bin, env.system_install, config_file.base, packages)
Exemplo n.º 33
0
def _parse_fabricrc(env):
    """Defaults from fabricrc.txt file; loaded if not specified at commandline.
    """
    env.config_dir = os.path.join(os.path.dirname(__file__), "..", "config")
    env.tool_data_table_conf_file = os.path.join(env.config_dir, "..",
                                                 "installed_files",
                                                 "tool_data_table_conf.xml")
    if not env.has_key("distribution") and not env.has_key("system_install"):
        env.logger.info("Reading default fabricrc.txt")
        env.update(load_settings(get_config_file(env, "fabricrc.txt").base))
    if "shell_config" not in env:
        env.shell_config = "~/.bashrc"
    if "shell" not in env:
        env.shell = "/bin/bash -i -c"
Exemplo n.º 34
0
def _read_main_config():
    """Pull a list of groups to install based on our main configuration YAML.

    Reads 'main.yaml' and returns packages and libraries
    """
    yaml_file = get_config_file(env, "main.yaml").base
    with open(yaml_file) as in_handle:
        full_data = yaml.load(in_handle)
    packages = full_data['packages']
    packages = packages if packages else []
    libraries = full_data['libraries']
    libraries = libraries if libraries else []
    custom_ignore = full_data.get('custom_ignore', [])
    env.logger.info("Meta-package information from {2}\n- Packages: {0}\n- Libraries: "
            "{1}".format(",".join(packages), ",".join(libraries), yaml_file))
    return packages, sorted(libraries), custom_ignore
Exemplo n.º 35
0
def _yum_packages(to_install):
    """Install rpm packages available via yum.
    """
    if env.distribution == "scientificlinux":
        package_file = "packages-scientificlinux.yaml"
    else:
        package_file = "packages-yum.yaml"
    pkg_config = get_config_file(env, package_file).base
    with settings(warn_only=True):
        env.safe_sudo("yum check-update")
    env.safe_sudo("yum -y upgrade")
    # Retrieve packages to get and install each of them
    (packages, _) = _yaml_to_packages(pkg_config, to_install)
    # At this point allow the Flavor to rewrite the package list
    packages = env.flavor.rewrite_config_items("packages", packages)
    for package in packages:
        env.safe_sudo("yum -y install %s" % package)
Exemplo n.º 36
0
def _yum_packages(to_install):
    """Install rpm packages available via yum.
    """
    if env.distribution == "scientificlinux":
        package_file = "packages-scientificlinux.yaml"
    else:
        package_file = "packages-yum.yaml"
    pkg_config = get_config_file(env, package_file).base
    with settings(warn_only=True):
        sudo("yum check-update")
    sudo("yum -y upgrade")
    # Retrieve packages to get and install each of them
    (packages, _) = _yaml_to_packages(pkg_config, to_install)
    # At this point allow the Flavor to rewrite the package list
    packages = env.flavor.rewrite_config_items("packages", packages)
    for package in packages:
        sudo("yum -y install %s" % package)
Exemplo n.º 37
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        if hasattr(env, "conda_yaml"):
            Config = collections.namedtuple("Config", "base dist")
            config_file = Config(base=env.conda_yaml, dist=None)
        else:
            config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
        conda_envs = _create_environments(env, conda_bin, packages)
        conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # Uninstall old R packages that clash with updated versions
        # Temporary fix to allow upgrades from older versions that have migrated
        # r-tximport is now bioconductor-tximport
        # py2cairo is incompatible with r 3.4.1
        # libedit pins to curses 6.0 but bioconda requires 5.9
        for problem in ["r-tximport", "py2cairo", "libedit"]:
            cur_packages = [x["name"] for x in
                            json.loads(env.safe_run_output("{conda_bin} list --json {problem}".format(**locals())))]
            if problem in cur_packages:
                env.safe_run("{conda_bin} remove --force -y {problem}".format(**locals()))
        # install our customized packages
        if len(packages) > 0:
            for env_name, env_packages in _split_by_condaenv(packages):
                if env_name:
                    assert env_name in conda_envs, (env_name, conda_envs)
                    env_str = "-n %s" % env_name
                else:
                    env_str = ""
                pkgs_str = " ".join(env_packages)
                env.safe_run("{conda_bin} install --quiet -y {env_str} {channels} {pkgs_str}".format(**locals()))
                conda_pkg_list = json.loads(env.safe_run_output(
                    "{conda_bin} list --json {env_str}".format(**locals())))
                for package in env_packages:
                    _link_bin(package, env, conda_info, conda_bin, conda_pkg_list,
                              conda_envdir=conda_envs.get(env_name))
        conda_pkg_list = json.loads(env.safe_run_output("{conda_bin} list --json".format(**locals())))
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
Exemplo n.º 38
0
def _read_main_config():
    """Pull a list of groups to install based on our main configuration YAML.

    Reads 'main.yaml' and returns packages and libraries
    """
    yaml_file = get_config_file(env, "main.yaml").base
    with open(yaml_file) as in_handle:
        full_data = yaml.load(in_handle)
    packages = full_data.get('packages', [])
    libraries = full_data.get('libraries', [])
    custom_ignore = full_data.get('custom_ignore', [])
    if packages is None: packages = []
    if libraries is None: libraries = []
    if custom_ignore is None: custom_ignore = []
    env.logger.info(
        "Meta-package information from {2}\n- Packages: {0}\n- Libraries: "
        "{1}".format(",".join(packages), ",".join(libraries), yaml_file))
    return packages, sorted(libraries), custom_ignore
Exemplo n.º 39
0
def _apt_packages(to_install=None, pkg_list=None):
    """
    Install packages available via apt-get.
    Note that ``to_install`` and ``pkg_list`` arguments cannot be used simultaneously.

    :type to_install:  list
    :param to_install: A list of strings (ie, groups) present in the ``main.yaml``
                       config file that will be used to filter out the specific
                       packages to be installed.

    :type pkg_list:  list
    :param pkg_list: An explicit list of packages to install. No other files,
                     flavors, or editions are considered.
    """
    if env.edition.short_name not in ["minimal"]:
        env.logger.info("Update the system")
        with settings(warn_only=True):
            env.safe_sudo("apt-get update")
    if to_install is not None:
        config_file = get_config_file(env, "packages.yaml")
        env.edition.apt_upgrade_system(env=env)
        (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                          config_file.dist)
        # Allow editions and flavors to modify the package list
        packages = env.edition.rewrite_config_items("packages", packages)
        packages = env.flavor.rewrite_config_items("packages", packages)
    elif pkg_list is not None:
        env.logger.info("Will install specific packages: {0}".format(pkg_list))
        packages = pkg_list
    else:
        raise ValueError("Need a file with packages or a list of packages")
    # A single line install is much faster - note that there is a max
    # for the command line size, so we do 30 at a time
    group_size = 30
    i = 0
    env.logger.info("Installing %i packages" % len(packages))
    while i < len(packages):
        env.logger.info("Package install progress: {0}/{1}".format(
            i, len(packages)))
        env.safe_sudo("apt-get -y --force-yes install %s" %
                      " ".join(packages[i:i + group_size]))
        i += group_size
    env.safe_sudo("apt-get clean")
Exemplo n.º 40
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                                  config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join([
                    "-c %s" % x
                    for x in yaml.safe_load(in_handle).get("channels", [])
                ])
        if len(packages) > 0:
            pkgs_str = " ".join(packages)
            env.safe_run("{conda_bin} install -y {channels} {pkgs_str}".format(
                **locals()))
Exemplo n.º 41
0
def _apt_packages(to_install=None, pkg_list=None):
    """
    Install packages available via apt-get.
    Note that ``to_install`` and ``pkg_list`` arguments cannot be used simultaneously.

    :type to_install:  list
    :param to_install: A list of strings (ie, groups) present in the ``main.yaml``
                       config file that will be used to filter out the specific
                       packages to be installed.

    :type pkg_list:  list
    :param pkg_list: An explicit list of packages to install. No other files,
                     flavors, or editions are considered.
    """
    if env.edition.short_name not in ["minimal"]:
        env.logger.info("Update the system")
        with settings(warn_only=True):
            sudo("apt-get update")
    if to_install is not None:
        config_file = get_config_file(env, "packages.yaml")
        env.edition.apt_upgrade_system()
        (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                          config_file.dist)
        # Allow editions and flavors to modify the package list
        packages = env.edition.rewrite_config_items("packages", packages)
        packages = env.flavor.rewrite_config_items("packages", packages)
    elif pkg_list is not None:
        env.logger.info("Will install specific packages: {0}".format(pkg_list))
        packages = pkg_list
    else:
        raise ValueError("Need a file with packages or a list of packages")
    # A single line install is much faster - note that there is a max
    # for the command line size, so we do 30 at a time
    group_size = 30
    i = 0
    env.logger.info("Installing %i packages" % len(packages))
    while i < len(packages):
        env.logger.info("Package install progress: {0}/{1}".format(
            i, len(packages)))
        sudo("apt-get -y --force-yes install %s" % " ".join(
            packages[i:i + group_size]))
        i += group_size
    sudo("apt-get clean")
Exemplo n.º 42
0
def _read_main_config():
    """Pull a list of groups to install based on our main configuration YAML.

    Reads 'main.yaml' and returns packages and libraries
    """
    yaml_file = get_config_file(env, "main.yaml").base
    with open(yaml_file) as in_handle:
        full_data = yaml.load(in_handle)
    packages = full_data.get('packages', [])
    packages = env.flavor.rewrite_config_items("main_packages", packages)
    libraries = full_data.get('libraries', [])
    custom_ignore = full_data.get('custom_ignore', [])
    custom_add = full_data.get("custom_additional")
    if packages is None: packages = []
    if libraries is None: libraries = []
    if custom_ignore is None: custom_ignore = []
    env.logger.info("Meta-package information from {2}\n- Packages: {0}\n- Libraries: "
            "{1}".format(",".join(packages), ",".join(libraries), yaml_file))
    return packages, sorted(libraries), custom_ignore, custom_add
Exemplo n.º 43
0
def install_custom(p, automated=False, pkg_to_group=None, flavor=None):
    """
    Install a single custom program or package by name.

    This method fetches program name from ``config/custom.yaml`` and delegates
    to a method in ``custom/*name*.py`` to proceed with the installation.
    Alternatively, if a program install method is defined in the appropriate
    package, it will be called directly (see param ``p``).

    Usage: fab [-i key] [-u user] -H host install_custom:program_name

    :type p:  string
    :param p: A name of the custom program to install. This has to be either a name
              that is listed in ``custom.yaml`` as a subordinate to a group name or a
              program name whose install method is defined in either ``cloudbio`` or
              ``custom`` packages
              (e.g., ``cloudbio/custom/cloudman.py -> install_cloudman``).

    :type automated:  bool
    :param automated: If set to True, the environment is not loaded and reading of
                      the ``custom.yaml`` is skipped.
    """
    p = p.lower()  # All packages listed in custom.yaml are in lower case
    if not automated:
        _setup_logging(env)
        _configure_fabric_environment(env, flavor, ignore_distcheck=True)
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
    time_start = _print_time_stats("Custom install for '{0}'".format(p),
                                   "start")
    fn = _custom_install_function(env, p, pkg_to_group)
    fn(env)
    ## TODO: Replace the previous 4 lines with the following one, barring
    ## objections. Slightly different behavior because pkg_to_group will be
    ## loaded regardless of automated if it is None, but IMO this shouldn't
    ## matter because the following steps look like they would fail if
    ## automated is True and pkg_to_group is None.
    # _install_custom(p, pkg_to_group)
    _print_time_stats("Custom install for '%s'" % p, "end", time_start)
Exemplo n.º 44
0
def install_custom(p, automated=False, pkg_to_group=None, flavor=None):
    """
    Install a single custom program or package by name.

    This method fetches program name from ``config/custom.yaml`` and delegates
    to a method in ``custom/*name*.py`` to proceed with the installation.
    Alternatively, if a program install method is defined in the appropriate
    package, it will be called directly (see param ``p``).

    Usage: fab [-i key] [-u user] -H host install_custom:program_name

    :type p:  string
    :param p: A name of the custom program to install. This has to be either a name
              that is listed in ``custom.yaml`` as a subordinate to a group name or a
              program name whose install method is defined in either ``cloudbio`` or
              ``custom`` packages
              (e.g., ``cloudbio/custom/cloudman.py -> install_cloudman``).

    :type automated:  bool
    :param automated: If set to True, the environment is not loaded and reading of
                      the ``custom.yaml`` is skipped.
    """
    p = p.lower() # All packages listed in custom.yaml are in lower case
    if not automated:
        _setup_logging(env)
        _configure_fabric_environment(env, flavor, ignore_distcheck=True)
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
    time_start = _print_time_stats("Custom install for '{0}'".format(p), "start")
    fn = _custom_install_function(env, p, pkg_to_group)
    fn(env)
    ## TODO: Replace the previous 4 lines with the following one, barring
    ## objections. Slightly different behavior because pkg_to_group will be
    ## loaded regardless of automated if it is None, but IMO this shouldn't
    ## matter because the following steps look like they would fail if
    ## automated is True and pkg_to_group is None.
    # _install_custom(p, pkg_to_group)
    _print_time_stats("Custom install for '%s'" % p, "end", time_start)
Exemplo n.º 45
0
def _do_library_installs(to_install):
    for iname in to_install:
        yaml_file = get_config_file(env, "%s.yaml" % iname).base
        with open(yaml_file) as in_handle:
            config = yaml.load(in_handle)
        lib_installers[iname](config)
Exemplo n.º 46
0
def install_packages(env, to_install=None, packages=None):
    if shared._is_anaconda(env):
        conda_bin = shared._conda_cmd(env)
        if hasattr(env, "conda_yaml"):
            Config = collections.namedtuple("Config", "base dist")
            config_file = Config(base=env.conda_yaml, dist=None)
        else:
            config_file = get_config_file(env, "packages-conda.yaml")
        if config_file.base is None and packages is None:
            packages = []
            channels = ""
        else:
            if to_install:
                (packages, _) = _yaml_to_packages(config_file.base, to_install,
                                                  config_file.dist)
            with open(config_file.base) as in_handle:
                channels = " ".join([
                    "-c %s" % x
                    for x in yaml.safe_load(in_handle).get("channels", [])
                ])
        conda_envs = _create_environments(env, conda_bin, packages)
        for env_dir in conda_envs.values():
            _clean_environment(env_dir)
        conda_info = json.loads(
            env.safe_run_output("{conda_bin} info --json".format(**locals())))
        # Temporary workaround:
        # Ensure we have conda-forge conda installed, < 4.6.0, since 4.6.0 resolves slowly
        # with conda-forge https://groups.google.com/d/msg/biovalidation/ZfcH1K7I-_I/q8FxBu9BDgAJ
        py_version = ENV_PY_VERSIONS[None]
        conda_max_version = "4.5.12"
        conda_version = subprocess.check_output(
            [conda_bin, "--version"], stderr=subprocess.STDOUT).split()[-1]
        if LooseVersion(conda_version) > LooseVersion(conda_max_version):
            env.safe_run(
                "{conda_bin} install -y {channels} 'conda={conda_max_version}' {py_version}"
                .format(**locals()))
        # Uninstall old R packages that clash with updated versions
        # Temporary fix to allow upgrades from older versions that have migrated
        # r-tximport is now bioconductor-tximport
        # py2cairo is incompatible with r 3.4.1
        for problem in ["r-tximport", "py2cairo", "libedit"]:
            cur_packages = [
                x["name"] for x in json.loads(
                    env.safe_run_output("{conda_bin} list --json {problem}".
                                        format(**locals())))
            ]
            if problem in cur_packages:
                env.safe_run("{conda_bin} remove --force -y {problem}".format(
                    **locals()))
        # install our customized packages
        if len(packages) > 0:
            for env_name, env_packages in _split_by_condaenv(packages):
                if env_name:
                    assert env_name in conda_envs, (env_name, conda_envs)
                    env_str = "-n %s" % env_name
                else:
                    env_str = ""
                pkgs_str = " ".join(["'%s'" % x for x in sorted(env_packages)])
                py_version = ENV_PY_VERSIONS[env_name]
                if "deepvariant" in env_packages:
                    # Ignore /etc/boto.cfg which creates conflicts with conda gsutils
                    # https://github.com/GoogleCloudPlatform/gsutil/issues/516
                    exports = "export BOTO_CONFIG=/ignoreglobal && "
                else:
                    exports = ""
                env.safe_run(
                    "{exports}{conda_bin} install -y {env_str} {channels} "
                    "{py_version} {pkgs_str}".format(**locals()))
                conda_pkg_list = json.loads(
                    env.safe_run_output(
                        "{conda_bin} list --json {env_str}".format(
                            **locals())))
                for package in env_packages:
                    _link_bin(package,
                              env,
                              conda_info,
                              conda_bin,
                              conda_pkg_list,
                              conda_envdir=conda_envs.get(env_name))
        conda_pkg_list = json.loads(
            env.safe_run_output("{conda_bin} list --json".format(**locals())))
        for pkg in ["python", "conda", "pip"]:
            _link_bin(pkg,
                      env,
                      conda_info,
                      conda_bin,
                      conda_pkg_list,
                      files=[pkg],
                      prefix="bcbio_")
Exemplo n.º 47
0
def _do_library_installs(to_install):
    for iname in to_install:
        yaml_file = get_config_file(env, "%s.yaml" % iname).base
        with open(yaml_file) as in_handle:
            config = yaml.load(in_handle)
        lib_installers[iname](config)
Exemplo n.º 48
0
def _install_custom(p, pkg_to_group=None):
    if pkg_to_group is None:
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
    fn = _custom_install_function(env, p, pkg_to_group)
    fn(env)
Exemplo n.º 49
0
def _install_custom(p, pkg_to_group=None):
    if pkg_to_group is None:
        pkg_config = get_config_file(env, "custom.yaml").base
        packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
    fn = _custom_install_function(env, p, pkg_to_group)
    fn(env)