Exemplo n.º 1
0
    def _solve_env(self, env, all_outputs):
        if self.requirements.get(env):
            console.print(f"Finalizing [yellow]{env}[/yellow] for {self.name}")
            specs = self.requirements[env]
            for s in specs:
                if s.is_pin:
                    s.eval_pin_subpackage(all_outputs)
                if env == "run" and s.is_pin_compatible:
                    s.eval_pin_compatible(self.requirements["build"],
                                          self.requirements["host"])

            # save finalized requirements in data for usage in metadata
            self.data["requirements"][env] = [
                s.final for s in self.requirements[env]
            ]

            spec_map = {s.final_name: s for s in specs}
            specs = [str(x) for x in specs]

            pkg_cache = PackageCacheData.first_writable().pkgs_dir
            if env in ("host", "run") and not self.config.subdirs_same:
                subdir = self.config.host_subdir
            else:
                subdir = self.config.build_subdir

            solver, pkg_cache = get_solver(subdir)
            t = solver.solve(specs, [pkg_cache])

            _, install_pkgs, _ = t.to_conda()
            for _, _, p in install_pkgs:
                p = json.loads(p)
                if p["name"] in spec_map:
                    spec_map[p["name"]].final_version = (
                        p["version"],
                        p["build_string"],
                    )
                    spec_map[p["name"]].channel = p["channel"]
                else:
                    cbs = CondaBuildSpec(f"{p['name']}")
                    cbs.is_transitive_dependency = True
                    cbs.final_version = (p["version"], p["build_string"])
                    cbs.channel = p["channel"]
                    self.requirements[env].append(cbs)

            self.transactions[env] = {
                "transaction": t,
                "pkg_cache": pkg_cache,
            }

            downloaded = t.fetch_extract_packages(
                pkg_cache,
                solver.repos + list(solver.local_repos.values()),
            )
            if not downloaded:
                raise RuntimeError("Did not succeed in downloading packages.")

            if env in ("build", "host"):
                self.propagate_run_exports(env,
                                           self.transactions[env]["pkg_cache"])
Exemplo n.º 2
0
    def propagate_run_exports(self, env):
        # find all run exports
        collected_run_exports = []
        for s in self.requirements[env]:
            if s.is_transitive_dependency:
                continue
            if s.name in self.sections['build'].get('ignore_run_exports', []):
                continue
            if hasattr(s, "final_version"):
                final_triple = (
                    f"{s.final_name}-{s.final_version[0]}-{s.final_version[1]}"
                )
            else:
                print(f"{s} has no final version")
                continue
            path = os.path.join(
                PackageCacheData.first_writable().pkgs_dir,
                final_triple,
                "info/run_exports.json",
            )
            if os.path.exists(path):
                with open(path) as fi:
                    run_exports_info = json.load(fi)
                    s.run_exports_info = run_exports_info
                    collected_run_exports.append(run_exports_info)
            else:
                s.run_exports_info = None

        def append_or_replace(env, spec):
            spec = CondaBuildSpec(spec)
            name = spec.name
            spec.from_run_export = True
            for idx, r in enumerate(self.requirements[env]):
                if r.final_name == name:
                    self.requirements[env][idx] = spec
                    return
            self.requirements[env].append(spec)

        if env == "build":
            for rex in collected_run_exports:
                if "strong" in rex:
                    for r in rex["strong"]:
                        append_or_replace("host", r)
                        append_or_replace("run", r)
                if "weak" in rex:
                    for r in rex["weak"]:
                        append_or_replace("host", r)

        if env == "host":
            for rex in collected_run_exports:
                if "strong" in rex:
                    for r in rex["strong"]:
                        append_or_replace("run", r)
                if "weak" in rex:
                    for r in rex["weak"]:
                        append_or_replace("run", r)
Exemplo n.º 3
0
def get_solver(subdir):
    pkg_cache = PackageCacheData.first_writable().pkgs_dir
    if subdir == "noarch":
        subdir = context.subdir
    elif subdir != context.subdir:
        pkg_cache = os.path.join(pkg_cache, subdir)
        if not os.path.exists(pkg_cache):
            os.makedirs(pkg_cache, exist_ok=True)

    if not solver_cache.get(subdir):
        solver_cache[subdir] = MambaSolver([], subdir)

    return solver_cache[subdir], pkg_cache
Exemplo n.º 4
0
    def _solve_env(self, env, all_outputs, solver):
        if self.requirements.get(env):
            if not self.requirements[env]:
                return
            console.print(f"Finalizing [yellow]{env}[/yellow] for {self.name}")
            specs = self.requirements[env]
            for s in specs:
                if s.is_pin:
                    s.eval_pin_subpackage(all_outputs)
                if env == "run" and s.is_pin_compatible:
                    s.eval_pin_compatible(self.requirements["build"],
                                          self.requirements["host"])

            spec_map = {s.final_name: s for s in specs}
            specs = [str(x) for x in specs]
            t = solver.solve(specs, "")

            _, install_pkgs, _ = t.to_conda()
            for _, _, p in install_pkgs:
                p = json.loads(p)
                if p["name"] in spec_map:
                    spec_map[p["name"]].final_version = (
                        p["version"],
                        p["build_string"],
                    )
                    spec_map[p["name"]].channel = p["channel"]
                else:
                    cbs = CondaBuildSpec(f"{p['name']}")
                    cbs.is_transitive_dependency = True
                    cbs.final_version = (p["version"], p["build_string"])
                    cbs.channel = p["channel"]
                    self.requirements[env].append(cbs)

            self.transactions[env] = t

            downloaded = t.fetch_extract_packages(
                PackageCacheData.first_writable().pkgs_dir,
                solver.repos + list(solver.local_repos.values()),
            )
            if not downloaded:
                raise RuntimeError("Did not succeed in downloading packages.")

            if env in ("build", "host"):
                self.propagate_run_exports(env)
Exemplo n.º 5
0
def make_temp_channel(packages):
    package_reqs = [pkg.replace("-", "=") for pkg in packages]
    package_names = [pkg.split("-")[0] for pkg in packages]

    with make_temp_env(*package_reqs) as prefix:
        for package in packages:
            assert package_is_installed(prefix, package.replace("-", "="))
        data = [
            p for p in PrefixData(prefix).iter_records()
            if p["name"] in package_names
        ]
        run_command(Commands.REMOVE, prefix, *package_names)
        for package in packages:
            assert not package_is_installed(prefix, package.replace("-", "="))

    repodata = {"info": {}, "packages": {}}
    tarfiles = {}
    for package_data in data:
        pkg_data = package_data
        fname = pkg_data["fn"]
        tarfiles[fname] = join(PackageCacheData.first_writable().pkgs_dir,
                               fname)

        pkg_data = pkg_data.dump()
        for field in ("url", "channel", "schannel"):
            pkg_data.pop(field, None)
        repodata["packages"][fname] = PackageRecord(**pkg_data)

    with make_temp_env() as channel:
        subchan = join(channel, context.subdir)
        noarch_dir = join(channel, "noarch")
        channel = path_to_url(channel)
        os.makedirs(subchan)
        os.makedirs(noarch_dir)
        for fname, tar_old_path in tarfiles.items():
            tar_new_path = join(subchan, fname)
            copyfile(tar_old_path, tar_new_path)

        with open(join(subchan, "repodata.json"), "w") as f:
            f.write(json.dumps(repodata, cls=EntityEncoder))
        with open(join(noarch_dir, "repodata.json"), "w") as f:
            f.write(json.dumps({}, cls=EntityEncoder))

        yield channel
Exemplo n.º 6
0
    def solve(self, specs, pkg_cache_path=None):
        """Solve given a set of specs.
        Parameters
        ----------
        specs : list of str
            A list of package specs. You can use `conda.models.match_spec.MatchSpec`
            to get them to the right form by calling
            `MatchSpec(mypec).conda_build_form()`
        Returns
        -------
        solvable : bool
            True if the set of specs has a solution, False otherwise.
        """
        solver_options = [(mamba_api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]
        api_solver = mamba_api.Solver(self.pool, solver_options)
        _specs = specs

        api_solver.add_jobs(_specs, mamba_api.SOLVER_INSTALL)
        success = api_solver.solve()

        if not success:
            error_string = "Mamba failed to solve:\n"
            for s in _specs:
                error_string += f" - {s}\n"
            error_string += "\nwith channels:\n"
            for c in self.channels:
                error_string += f" - {c}\n"
            pstring = api_solver.problems_to_str()
            pstring = "\n".join(["   " + el for el in pstring.split("\n")])
            error_string += f"\nThe reported errors are:\n{pstring}"
            print(error_string)
            exit(1)

        if pkg_cache_path is None:
            # use values from conda
            pkg_cache_path = pkgs_dirs
            writeable_dir = PackageCacheData.first_writable().pkgs_dir
        else:
            writeable_dir = pkg_cache_path[0]

        package_cache = mamba_api.MultiPackageCache(pkg_cache_path)
        t = mamba_api.Transaction(api_solver, package_cache, writeable_dir)
        return t
Exemplo n.º 7
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)
        exit()

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")
    solver = MambaSolver([], context.subdir)

    console.print("\n[yellow]Downloading source[/yellow]\n")
    download_source(MetaData(recipe_path, o0), args.interactive)
    cached_source = o0.sections["source"]

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"].execute(
                    PrefixData(o.config.build_prefix),
                    PackageCacheData.first_writable().pkgs_dir,
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir,
            )

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        build(meta, None, allow_interactive=args.interactive)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
Exemplo n.º 8
0
def install(args, parser, command="install"):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == "create")
    isinstall = bool(command == "install")
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == "update")
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, "conda-meta", "history")):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip("\"'") for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith(".tar.bz2") for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        "use_cache": args.use_index_cache,
        "channel_urls": context.channels,
        "unknown": args.unknown,
        "prepend": not args.override_channels,
        "use_local": args.use_local,
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except UnicodeError:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if "@EXPLICIT" in file_specs:
            explicit(
                file_specs,
                prefix,
                verbose=not (context.quiet or context.json),
                index_args=index_args,
            )
            return

        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args["channel_urls"] = channels

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        for i in installed_names:
            if i != "python":
                specs.append(MatchSpec(i))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if s.name == "python":
                specs.append(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != "python":
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0,
                len(args.packages),
                list(args.packages),
                "did not expect any arguments for --clone",
            )

        clone(
            args.clone,
            prefix,
            json=context.json,
            quiet=(context.quiet or context.json),
            index_args=index_args,
        )
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None

    if "python" not in spec_names:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    if context.channel_priority is ChannelPriority.STRICT:
        solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

    pool = api.Pool()

    repos = []

    prefix_data = api.PrefixData(context.target_prefix)
    prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    if newenv and not specs:
        # creating an empty environment with e.g. "mamba create -n my_env"
        # should not download the repodata
        index = []
        specs_to_add = []
        specs_to_remove = []
        to_link = []
        to_unlink = []
        installed_pkg_recs = []
    else:
        index = load_channels(pool, channels, repos)

        if context.force_reinstall:
            solver = api.Solver(pool, solver_options, prefix_data)
        else:
            solver = api.Solver(pool, solver_options)

        solver.set_postsolve_flags([
            (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
            (api.MAMBA_ONLY_DEPS,
             context.deps_modifier == DepsModifier.ONLY_DEPS),
            (api.MAMBA_FORCE_REINSTALL, context.force_reinstall),
        ])

        if context.update_modifier is UpdateModifier.FREEZE_INSTALLED:
            solver.add_jobs([p for p in prefix_data.package_records],
                            api.SOLVER_LOCK)

        solver.add_jobs(mamba_solve_specs, solver_task)

        if not context.force_reinstall:
            # as a security feature this will _always_ attempt to upgradecertain
            # packages
            for a_pkg in [_.name for _ in context.aggressive_update_packages]:
                if a_pkg in installed_names:
                    solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

        pinned_specs_info = ""
        if python_constraint:
            solver.add_pin(python_constraint)
            pinned_specs_info += f"  - {python_constraint}\n"

        pinned_specs = get_pinned_specs(context.target_prefix)
        if pinned_specs:
            conda_prefix_data = PrefixData(context.target_prefix)
        for s in pinned_specs:
            x = conda_prefix_data.query(s.name)
            if x:
                for el in x:
                    if not s.match(el):
                        print(
                            "Your pinning does not match what's currently installed."
                            " Please remove the pin and fix your installation")
                        print("  Pin: {}".format(s))
                        print("  Currently installed: {}".format(el))
                        exit(1)

            try:
                final_spec = s.conda_build_form()
                pinned_specs_info += f"  - {final_spec}\n"
                solver.add_pin(final_spec)
            except AssertionError:
                print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                      "is of the format\n"
                      "libname VERSION BUILD, for example libblas=*=*mkl\n")

        if pinned_specs_info and not (context.quiet or context.json):
            print(f"\nPinned packages:\n{pinned_specs_info}\n")

        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(
            solver, package_cache,
            PackageCacheData.first_writable().pkgs_dir)
        mmb_specs, to_link, to_unlink = transaction.to_conda()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        transaction.log_json()

        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)
        PackageCacheData.first_writable().reload()

    # if use_mamba_experimental and not os.name == "nt":
    if use_mamba_experimental:
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data)
    else:
        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
            index,
        )
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass
Exemplo n.º 9
0
def remove(args, parser):
    if not (args.all or args.package_names):
        raise CondaValueError("no package names supplied,\n"
                              '       try "mamba remove -h" for more details')

    prefix = context.target_prefix
    check_non_admin()
    init_api_context()

    if args.all and prefix == context.default_prefix:
        raise CondaEnvironmentError("cannot remove current environment. \
                                     deactivate and run mamba remove again")

    if args.all and path_is_clean(prefix):
        # full environment removal was requested, but environment doesn't exist anyway
        return 0

    if args.all:
        if prefix == context.root_prefix:
            raise CondaEnvironmentError(
                "cannot remove root environment,\n"
                "       add -n NAME or -p PREFIX option")
        print("\nRemove all packages in environment %s:\n" % prefix,
              file=sys.stderr)

        if "package_names" in args:
            stp = PrefixSetup(
                target_prefix=prefix,
                unlink_precs=tuple(PrefixData(prefix).iter_records()),
                link_precs=(),
                remove_specs=(),
                update_specs=(),
                neutered_specs=(),
            )
            txn = UnlinkLinkTransaction(stp)
            try:
                handle_txn(txn, prefix, args, False, True)
            except PackagesNotFoundError:
                print(
                    "No packages found in %s. Continuing environment removal" %
                    prefix)

        rm_rf(prefix, clean_empty_parents=True)
        unregister_env(prefix)

        return

    else:
        if args.features:
            specs = tuple(
                MatchSpec(track_features=f) for f in set(args.package_names))
        else:
            specs = [s for s in specs_from_args(args.package_names)]
        if not context.quiet:
            print("Removing specs: {}".format(
                [s.conda_build_form() for s in specs]))

        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

        mamba_solve_specs = [s.conda_build_form() for s in specs]

        solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1))

        if context.channel_priority is ChannelPriority.STRICT:
            solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

        pool = api.Pool()
        repos = []

        # add installed
        if use_mamba_experimental:
            prefix_data = api.PrefixData(context.target_prefix)
            prefix_data.load()
            repo = api.Repo(pool, prefix_data)
            repos.append(repo)
        else:
            repo = api.Repo(pool, "installed", installed_json_f.name, "")
            repo.set_installed()
            repos.append(repo)

        solver = api.Solver(pool, solver_options)

        history = api.History(context.target_prefix)
        history_map = history.get_requested_specs_map()
        solver.add_jobs(
            [ms.conda_build_form() for ms in history_map.values()],
            api.SOLVER_USERINSTALLED,
        )

        solver.add_jobs(mamba_solve_specs,
                        api.SOLVER_ERASE | api.SOLVER_CLEANDEPS)
        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(
            solver, package_cache,
            PackageCacheData.first_writable().pkgs_dir)
        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)

        mmb_specs, to_link, to_unlink = transaction.to_conda()
        transaction.log_json()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
        )
        handle_txn(conda_transaction, prefix, args, False, True)
Exemplo n.º 10
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description="Boa, the fast, mamba powered-build tool for conda packages."
    )
    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("recipe_dir", type=str)

    render_parser = subparsers.add_parser(
        "render", parents=[parent_parser], help="render a recipe"
    )
    convert_parser = subparsers.add_parser(
        "convert", parents=[parent_parser], help="convert recipe.yaml to old-style meta.yaml"
    )
    build_parser = subparsers.add_parser(
        "build", parents=[parent_parser], help="build a recipe"
    )
    args = parser.parse_args()

    command = args.command

    if command == 'convert':
        from boa.cli import convert
        convert.main(args.recipe_dir)
        exit()

    folder = args.recipe_dir
    cbc, config = get_config(folder)

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)
    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))
    normalize_recipe(ydoc)

    # pprint(ydoc)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    print('\n')
    if command == "render":
        for o in sorted_outputs:
            print(o)
        exit()


    # TODO this should be done cleaner
    top_name = ydoc['package']['name']
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    solver = MambaSolver(["conda-forge"], context.subdir)
    print("\n")

    download_source(MetaData(recipe_path, o0))
    cached_source = o0.sections['source']

    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        # print(o)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if isdir(o.config.build_prefix):
                utils.rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            o.transactions['build'].execute(PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir)

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions['host'].execute(PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir)

        meta = MetaData(recipe_path, o)
        o.final_build_id = meta.build_id()

        if cached_source != o.sections['source']:
            download_source(meta)

        build(meta, None)

    for o in sorted_outputs:
        print("\n")
        print(o)
Exemplo n.º 11
0
def run_test(
    recipedir_or_package_or_metadata,
    config,
    stats,
    move_broken=True,
    provision_only=False,
    solver=None,
):
    """
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    """

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    hash_input = {}

    # store this name to keep it consistent.  By changing files, we change the hash later.
    #    It matches the build hash now, so let's keep it around.
    test_package_name = (
        recipedir_or_package_or_metadata.dist()
        if hasattr(recipedir_or_package_or_metadata, "dist")
        else recipedir_or_package_or_metadata
    )

    if not provision_only:
        print("TEST START:", test_package_name)

    if hasattr(recipedir_or_package_or_metadata, "config"):
        metadata = recipedir_or_package_or_metadata
        utils.rm_rf(metadata.config.test_dir)
    else:
        metadata, hash_input = construct_metadata_for_test(
            recipedir_or_package_or_metadata, config
        )

    trace = "-x " if metadata.config.debug else ""

    # Must download *after* computing build id, or else computing build id will change
    #     folder destination
    _extract_test_files_from_package(metadata)

    # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it.
    # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed
    # I think we can remove this call to clean_pkg_cache().
    in_pkg_cache = (
        not hasattr(recipedir_or_package_or_metadata, "config")
        and os.path.isfile(recipedir_or_package_or_metadata)
        and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS)
        and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]
    )
    if not in_pkg_cache:
        environ.clean_pkg_cache(metadata.dist(), metadata.config)

    copy_test_source_files(metadata, metadata.config.test_dir)
    # this is also copying tests/source_files from work_dir to testing workdir

    _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(
        metadata
    )

    if (
        not any([py_files, shell_files, pl_files, lua_files, r_files])
        and not metadata.config.test_run_post
    ):
        print("Nothing to test for:", test_package_name)
        return True

    if metadata.config.remove_work_dir:
        for name, prefix in (
            ("host", metadata.config.host_prefix),
            ("build", metadata.config.build_prefix),
        ):
            if os.path.isdir(prefix):
                # move host folder to force hardcoded paths to host env to break during tests
                #    (so that they can be properly addressed by recipe author)
                dest = os.path.join(
                    os.path.dirname(prefix),
                    "_".join(
                        (
                            "%s_prefix_moved" % name,
                            metadata.dist(),
                            getattr(metadata.config, "%s_subdir" % name),
                        )
                    ),
                )
                # Needs to come after create_files in case there's test/source_files
                shutil_move_more_retrying(prefix, dest, "{} prefix".format(prefix))

        # nested if so that there's no warning when we just leave the empty workdir in place
        if metadata.source_provided:
            dest = os.path.join(
                os.path.dirname(metadata.config.work_dir),
                "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)),
            )
            # Needs to come after create_files in case there's test/source_files
            shutil_move_more_retrying(config.work_dir, dest, "work")
    else:
        log.warn(
            "Not moving work directory after build.  Your package may depend on files "
            "in the work directory that are not included with your package"
        )

    # looks like a dead function to me
    # get_build_metadata(metadata)

    specs = metadata.get_test_deps(py_files, pl_files, lua_files, r_files)

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        env["CONDA_BUILD"] = "1"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if not metadata.config.activate or metadata.name() == "conda":
        # prepend bin (or Scripts) directory
        env = utils.prepend_bin_path(
            env, metadata.config.test_prefix, prepend_prefix=True
        )

    if utils.on_win:
        env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"]

    env["PREFIX"] = metadata.config.test_prefix
    if "BUILD_PREFIX" in env:
        del env["BUILD_PREFIX"]

    # In the future, we will need to support testing cross compiled
    #     packages on physical hardware. until then it is expected that
    #     something like QEMU or Wine will be used on the build machine,
    #     therefore, for now, we use host_subdir.

    # ensure that the test prefix isn't kept between variants
    utils.rm_rf(metadata.config.test_prefix)

    if solver is None:
        solver, pkg_cache_path = get_solver(metadata.config.host_subdir)
    else:
        pkg_cache_path = PackageCacheData.first_writable().pkgs_dir

    solver.replace_channels()
    transaction = solver.solve(specs, [pkg_cache_path])

    downloaded = transaction.fetch_extract_packages(
        pkg_cache_path, solver.repos + list(solver.local_repos.values()),
    )
    if not downloaded:
        raise RuntimeError("Did not succeed in downloading packages.")

    mkdir_p(metadata.config.test_prefix)
    transaction.execute(
        PrefixData(metadata.config.test_prefix), pkg_cache_path,
    )

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if config.test_run_post:
        from conda_build.utils import get_installed_packages

        installed = get_installed_packages(metadata.config.test_prefix)
        files = installed[metadata.meta["package"]["name"]]["files"]
        replacements = get_all_replacements(metadata.config)
        try_download(metadata, False, True)
        create_info_files(metadata, replacements, files, metadata.config.test_prefix)
        post_build(metadata, files, None, metadata.config.test_prefix, True)

    # when workdir is removed, the source files are unavailable.  There's the test/source_files
    #    entry that lets people keep these files around.  The files are copied into test_dir for
    #    intuitive relative path behavior, though, not work_dir, so we need to adjust where
    #    SRC_DIR points.  The initial CWD during tests is test_dir.
    if metadata.config.remove_work_dir:
        env["SRC_DIR"] = metadata.config.test_dir

    test_script, _ = write_test_scripts(
        metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace
    )

    if utils.on_win:
        cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script]
    else:
        cmd = (
            [shell_path]
            + (["-x"] if metadata.config.debug else [])
            + ["-o", "errexit", test_script]
        )
    try:
        test_stats = {}
        if not provision_only:
            # rewrite long paths in stdout back to their env variables
            if metadata.config.debug or metadata.config.no_rewrite_stdout_env:
                rewrite_env = None
            else:
                rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env}
                if metadata.config.verbose:
                    for k, v in rewrite_env.items():
                        print(
                            "{0} {1}={2}".format(
                                "set" if test_script.endswith(".bat") else "export",
                                k,
                                v,
                            )
                        )
            utils.check_call_env(
                cmd,
                env=env,
                cwd=metadata.config.test_dir,
                stats=test_stats,
                rewrite_stdout_env=rewrite_env,
            )
            log_stats(test_stats, "testing {}".format(metadata.name()))
            # TODO need to implement metadata.get_used_loop_vars
            # if stats is not None and metadata.config.variants:
            #     stats[
            #         stats_key(metadata, "test_{}".format(metadata.name()))
            #     ] = test_stats
            if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")):
                raise subprocess.CalledProcessError(-1, "")
            print("TEST END:", test_package_name)

    except subprocess.CalledProcessError as _:  # noqa
        tests_failed(
            metadata,
            move_broken=move_broken,
            broken_dir=metadata.config.broken_dir,
            config=metadata.config,
        )
        raise

    if config.need_cleanup and config.recipe_dir is not None and not provision_only:
        utils.rm_rf(config.recipe_dir)

    return True
Exemplo n.º 12
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    pool = api.Pool()
    repos = []
    index = load_channels(pool, channel_urls, repos, prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    if "python" not in [s.name for s in match_specs]:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" + version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation"
                    )
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}\n"
            solver.add_pin(final_spec)
        except AssertionError:
            print(
                f"\nERROR: could not add pinned spec {s}. Make sure pin "
                "is of the format\n"
                "libname VERSION BUILD, for example libblas=*=*mkl\n"
            )

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    install_specs = [s for s in specs if MatchSpec(s).name not in installed_names]
    solver.add_jobs(install_specs, api.SOLVER_INSTALL)

    update_specs = [s for s in specs if MatchSpec(s).name in installed_names]
    solver.add_jobs(update_specs, api.SOLVER_UPDATE)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(
        solver, package_cache, PackageCacheData.first_writable().pkgs_dir
    )
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(
        specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index
    )

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 13
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context()

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            delete_trash(prefix)
            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)

    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0

        subpriority = 0 if chan.platform == 'noarch' else 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Cache path: ", subdir.cache_path())
        channel_json.append((chan, subdir.cache_path(), priority, subpriority))

    installed_json_f = get_installed_jsonfile(prefix)

    specs = []

    if args.file:
        for fpath in args.file:
            try:
                file_specs = specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs,
                     prefix,
                     verbose=not (context.quiet or context.json),
                     index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        # Note: History(prefix).get_requested_specs_map()
        print(
            "Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)"
        )
        exit()

    if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL:
        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=(context.quiet or context.json),
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    spec_names = [s.name for s in specs]

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format(spec_names))

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_added = False
    if 'python' not in spec_names:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            specs.append(MatchSpec('python==' + version))
            python_added = True

    mamba_solve_specs = [s.conda_build_form() for s in specs]

    pool = api.Pool()

    repos = []

    # add installed
    if use_mamba_experimental:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, cache_file, priority, subpriority in channel_json:
        repo = api.Repo(pool, str(channel), cache_file,
                        channel.url(with_credentials=True))
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(mamba_solve_specs, solver_task)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    transaction = api.Transaction(solver)
    to_link, to_unlink = transaction.to_conda()
    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir,
                                    repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if python_added:
        specs = [s for s in specs if s.name != 'python']

    if use_mamba_experimental and not os.name == 'nt':
        if command == 'create' and not isdir(context.target_prefix):
            mkdir_p(prefix)
        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs, (), prefix, to_link, to_unlink,
                                   index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Exemplo n.º 14
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs,
                     prefix,
                     verbose=not (context.quiet or context.json),
                     index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value('channel')
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args['channel_urls'] = channels

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)
    subprio_index = len(index)
    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0
        if strict_priority:
            subpriority = 0 if chan.platform == 'noarch' else 1
        else:
            subpriority = subprio_index
            subprio_index -= 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Channel: {}, prio: {} : {}".format(chan, priority,
                                                      subpriority))
            print("Cache path: ", subdir.cache_path())

        channel_json.append((chan, subdir, priority, subpriority))

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        history_dict = History(prefix).get_requested_specs_map()
        pins = {pin.name: pin for pin in get_pinned_specs(prefix)}
        # for key, match_spec in history_dict.items():
        for key in installed_names:
            if key == 'python':
                i = installed_names.index('python')
                version = installed_pkg_recs[i].version
                py_ver = ".".join(version.split(".")[:2]) + '.*'
                # specs.append(MatchSpec(name="python", version=py_ver))
            else:
                if key in pins:
                    specs.append(pins[key])
                else:
                    specs.append(MatchSpec(key))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != 'python':
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=(context.quiet or context.json),
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None
    additional_specs = []

    if 'python' not in spec_names:
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec('python==' +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    pool = api.Pool()

    repos = []

    if use_mamba_experimental or context.force_reinstall:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    if context.force_reinstall:
        solver = api.Solver(pool, solver_options, prefix_data)
    else:
        solver = api.Solver(pool, solver_options)

    solver.set_postsolve_flags([
        (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
        (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS),
        (api.MAMBA_FORCE_REINSTALL, context.force_reinstall)
    ])
    solver.add_jobs(mamba_solve_specs, solver_task)

    # as a security feature this will _always_ attempt to upgrade certain packages
    for a_pkg in [_.name for _ in context.aggressive_update_packages]:
        if a_pkg in installed_names:
            solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

    if python_constraint:
        solver.add_constraint(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
    specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir,
                                    repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if use_mamba_experimental and not os.name == 'nt':
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix,
                                   to_link, to_unlink, installed_pkg_recs,
                                   index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Exemplo n.º 15
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description='Boa, the fast build tool for conda packages.')
    subparsers = parser.add_subparsers(help='sub-command help', dest='command')
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument('recipe_dir', type=str)

    render_parser = subparsers.add_parser('render',
                                          parents=[parent_parser],
                                          help='render a recipe')
    build_parser = subparsers.add_parser('build',
                                         parents=[parent_parser],
                                         help='build a recipe')
    args = parser.parse_args()

    command = args.command

    folder = args.recipe_dir
    config = get_or_merge_config(None, {})
    config_files = find_config_files(folder)
    parsed_cfg = collections.OrderedDict()
    for f in config_files:
        parsed_cfg[f] = parse_config_file(f, config)
        normalized = {}
        for k in parsed_cfg[f].keys():
            if "_" in k:
                n = k.replace("_", "-")
                normalized[n] = parsed_cfg[f][k]
        parsed_cfg[f].update(normalized)

    # TODO just using latest config here, should merge!
    if len(config_files):
        cbc = parsed_cfg[config_files[-1]]
    else:
        cbc = {}

    update_index(os.path.dirname(config.output_folder),
                 verbose=config.debug,
                 threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))

    # We need to assemble the variants for each output

    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):

        # if ydoc.get("build"):
        #     raise InvalidRecipeError("You can either declare outputs, or build?")
        for o in ydoc["outputs"]:

            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o["requirements"], cbc, config)
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc["requirements"], cbc, config)

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything

    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if command == 'render':
        for o in sorted_outputs:
            print(o)
        exit()

    solver = MambaSolver(["conda-forge"], "linux-64")
    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        print(o)

        o.config.compute_build_id(o.name)

        print(o.config.host_prefix)

        if 'build' in o.transactions:
            mkdir_p(o.config.build_prefix)
            print(o.transactions)
            o.transactions['build'].execute(
                PrefixData(o.config.build_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        if 'host' in o.transactions:
            mkdir_p(o.config.host_prefix)
            print(o.transactions)
            o.transactions['host'].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir)
        print(o.sections)
        stats = {}

        print("Final variant config")
        print(config.variant)
        print(o.variant)
        build(MetaData(recipe_path, o), None)

    # sorted_outputs
    # print(sorted_outputs[0].config.host_prefix)
    exit()

    for o in sorted_outputs:
        print("\n")
        print(o)