예제 #1
0
파일: mambabuild.py 프로젝트: hbcarlos/boa
    def __init__(self, channels, platform):

        api_ctx = mamba_api.Context()
        api_ctx.conda_prefix = context.conda_prefix

        self.channels = channels
        self.platform = platform
        self.index = get_index(channels, platform=platform)
        self.local_index = []
        self.pool = mamba_api.Pool()
        self.repos = []

        start_prio = len(channels)
        priority = start_prio
        subpriority = 0  # wrong! :)
        for subdir, channel in self.index:
            repo = mamba_api.Repo(
                self.pool,
                str(channel),
                subdir.cache_path(),
                channel.url(with_credentials=True),
            )
            repo.set_priority(start_prio, subpriority)
            start_prio -= 1
            self.repos.append(repo)

        self.local_repos = {}
예제 #2
0
파일: utils.py 프로젝트: xhochy/mamba
def init_api_context():
    api_ctx = api.Context()

    api_ctx.json = context.json
    api_ctx.dry_run = context.dry_run
    if context.json:
        context.always_yes = True
        context.quiet = True
        context.json = False
        context.dry_run = False
    api_ctx.set_verbosity(context.verbosity)
    api_ctx.quiet = context.quiet
    api_ctx.offline = context.offline
    api_ctx.local_repodata_ttl = context.local_repodata_ttl
    api_ctx.use_index_cache = context.use_index_cache
    api_ctx.always_yes = context.always_yes
    if context.ssl_verify == False:
        api_ctx.ssl_verify = "<false>"
    elif context.ssl_verify is not True:
        api_ctx.ssl_verify = context.ssl_verify
    api_ctx.target_prefix = context.target_prefix

    api_ctx.read_timeout_secs = int(round(context.remote_read_timeout_secs))
    api_ctx.connect_timeout_secs = int(
        round(context.remote_connect_timeout_secs))
    api_ctx.max_retries = context.remote_max_retries
    api_ctx.retry_backoff = context.remote_backoff_factor
예제 #3
0
def init_api_context(use_mamba_experimental=False):
    api_ctx = api.Context()

    api_ctx.json = context.json
    api_ctx.dry_run = context.dry_run
    if context.json:
        context.always_yes = True
        context.quiet = True
        if use_mamba_experimental:
            context.json = False
    api_ctx.set_verbosity(context.verbosity)
    api_ctx.quiet = context.quiet
    api_ctx.offline = context.offline
    api_ctx.local_repodata_ttl = context.local_repodata_ttl
    api_ctx.use_index_cache = context.use_index_cache
    api_ctx.always_yes = context.always_yes
    api_ctx.channels = context.channels

    if context.ssl_verify is False:
        api_ctx.ssl_verify = "<false>"
    elif context.ssl_verify is None or context.ssl_verify == "":
        api_ctx.ssl_verify = context.ssl_verify
    api_ctx.target_prefix = context.target_prefix
    api_ctx.root_prefix = context.root_prefix
    api_ctx.conda_prefix = context.conda_prefix
    api_ctx.pkgs_dirs = context.pkgs_dirs
    api_ctx.envs_dirs = context.envs_dirs

    api_ctx.connect_timeout_secs = int(round(context.remote_connect_timeout_secs))
    api_ctx.max_retries = context.remote_max_retries
    api_ctx.retry_backoff = context.remote_backoff_factor
    api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency
예제 #4
0
파일: solver.py 프로젝트: ocefpaf/boa
    def __init__(self, channels, platform):

        api_ctx = mamba_api.Context()
        api_ctx.root_prefix = context.conda_prefix
        api_ctx.conda_prefix = context.conda_prefix
        api_ctx.envs_dirs = [os.path.join(context.conda_prefix, "envs")]
        api_ctx.pkgs_dirs = [os.path.join(context.conda_prefix, "pkgs")]

        self.channels = channels
        self.platform = platform
        self.pool = mamba_api.Pool()
        self.repos = []
        self.index = load_channels(
            self.pool, self.channels, self.repos, platform=platform
        )
        self.local_index = []
        self.local_repos = {}
예제 #5
0
def init_api_context(use_mamba_experimental=False):
    api_ctx = api.Context()

    api_ctx.json = context.json
    api_ctx.dry_run = context.dry_run
    if context.json:
        context.always_yes = True
        context.quiet = True
        if use_mamba_experimental:
            context.json = False
    api_ctx.set_verbosity(context.verbosity)
    api_ctx.quiet = context.quiet
    api_ctx.offline = context.offline
    api_ctx.local_repodata_ttl = context.local_repodata_ttl
    api_ctx.use_index_cache = context.use_index_cache
    api_ctx.always_yes = context.always_yes
    api_ctx.channels = context.channels

    if context.ssl_verify is False:
        api_ctx.ssl_verify = "<false>"
    elif context.ssl_verify is not True:
        api_ctx.ssl_verify = context.ssl_verify
    api_ctx.target_prefix = context.target_prefix
    api_ctx.root_prefix = context.root_prefix
    api_ctx.conda_prefix = context.conda_prefix
    api_ctx.pkgs_dirs = context.pkgs_dirs
    api_ctx.envs_dirs = context.envs_dirs

    api_ctx.connect_timeout_secs = int(round(context.remote_connect_timeout_secs))
    api_ctx.max_retries = context.remote_max_retries
    api_ctx.retry_backoff = context.remote_backoff_factor
    api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency
    api_ctx.use_only_tar_bz2 = context.use_only_tar_bz2

    if context.channel_priority is ChannelPriority.STRICT:
        api_ctx.channel_priority = api.ChannelPriority.kStrict
    elif context.channel_priority is ChannelPriority.FLEXIBLE:
        api_ctx.channel_priority = api.ChannelPriority.kFlexible
    elif context.channel_priority is ChannelPriority.DISABLED:
        api_ctx.channel_priority = api.ChannelPriority.kDisabled
예제 #6
0
    def __init__(self, channels, platform, output_folder=None):

        api_ctx = mamba_api.Context()
        api_ctx.root_prefix = context.conda_prefix
        api_ctx.conda_prefix = context.conda_prefix
        # api_ctx.set_verbosity(1)
        api_ctx.offline = context.offline
        api_ctx.envs_dirs = [os.path.join(context.conda_prefix, "envs")]
        api_ctx.pkgs_dirs = [os.path.join(context.conda_prefix, "pkgs")]

        self.channels = channels
        self.platform = platform
        self.output_folder = output_folder or "local"
        self.pool = mamba_api.Pool()
        self.repos = []
        self.index = load_channels(self.pool,
                                   self.channels,
                                   self.repos,
                                   platform=platform)
        self.local_index = []
        self.local_repos = {}
        # load local repo, too
        self.replace_channels()
예제 #7
0
파일: mamba_env.py 프로젝트: wzmucas/mamba
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = (len(_channel_priority_map) -
                    _channel_priority_map[chan.url(with_credentials=True)][1])
        subpriority = 0 if chan.platform == "noarch" else 1
        if not subdir.loaded() and chan.platform != "noarch":
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    # if using update
    installed_pkg_recs = []
    python_constraint = None
    if "update" in args.func:
        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

        # Also pin the Python version if it's installed
        # If python was not specified, check if it is installed.
        # If yes, add the installed python to the specs to prevent updating it.
        if "python" not in [MatchSpec(s).name for s in specs]:
            installed_names = [i_rec.name for i_rec in installed_pkg_recs]
            if "python" in installed_names:
                i = installed_names.index("python")
                version = installed_pkg_recs[i].version
                python_constraint = MatchSpec("python==" +
                                              version).conda_build_form()

    for _, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)

    if python_constraint:
        solver.add_pin(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
예제 #8
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    ordered_channels_dict = prioritize_channels(channel_urls)

    pool = api.Pool()
    repos = []
    index = load_channels(pool,
                          tuple(ordered_channels_dict.keys()),
                          repos,
                          prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    if "python" not in [s.name for s in match_specs]:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation")
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}"
            solver.add_pin(final_spec)
        except AssertionError:
            print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                  "is of the format\n"
                  "libname VERSION BUILD, for example libblas=*=*mkl\n")

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    solver.add_jobs(specs, api.SOLVER_INSTALL)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
예제 #9
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != 'nodefaults']

    if 'nodefaults' not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = len(_channel_priority_map) - _channel_priority_map[chan.url(
            with_credentials=True)][1]
        subpriority = 0 if chan.platform == 'noarch' else 1
        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    final_precs = IndexedSet()

    lookup_dict = {}
    for _, c in index:
        lookup_dict[c.url(with_credentials=True)] = c

    for c, pkg, jsn_s in to_link:
        sdir = lookup_dict[c]
        rec = to_package_record_from_subjson(sdir, pkg, jsn_s)
        final_precs.add(rec)

    unlink_precs, link_precs = diff_for_unlink_link_precs(
        prefix,
        final_precs=IndexedSet(PrefixGraph(final_precs).graph),
        specs_to_add=specs_to_add,
        force_reinstall=context.force_reinstall)

    pref_setup = PrefixSetup(target_prefix=prefix,
                             unlink_precs=unlink_precs,
                             link_precs=link_precs,
                             remove_specs=(),
                             update_specs=specs_to_add,
                             neutered_specs=())

    conda_transaction = UnlinkLinkTransaction(pref_setup)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
예제 #10
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != 'nodefaults']

    if 'nodefaults' not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]
        subpriority = 0 if chan.platform == 'noarch' else 1
        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    # if using update
    installed_pkg_recs = []
    if 'update' in args.func:
        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    final_precs = IndexedSet()

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
예제 #11
0
파일: mamba_env.py 프로젝트: rowhit/mamba-1
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    ordered_channels_dict = prioritize_channels(channel_urls)

    pool = api.Pool()
    repos = []
    index = load_channels(pool,
                          tuple(ordered_channels_dict.keys()),
                          repos,
                          prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []
    python_constraint = None

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    if "python" not in [s.name for s in match_specs]:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)

    if python_constraint:
        solver.add_pin(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
예제 #12
0
def init_api_context(use_mamba_experimental=False):
    api_ctx = api.Context()

    api_ctx.json = context.json
    api_ctx.dry_run = context.dry_run
    if context.json:
        context.always_yes = True
        context.quiet = True
        if use_mamba_experimental:
            context.json = False

    api_ctx.set_verbosity(context.verbosity)
    api_ctx.quiet = context.quiet
    api_ctx.offline = context.offline
    api_ctx.local_repodata_ttl = context.local_repodata_ttl
    api_ctx.use_index_cache = context.use_index_cache
    api_ctx.always_yes = context.always_yes
    api_ctx.channels = context.channels
    api_ctx.platform = context.subdir

    def get_base_url(url, name=None):
        tmp = url.rsplit("/", 1)[0]
        if name:
            if tmp.endswith(name):
                return tmp.rsplit("/", 1)[0]
        return tmp

    api_ctx.channel_alias = str(
        get_base_url(context.channel_alias.url(with_credentials=True)))

    additional_custom_channels = {}
    for el in context.custom_channels:
        if context.custom_channels[el].canonical_name not in [
                "local", "defaults"
        ]:
            additional_custom_channels[el] = get_base_url(
                context.custom_channels[el].url(with_credentials=True), el)
    api_ctx.custom_channels = additional_custom_channels

    if context.ssl_verify is False:
        api_ctx.ssl_verify = "<false>"
    elif context.ssl_verify is not True:
        api_ctx.ssl_verify = context.ssl_verify
    api_ctx.target_prefix = context.target_prefix
    api_ctx.root_prefix = context.root_prefix
    api_ctx.conda_prefix = context.conda_prefix
    api_ctx.pkgs_dirs = context.pkgs_dirs
    api_ctx.envs_dirs = context.envs_dirs

    api_ctx.connect_timeout_secs = int(
        round(context.remote_connect_timeout_secs))
    api_ctx.max_retries = context.remote_max_retries
    api_ctx.retry_backoff = context.remote_backoff_factor
    api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency
    api_ctx.use_only_tar_bz2 = context.use_only_tar_bz2

    if context.channel_priority is ChannelPriority.STRICT:
        api_ctx.channel_priority = api.ChannelPriority.kStrict
    elif context.channel_priority is ChannelPriority.FLEXIBLE:
        api_ctx.channel_priority = api.ChannelPriority.kFlexible
    elif context.channel_priority is ChannelPriority.DISABLED:
        api_ctx.channel_priority = api.ChannelPriority.kDisabled