def test_subdirs_env_var(self): subdirs = ('linux-highest', 'linux-64', 'noarch') def _channel_urls(channels=None): for channel in channels or DEFAULT_CHANNELS: channel = Channel(channel) for subdir in subdirs: yield join_url(channel.base_url, subdir) with env_var('CONDA_SUBDIRS', ','.join(subdirs), reset_context): c = Channel('defaults') assert c.urls() == list(_channel_urls()) c = Channel('conda-forge') assert c.urls() == list(_channel_urls(('conda-forge',))) channels = ('bioconda', 'conda-forge') prioritized = prioritize_channels(channels) assert prioritized == OrderedDict(( ("https://conda.anaconda.org/bioconda/linux-highest", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/linux-64", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/noarch", ("bioconda", 0)), ("https://conda.anaconda.org/conda-forge/linux-highest", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/linux-64", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/noarch", ("conda-forge", 1)), )) prioritized = prioritize_channels(channels, subdirs=('linux-again', 'noarch')) assert prioritized == OrderedDict(( ("https://conda.anaconda.org/bioconda/linux-again", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/noarch", ("bioconda", 0)), ("https://conda.anaconda.org/conda-forge/linux-again", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/noarch", ("conda-forge", 1)), ))
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] index = get_index(channel_urls=channel_urls, prepend='nodefaults' not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) unlink_link_transaction = get_install_transaction( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map) with common.json_progress_bars(json=args.json and not args.quiet): pfe = unlink_link_transaction.get_pfe() pfe.execute() unlink_link_transaction.execute()
def test_channels_with_dashes(self): # regression test for #5763 assert context.channels[0] == 'http://test/conda/anaconda-cluster' channel_urls = prioritize_channels(context.channels) channel_urls = tuple(channel_urls.items()) assert channel_urls[0] == ('http://test/conda/anaconda-cluster/%s' % context.subdir, ('http://test/conda/anaconda-cluster', 0)) assert channel_urls[1] == ('http://test/conda/anaconda-cluster/noarch', ('http://test/conda/anaconda-cluster', 0))
def install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) unlink_link_transaction = solver.solve_for_transaction(prune=getattr(args, 'prune', False)) pfe = unlink_link_transaction._get_pfe() pfe.execute() unlink_link_transaction.execute()
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] _channel_priority_map = prioritize_channels(channel_urls) channel_names = IndexedSet( Channel(url).canonical_name for url in _channel_priority_map) channels = IndexedSet(Channel(cn) for cn in channel_names) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) unlink_link_transaction = solver.solve_for_transaction(prune=prune) pfe = unlink_link_transaction.get_pfe() pfe.execute() unlink_link_transaction.execute()
def test_env_var_file_urls(self): channels = ( "file://\\\\network_share\\shared_folder\\path\\conda", "https://some.url/ch_name", "file:///some/place/on/my/machine", ) with env_var("CONDA_CHANNELS", ','.join(channels)): new_context = Context((), APP_NAME) assert new_context.channels == ( "file://\\\\network_share\\shared_folder\\path\\conda", "https://some.url/ch_name", "file:///some/place/on/my/machine", ) prioritized = prioritize_channels(new_context.channels) assert prioritized == OrderedDict(( ("file://network_share/shared_folder/path/conda/%s" % context.subdir, ("file://network_share/shared_folder/path/conda", 0)), ("file://network_share/shared_folder/path/conda/noarch", ("file://network_share/shared_folder/path/conda", 0)), ("https://some.url/ch_name/%s" % context.subdir, ("https://some.url/ch_name", 1)), ("https://some.url/ch_name/noarch", ("https://some.url/ch_name", 1)), ("file:///some/place/on/my/machine/%s" % context.subdir, ("file:///some/place/on/my/machine", 2)), ("file:///some/place/on/my/machine/noarch", ("file:///some/place/on/my/machine", 2)), ))
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [chan for chan in env.channels if chan != "nodefaults"] index = get_index(channel_urls=channel_urls, prepend="nodefaults" not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) action_set = plan.install_actions_list( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map ) with common.json_progress_bars(json=args.json and not args.quiet): for actions in action_set: try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError("Already locked: %s" % text_type(e)) else: raise CondaRuntimeError("RuntimeError: %s" % e) except SystemExit as e: raise CondaSystemExit("Exiting", e)
def test_determine_all_envs_with_channel_priority(self): prioritized_channel_map = prioritize_channels(tuple(["rando_chnl", "defaults"])) specs_for_envs_w_channel_priority = plan.determine_all_envs( self.res, self.specs, prioritized_channel_map) expected_output = [plan.SpecForEnv(env="ranenv", spec="test-spec"), plan.SpecForEnv(env="test1", spec="test-spec2")] self.assertEquals(specs_for_envs_w_channel_priority, expected_output)
def install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) try: unlink_link_transaction = solver.solve_for_transaction( prune=getattr(args, 'prune', False), update_modifier=UpdateModifier.FREEZE_INSTALLED) except (UnsatisfiableError, SystemExit): unlink_link_transaction = solver.solve_for_transaction( prune=getattr(args, 'prune', False), update_modifier=NULL) if unlink_link_transaction.nothing_to_do: return None unlink_link_transaction.download_and_extract() unlink_link_transaction.execute() return unlink_link_transaction._make_legacy_action_groups()[0]
def test_channels_with_dashes(self): # regression test for #5763 assert context.channels == ('http://test/conda/anaconda-cluster',) channel_urls = prioritize_channels(context.channels) assert channel_urls == odict(( ('http://test/conda/anaconda-cluster/%s' % context.subdir, ('http://test/conda/anaconda-cluster', 0)), ('http://test/conda/anaconda-cluster/noarch', ('http://test/conda/anaconda-cluster', 0)), ))
def test_subdirs_env_var(self): subdirs = ('linux-highest', 'linux-64', 'noarch') def _channel_urls(channels=None): for channel in channels or DEFAULT_CHANNELS: channel = Channel(channel) for subdir in subdirs: yield join_url(channel.base_url, subdir) with env_var('CONDA_SUBDIRS', ','.join(subdirs), reset_context): c = Channel('defaults') assert c.urls() == list(_channel_urls()) c = Channel('conda-forge') assert c.urls() == list(_channel_urls(('conda-forge', ))) channels = ('bioconda', 'conda-forge') prioritized = prioritize_channels(channels) assert prioritized == OrderedDict(( ("https://conda.anaconda.org/bioconda/linux-highest", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/linux-64", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/noarch", ("bioconda", 0)), ("https://conda.anaconda.org/conda-forge/linux-highest", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/linux-64", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/noarch", ("conda-forge", 1)), )) prioritized = prioritize_channels(channels, subdirs=('linux-again', 'noarch')) assert prioritized == OrderedDict(( ("https://conda.anaconda.org/bioconda/linux-again", ("bioconda", 0)), ("https://conda.anaconda.org/bioconda/noarch", ("bioconda", 0)), ("https://conda.anaconda.org/conda-forge/linux-again", ("conda-forge", 1)), ("https://conda.anaconda.org/conda-forge/noarch", ("conda-forge", 1)), ))
def create_env(pkgs, target, pkg_cache): # We lock the specific environment we are wanting to create. If other requests come in for the # exact same environment, they will have to wait for this to finish (good). with Locked(target): pkg_names = set(pkg for _, pkg in pkgs) if os.path.exists(target): # The environment we want to deploy already exists. We should just double check that # there aren't already packages in there which we need to remove before we install anything # new. linked = conda.install.linked(target) for pkg in linked: if pkg not in pkg_names: conda.install.unlink(target, pkg) else: linked = [] if set(linked) == pkg_names: # We don't need to re-link everything - it is already as expected. # The downside is that we are not verifying that each package is installed correctly. return try: # Support conda>4.1 from conda.models.channel import prioritize_channels except ImportError: prioritize_channels = lambda nop: nop for source, pkg in pkgs: index = conda.fetch.fetch_index(prioritize_channels([source]), use_cache=False) # Deal with the fact that a recent conda includes the source in the index key. index = {pkg['fn']: pkg for pkg in index.values()} tar_name = pkg + '.tar.bz2' pkg_info = index.get(tar_name, None) if pkg_info is None: raise ValueError('Distribution {} is no longer available in the channel.'.format(tar_name)) dist_name = pkg # We force a lock on retrieving anything which needs access to a distribution of this # name. If other requests come in to get the exact same package they will have to wait # for this to finish (good). If conda itself it fetching these pacakges then there is # the potential for a race condition (bad) - there is no solution to this unless # conda/conda is updated to be more precise with its locks. lock_name = os.path.join(pkg_cache, dist_name) with Locked(lock_name): schannel_dist_name = dist_name if pkg_info['schannel'] != 'defaults': schannel_dist_name='{}::{}'.format(pkg_info['schannel'], dist_name) if not conda.install.is_extracted(schannel_dist_name): if not conda.install.is_fetched(schannel_dist_name): print('Fetching {}'.format(dist_name)) conda.fetch.fetch_pkg(pkg_info) conda.install.extract(schannel_dist_name) conda.install.link(target, schannel_dist_name)
def test_determine_all_envs_with_channel_priority(self): self.res = generate_mocked_resolve([ (None, "test-spec", "defaults", "5"), ("ranenv", "test-spec", "rando_chnl", "1"), ("test1", "test-spec2", "defaults", "1") ]) prioritized_channel_map = prioritize_channels(tuple(["rando_chnl", "defaults"])) specs_for_envs_w_channel_priority = plan.determine_all_envs( self.res, self.specs, prioritized_channel_map) expected_output = (plan.SpecForEnv(env="ranenv", spec="test-spec"), plan.SpecForEnv(env="test1", spec="test-spec2")) self.assertEquals(specs_for_envs_w_channel_priority, expected_output)
def _solve(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) return solver
def test_multichannel_priority(): channels = ['conda-test', 'defaults', 'conda-forge'] subdirs = ['new-optimized-subdir', 'linux-32', 'noarch'] channel_priority_map = prioritize_channels(channels, with_credentials=True, subdirs=subdirs) if on_win: assert channel_priority_map == OrderedDict([ ('https://conda.anaconda.org/conda-test/new-optimized-subdir', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/linux-32', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/noarch', ('conda-test', 0)), ('https://repo.anaconda.com/pkgs/main/new-optimized-subdir', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/main/linux-32', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/main/noarch', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/free/new-optimized-subdir', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/free/linux-32', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/free/noarch', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/r/new-optimized-subdir', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/r/linux-32', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/r/noarch', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/pro/new-optimized-subdir', ('defaults', 4)), ('https://repo.anaconda.com/pkgs/pro/linux-32', ('defaults', 4)), ('https://repo.anaconda.com/pkgs/pro/noarch', ('defaults', 4)), ('https://repo.anaconda.com/pkgs/msys2/new-optimized-subdir', ('defaults', 5)), ('https://repo.anaconda.com/pkgs/msys2/linux-32', ('defaults', 5)), ('https://repo.anaconda.com/pkgs/msys2/noarch', ('defaults', 5)), ('https://conda.anaconda.org/conda-forge/new-optimized-subdir', ('conda-forge', 6)), ('https://conda.anaconda.org/conda-forge/linux-32', ('conda-forge', 6)), ('https://conda.anaconda.org/conda-forge/noarch', ('conda-forge', 6)), ]) else: assert channel_priority_map == OrderedDict([ ('https://conda.anaconda.org/conda-test/new-optimized-subdir', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/linux-32', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/noarch', ('conda-test', 0)), ('https://repo.anaconda.com/pkgs/main/new-optimized-subdir', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/main/linux-32', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/main/noarch', ('defaults', 1)), ('https://repo.anaconda.com/pkgs/free/new-optimized-subdir', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/free/linux-32', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/free/noarch', ('defaults', 2)), ('https://repo.anaconda.com/pkgs/r/new-optimized-subdir', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/r/linux-32', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/r/noarch', ('defaults', 3)), ('https://repo.anaconda.com/pkgs/pro/new-optimized-subdir', ('defaults', 4)), ('https://repo.anaconda.com/pkgs/pro/linux-32', ('defaults', 4)), ('https://repo.anaconda.com/pkgs/pro/noarch', ('defaults', 4)), ('https://conda.anaconda.org/conda-forge/new-optimized-subdir', ('conda-forge', 5)), ('https://conda.anaconda.org/conda-forge/linux-32', ('conda-forge', 5)), ('https://conda.anaconda.org/conda-forge/noarch', ('conda-forge', 5)), ])
def test_multichannel_priority(): channels = ['conda-test', 'defaults', 'conda-forge'] subdirs = ['new-optimized-subdir', 'linux-32', 'noarch'] channel_priority_map = prioritize_channels(channels, with_credentials=True, subdirs=subdirs) if on_win: assert channel_priority_map == OrderedDict([ ('https://conda.anaconda.org/conda-test/new-optimized-subdir', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/linux-32', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/noarch', ('conda-test', 0)), ('https://repo.continuum.io/pkgs/main/new-optimized-subdir', ('defaults', 1)), ('https://repo.continuum.io/pkgs/main/linux-32', ('defaults', 1)), ('https://repo.continuum.io/pkgs/main/noarch', ('defaults', 1)), ('https://repo.continuum.io/pkgs/free/new-optimized-subdir', ('defaults', 2)), ('https://repo.continuum.io/pkgs/free/linux-32', ('defaults', 2)), ('https://repo.continuum.io/pkgs/free/noarch', ('defaults', 2)), ('https://repo.continuum.io/pkgs/r/new-optimized-subdir', ('defaults', 3)), ('https://repo.continuum.io/pkgs/r/linux-32', ('defaults', 3)), ('https://repo.continuum.io/pkgs/r/noarch', ('defaults', 3)), ('https://repo.continuum.io/pkgs/pro/new-optimized-subdir', ('defaults', 4)), ('https://repo.continuum.io/pkgs/pro/linux-32', ('defaults', 4)), ('https://repo.continuum.io/pkgs/pro/noarch', ('defaults', 4)), ('https://repo.continuum.io/pkgs/msys2/new-optimized-subdir', ('defaults', 5)), ('https://repo.continuum.io/pkgs/msys2/linux-32', ('defaults', 5)), ('https://repo.continuum.io/pkgs/msys2/noarch', ('defaults', 5)), ('https://conda.anaconda.org/conda-forge/new-optimized-subdir', ('conda-forge', 6)), ('https://conda.anaconda.org/conda-forge/linux-32', ('conda-forge', 6)), ('https://conda.anaconda.org/conda-forge/noarch', ('conda-forge', 6)), ]) else: assert channel_priority_map == OrderedDict([ ('https://conda.anaconda.org/conda-test/new-optimized-subdir', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/linux-32', ('conda-test', 0)), ('https://conda.anaconda.org/conda-test/noarch', ('conda-test', 0)), ('https://repo.continuum.io/pkgs/main/new-optimized-subdir', ('defaults', 1)), ('https://repo.continuum.io/pkgs/main/linux-32', ('defaults', 1)), ('https://repo.continuum.io/pkgs/main/noarch', ('defaults', 1)), ('https://repo.continuum.io/pkgs/free/new-optimized-subdir', ('defaults', 2)), ('https://repo.continuum.io/pkgs/free/linux-32', ('defaults', 2)), ('https://repo.continuum.io/pkgs/free/noarch', ('defaults', 2)), ('https://repo.continuum.io/pkgs/r/new-optimized-subdir', ('defaults', 3)), ('https://repo.continuum.io/pkgs/r/linux-32', ('defaults', 3)), ('https://repo.continuum.io/pkgs/r/noarch', ('defaults', 3)), ('https://repo.continuum.io/pkgs/pro/new-optimized-subdir', ('defaults', 4)), ('https://repo.continuum.io/pkgs/pro/linux-32', ('defaults', 4)), ('https://repo.continuum.io/pkgs/pro/noarch', ('defaults', 4)), ('https://conda.anaconda.org/conda-forge/new-optimized-subdir', ('conda-forge', 5)), ('https://conda.anaconda.org/conda-forge/linux-32', ('conda-forge', 5)), ('https://conda.anaconda.org/conda-forge/noarch', ('conda-forge', 5)), ])
def install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) unlink_link_transaction = solver.solve_for_transaction( prune=getattr(args, 'prune', False)) pfe = unlink_link_transaction._get_pfe() pfe.execute() unlink_link_transaction.execute()
def test_env_var_file_urls(self): channels = ("file://\\\\network_share\\shared_folder\\path\\conda," "https://some.url/ch_name," "file:///some/place/on/my/machine") with env_var("CONDA_CHANNELS", channels, reset_context): assert context.channels == ( "file://\\\\network_share\\shared_folder\\path\\conda", "https://some.url/ch_name", "file:///some/place/on/my/machine", ) prioritized = prioritize_channels(context.channels) assert prioritized == OrderedDict(( ("file://network_share/shared_folder/path/conda/%s" % context.subdir, ("file://network_share/shared_folder/path/conda", 0)), ("file://network_share/shared_folder/path/conda/noarch", ("file://network_share/shared_folder/path/conda", 0)), ("https://some.url/ch_name/%s" % context.subdir, ("https://some.url/ch_name", 1)), ("https://some.url/ch_name/noarch", ("https://some.url/ch_name", 1)), ("file:///some/place/on/my/machine/%s" % context.subdir, ("file:///some/place/on/my/machine", 2)), ("file:///some/place/on/my/machine/noarch", ("file:///some/place/on/my/machine", 2)), ))
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] index = get_index(channel_urls=channel_urls, prepend='nodefaults' not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) action_set = plan.install_actions_list( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map) with common.json_progress_bars(json=args.json and not args.quiet): for actions in action_set: try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError('Already locked: %s' % text_type(e)) else: raise CondaHTTPError('CondaHTTPError: %s' % e) except SystemExit as e: raise CondaSystemExit('Exiting', e)
def main(info, verbose=True, dry_run=False, use_conda=False): if 'channels' in info: global index if use_conda: from conda.models.channel import prioritize_channels from conda.exports import fetch_index channels = tuple('%s/%s/' % (url.rstrip('/'), platform) for url in info['channels'] for platform in (info['_platform'], 'noarch')) index = fetch_index(prioritize_channels(channels)) else: from libconda.fetch import fetch_index index = fetch_index( tuple('%s/%s/' % (url.rstrip('/'), platform) for url in info['channels'] for platform in (info['_platform'], 'noarch'))) if 'specs' in info: resolve(info, verbose, use_conda) exclude_packages(info) if 'packages' in info: handle_packages(info) if not info.get('install_in_dependency_order'): dists.sort() move_python_first() all_names = set(name_dist(fn) for fn in dists) for name in info.get('menu_packages', []): if name not in all_names: print("WARNING: no such package (in menu_packages): %s" % name) if verbose: show(info) check_dists() if dry_run: return fetch(info, use_conda) info['_dists'] = list(dists)
def create_env(repo, pkgs, target): with Locked(target): spec_fname = os.path.join(repo.working_dir, 'env.spec') with open(spec_fname, 'r') as fh: spec = yaml.safe_load(fh) channels = prioritize_channels(spec.get('channels', [])) # Build reverse look-up from channel URL to channel name. channel_by_url = {url: channel for url, (channel, _) in channels.items()} index = fetch_index(channels, use_cache=False) resolver = Resolve(index) # Create the package distribution from the manifest. Ensure to replace # channel-URLs with channel names, otherwise the fetch-extract may fail. dists = [Dist.from_string(pkg, channel_override=channel_by_url.get(url, url)) for url, pkg in pkgs] # Use the resolver to sort packages into the appropriate dependency # order. sorted_dists = resolver.dependency_sort({dist.name: dist for dist in dists}) pfe = ProgressiveFetchExtract(index, sorted_dists) pfe.execute() mkdir_p(target) txn = UnlinkLinkTransaction.create_from_dists(index, target, (), sorted_dists) txn.execute()
def pkg_env(environment_file: Path, coex_path: Path, cache_dir: Path) -> None: """Resolve, fetch, and repackage conda env into coex /pkgs directory. Resolve conda environment file to a specific package list via conda solver, then fetch and unpack target packages. Repack into .coex package data in cache_dir or reuse if pre-packed, and assemble into /pkgs under coex_path. Args: environment_file: Standard conda env file, can not contain pip deps. coex_path: Output coex build path. cache_dir: Coex build cache directory. """ # Resolve environment file to dependencies # Logic culled from conda-env spec = YamlFileSpec(filename=str(environment_file)) env = spec.environment logging.info(env.dependencies) assert set(env.dependencies) == { "conda" }, f"coex environments do not support pip dependencies: {env}" channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) # Setup an dummpy environment resolution for install into /dev/null # Execute fetch-and-extract operations for required conda packages prefix = "/dev/null" channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet( os.path.basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=env.dependencies["conda"]) transaction: UnlinkLinkTransaction = solver.solve_for_transaction() logging.info(transaction) transaction.download_and_extract() # Resolve all the, now extracted, target packages in the filesystem fetcher: ProgressiveFetchExtract = transaction._pfe target_records: Set[PackageRecord] = set(fetcher.link_precs) logging.debug("target_records=%s", target_records) extracted: Set[PackageCacheRecord] = { next( (pcrec for pcrec in chain(*(PackageCacheData(pkgs_dir).query(precord) for pkgs_dir in context.pkgs_dirs)) if pcrec.is_extracted), None, ) for precord in target_records } logging.debug("extracted=%s", extracted) # Repackage into a single-file .zst in the cache, then copy into the output # package. output_path = coex_path / "pkgs" for e in extracted: extracted_dir = Path(e.extracted_package_dir) pkgname = extracted_dir.name + ".tar.zst" cache_dir.mkdir(parents=True, exist_ok=True) if not (cache_dir / pkgname).exists(): pkg_cmd = ( # tar filtered through zstd # Seeing errors on macos 10.13 image when using --use-compress-program # with arguments, consider (a) installing conda-forge tar or (b) using # a wrapper script if zstd arguments are needed [ "tar", "--use-compress-program", "zstd -T0" if platform.system() != "Darwin" else "zstd", ] # write to archive file + ["-f", str(cache_dir / pkgname)] # chdir to extracted package directory + ["-C", str(extracted_dir)] # and add all package dirs + (["-c"] + [f.name for f in extracted_dir.iterdir()])) logging.info("packaging: %s", pkg_cmd) subprocess.check_call(pkg_cmd) output_path.mkdir(parents=True, exist_ok=True) shutil.copyfile(cache_dir / pkgname, output_path / pkgname)
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_env_index(_channel_priority_map) channel_json = [] for x in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[ x.url_w_subdir][1] subpriority = 0 if x.channel.platform == 'noarch' else 1 if os.path.exists(x.cache_path_solv): cache_file = x.cache_path_solv else: cache_file = x.cache_path_json channel_json.append( (str(x.channel), cache_file, priority, subpriority)) specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) # TODO! installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write("") # stupid! installed_json_f.flush() solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, solver_options, api.SOLVER_INSTALL, False, context.quiet) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=[], update_specs=specs) conda_transaction = UnlinkLinkTransaction(pref_setup) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute() try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys()), prepend=False) channel_json = [] for subdir, chan in index: # add priority here priority = (len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]) subpriority = 0 if chan.platform == "noarch" else 1 if not subdir.loaded() and chan.platform != "noarch": # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir, priority, subpriority)) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] # if using update installed_pkg_recs = [] python_constraint = None if "update" in args.func: installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [MatchSpec(s).name for s in specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() for _, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) if python_constraint: solver.add_pin(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver.add_pin(python_constraint) pinned_specs = get_pinned_specs(prefix) pinned_specs_info = "" if pinned_specs: conda_prefix_data = PrefixData(prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin" "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info: print(f"\n Pinned packages:\n\n{pinned_specs_info}\n") solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys())) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url( with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir.cache_path(), priority, subpriority)) specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] for channel, cache_file, priority, subpriority in channel_json: repo = api.Repo(pool, str(channel), cache_file, channel.url(with_credentials=True)) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(mamba_solve_specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) to_link, to_unlink = transaction.to_conda() to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=[], update_specs=specs, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def fetch_index(channel_urls): return _fetch_index(prioritize_channels(channel_urls))
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys()), prepend=False) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir, priority, subpriority)) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] # if using update installed_pkg_recs = [] if 'update' in args.func: installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] final_precs = IndexedSet() conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] python_constraint = None # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) if python_constraint: solver.add_pin(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()