def do_list_available_packages(): import conda version = conda.__version__ version_splitted = version.split(".") if len(version_splitted) < 2: sys.stderr.write("Conda version %s" % version) sys.stderr.flush() return major_version = int(version_splitted[0]) minor_version = int(version_splitted[1]) if major_version >= 4 and minor_version >= 4: init_context() from conda.core.index import get_index index = get_index() elif major_version == 4 and minor_version >= 2: from conda.api import get_index index = get_index() elif major_version == 4 and minor_version == 1: from conda.cli.main_search import get_index index = get_index() else: from conda.cli.main_search import common index = common.get_index_trap() for pkg in index.values(): sys.stdout.write("\t".join([pkg["name"], pkg["version"], ":".join(pkg["depends"])]) + chr(10)) sys.stdout.flush()
def do_list_available_packages(): import conda version = conda.__version__ version_splitted = version.split(".") if len(version_splitted) < 2: sys.stderr.write("Conda version %s" % version) sys.stderr.flush() return major_version = int(version_splitted[0]) minor_version = int(version_splitted[1]) if major_version >= 4 and minor_version >= 4: from conda.core.index import get_index index = get_index() elif major_version == 4 and minor_version >= 2: from conda.api import get_index index = get_index() elif major_version == 4 and minor_version == 1: from conda.cli.main_search import get_index index = get_index() else: from conda.cli.main_search import common index = common.get_index_trap() for pkg in index.values(): sys.stdout.write("\t".join([pkg["name"], pkg["version"], ":".join(pkg["depends"])])+chr(10)) sys.stdout.flush()
def test_get_index_no_platform_with_offline_cache(self): import conda.core.subdir_data with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', stack_callback=conda_tests_ctxt_mgmt_def_pol): with patch.object(conda.core.subdir_data, 'read_mod_and_etag') as read_mod_and_etag: read_mod_and_etag.return_value = {} channel_urls = ('https://repo.anaconda.com/pkgs/pro',) with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', stack_callback=conda_tests_ctxt_mgmt_def_pol): this_platform = context.subdir index = get_index(channel_urls=channel_urls, prepend=False) for dist, record in iteritems(index): assert platform_in_record(this_platform, record), (this_platform, record.url) # When unknown=True (which is implicity engaged when context.offline is # True), there may be additional items in the cache that are included in # the index. But where those items coincide with entries already in the # cache, they must not change the record in any way. TODO: add one or # more packages to the cache so these tests affirmatively exercise # supplement_index_from_cache on CI? for unknown in (None, False, True): with env_var('CONDA_OFFLINE', 'yes', stack_callback=conda_tests_ctxt_mgmt_def_pol): with patch.object(conda.core.subdir_data, 'fetch_repodata_remote_request') as remote_request: index2 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) assert all(index2.get(k) == rec for k, rec in iteritems(index)) assert unknown is not False or len(index) == len(index2) assert remote_request.call_count == 0 for unknown in (False, True): with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', stack_callback=conda_tests_ctxt_mgmt_def_pol): with patch.object(conda.core.subdir_data, 'fetch_repodata_remote_request') as remote_request: remote_request.side_effect = Response304ContentUnchanged() index3 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) assert all(index3.get(k) == rec for k, rec in iteritems(index)) assert unknown or len(index) == len(index3)
def test_get_index_no_platform_with_offline_cache(self): import conda.core.subdir_data with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object(conda.core.subdir_data, 'read_mod_and_etag') as read_mod_and_etag: read_mod_and_etag.return_value = {} channel_urls = ('https://repo.continuum.io/pkgs/pro',) with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): this_platform = context.subdir index = get_index(channel_urls=channel_urls, prepend=False) for dist, record in iteritems(index): assert platform_in_record(this_platform, record), (this_platform, record.url) # When unknown=True (which is implicity engaged when context.offline is # True), there may be additional items in the cache that are included in # the index. But where those items coincide with entries already in the # cache, they must not change the record in any way. TODO: add one or # more packages to the cache so these tests affirmatively exercise # supplement_index_from_cache on CI? for unknown in (None, False, True): with env_var('CONDA_OFFLINE', 'yes', reset_context): with patch.object(conda.core.subdir_data, 'fetch_repodata_remote_request') as remote_request: index2 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) assert all(index2.get(k) == rec for k, rec in iteritems(index)) assert unknown is not False or len(index) == len(index2) assert remote_request.call_count == 0 for unknown in (False, True): with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object(conda.core.subdir_data, 'fetch_repodata_remote_request') as remote_request: remote_request.side_effect = Response304ContentUnchanged() index3 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) assert all(index3.get(k) == rec for k, rec in iteritems(index)) assert unknown or len(index) == len(index3)
def test_get_index_no_platform_with_offline_cache(self): import conda.core.index with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object(conda.core.index, 'read_mod_and_etag') as read_mod_and_etag: read_mod_and_etag.return_value = {} channel_urls = ('https://repo.continuum.io/pkgs/pro', ) with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): this_platform = context.subdir index = get_index(channel_urls=channel_urls, prepend=False) for dist, record in iteritems(index): assert platform_in_record(this_platform, record), (this_platform, record.url) with env_var('CONDA_OFFLINE', 'yes', reset_context): with patch.object( conda.core.index, 'fetch_repodata_remote_request') as remote_request: index2 = get_index(channel_urls=channel_urls, prepend=False) assert index2 == index assert remote_request.call_count == 0 with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object( conda.core.index, 'fetch_repodata_remote_request') as remote_request: remote_request.side_effect = Response304ContentUnchanged() index3 = get_index(channel_urls=channel_urls, prepend=False) assert index3 == index
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] index = get_index(channel_urls=channel_urls, prepend='nodefaults' not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) unlink_link_transaction = get_install_transaction( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map) with common.json_progress_bars(json=args.json and not args.quiet): pfe = unlink_link_transaction.get_pfe() pfe.execute() unlink_link_transaction.execute()
def test_check_whitelist(): whitelist = ('defaults', 'conda-forge', 'https://beta.conda.anaconda.org/conda-test') with env_vars({'CONDA_WHITELIST_CHANNELS': ','.join(whitelist)}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with pytest.raises(ChannelNotAllowed): get_index(("conda-canary", )) with pytest.raises(ChannelNotAllowed): get_index(("https://repo.anaconda.com/pkgs/denied", )) check_whitelist(("defaults", )) check_whitelist((DEFAULT_CHANNELS[0], DEFAULT_CHANNELS[1])) check_whitelist(("https://conda.anaconda.org/conda-forge/linux-64", )) check_whitelist(("conda-canary", ))
def test_check_whitelist(): # get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=None, prefix=None) whitelist = ('defaults', 'conda-forge', 'https://beta.conda.anaconda.org/conda-test') with env_var('CONDA_WHITELIST_CHANNELS', ','.join(whitelist), reset_context): with pytest.raises(ChannelNotAllowed): get_index(("conda-canary", )) with pytest.raises(ChannelNotAllowed): get_index(("https://repo.anaconda.com/pkgs/denied", )) check_whitelist(("defaults", )) check_whitelist((DEFAULT_CHANNELS[0], DEFAULT_CHANNELS[1])) check_whitelist(("https://conda.anaconda.org/conda-forge/linux-64", )) check_whitelist(("conda-canary", ))
def get_index_trap(*args, **kwargs): """ Retrieves the package index, but traps exceptions and reports them as JSON if necessary. """ from conda.core.index import get_index kwargs.pop('json', None) return get_index(*args, **kwargs)
def get_index_trap(*args, **kwargs): """ Retrieves the package index, but traps exceptions and reports them as JSON if necessary. """ from conda.core.index import get_index kwargs.pop('json', None) return get_index(*args, **kwargs)
def test_check_whitelist(): whitelist = ( 'defaults', 'conda-forge', 'https://beta.conda.anaconda.org/conda-test' ) with env_vars({'CONDA_WHITELIST_CHANNELS': ','.join(whitelist)}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with pytest.raises(ChannelNotAllowed): get_index(("conda-canary",)) with pytest.raises(ChannelNotAllowed): get_index(("https://repo.anaconda.com/pkgs/denied",)) check_whitelist(("defaults",)) check_whitelist((DEFAULT_CHANNELS[0], DEFAULT_CHANNELS[1])) check_whitelist(("https://conda.anaconda.org/conda-forge/linux-64",)) check_whitelist(("conda-canary",))
def test_check_whitelist(): # get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=None, prefix=None) whitelist = ( 'defaults', 'conda-forge', 'https://beta.conda.anaconda.org/conda-test' ) with env_var('CONDA_WHITELIST_CHANNELS', ','.join(whitelist), reset_context): with pytest.raises(ChannelNotAllowed): get_index(("conda-canary",)) with pytest.raises(ChannelNotAllowed): get_index(("https://repo.anaconda.com/pkgs/denied",)) check_whitelist(("defaults",)) check_whitelist((DEFAULT_CHANNELS[0], DEFAULT_CHANNELS[1])) check_whitelist(("https://conda.anaconda.org/conda-forge/linux-64",)) check_whitelist(("conda-canary",))
def test_ProgressiveFetchExtract_prefers_conda_v2_format(): index = get_index([CONDA_PKG_REPO], prepend=False) rec = next(iter(index)) for rec in index: # zlib is the one package in the test index that has a .conda file record if rec.name == 'zlib': break cache_action, extract_action = ProgressiveFetchExtract.make_actions_for_record( rec) assert cache_action.target_package_basename.endswith('.conda') assert extract_action.source_full_path.endswith('.conda')
def _get_items(self): # TODO: Include .tar.bz2 files for local installs. from conda.core.index import get_index args = self.parsed_args call_dict = dict(channel_urls=args.channel or (), use_cache=True, prepend=not args.override_channels, unknown=args.unknown) if hasattr(args, 'platform'): # in search call_dict['platform'] = args.platform index = get_index(**call_dict) return [record.name for record in index]
def _get_items(self): # TODO: Include .tar.bz2 files for local installs. from conda.core.index import get_index args = self.parsed_args call_dict = dict(channel_urls=args.channel or (), use_cache=True, prepend=not args.override_channels, unknown=args.unknown) if hasattr(args, 'platform'): # in search call_dict['platform'] = args.platform index = get_index(**call_dict) return [record.name for record in index]
def test_ProgressiveFetchExtract_prefers_conda_v2_format(): index = get_index([CONDA_PKG_REPO], prepend=False) rec = next(iter(index)) cache_action, extract_action = pcd.ProgressiveFetchExtract.make_actions_for_record( rec) assert cache_action.target_package_basename.endswith('.conda') assert cache_action.sha256sum == rec.conda_outer_sha256 assert cache_action.expected_size_in_bytes == rec.conda_size assert extract_action.source_full_path.endswith('.conda') assert extract_action.sha256sum == rec.conda_outer_sha256
def test_get_index_no_platform_with_offline_cache(self): import conda.core.index with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object(conda.core.index, 'read_mod_and_etag') as read_mod_and_etag: read_mod_and_etag.return_value = {} channel_urls = ('https://repo.continuum.io/pkgs/pro',) with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): this_platform = context.subdir index = get_index(channel_urls=channel_urls, prepend=False) for dist, record in iteritems(index): assert platform_in_record(this_platform, record), (this_platform, record.url) with env_var('CONDA_OFFLINE', 'yes', reset_context): with patch.object(conda.core.index, 'fetch_repodata_remote_request') as remote_request: index2 = get_index(channel_urls=channel_urls, prepend=False) assert index2 == index assert remote_request.call_count == 0 with env_var('CONDA_REPODATA_TIMEOUT_SECS', '0', reset_context): with patch.object(conda.core.index, 'fetch_repodata_remote_request') as remote_request: remote_request.side_effect = Response304ContentUnchanged() index3 = get_index(channel_urls=channel_urls, prepend=False) assert index3 == index
def test_ProgressiveFetchExtract_prefers_conda_v2_format(): # force this to False, because otherwise tests fail when run with old conda-build with env_var('CONDA_USE_ONLY_TAR_BZ2', False, stack_callback=conda_tests_ctxt_mgmt_def_pol): index = get_index([CONDA_PKG_REPO], prepend=False) rec = next(iter(index)) for rec in index: # zlib is the one package in the test index that has a .conda file record if rec.name == 'zlib': break cache_action, extract_action = ProgressiveFetchExtract.make_actions_for_record( rec) assert cache_action.target_package_basename.endswith('.conda') assert extract_action.source_full_path.endswith('.conda')
def compute_matrix(cls, meta, index=None, extra_conditions=None): if index is None: with vn_matrix.override_conda_logging('WARN'): index = get_index() cases = vn_matrix.special_case_version_matrix(meta, index) if extra_conditions: cases = list(vn_matrix.filter_cases(cases, index, extra_conditions)) result = [] for case in cases: dist = cls(meta, case) if not dist.skip(): result.append(dist) return result
def main(self): recipe_metas = self.fetch_all_metas() index = get_index() print('Resolving distributions from {} recipes... '.format( len(recipe_metas))) all_distros = [] for meta in recipe_metas: distros = BakedDistribution.compute_matrix( meta, index, getattr(self, 'extra_build_conditions', [])) all_distros.extend(distros) print('Computed that there are {} distributions from the {} ' 'recipes:'.format(len(all_distros), len(recipe_metas))) recipes_to_build = self.recipes_to_build(all_distros) for meta, build_dist in zip(all_distros, recipes_to_build): if build_dist: self.build(meta) self.post_build(meta, build_occured=build_dist)
def localmirror(packages, channels=None, platform=None, target_dir=None): base_dir = Path.cwd() / target_dir base_dir.mkdir(exist_ok=True) platforms = [platform, 'noarch'] #defaults aka 'main' URLs urls = [] defaults_base = 'https://repo.anaconda.com/pkgs/' paths = ['main', 'r'] for p in paths: for a in platforms: urls.append(defaults_base + p + '/' + a + '/current_repodata.json') channel_base = 'https://conda.anaconda.org/' for c in channels: if (c == DEFAULT_CHANNEL[0]): continue for a in platforms: urls.append(channel_base + c + '/' + a + '/current_repodata.json') urls.append(channel_base + c + '/' + a + '/repodata.json') for u in urls: # always want to make sure we have the most current repo/dependency information download(u, base_dir, overwite=True) if 'defaults' not in channels: channels = list(channels) channels.append('defaults') index = get_index( channel_urls=channels, platform=platform, prepend=False, ) solver = Resolve(index, channels=channels) to_download = solver.install(packages) for d in to_download: download(d.url, base_dir)
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) # Discard conda and any package that depends on it drecs = linked_data(prefix1) filter = {} found = True while found: found = False for dist, info in iteritems(drecs): name = info['name'] if name in filter: continue if name == 'conda': filter['conda'] = dist found = True break for dep in info.get('depends', []): if MatchSpec(dep).name in filter: filter[name] = dist found = True if filter: if not quiet: print( 'The following packages cannot be cloned out of the root environment:' ) for pkg in itervalues(filter): print(' - ' + pkg) drecs = { dist: info for dist, info in iteritems(drecs) if info['name'] not in filter } # Resolve URLs for packages that do not have URLs r = None index = {} unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')] notfound = [] if unknowns: index_args = index_args or {} index = get_index(**index_args) r = Resolve(index, sort=True) for dist in unknowns: name = dist.dist_name fn = dist.to_filename() fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn] if fkeys: del drecs[dist] dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0] drecs[Dist(dist_str)] = r.index[dist_str] else: notfound.append(fn) if notfound: what = "Package%s " % ('' if len(notfound) == 1 else 's') notfound = '\n'.join(' - ' + fn for fn in notfound) msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound) raise CondaRuntimeError(msg) # Assemble the URL and channel list urls = {} for dist, info in iteritems(drecs): fkey = dist if fkey not in index: info['not_fetched'] = True index[fkey] = info r = None urls[dist] = info['url'] if r is None: r = Resolve(index) dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()}) urls = [urls[d] for d in dists] if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): rm_rf(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = explicit(urls, prefix2, verbose=not quiet, index=index, force_extract=False, index_args=index_args) return actions, untracked_files
def test_get_index_linux64_platform(self): linux64 = 'linux-64' index = get_index(platform=linux64) for dist, record in iteritems(index): assert platform_in_record(linux64, record), (linux64, record.url)
def test_get_index(self): with patch('conda.core.repodata.fetch_repodata') as fetch_repodata: linux64 = 'linux-64' index = _index.get_index(platform=linux64)
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.gateways.disk.delete import rm_rf from conda.core.linked_data import linked_data if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details') prefix = context.prefix_w_legacy_search if args.all and prefix == context.default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=context.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist: info for dist, info in iteritems(index)} else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {inst.PREFIX: prefix} for dist in sorted(iterkeys(index)): plan.add_unlink(actions, dist) else: specs = specs_from_args(args.package_names) # import pdb; pdb.set_trace() if (context.conda_in_root and plan.is_root_prefix(prefix) and names_in_specs(ROOT_NO_RM, specs) and not args.force): raise CondaEnvironmentError('cannot remove %s from root environment' % ', '.join(ROOT_NO_RM)) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) delete_trash() if plan.nothing_to_do(actions): if args.all: print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if not context.json: confirm_yn(args) rm_rf(prefix) if context.json: stdout_json({ 'success': True, 'actions': actions }) return raise PackageNotFoundError('', 'no packages found to remove from ' 'environment: %s' % prefix) if not context.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if context.json and args.dry_run: stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not context.json: confirm_yn(args) if context.json and not context.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not context.quiet) else: plan.execute_actions(actions, index, verbose=not context.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % ','.join(specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if context.json: stdout_json({ 'success': True, 'actions': actions })
def test_get_index_win64_platform(self): win64 = 'win-64' index = get_index(platform=win64) for dist, record in iteritems(index): assert platform_in_record(win64, record), (win64, record.url)
def test_get_index_win64_platform(self): win64 = 'win-64' index = get_index(platform=win64) for dist, record in iteritems(index): assert platform_in_record(win64, record), (win64, record.url)
def test_ProgressiveFetchExtract_prefers_conda_v2_format(): index = get_index([CONDA_PKG_REPO], prepend=False) rec = next(iter(index)) cache_action, extract_action = ProgressiveFetchExtract.make_actions_for_record(rec) assert cache_action.target_package_basename.endswith('.conda') assert extract_action.source_full_path.endswith('.conda')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA linked = {dist.dist_name: dist for dist in install_linked(prefix)} index_args = index_args or {} index = index or {} verifies = [] # List[Tuple(filename, md5)] channels = set() for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: raise ParseError('Could not parse explicit URL: %s' % spec) url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5') if not is_url(url_p): if url_p is None: url_p = curdir elif not isdir(url_p): raise CondaFileNotFoundError(join(url_p, fn)) url_p = path_to_url(url_p).rstrip('/') url = "{0}/{1}".format(url_p, fn) # is_local: if the tarball is stored locally (file://) # is_cache: if the tarball is sitting in our cache is_local = not is_url(url) or url.startswith('file://') prefix = cached_url(url) if is_local else None is_cache = prefix is not None if is_cache: # Channel information from the cache schannel = DEFAULTS if prefix == '' else prefix[:-2] else: # Channel information from the URL channel, schannel = Channel(url).url_channel_wtf prefix = '' if schannel == DEFAULTS else schannel + '::' fn = prefix + fn dist = Dist(fn[:-8]) # Add explicit file to index so we'll be sure to see it later if is_local: index[dist] = Record( **{ 'fn': dist.to_filename(), 'url': url, 'md5': md5, 'build': dist.quad[2], 'build_number': dist.build_number(), 'name': dist.quad[0], 'version': dist.quad[1], }) verifies.append((fn, md5)) pkg_path = is_fetched(dist) dir_path = is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch # Also remove explicit tarballs from cache, unless the path *is* to the cache if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: pkg_path, conflict = find_new_location(dist) pkg_path = join(pkg_path, dist.to_filename()) if conflict: actions[RM_FETCHED].append(Dist(conflict)) if not is_local: if dist not in index or index[dist].get('not_fetched'): channels.add(schannel) verifies.append((dist.to_filename(), md5)) actions[FETCH].append(dist) actions[EXTRACT].append(dist) # unlink any installed package with that name name = dist.dist_name if name in linked: actions[UNLINK].append(linked[name]) ###################################### # copied from conda/plan.py TODO: refactor ###################################### # check for link action fetched_dist = dir_path or pkg_path[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not dir_path: # If not already extracted, create some dummy # data to test with rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if context.always_copy: lt = LINK_COPY elif try_hard_link(fetched_dir, prefix, dist): lt = LINK_HARD elif context.allow_softlinks and not on_win: lt = LINK_SOFT else: lt = LINK_COPY actions[LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[LINK].append('%s %d' % (dist, LINK_COPY)) finally: if not dir_path: # Remove the dummy data try: rm_rf(fetched_dist) except (OSError, IOError): pass ###################################### # ^^^^^^^^^^ copied from conda/plan.py ###################################### # Pull the repodata for channels we are using if channels: index_args = index_args or {} index_args = index_args.copy() index_args['prepend'] = False index_args['channel_urls'] = list(channels) index.update(get_index(**index_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(Dist(fn)) if info is None: raise PackageNotFoundError(fn, "no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: raise MD5MismatchError( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
def test_get_index_osx64_platform(self): osx64 = 'osx-64' index = get_index(platform=osx64) for dist, record in iteritems(index): assert platform_in_record(osx64, record), (osx64, record.url)
def test_get_index_linux64_platform(self): linux64 = 'linux-64' index = get_index(platform=linux64) for dist, record in iteritems(index): assert platform_in_record(linux64, record), (linux64, record.url)
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.gateways.disk.delete import rm_rf from conda.core.linked_data import linked_data if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details') prefix = context.prefix_w_legacy_search if args.all and prefix == context.default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=context.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist: info for dist, info in iteritems(index)} else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {inst.PREFIX: prefix} for dist in sorted(iterkeys(index)): plan.add_unlink(actions, dist) else: specs = specs_from_args(args.package_names) # import pdb; pdb.set_trace() if (context.conda_in_root and plan.is_root_prefix(prefix) and names_in_specs(ROOT_NO_RM, specs) and not args.force): raise CondaEnvironmentError( 'cannot remove %s from root environment' % ', '.join(ROOT_NO_RM)) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) delete_trash() if plan.nothing_to_do(actions): if args.all: print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if not context.json: confirm_yn(args) rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions}) return raise PackageNotFoundError( '', 'no packages found to remove from ' 'environment: %s' % prefix) if not context.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if context.json and args.dry_run: stdout_json({'success': True, 'dry_run': True, 'actions': actions}) return if not context.json: confirm_yn(args) if context.json and not context.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not context.quiet) else: plan.execute_actions(actions, index, verbose=not context.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % ','.join(specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions})
def test_get_index_osx64_platform(self): osx64 = 'osx-64' index = get_index(platform=osx64) for dist, record in iteritems(index): assert platform_in_record(osx64, record), (osx64, record.url)