def display_actions(actions, index=None): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") disp_lst = [] for dist in actions[FETCH]: info = index[dist + '.tar.bz2'] extra = '%15s' % human_bytes(info['size']) if config.show_channel_urls: extra += ' %s' % config.canonical_channel_name( info.get('channel')) disp_lst.append((dist, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: print(' ' * 4 + '-' * 60) print(" " * 43 + "Total: %14s" % human_bytes( sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) if actions.get(UNLINK): print("\nThe following packages will be UN-linked:\n") print_dists([(dist, None) for dist in actions[UNLINK]]) if actions.get(LINK): print("\nThe following packages will be linked:\n") lst = [] for arg in actions[LINK]: dist, pkgs_dir, lt = split_linkarg(arg) extra = ' %s' % install.link_name_map.get(lt) lst.append((dist, extra)) print_dists(lst) print()
def list_packages(prefix, installed, regex=None, format='human'): res = 1 result = [] for dist in get_packages(installed, regex): res = 0 if format == 'canonical': result.append(dist) continue if format == 'export': result.append('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) result.append(disp) except (AttributeError, IOError, KeyError, ValueError) as e: log.debug(str(e)) result.append('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res, result
def display_actions(actions, index=None): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") disp_lst = [] for dist in actions[FETCH]: info = index[dist + '.tar.bz2'] extra = '%15s' % human_bytes(info['size']) if config.show_channel_urls: extra += ' %s' % config.canonical_channel_name( info.get('channel')) disp_lst.append((dist, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: print(' ' * 4 + '-' * 60) print(" " * 43 + "Total: %14s" % human_bytes(sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) if actions.get(UNLINK): print("\nThe following packages will be UN-linked:\n") print_dists([ (dist, None) for dist in actions[UNLINK]]) if actions.get(LINK): print("\nThe following packages will be linked:\n") lst = [] for arg in actions[LINK]: dist, pkgs_dir, lt = split_linkarg(arg) extra = ' %s' % install.link_name_map.get(lt) lst.append((dist, extra)) print_dists(lst) print()
def pretty_package(pkg): from conda.utils import human_bytes from conda.config import canonical_channel_name d = OrderedDict([ ('file name', pkg.fn), ('name', pkg.name), ('version', pkg.version), ('build number', pkg.build_number), ('build string', pkg.build), ('channel', canonical_channel_name(pkg.channel)), ('size', human_bytes(pkg.info['size'])), ]) rest = pkg.info for key in sorted(rest): if key in { 'build', 'depends', 'requires', 'channel', 'name', 'version', 'build_number', 'size' }: continue d[key] = rest[key] print() header = "%s %s %s" % (d['name'], d['version'], d['build string']) print(header) print('-' * len(header)) for key in d: print("%-12s: %s" % (key, d[key])) print('dependencies:') for dep in pkg.info['depends']: print(' %s' % dep)
def format(s, pkg): channel = ['', ''] for i in range(2): if packages[pkg][i]: channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel) return lead + s.format(pkg=pkg + ':', vers=packages[pkg], channel=channel, features=features[pkg])
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform, offline) if prepend: pri0 = max(itervalues(channel_urls)) if channel_urls else 0 for url, rec in iteritems(config.get_channel_urls(platform, offline)): channel_urls[url] = (rec[0], rec[1] + pri0) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: for dist, info in iteritems(install.linked_data(prefix)): fn = dist + '.tar.bz2' channel = info.get('channel', '') if channel not in channel_urls: channel_urls[channel] = (config.canonical_channel_name(channel, True, True), 0) url_s, priority = channel_urls[channel] key = url_s + '::' + fn if url_s else fn if key not in index: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['fn'] = fn info['schannel'] = url_s info['channel'] = channel info['url'] = channel + fn info['priority'] = priority index[key] = info return index
def pretty_package(pkg): import conda.config as config from conda.utils import human_bytes from conda.api import app_is_installed d = OrderedDict([ ('file name', pkg.fn), ('name', pkg.name), ('version', pkg.version), ('build number', pkg.build_number), ('build string', pkg.build), ('channel', config.canonical_channel_name(pkg.channel)), ('size', human_bytes(pkg.info['size'])), ]) rest = pkg.info.copy() for key in sorted(rest): if key in ['build', 'depends', 'requires', 'channel', 'name', 'version', 'build_number', 'size']: continue d[key] = rest[key] print() header = "%s %s %s" % (d['name'], d['version'], d['build string']) print(header) print('-'*len(header)) for key in d: print("%-12s: %s" % (key, d[key])) print("installed environments:") for env in app_is_installed(pkg.fn): print(' %s' % env) print('dependencies:') for dep in pkg.info['depends']: print(' %s' % dep)
def format(s, pkg): channel = ['', ''] for i in range(2): if packages[pkg][i]: channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel) return lead + s.format(pkg=pkg+':', vers=packages[pkg], channel=channel, features=features[pkg])
def list_packages(prefix, installed, regex=None, format="human", show_channel_urls=config.show_channel_urls): res = 1 result = [] for dist in get_packages(installed, regex): res = 0 if format == "canonical": result.append(dist) continue if format == "export": result.append("=".join(dist.rsplit("-", 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get("features", "").split()) disp = "%(name)-25s %(version)-15s %(build)15s" % info disp += " %s" % common.disp_features(features) if show_channel_urls: disp += " %s" % config.canonical_channel_name(info.get("url")) result.append(disp) except (AttributeError, IOError, KeyError, ValueError) as e: log.debug(str(e)) result.append("%-25s %-15s %15s" % tuple(dist.rsplit("-", 2))) return res, result
def __init__(self, fn, info): self.fn = fn self.name = info.get('name') self.version = info.get('version') self.build = info.get('build') self.build_number = info.get('build_number') self.channel = info.get('channel') self.schannel = info.get('schannel') if self.schannel is None: self.schannel = canonical_channel_name(self.channel) try: self.norm_version = normalized_version(self.version) except ValueError: stderrlog.error("\nThe following stack trace is in reference to " "package:\n\n\t%s\n\n" % fn) raise self.info = info
def __init__(self, fn, info): self.fn = fn self.name = info.get('name') self.version = info.get('version') self.build = info.get('build') self.build_number = info.get('build_number') self.channel = info.get('channel') self.schannel = info.get('schannel') if self.schannel is None: self.schannel = config.canonical_channel_name(self.channel) try: self.norm_version = normalized_version(self.version) except ValueError: stderrlog.error("\nThe following stack trace is in reference to " "package:\n\n\t%s\n\n" % fn) raise self.info = info
def list_packages(prefix, regex=None, format='human', piplist=False): if not isdir(prefix): sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix) pat = re.compile(regex, re.I) if regex else None if format == 'human': print('# packages in environment at %s:' % prefix) print('#') res = 1 if format == 'export': print_export_header() installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed) for dist in sorted(installed): name = dist.rsplit('-', 2)[0] if pat and pat.search(name) is None: continue res = 0 if format == 'canonical': print(dist) continue if format == 'export': print('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) print(disp) except: # (IOError, KeyError, ValueError): print('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res
def list_packages(prefix, regex=None, format='human', piplist=False): if not isdir(prefix): sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix) pat = re.compile(regex, re.I) if regex else None if format == 'human': print('# packages in environment at %s:' % prefix) print('#') if format == 'export': print_export_header() res = 1 installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed) for dist in sorted(installed): name = dist.rsplit('-', 2)[0] if pat and pat.search(name) is None: continue res = 0 if format == 'canonical': print(dist) continue if format == 'export': print('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) print(disp) except: # (IOError, KeyError, ValueError): print('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res
def display_actions(actions, index): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") disp_lst = [] for dist in actions[FETCH]: info = index[dist + '.tar.bz2'] extra = '%15s' % human_bytes(info['size']) if config.show_channel_urls: extra += ' %s' % config.canonical_channel_name( info.get('channel')) disp_lst.append((dist, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: print(' ' * 4 + '-' * 60) print(" " * 43 + "Total: %14s" % human_bytes( sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) # package -> [oldver-oldbuild, newver-newbuild] packages = defaultdict(lambda: list(('', ''))) features = defaultdict(lambda: list(('', ''))) # This assumes each package will appear in LINK no more than once. Packages = {} linktypes = {} for arg in actions.get(LINK, []): dist, pkgs_dir, lt = split_linkarg(arg) pkg, ver, build = dist.rsplit('-', 2) packages[pkg][1] = ver + '-' + build Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) linktypes[pkg] = lt features[pkg][1] = index[dist + '.tar.bz2'].get('features', '') for arg in actions.get(UNLINK, []): dist, pkgs_dir, lt = split_linkarg(arg) pkg, ver, build = dist.rsplit('-', 2) packages[pkg][0] = ver + '-' + build # If the package is not in the index (e.g., an installed # package that is not in the index any more), we just have to fake the metadata. info = index.get( dist + '.tar.bz2', dict(name=pkg, version=ver, build_number=int(build) if build.isdigit() else 0, build=build, channel=None)) Packages[dist] = Package(dist + '.tar.bz2', info) features[pkg][0] = info.get('features', '') # Put a minimum length here---. .--For the : # v v maxpkg = max(len(max(packages or [''], key=len)), 0) + 1 maxoldver = len( max(packages.values() or [['']], key=lambda i: len(i[0]))[0]) maxnewver = len( max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1]) maxoldfeatures = len( max(features.values() or [['']], key=lambda i: len(i[0]))[0]) maxnewfeatures = len( max(features.values() or [['', '']], key=lambda i: len(i[1]))[1]) maxoldchannel = len( max([ config.canonical_channel_name( Packages[pkg + '-' + packages[pkg][0]].channel) for pkg in packages if packages[pkg][0] ] or [''], key=len)) maxnewchannel = len( max([ config.canonical_channel_name( Packages[pkg + '-' + packages[pkg][1]].channel) for pkg in packages if packages[pkg][1] ] or [''], key=len)) new = {pkg for pkg in packages if not packages[pkg][0]} removed = {pkg for pkg in packages if not packages[pkg][1]} updated = set() downgraded = set() oldfmt = {} newfmt = {} for pkg in packages: # That's right. I'm using old-style string formatting to generate a # string with new-style string formatting. oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver) if config.show_channel_urls: oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel if packages[pkg][0]: newfmt[pkg] = '{vers[1]:<%s}' % maxnewver else: newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver) if config.show_channel_urls: newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel # TODO: Should we also care about the old package's link type? if pkg in linktypes and linktypes[pkg] != install.LINK_HARD: newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]] if features[pkg][0]: oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures if features[pkg][1]: newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures if pkg in new or pkg in removed: continue P0 = Packages[pkg + '-' + packages[pkg][0]] P1 = Packages[pkg + '-' + packages[pkg][1]] try: # <= here means that unchanged packages will be put in updated newer = (P0.name, P0.norm_version, P0.build_number) <= ( P1.name, P1.norm_version, P1.build_number) except TypeError: newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number) if newer: updated.add(pkg) else: downgraded.add(pkg) arrow = ' --> ' lead = ' ' * 4 def format(s, pkg): channel = ['', ''] for i in range(2): if packages[pkg][i]: channel[i] = config.canonical_channel_name( Packages[pkg + '-' + packages[pkg][i]].channel) return lead + s.format(pkg=pkg + ':', vers=packages[pkg], channel=channel, features=features[pkg]) if new: print("\nThe following NEW packages will be INSTALLED:\n") for pkg in sorted(new): print(format(newfmt[pkg], pkg)) if removed: print("\nThe following packages will be REMOVED:\n") for pkg in sorted(removed): print(format(oldfmt[pkg], pkg)) if updated: print("\nThe following packages will be UPDATED:\n") for pkg in sorted(updated): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) if downgraded: print("\nThe following packages will be DOWNGRADED:\n") for pkg in sorted(downgraded): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) print()
def execute_search(args, parser): import re import sys from conda.api import get_index from conda.resolve import MatchSpec, Resolve pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: try: pat = re.compile(args.regex, re.I) except re.error as e: common.error_and_exit("%r is not a valid regex pattern (exception: %s)" % (args.regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if not args.canonical: import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) r = Resolve(index) if args.canonical: json = [] else: json = {} for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(ms_name)) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(ms_name)): dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute(args, parser): import re import sys from conda.api import get_index from conda.resolve import MatchSpec, Resolve if args.regex: try: pat = re.compile(args.regex, re.I) except re.error as e: sys.exit("Error: %r is not a valid regex pattern (exception: %s)" % (args.regex, e)) else: pat = None prefix = common.get_prefix(args) if not args.canonical: import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown) r = Resolve(index) for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(MatchSpec(name))): dist = pkg.fn[:-8] if args.canonical: print(dist) continue if dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, r.index[pkg.fn]['build'], config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = ''
def execute(args, parser): import re from conda.api import get_index from conda.resolve import MatchSpec, Resolve if args.regex: pat = re.compile(args.regex, re.I) else: pat = None prefix = common.get_prefix(args) if not args.canonical: import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown) r = Resolve(index) for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if args.outdated: vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(MatchSpec(name))): dist = pkg.fn[:-8] if args.canonical: print(dist) continue if dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, r.index[pkg.fn]['build'], config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = ''
def execute_search(args, parser): import re from conda.resolve import Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = ' '.join(args.regex.split('=')) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, prefix=prefix, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, prefix=prefix, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if '@' in name: continue if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = name pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute_search(args, parser): import re from conda.resolve import Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = ' '.join(args.regex.split('=')) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.install linked = conda.install.linked(prefix) extracted = conda.install.extracted() # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != subdir: args.unknown = False common.ensure_use_local(args) common.ensure_override_channels_requires_channel(args, dashc=False) channel_urls = args.channel or () index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if '@' in name: continue if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.split()[0]: continue if ms: ms_name = ms else: ms_name = name pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x: (x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist[1] for dist in map(dist2quad, linked) if dist[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute_search(args, parser): import re from conda.resolve import MatchSpec, Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def display_actions(actions, index): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") disp_lst = [] for dist in actions[FETCH]: info = index[dist + '.tar.bz2'] extra = '%15s' % human_bytes(info['size']) if config.show_channel_urls: extra += ' %s' % config.canonical_channel_name( info.get('channel')) disp_lst.append((dist, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: print(' ' * 4 + '-' * 60) print(" " * 43 + "Total: %14s" % human_bytes(sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) # package -> [oldver-oldbuild, newver-newbuild] packages = defaultdict(lambda: list(('', ''))) features = defaultdict(lambda: list(('', ''))) # This assumes each package will appear in LINK no more than once. Packages = {} linktypes = {} for arg in actions.get(LINK, []): dist, pkgs_dir, lt = split_linkarg(arg) pkg, ver, build = dist.rsplit('-', 2) packages[pkg][1] = ver + '-' + build Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) linktypes[pkg] = lt features[pkg][1] = index[dist + '.tar.bz2'].get('features', '') for arg in actions.get(UNLINK, []): dist, pkgs_dir, lt = split_linkarg(arg) pkg, ver, build = dist.rsplit('-', 2) packages[pkg][0] = ver + '-' + build # If the package is not in the index (e.g., an installed # package that is not in the index any more), we just have to fake the metadata. info = index.get(dist + '.tar.bz2', dict(name=pkg, version=ver, build_number=int(build) if build.isdigit() else 0, build=build, channel=None)) Packages[dist] = Package(dist + '.tar.bz2', info) features[pkg][0] = info.get('features', '') # Put a minimum length here---. .--For the : # v v maxpkg = max(len(max(packages or [''], key=len)), 0) + 1 maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0]) maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1]) maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0]) maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1]) maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or [''], key=len)) maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or [''], key=len)) new = {pkg for pkg in packages if not packages[pkg][0]} removed = {pkg for pkg in packages if not packages[pkg][1]} updated = set() downgraded = set() oldfmt = {} newfmt = {} for pkg in packages: # That's right. I'm using old-style string formatting to generate a # string with new-style string formatting. oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver) if config.show_channel_urls: oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel if packages[pkg][0]: newfmt[pkg] = '{vers[1]:<%s}' % maxnewver else: newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver) if config.show_channel_urls: newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel # TODO: Should we also care about the old package's link type? if pkg in linktypes and linktypes[pkg] != install.LINK_HARD: newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]] if features[pkg][0]: oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures if features[pkg][1]: newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures if pkg in new or pkg in removed: continue P0 = Packages[pkg + '-' + packages[pkg][0]] P1 = Packages[pkg + '-' + packages[pkg][1]] try: # <= here means that unchanged packages will be put in updated newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number) except TypeError: newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number) if newer: updated.add(pkg) else: downgraded.add(pkg) arrow = ' --> ' lead = ' '*4 def format(s, pkg): channel = ['', ''] for i in range(2): if packages[pkg][i]: channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel) return lead + s.format(pkg=pkg+':', vers=packages[pkg], channel=channel, features=features[pkg]) if new: print("\nThe following NEW packages will be INSTALLED:\n") for pkg in sorted(new): print(format(newfmt[pkg], pkg)) if removed: print("\nThe following packages will be REMOVED:\n") for pkg in sorted(removed): print(format(oldfmt[pkg], pkg)) if updated: print("\nThe following packages will be UPDATED:\n") for pkg in sorted(updated): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) if downgraded: print("\nThe following packages will be DOWNGRADED:\n") for pkg in sorted(downgraded): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) print()
def execute_search(args, parser): import re import sys from conda.api import get_index from conda.resolve import MatchSpec, Resolve pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: try: pat = re.compile(args.regex, re.I) except re.error as e: common.error_and_exit("%r is not a valid regex pattern (exception: %s)" % (args.regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) r = Resolve(index) if args.canonical: json = [] else: json = {} for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(ms_name)) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(ms_name)): dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)