def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform, offline) if prepend: pri0 = max(itervalues(channel_urls)) if channel_urls else 0 for url, rec in iteritems(config.get_channel_urls(platform, offline)): channel_urls[url] = (rec[0], rec[1] + pri0) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: for dist, info in iteritems(install.linked_data(prefix)): fn = dist + '.tar.bz2' channel = info.get('channel', '') if channel not in channel_urls: channel_urls[channel] = (config.canonical_channel_name(channel, True, True), 0) url_s, priority = channel_urls[channel] key = url_s + '::' + fn if url_s else fn if key not in index: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['fn'] = fn info['schannel'] = url_s info['channel'] = channel info['url'] = channel + fn info['priority'] = priority index[key] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith('file:')] index = fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown) if prefix: for fn, info in iteritems(install.linked_data(prefix)): fn = fn + '.tar.bz2' orec = index.get(fn) if orec is not None: if orec.get('md5',None) == info.get('md5',None): continue info.setdefault('depends',orec.get('depends',[])) index[fn] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ if use_local: channel_urls = ['local'] + list(channel_urls) channel_urls = normalize_urls(channel_urls, platform, offline) if prepend: channel_urls.extend(get_channel_urls(platform, offline)) channel_urls = prioritize_channels(channel_urls) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: priorities = {c: p for c, p in itervalues(channel_urls)} for dist, info in iteritems(install.linked_data(prefix)): fn = info['fn'] schannel = info['schannel'] prefix = '' if schannel == 'defaults' else schannel + '::' priority = priorities.get(schannel, 0) key = prefix + fn if key in index: # Copy the link information so the resolver knows this is installed index[key]['link'] = info.get('link') else: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['priority'] = priority index[key] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith('file:')] index = fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown) if prefix: for dist, info in iteritems(install.linked_data(prefix)): fn = dist + '.tar.bz2' if fn not in index: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) index[fn] = info return index
def get_package_obj_files(dist, prefix): data = linked_data(prefix).get(dist) res = [] if data: for f in data.get('files', []): path = join(prefix, f) if post.is_obj(path): res.append(f) return res
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [ p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask' ][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') # Regression test for 2812 # install from local channel from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages': {flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, subdir) channel = url_path(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') # regression test for #2626 # install tarball with full path tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2599 linked_data_.clear() flask_metadata = glob( join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') # Regression test for 2812 # install from local channel from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages':{flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, subdir) channel = url_path(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') # regression test for #2626 # install tarball with full path tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def create_bundle(prefix=None, data_path=None, bundle_name=None, extra_meta=None): """ Create a "bundle" of the environment located in `prefix`, and return the full path to the created package, which is going to be located in the current working directory, unless specified otherwise. """ meta = dict( name='bundle', build='0', build_number=0, type='bundle', bundle_name=bundle_name, creator=os.getenv('USER'), platform=platform, arch=arch_name, ctime=time.strftime(ISO8601), depends=[], ) meta['version'] = get_version(meta) tar_path = join('bundle-%(version)s-0.tar.bz2' % meta) t = tarfile.open(tar_path, 'w:bz2') if prefix: prefix = abspath(prefix) if not prefix.startswith('/opt/anaconda'): for f in sorted(untracked(prefix, exclude_self_build=True)): if f.startswith(BDP): raise RuntimeError('bad untracked file: %s' % f) if f.startswith('info/'): continue path = join(prefix, f) add_file(t, path, f) meta['bundle_prefix'] = prefix meta['depends'] = [ '%(name)s %(version)s %(build)s' % info for info in itervalues(install.linked_data(prefix)) ] if data_path: add_data(t, data_path) if extra_meta: meta.update(extra_meta) add_info_files(t, meta) t.close() return tar_path
def create_prefix_spec_map_with_deps(r, specs, default_prefix): prefix_spec_map = {} for spec in specs: spec_prefix = prefix_if_in_private_env(spec) spec_prefix = spec_prefix if spec_prefix is not None else default_prefix if spec_prefix in prefix_spec_map.keys(): prefix_spec_map[spec_prefix].add(spec) else: prefix_spec_map[spec_prefix] = {spec} if is_private_env(prefix_to_env_name(spec_prefix, context.root_prefix)): linked = linked_data(spec_prefix) for linked_spec in linked: if not linked_spec.name.startswith(spec) and r.depends_on(spec, linked_spec): prefix_spec_map[spec_prefix].add(linked_spec.name) return prefix_spec_map
def test_install_tarball_from_local_channel(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') flask_fname = flask_data['fn'] tar_old_path = join(context.pkgs_dirs[0], flask_fname) # Regression test for #2812 # install from local channel for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages': {flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, context.subdir) channel = path_to_url(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata, cls=EntityEncoder).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # Regression test for 2970 # install from build channel as a tarball conda_bld = join(sys.prefix, 'conda-bld') conda_bld_sub = join(conda_bld, context.subdir) tar_bld_path = join(conda_bld_sub, flask_fname) if os.path.exists(conda_bld): try: os.rename(tar_new_path, tar_bld_path) except OSError: pass else: os.makedirs(conda_bld) os.rename(subchan, conda_bld_sub) run_command(Commands.INSTALL, prefix, tar_bld_path) assert_package_is_installed(prefix, 'flask-')
def create_bundle(prefix=None, data_path=None, bundle_name=None, extra_meta=None): """ Create a "bundle" of the environment located in `prefix`, and return the full path to the created package, which is going to be located in the current working directory, unless specified otherwise. """ meta = dict( name='bundle', build='0', build_number=0, type='bundle', bundle_name=bundle_name, creator=os.getenv('USER'), platform=platform, arch=arch_name, ctime=time.strftime(ISO8601), depends=[], ) meta['version'] = get_version(meta) tar_path = join('bundle-%(version)s-0.tar.bz2' % meta) t = tarfile.open(tar_path, 'w:bz2') if prefix: prefix = abspath(prefix) if not prefix.startswith('/opt/anaconda'): for f in sorted(untracked(prefix, exclude_self_build=True)): if f.startswith(BDP): raise RuntimeError('bad untracked file: %s' % f) if f.startswith('info/'): continue path = join(prefix, f) add_file(t, path, f) meta['bundle_prefix'] = prefix meta['depends'] = ['%(name)s %(version)s %(build)s' % info for info in itervalues(install.linked_data(prefix))] if data_path: add_data(t, data_path) if extra_meta: meta.update(extra_meta) add_info_files(t, meta) t.close() return tar_path
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') flask_fname = flask_data['fn'] tar_old_path = join(context.pkgs_dirs[0], flask_fname) # regression test for #2886 (part 1 of 2) # install tarball from package cache, default channel run_command(Commands.INSTALL, prefix, tar_old_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2626 # install tarball with full path, outside channel tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path, outside channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2886 (part 2 of 2) # install tarball from package cache, local channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') run_command(Commands.INSTALL, prefix, tar_old_path) # The last install was from the `local::` channel assert_package_is_installed(prefix, 'flask-') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def create_prefix_spec_map_with_deps(r, specs, default_prefix): prefix_spec_map = {} for spec in specs: spec_prefix = prefix_if_in_private_env(spec) spec_prefix = spec_prefix if spec_prefix is not None else default_prefix if spec_prefix in prefix_spec_map.keys(): prefix_spec_map[spec_prefix].add(spec) else: prefix_spec_map[spec_prefix] = {spec} if is_private_env(prefix_to_env_name(spec_prefix, context.root_prefix)): linked = linked_data(spec_prefix) for linked_spec in linked: if not linked_spec.name.startswith(spec) and r.depends_on( spec, linked_spec): prefix_spec_map[spec_prefix].add(linked_spec.name) return prefix_spec_map
def get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ if use_local: channel_urls = ['local'] + list(channel_urls) channel_urls = normalize_urls(channel_urls, platform, offline) if prepend: channel_urls.extend(get_channel_urls(platform, offline)) channel_urls = prioritize_channels(channel_urls) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: priorities = {c: p for c, p in itervalues(channel_urls)} maxp = max(itervalues(priorities)) + 1 if priorities else 1 for dist, info in iteritems(install.linked_data(prefix)): fn = info['fn'] schannel = info['schannel'] prefix = '' if schannel == 'defaults' else schannel + '::' priority = priorities.get(schannel, maxp) key = prefix + fn if key in index: # Copy the link information so the resolver knows this is installed index[key]['link'] = info.get('link') else: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['priority'] = priority index[key] = info return index
def get_egg_info(prefix, all_pkgs=False): """ Return a set of canonical names of all Python packages (in `prefix`), by inspecting the .egg-info files inside site-packages. By default, only untracked (not conda installed) .egg-info files are considered. Setting `all_pkgs` to True changes this. """ installed_pkgs = linked_data(prefix) sp_dir = get_site_packages_dir(installed_pkgs) if sp_dir is None: return set() conda_files = set() for info in itervalues(installed_pkgs): conda_files.update(info.get('files', [])) res = set() for path in get_egg_info_files(join(prefix, sp_dir)): f = rel_path(prefix, path) if all_pkgs or f not in conda_files: dist = parse_egg_info(path) if dist: res.add(dist) return res
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') flask_fname = flask_data['fn'] tar_old_path = join(context.pkgs_dirs[0], flask_fname) # regression test for #2886 (part 1 of 2) # install tarball from package cache, default channel run_command(Commands.INSTALL, prefix, tar_old_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2626 # install tarball with full path, outside channel tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path, outside channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # Regression test for 2812 # install from local channel for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages':{flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, context.subdir) channel = path_to_url(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # Regression test for 2970 # install from build channel as a tarball conda_bld = join(sys.prefix, 'conda-bld') conda_bld_sub = join(conda_bld, context.subdir) tar_bld_path = join(conda_bld_sub, flask_fname) if os.path.exists(conda_bld): try: os.rename(tar_new_path, tar_bld_path) except OSError: pass else: os.makedirs(conda_bld) os.rename(subchan, conda_bld_sub) run_command(Commands.INSTALL, prefix, tar_bld_path) assert_package_is_installed(prefix, 'flask-') # regression test for #2886 (part 2 of 2) # install tarball from package cache, local channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') run_command(Commands.INSTALL, prefix, tar_old_path) # The last install was from the `local::` channel assert_package_is_installed(prefix, 'flask-') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def get_installed_version(prefix, name): for info in itervalues(install.linked_data(prefix)): if info['name'] == name: return str(info['version']) return None
def execute(args, parser): import os from os.path import dirname import conda from conda.config import (root_dir, get_channel_urls, subdir, pkgs_dirs, root_writable, envs_dirs, default_prefix, rc_path, user_rc_path, sys_rc_path, foreign, hide_binstar_tokens, platform, offline) from conda.resolve import Resolve from conda.api import get_index if args.root: if args.json: stdout_json({'root_prefix': root_dir}) else: print(root_dir) return if args.packages: index = get_index() r = Resolve(index) if args.json: stdout_json({ package: [p._asdict() for p in sorted(r.get_pkgs(arg2spec(package)))] for package in args.packages }) else: for package in args.packages: versions = r.get_pkgs(arg2spec(package)) for pkg in sorted(versions): pretty_package(pkg) return options = 'envs', 'system', 'license' try: from conda.install import linked_data root_pkgs = linked_data(sys.prefix) except: root_pkgs = None try: from .._vendor.requests import __version__ as requests_version except ImportError: requests_version = "could not import" except Exception as e: requests_version = "Error %s" % e try: cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env'] conda_env_version = cenv[0]['version'] except: conda_env_version = "not installed" try: import conda_build except ImportError: conda_build_version = "not installed" except Exception as e: conda_build_version = "Error %s" % e else: conda_build_version = conda_build.__version__ channels = get_channel_urls(offline=offline) info_dict = dict( platform=subdir, conda_version=conda.__version__, conda_env_version=conda_env_version, conda_build_version=conda_build_version, root_prefix=root_dir, root_writable=root_writable, pkgs_dirs=pkgs_dirs, envs_dirs=envs_dirs, default_prefix=default_prefix, channels=channels, rc_path=rc_path, user_rc_path=user_rc_path, sys_rc_path=sys_rc_path, is_foreign=bool(foreign), offline=offline, envs=[], python_version='.'.join(map(str, sys.version_info)), requests_version=requests_version, ) if args.unsafe_channels: if not args.json: print("\n".join(info_dict["channels"])) else: print(json.dumps({"channels": info_dict["channels"]})) return 0 else: info_dict['channels'] = [hide_binstar_tokens(c) for c in info_dict['channels']] if args.all or args.json: for option in options: setattr(args, option, True) if args.all or all(not getattr(args, opt) for opt in options): for key in 'pkgs_dirs', 'envs_dirs', 'channels': info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s conda-env version : %(conda_env_version)s conda-build version : %(conda_build_version)s python version : %(python_version)s requests version : %(requests_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels)s config file : %(rc_path)s offline mode : %(offline)s is foreign system : %(is_foreign)s """ % info_dict) if args.envs: handle_envs_list(info_dict['envs'], not args.json) if args.system and not args.json: from conda.cli.find_commands import find_commands, find_executable print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd))) print("user site dirs: ", end='') site_dirs = get_user_site() if site_dirs: print(site_dirs[0]) else: print() for site_dir in site_dirs[1:]: print(' %s' % site_dir) print() evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH'] if platform == 'linux': evars.append('LD_LIBRARY_PATH') elif platform == 'osx': evars.append('DYLD_LIBRARY_PATH') for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not args.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could not import _license.show_info # try: # $ conda install -n root _license""") if args.json: stdout_json(info_dict)
def execute(args, parser): import os from os.path import dirname import conda from conda.base.context import context from conda.models.channel import offline_keep from conda.resolve import Resolve from conda.api import get_index from conda.connection import user_agent if args.root: if context.json: stdout_json({'root_prefix': context.root_dir}) else: print(context.root_dir) return if args.packages: index = get_index() r = Resolve(index) if context.json: stdout_json({ package: [ dump_record(r.index[d]) for d in r.get_dists_for_spec(arg2spec(package)) ] for package in args.packages }) else: for package in args.packages: for dist in r.get_dists_for_spec(arg2spec(package)): pretty_package(dist, r.index[dist]) return options = 'envs', 'system', 'license' try: from conda.install import linked_data root_pkgs = linked_data(context.root_prefix) except: root_pkgs = None try: import requests requests_version = requests.__version__ except ImportError: requests_version = "could not import" except Exception as e: requests_version = "Error %s" % e try: import conda_env conda_env_version = conda_env.__version__ except: try: cenv = [ p for p in itervalues(root_pkgs) if p['name'] == 'conda-env' ] conda_env_version = cenv[0]['version'] except: conda_env_version = "not installed" try: import conda_build except ImportError: conda_build_version = "not installed" except Exception as e: conda_build_version = "Error %s" % e else: conda_build_version = conda_build.__version__ channels = context.channels if args.unsafe_channels: if not context.json: print("\n".join(channels)) else: print(json.dumps({"channels": channels})) return 0 channels = list(prioritize_channels(channels).keys()) if not context.json: channels = [ c + ('' if offline_keep(c) else ' (offline)') for c in channels ] channels = [mask_anaconda_token(c) for c in channels] info_dict = dict( platform=context.subdir, conda_version=conda.__version__, conda_env_version=conda_env_version, conda_build_version=conda_build_version, root_prefix=context.root_dir, conda_prefix=context.conda_prefix, conda_private=context.conda_private, root_writable=context.root_writable, pkgs_dirs=context.pkgs_dirs, envs_dirs=context.envs_dirs, default_prefix=context.default_prefix, channels=channels, rc_path=rc_path, user_rc_path=user_rc_path, sys_rc_path=sys_rc_path, # is_foreign=bool(foreign), offline=context.offline, envs=[], python_version='.'.join(map(str, sys.version_info)), requests_version=requests_version, user_agent=user_agent, ) if not on_win: info_dict['UID'] = os.geteuid() info_dict['GID'] = os.getegid() if args.all or context.json: for option in options: setattr(args, option, True) if (args.all or all(not getattr(args, opt) for opt in options)) and not context.json: for key in 'pkgs_dirs', 'envs_dirs', 'channels': info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s conda is private : %(conda_private)s conda-env version : %(conda_env_version)s conda-build version : %(conda_build_version)s python version : %(python_version)s requests version : %(requests_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels)s config file : %(rc_path)s offline mode : %(offline)s user-agent : %(user_agent)s\ """ % info_dict) if not on_win: print("""\ UID:GID : %(UID)s:%(GID)s """ % info_dict) else: print() if args.envs: handle_envs_list(info_dict['envs'], not context.json) if args.system: from conda.cli.find_commands import find_commands, find_executable site_dirs = get_user_site() evars = [ 'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH' ] if context.platform == 'linux': evars.append('LD_LIBRARY_PATH') elif context.platform == 'osx': evars.append('DYLD_LIBRARY_PATH') if context.json: info_dict['sys.version'] = sys.version info_dict['sys.prefix'] = sys.prefix info_dict['sys.executable'] = sys.executable info_dict['site_dirs'] = get_user_site() info_dict['env_vars'] = { ev: os.getenv(ev, '<not set>') for ev in evars } else: print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd))) print("user site dirs: ", end='') if site_dirs: print(site_dirs[0]) else: print() for site_dir in site_dirs[1:]: print(' %s' % site_dir) print() for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not context.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could not import _license.show_info # try: # $ conda install -n root _license""") if context.json: stdout_json(info_dict)
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) # Discard conda and any package that depends on it drecs = install.linked_data(prefix1) filter = {} found = True while found: found = False for dist, info in iteritems(drecs): name = info['name'] if name in filter: continue if name == 'conda': filter['conda'] = dist found = True break for dep in info.get('depends', []): if MatchSpec(dep).name in filter: filter[name] = dist found = True if not quiet and filter: print('The following packages cannot be cloned out of the root environment:') for pkg in itervalues(filter): print(' - ' + pkg) # Assemble the URL and channel list urls = {} index = {} for dist, info in iteritems(drecs): if info['name'] in filter: continue url = info.get('url') if url is None: sys.exit('Error: no URL found for package: %s' % dist) _, schannel = url_channel(url) index[dist + '.tar.bz2'] = info urls[dist] = url r = Resolve(index) dists = r.dependency_sort(urls.keys()) urls = [urls[d] for d in dists] if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = explicit(urls, prefix2, verbose=not quiet, force_extract=False, fetch_args=fetch_args) return actions, untracked_files
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') flask_fname = flask_data['fn'] tar_old_path = join(context.pkgs_dirs[0], flask_fname) # regression test for #2886 (part 1 of 2) # install tarball from package cache, default channel run_command(Commands.INSTALL, prefix, tar_old_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2626 # install tarball with full path, outside channel tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path, outside channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # Regression test for 2812 # install from local channel for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages': {flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, context.subdir) channel = path_to_url(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # Regression test for 2970 # install from build channel as a tarball conda_bld = join(sys.prefix, 'conda-bld') conda_bld_sub = join(conda_bld, context.subdir) tar_bld_path = join(conda_bld_sub, flask_fname) if os.path.exists(conda_bld): try: os.rename(tar_new_path, tar_bld_path) except OSError: pass else: os.makedirs(conda_bld) os.rename(subchan, conda_bld_sub) run_command(Commands.INSTALL, prefix, tar_bld_path) assert_package_is_installed(prefix, 'flask-') # regression test for #2886 (part 2 of 2) # install tarball from package cache, local channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') run_command(Commands.INSTALL, prefix, tar_old_path) # The last install was from the `local::` channel assert_package_is_installed(prefix, 'flask-') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked_data if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details', args.json) prefix = get_prefix(args) if args.all and prefix == default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=args.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist + '.tar.bz2': info for dist, info in iteritems(index)} else: index = get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, json=args.json, offline=args.offline, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', args.json) actions = {inst.PREFIX: prefix} for fkey in sorted(iterkeys(index)): plan.add_unlink(actions, fkey[:-8]) else: specs = specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and names_in_specs(root_no_rm, specs)): raise CondaEnvironmentError('cannot remove %s from root environment' % ', '.join(root_no_rm), args.json) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: stdout_json({ 'success': True, 'actions': actions }) return raise PackageNotFoundError('no packages found to remove from ' 'environment: %s' % prefix, args.json) if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: stdout_json({ 'success': True, 'actions': actions })
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) # Discard conda and any package that depends on it drecs = install.linked_data(prefix1) filter = {} found = True while found: found = False for dist, info in iteritems(drecs): name = info['name'] if name in filter: continue if name == 'conda': filter['conda'] = dist found = True break for dep in info.get('depends', []): if MatchSpec(dep).name in filter: filter[name] = dist found = True if not quiet and filter: print( 'The following packages cannot be cloned out of the root environment:' ) for pkg in itervalues(filter): print(' - ' + pkg) # Assemble the URL and channel list urls = {} index = {} for dist, info in iteritems(drecs): if info['name'] in filter: continue url = info.get('url') if url is None: sys.exit('Error: no URL found for package: %s' % dist) _, schannel = url_channel(url) index[dist + '.tar.bz2'] = info urls[dist] = url r = Resolve(index) dists = r.dependency_sort(urls.keys()) urls = [urls[d] for d in dists] if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = explicit(urls, prefix2, verbose=not quiet, force_extract=False, fetch_args=fetch_args) return actions, untracked_files
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import linked_data from conda.common.disk import rm_rf if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details') prefix = context.prefix_w_legacy_search if args.all and prefix == context.default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=context.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist + '.tar.bz2': info for dist, info in iteritems(index)} else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {inst.PREFIX: prefix} for fkey in sorted(iterkeys(index)): plan.add_unlink(actions, fkey[:-8]) else: specs = specs_from_args(args.package_names) if (context.conda_in_root and plan.is_root_prefix(prefix) and names_in_specs(root_no_rm, specs)): raise CondaEnvironmentError( 'cannot remove %s from root environment' % ', '.join(root_no_rm)) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) delete_trash() if plan.nothing_to_do(actions): if args.all: print() print("Remove all packages in environment %s:\n" % prefix) if not context.json: confirm_yn(args) rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions}) return raise PackageNotFoundError( '', 'no packages found to remove from ' 'environment: %s' % prefix) if not context.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if context.json and args.dry_run: stdout_json({'success': True, 'dry_run': True, 'actions': actions}) return if not context.json: confirm_yn(args) if context.json and not context.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not context.quiet) else: plan.execute_actions(actions, index, verbose=not context.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions})
def execute(args, parser): import os from os.path import dirname import conda from conda.base.context import context from conda.models.channel import offline_keep from conda.resolve import Resolve from conda.api import get_index from conda.connection import user_agent if args.root: if context.json: stdout_json({'root_prefix': context.root_prefix}) else: print(context.root_prefix) return if args.packages: index = get_index() r = Resolve(index) if context.json: stdout_json({ package: [dump_record(r.index[d]) for d in r.get_dists_for_spec(arg2spec(package))] for package in args.packages }) else: for package in args.packages: for dist in r.get_dists_for_spec(arg2spec(package)): pretty_package(dist, r.index[dist]) return options = 'envs', 'system', 'license' try: from conda.install import linked_data root_pkgs = linked_data(context.root_prefix) except: root_pkgs = None try: import requests requests_version = requests.__version__ except ImportError: requests_version = "could not import" except Exception as e: requests_version = "Error %s" % e try: import conda_env conda_env_version = conda_env.__version__ except: try: cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env'] conda_env_version = cenv[0]['version'] except: conda_env_version = "not installed" try: import conda_build except ImportError: conda_build_version = "not installed" except Exception as e: conda_build_version = "Error %s" % e else: conda_build_version = conda_build.__version__ channels = context.channels if args.unsafe_channels: if not context.json: print("\n".join(channels)) else: print(json.dumps({"channels": channels})) return 0 channels = list(prioritize_channels(channels).keys()) if not context.json: channels = [c + ('' if offline_keep(c) else ' (offline)') for c in channels] channels = [mask_anaconda_token(c) for c in channels] info_dict = dict( platform=context.subdir, conda_version=conda.__version__, conda_env_version=conda_env_version, conda_build_version=conda_build_version, root_prefix=context.root_prefix, conda_prefix=context.conda_prefix, conda_private=context.conda_private, root_writable=context.root_writable, pkgs_dirs=context.pkgs_dirs, envs_dirs=context.envs_dirs, default_prefix=context.default_prefix, channels=channels, rc_path=rc_path, user_rc_path=user_rc_path, sys_rc_path=sys_rc_path, # is_foreign=bool(foreign), offline=context.offline, envs=[], python_version='.'.join(map(str, sys.version_info)), requests_version=requests_version, user_agent=user_agent, ) if not on_win: info_dict['UID'] = os.geteuid() info_dict['GID'] = os.getegid() if args.all or context.json: for option in options: setattr(args, option, True) if (args.all or all(not getattr(args, opt) for opt in options)) and not context.json: for key in 'pkgs_dirs', 'envs_dirs', 'channels': info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s conda is private : %(conda_private)s conda-env version : %(conda_env_version)s conda-build version : %(conda_build_version)s python version : %(python_version)s requests version : %(requests_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels)s config file : %(rc_path)s offline mode : %(offline)s user-agent : %(user_agent)s\ """ % info_dict) if not on_win: print("""\ UID:GID : %(UID)s:%(GID)s """ % info_dict) else: print() if args.envs: handle_envs_list(info_dict['envs'], not context.json) if args.system: from conda.cli.find_commands import find_commands, find_executable site_dirs = get_user_site() evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH'] if context.platform == 'linux': evars.append('LD_LIBRARY_PATH') elif context.platform == 'osx': evars.append('DYLD_LIBRARY_PATH') if context.json: info_dict['sys.version'] = sys.version info_dict['sys.prefix'] = sys.prefix info_dict['sys.executable'] = sys.executable info_dict['site_dirs'] = get_user_site() info_dict['env_vars'] = {ev: os.getenv(ev, '<not set>') for ev in evars} else: print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd))) print("user site dirs: ", end='') if site_dirs: print(site_dirs[0]) else: print() for site_dir in site_dirs[1:]: print(' %s' % site_dir) print() for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not context.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could not import _license.show_info # try: # $ conda install -n root _license""") if context.json: stdout_json(info_dict)
def execute(args, parser): import os from os.path import dirname import conda from conda.config import (root_dir, get_channel_urls, subdir, pkgs_dirs, root_writable, envs_dirs, default_prefix, rc_path, user_rc_path, sys_rc_path, foreign, hide_binstar_tokens, platform, offline) from conda.resolve import Resolve from conda.api import get_index if args.root: if args.json: stdout_json({'root_prefix': root_dir}) else: print(root_dir) return if args.packages: index = get_index() r = Resolve(index) if args.json: stdout_json({ package: [p._asdict() for p in sorted(r.get_pkgs(arg2spec(package)))] for package in args.packages }) else: for package in args.packages: versions = r.get_pkgs(arg2spec(package)) for pkg in sorted(versions): pretty_package(pkg) return options = 'envs', 'system', 'license' try: from conda.install import linked_data root_pkgs = linked_data(sys.prefix) except: root_pkgs = None try: import requests requests_version = requests.__version__ except ImportError: requests_version = "could not import" except Exception as e: requests_version = "Error %s" % e try: cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env'] conda_env_version = cenv[0]['version'] except: conda_env_version = "not installed" try: import conda_build except ImportError: conda_build_version = "not installed" except Exception as e: conda_build_version = "Error %s" % e else: conda_build_version = conda_build.__version__ channels = get_channel_urls(offline=offline) info_dict = dict( platform=subdir, conda_version=conda.__version__, conda_env_version=conda_env_version, conda_build_version=conda_build_version, root_prefix=root_dir, root_writable=root_writable, pkgs_dirs=pkgs_dirs, envs_dirs=envs_dirs, default_prefix=default_prefix, channels=channels, rc_path=rc_path, user_rc_path=user_rc_path, sys_rc_path=sys_rc_path, is_foreign=bool(foreign), offline=offline, envs=[], python_version='.'.join(map(str, sys.version_info)), requests_version=requests_version, ) if args.unsafe_channels: if not args.json: print("\n".join(info_dict["channels"])) else: print(json.dumps({"channels": info_dict["channels"]})) return 0 else: info_dict['channels'] = [ hide_binstar_tokens(c) for c in info_dict['channels'] ] if args.all or args.json: for option in options: setattr(args, option, True) if args.all or all(not getattr(args, opt) for opt in options): for key in 'pkgs_dirs', 'envs_dirs', 'channels': info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s conda-env version : %(conda_env_version)s conda-build version : %(conda_build_version)s python version : %(python_version)s requests version : %(requests_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels)s config file : %(rc_path)s offline mode : %(offline)s is foreign system : %(is_foreign)s """ % info_dict) if args.envs: handle_envs_list(info_dict['envs'], not args.json) if args.system and not args.json: from conda.cli.find_commands import find_commands, find_executable print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd))) print("user site dirs: ", end='') site_dirs = get_user_site() if site_dirs: print(site_dirs[0]) else: print() for site_dir in site_dirs[1:]: print(' %s' % site_dir) print() evars = [ 'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH' ] if platform == 'linux': evars.append('LD_LIBRARY_PATH') elif platform == 'osx': evars.append('DYLD_LIBRARY_PATH') for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not args.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could not import _license.show_info # try: # $ conda install -n root _license""") if args.json: stdout_json(info_dict)
def execute(args, parser): if not args.subcommand: parser.print_help() exit() if args.subcommand == 'channels': if not args.test_installable: parser.error("At least one option (--test-installable) is required.") else: sys.exit(not test_installable(channel=args.channel, verbose=args.verbose)) prefix = get_prefix(args) installed = ci.linked_data(prefix) installed = {rec['name']: dist for dist, rec in iteritems(installed)} if not args.packages and not args.untracked and not args.all: parser.error("At least one package or --untracked or --all must be provided") if args.all: args.packages = sorted(installed.keys()) if args.untracked: args.packages.append(untracked_package) if args.subcommand == 'linkages': pkgmap = {} for pkg in args.packages: if pkg == untracked_package: dist = untracked_package elif pkg not in installed: sys.exit("Package %s is not installed in %s" % (pkg, prefix)) else: dist = installed[pkg] if not sys.platform.startswith(('linux', 'darwin')): sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") if dist == untracked_package: obj_files = get_untracked_obj_files(prefix) else: obj_files = get_package_obj_files(dist, prefix) linkages = get_linkages(obj_files, prefix) depmap = defaultdict(list) pkgmap[pkg] = depmap depmap['not found'] = [] for binary in linkages: for lib, path in linkages[binary]: path = replace_path(binary, path, prefix) if path not in {'', 'not found'} else path if path.startswith(prefix): deps = list(which_package(path)) if len(deps) > 1: print("Warning: %s comes from multiple packages: %s" % (path, comma_join(deps)), file=sys.stderr) if not deps: if exists(path): depmap['untracked'].append((lib, path.split(prefix + '/', 1)[-1], binary)) else: depmap['not found'].append((lib, path.split(prefix + '/', 1)[-1], binary)) for d in deps: depmap[d].append((lib, path.split(prefix + '/', 1)[-1], binary)) elif path == 'not found': depmap['not found'].append((lib, path, binary)) else: depmap['system'].append((lib, path, binary)) if args.groupby == 'package': for pkg in args.packages: print(pkg) print('-' * len(str(pkg))) print() print_linkages(pkgmap[pkg], show_files=args.show_files) elif args.groupby == 'dependency': # {pkg: {dep: [files]}} -> {dep: {pkg: [files]}} inverted_map = defaultdict(lambda: defaultdict(list)) for pkg in pkgmap: for dep in pkgmap[pkg]: if pkgmap[pkg][dep]: inverted_map[dep][pkg] = pkgmap[pkg][dep] # print system and not found last k = sorted(set(inverted_map.keys()) - {'system', 'not found'}) for dep in k + ['system', 'not found']: print(dep) print('-' * len(str(dep))) print() print_linkages(inverted_map[dep], show_files=args.show_files) else: raise ValueError("Unrecognized groupby: %s" % args.groupby) if args.subcommand == 'objects': for pkg in args.packages: if pkg == untracked_package: dist = untracked_package elif pkg not in installed: sys.exit("Package %s is not installed in %s" % (pkg, prefix)) else: dist = installed[pkg] print(pkg) print('-' * len(str(pkg))) print() if not sys.platform.startswith('darwin'): sys.exit("Error: conda inspect objects is only implemented in OS X") if dist == untracked_package: obj_files = get_untracked_obj_files(prefix) else: obj_files = get_package_obj_files(dist, prefix) info = [] for f in obj_files: f_info = {} path = join(prefix, f) f_info['filetype'] = human_filetype(path) f_info['rpath'] = ':'.join(get_rpaths(path)) f_info['filename'] = f info.append(f_info) print_object_info(info, args.groupby)