def list_packages(prefix, installed, regex=None, format="human", show_channel_urls=config.show_channel_urls): res = 1 result = [] for dist in get_packages(installed, regex): res = 0 if format == "canonical": result.append(dist) continue if format == "export": result.append("=".join(dist.rsplit("-", 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get("features", "").split()) disp = "%(name)-25s %(version)-15s %(build)15s" % info disp += " %s" % common.disp_features(features) if show_channel_urls: disp += " %s" % config.canonical_channel_name(info.get("url")) result.append(disp) except (AttributeError, IOError, KeyError, ValueError) as e: log.debug(str(e)) result.append("%-25s %-15s %15s" % tuple(dist.rsplit("-", 2))) return res, result
def list_packages(prefix, installed, regex=None, format='human', show_channel_urls=show_channel_urls): res = 1 result = [] for dist in get_packages(installed, regex): res = 0 if format == 'canonical': result.append(dist) continue if format == 'export': result.append('='.join(dist2quad(dist)[:3])) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) schannel = info.get('schannel') if show_channel_urls or show_channel_urls is None and schannel != 'defaults': disp += ' %s' % schannel result.append(disp) except (AttributeError, IOError, KeyError, ValueError) as e: log.debug(str(e)) result.append('%-25s %-15s %s' % dist2quad(dist)[:3]) return res, result
def list_packages(prefix, installed, regex=None, format='human'): res = 1 result = [] for dist in get_packages(installed, regex): res = 0 if format == 'canonical': result.append(dist) continue if format == 'export': result.append('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) result.append(disp) except (AttributeError, IOError, KeyError, ValueError) as e: log.debug(str(e)) result.append('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res, result
def list_packages(prefix, regex=None, verbose=True): import re import conda.install as install pat = re.compile(regex, re.I) if regex else None if verbose: print('# packages in environment at %s:' % prefix) print('#') packages = False for dist in sorted(install.linked(prefix)): name = dist.rsplit('-', 2)[0] if pat and pat.search(name) is None: continue if not verbose: print(dist) continue try: info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) print('%-25s %-15s %15s %s' % (info['name'], info['version'], info['build'], common.disp_features(features)) ) except: # IOError, KeyError, ValueError print('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) packages = True if not packages: # Be Unix-friendly sys.exit(1)
def install_local_packages(prefix, paths, verbose=False): # copy packages to pkgs dir pkgs_dir = config.pkgs_dirs[0] dists = [] for src_path in paths: assert src_path.endswith('.tar.bz2') fn = basename(src_path) dists.append(fn[:-8]) dst_path = join(pkgs_dir, fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose) depends = [] for dist in dists: try: with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) depends.extend(meta['depends']) except (IOError, KeyError): continue return depends
def launch(fn, prefix=config.root_dir, additional_args=None): info = install.is_linked(prefix, fn[:-8]) if info is None: return None if not info.get('type') == 'app': raise Exception('Not an application: %s' % fn) # prepend the bin directory to the path fmt = r'%s\Scripts;%s' if sys.platform == 'win32' else '%s/bin:%s' env = {'PATH': fmt % (abspath(prefix), os.getenv('PATH'))} # copy existing environment variables, but not anything with PATH in it for k, v in iteritems(os.environ): if 'PATH' not in k: env[k] = v # allow updating environment variables from metadata if 'app_env' in info: env.update(info['app_env']) # call the entry command args = info['app_entry'].split() args = [a.replace('${PREFIX}', prefix) for a in args] arg0 = find_executable(args[0], env['PATH']) if arg0 is None: raise Exception('Executable not found: %s' % args[0]) args[0] = arg0 cwd = abspath(expanduser('~')) if additional_args: args.extend(additional_args) return subprocess.Popen(args, cwd=cwd, env=env)
def install_local_packages(prefix, paths, verbose=False): # copy packages to pkgs dir pkgs_dir = config.pkgs_dirs[0] dists = [] for src_path in paths: assert src_path.endswith('.tar.bz2') fn = basename(src_path) dists.append(fn[:-8]) dst_path = join(pkgs_dir, fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose) depends = [] for dist in dists: try: with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) depends.extend(meta['depends']) except (IOError, KeyError): continue print('depends: %r' % depends) return depends
def launch(fn, prefix=config.root_dir, additional_args=None): info = install.is_linked(prefix, fn[:-8]) if info is None: return None if not info.get('type') == 'app': raise Exception('Not an application: %s' % fn) # prepend the bin directory to the path fmt = r'%s\Scripts;%s' if sys.platform == 'win32' else '%s/bin:%s' env = {'PATH': fmt % (abspath(prefix), os.getenv('PATH'))} # copy existing environment variables, but not anything with PATH in it for k, v in iteritems(os.environ): if 'PATH' not in k: env[k] = v # allow updating environment variables from metadata if 'app_env' in info: env.update(info['app_env']) # call the entry command args = info['app_entry'].split() args = [a.replace('${PREFIX}', prefix) for a in args] arg0 = find_executable(args[0], env['PATH']) if arg0 is None: raise Exception('Executable not found: %s' % args[0]) args[0] = arg0 cwd = abspath(expanduser('~')) if additional_args: args.extend(additional_args) return subprocess.Popen(args, cwd=cwd , env=env)
def launch(fn, prefix=config.root_dir, additional_args=None): info = install.is_linked(prefix, fn[:-8]) if info is None: return None if not info.get("type") == "app": raise Exception("Not an application: %s" % fn) # prepend the bin directory to the path fmt = r"%s\Scripts;%s" if sys.platform == "win32" else "%s/bin:%s" env = {"PATH": fmt % (abspath(prefix), os.getenv("PATH"))} # copy existing environment variables, but not anything with PATH in it for k, v in iteritems(os.environ): if "PATH" not in k: env[k] = v # allow updating environment variables from metadata if "app_env" in info: env.update(info["app_env"]) # call the entry command args = info["app_entry"].split() args = [a.replace("${PREFIX}", prefix) for a in args] arg0 = find_executable(args[0], env["PATH"]) if arg0 is None: raise Exception("Executable not found: %s" % args[0]) args[0] = arg0 cwd = abspath(expanduser("~")) if additional_args: args.extend(additional_args) return subprocess.Popen(args, cwd=cwd, env=env)
def launch(fn, prefix=config.root_dir, additional_args=None, background=False): info = install.is_linked(prefix, fn[:-8]) if info is None: return None if not info.get('type') == 'app': raise TypeError('Not an application: %s' % fn) binpath, env = environment_for_conda_environment(prefix) # allow updating environment variables from metadata if 'app_env' in info: env.update(info['app_env']) # call the entry command args = info['app_entry'].split() args = [a.replace('${PREFIX}', prefix) for a in args] arg0 = find_executable(args[0], env['PATH']) if arg0 is None: raise Exception('Executable not found: %s' % args[0]) args[0] = arg0 cwd = abspath(expanduser('~')) if additional_args: args.extend(additional_args) if sys.platform == 'win32' and background: return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False, creationflags=subprocess.CREATE_NEW_CONSOLE) else: return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False)
def get_requires(prefix): res = [] for dist in install.linked(prefix): meta = install.is_linked(prefix, dist) assert meta if 'file_hash' not in meta: res.append('%(name)s %(version)s %(build)s' % meta) res.sort() return res
def list_package_sources(prefix): """ List the sources of all the packages installed in the given environment. """ installed = install.linked(prefix) sources = [] for dist in conda.cli.main_list.get_packages(installed, None): info = install.is_linked(prefix, dist) sources.append(info['link']['source']) return sources
def conda_installed_files(prefix, exclude_self_build=False): """ Return the set of files which have been installed (using conda) into a given prefix. """ res = set() for dist in install.linked(prefix): meta = install.is_linked(prefix, dist) if exclude_self_build and 'file_hash' in meta: continue res.update(set(meta['files'])) return res
def app_is_installed(fn): """ Return the list of prefix directories in which `fn` in installed into, which might be an empty list. """ prefixes = [config.root_dir] for fn2 in os.listdir(config.envs_dir): prefix = join(config.envs_dir, fn2) if isdir(prefix): prefixes.append(prefix) dist = fn[:-8] return [prefix for prefix in prefixes if install.is_linked(prefix, dist)]
def app_is_installed(fn): """ Return the list of prefix directories in which `fn` in installed into, which might be an empty list. """ prefixes = [config.root_dir] for envs_dir in config.envs_dirs: for fn2 in os.listdir(envs_dir): prefix = join(envs_dir, fn2) if isdir(prefix): prefixes.append(prefix) dist = fn[:-8] return [prefix for prefix in prefixes if install.is_linked(prefix, dist)]
def which_package(path): """ given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. """ path = abspath(path) prefix = which_prefix(path) if prefix is None: raise RuntimeError("could not determine conda prefix from: %s" % path) for dist in install.linked(prefix): meta = install.is_linked(prefix, dist) if any(abspath(join(prefix, f)) == path for f in meta['files']): yield dist
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[inst.PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if config.always_copy: lt = install.LINK_COPY elif install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[inst.LINK].append('%s %s %d' % (dist, extracted_in, lt)) else: # Make a guess from the first pkgs dir, which is where it will be # extracted try: os.makedirs(join(config.pkgs_dirs[0], dist, 'info')) index_json = join(config.pkgs_dirs[0], dist, 'info', 'index.json') with open(index_json, 'w'): pass if config.always_copy: lt = install.LINK_COPY elif install.try_hard_link(config.pkgs_dirs[0], prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[inst.LINK].append('%s %s %d' % (dist, config.pkgs_dirs[0], lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: try: install.rm_rf(join(config.pkgs_dirs[0], dist)) except (OSError, IOError): pass actions[inst.EXTRACT].append(dist) if install.is_fetched(config.pkgs_dirs[0], dist): continue actions[inst.FETCH].append(dist) return actions
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue actions[LINK].append(dist) if install.is_extracted(config.pkgs_dir, dist): continue actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dir, dist): continue actions[FETCH].append(dist) return actions
def app_is_installed(fn, prefixes=None): """ Return the list of prefix directories in which `fn` in installed into, which might be an empty list. """ if prefixes is None: prefixes = [config.root_dir] for envs_dir in config.envs_dirs: for fn2 in os.listdir(envs_dir): prefix = join(envs_dir, fn2) if isdir(prefix): prefixes.append(prefix) dist = fn[:-8] return [p for p in prefixes if install.is_linked(p, dist)]
def list_packages(prefix, regex=None, format='human', piplist=False): if not isdir(prefix): sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix) pat = re.compile(regex, re.I) if regex else None if format == 'human': print('# packages in environment at %s:' % prefix) print('#') res = 1 if format == 'export': print_export_header() installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed) for dist in sorted(installed): name = dist.rsplit('-', 2)[0] if pat and pat.search(name) is None: continue res = 0 if format == 'canonical': print(dist) continue if format == 'export': print('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) print(disp) except: # (IOError, KeyError, ValueError): print('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res
def list_packages(prefix, regex=None, format='human', piplist=False): if not isdir(prefix): sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix) pat = re.compile(regex, re.I) if regex else None if format == 'human': print('# packages in environment at %s:' % prefix) print('#') if format == 'export': print_export_header() res = 1 installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed) for dist in sorted(installed): name = dist.rsplit('-', 2)[0] if pat and pat.search(name) is None: continue res = 0 if format == 'canonical': print(dist) continue if format == 'export': print('='.join(dist.rsplit('-', 2))) continue try: # Returns None if no meta-file found (e.g. pip install) info = install.is_linked(prefix, dist) features = set(info.get('features', '').split()) disp = '%(name)-25s %(version)-15s %(build)15s' % info disp += ' %s' % common.disp_features(features) if config.show_channel_urls: disp += ' %s' % config.canonical_channel_name(info.get('url')) print(disp) except: # (IOError, KeyError, ValueError): print('%-25s %-15s %15s' % tuple(dist.rsplit('-', 2))) return res
def install_local_packages(prefix, paths, verbose=False): # copy packages to pkgs dir dists = [] for src_path in paths: assert src_path.endswith('.tar.bz2') fn = basename(src_path) dists.append(fn[:-8]) dst_path = join(config.pkgs_dirs[0], fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose)
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = install.LINK_COPY if sys.platform == "win32" else install.LINK_SOFT actions[LINK].append("%s %s %d" % (dist, extracted_in, lt)) continue actions[LINK].append(dist) actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dir, dist): continue actions[FETCH].append(dist) return actions
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[LINK].append('%s %s %d' % (dist, extracted_in, lt)) continue actions[LINK].append(dist) actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dirs[0], dist): continue actions[FETCH].append(dist) return actions
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config.always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif config.allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions
def execute(args, parser): import os from os.path import basename, dirname import conda import conda.config as config import conda.misc as misc from conda.cli.main_init import is_initialized from conda.api import get_package_versions, app_is_installed from conda.install import is_linked if args.args: results = defaultdict(list) for arg in args.args: if isfile(arg): from conda.misc import which_package path = arg for dist in which_package(path): if args.json: results[arg].append(dist) else: print('%-50s %s' % (path, dist)) elif arg.endswith('.tar.bz2'): info = None for prefix in misc.list_prefixes(): info = is_linked(prefix, arg[:-8]) if info: break if not info: if args.json: results[arg] = {'installed': []} else: print("Package %s is not installed" % arg) continue info['installed'] = app_is_installed(arg) if args.json: results[arg] = info else: print(arg) print(' %-15s %30s' % ('installed', bool(info.get('installed')))) for key in ('name', 'version', 'build', 'license', 'platform', 'arch', 'size', 'summary'): print(' %-15s %30s' % (key, info.get(key))) else: if args.json: for pkg in get_package_versions(arg): results[arg].append(pkg._asdict()) else: show_pkg_info(arg) if args.json: common.stdout_json(results) return options = 'envs', 'system', 'license' try: import requests requests_version = requests.__version__ except ImportError: requests_version = "could not import" except Exception as e: requests_version = "Error %s" % e try: import conda_build except ImportError: conda_build_version = "not installed" except Exception as e: conda_build_version = "Error %s" % e else: conda_build_version = conda_build.__version__ info_dict = dict( platform=config.subdir, conda_version=conda.__version__, conda_build_version=conda_build_version, root_prefix=config.root_dir, root_writable=config.root_writable, pkgs_dirs=config.pkgs_dirs, envs_dirs=config.envs_dirs, default_prefix=config.default_prefix, channels=config.get_channel_urls(), rc_path=config.rc_path, is_foreign=bool(config.foreign), envs=[], python_version='.'.join(map(str, sys.version_info)), requests_version=requests_version, ) if args.all or args.json: for option in options: setattr(args, option, True) info_dict['channels_disp'] = [ config.hide_binstar_tokens(c) for c in info_dict['channels'] ] if args.all or all(not getattr(args, opt) for opt in options): for key in 'pkgs_dirs', 'envs_dirs', 'channels_disp': info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s conda-build version : %(conda_build_version)s python version : %(python_version)s requests version : %(requests_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels_disp)s config file : %(rc_path)s is foreign system : %(is_foreign)s """ % info_dict) if not is_initialized(): print("""\ # NOTE: # root directory '%s' uninitalized, # use 'conda init' to initialize.""" % config.root_dir) del info_dict['channels_disp'] if args.envs: if not args.json: print("# conda environments:") print("#") def disp_env(prefix): fmt = '%-20s %s %s' default = '*' if prefix == config.default_prefix else ' ' name = (config.root_env_name if prefix == config.root_dir else basename(prefix)) if not args.json: print(fmt % (name, default, prefix)) for prefix in misc.list_prefixes(): disp_env(prefix) if prefix != config.root_dir: info_dict['envs'].append(prefix) print() if args.system and not args.json: from conda.cli.find_commands import find_commands, find_executable print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable(cmd))) print("user site dirs: ", end='') site_dirs = get_user_site() if site_dirs: print(site_dirs[0]) else: print() for site_dir in site_dirs[1:]: print(' %s' % site_dir) print() evars = [ 'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH' ] if config.platform == 'linux': evars.append('LD_LIBRARY_PATH') elif config.platform == 'osx': evars.append('DYLD_LIBRARY_PATH') for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not args.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could import _license.show_info # try: # $ conda install -n root _license""") if args.json: common.stdout_json(info_dict)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit( "you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[ 0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: error_message = e.args[0] packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages) if close: error_message += "\n\nDid you mean one of these?\n %s" % ( ', '.join(close)) error_message += '\n\nYou can search for this package on Binstar with' error_message += '\n\n binstar search -t conda %s' % pkg error_message += '\n\nYou may need to install the Binstar command line client with' error_message += '\n\n conda install binstar' common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit("you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit("cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: common.exception_and_exit(e, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit("Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
dists.append(fn[:-8]) dst_path = join(pkgs_dir, fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) <<<<<<< HEAD force_extract_and_link(dists, prefix, verbose=verbose) ======= actions = defaultdict(list) actions['PREFIX'] = [prefix] actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose) depends = [] for dist in dists: try: with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) depends.extend(meta['depends']) except (IOError, KeyError): continue print('depends: %r' % depends) return depends >>>>>>> conda/feature/instruction-arguments
def execute(args, parser): from conda import config from conda.api import get_package_versions, app_is_installed from conda.install import is_linked if args.args: results = defaultdict(list) for arg in args.args: if isfile(arg): from conda.misc import which_package path = arg for dist in which_package(path): if args.json: results[arg].append(dist) else: print('%-50s %s' % (path, dist)) elif arg.endswith('.tar.bz2'): info = is_linked(config.root_dir, arg[:-8]) if not info: if args.json: results[arg] = { 'installed': [] } else: print("Package %s is not installed" % arg) continue info['installed'] = app_is_installed(arg) if args.json: results[arg] = info else: print(arg) print(' %-15s %30s' % ('installed', bool(info.get('installed')))) for key in ('name', 'version', 'build', 'license', 'platform', 'arch', 'size', 'summary'): print(' %-15s %30s' % (key, info.get(key))) else: if args.json: for pkg in get_package_versions(arg): results[arg].append(pkg._asdict()) else: show_pkg_info(arg) if args.json: common.stdout_json(results) return import os from os.path import basename, dirname, isdir, join import conda import conda.config as config from conda.cli.main_init import is_initialized options = 'envs', 'system', 'license' info_dict = dict(platform=config.subdir, conda_version=conda.__version__, root_prefix=config.root_dir, root_writable=config.root_writable, pkgs_dirs=config.pkgs_dirs, envs_dirs=config.envs_dirs, default_prefix=config.default_prefix, channels=config.get_channel_urls(), rc_path=config.rc_path, is_foreign=bool(config.foreign), envs=[], python_version='.'.join(map(str, sys.version_info)), ) if args.all or args.json: for option in options: setattr(args, option, True) t_pat = re.compile(r'binstar\.org/(t/[0-9a-f\-]{4,})') info_dict['channels_disp'] = [t_pat.sub('binstar.org/t/<TOKEN>', c) for c in info_dict['channels']] if args.all or all(not getattr(args, opt) for opt in options): for key in 'pkgs_dirs', 'envs_dirs', 'channels_disp': info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key]) info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') print("""\ Current conda install: platform : %(platform)s conda version : %(conda_version)s python version : %(python_version)s root environment : %(root_prefix)s (%(_rtwro)s) default environment : %(default_prefix)s envs directories : %(_envs_dirs)s package cache : %(_pkgs_dirs)s channel URLs : %(_channels_disp)s config file : %(rc_path)s is foreign system : %(is_foreign)s """ % info_dict) if not is_initialized(): print("""\ # NOTE: # root directory '%s' uninitalized, # use 'conda init' to initialize.""" % config.root_dir) del info_dict['channels_disp'] if args.envs: if not args.json: print("# conda environments:") print("#") def disp_env(prefix): fmt = '%-20s %s %s' default = '*' if prefix == config.default_prefix else ' ' name = (config.root_env_name if prefix == config.root_dir else basename(prefix)) if not args.json: print(fmt % (name, default, prefix)) for envs_dir in config.envs_dirs: if not isdir(envs_dir): continue for dn in sorted(os.listdir(envs_dir)): if dn.startswith('.'): continue prefix = join(envs_dir, dn) if isdir(prefix): prefix = join(envs_dir, dn) disp_env(prefix) info_dict['envs'].append(prefix) disp_env(config.root_dir) print() if args.system and not args.json: from conda.cli.find_commands import find_commands, find_executable print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) print("conda location: %s" % dirname(conda.__file__)) for cmd in sorted(set(find_commands() + ['build'])): print("conda-%s: %s" % (cmd, find_executable(cmd))) print() evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV', 'CIO_TEST', 'CONDA_ENVS_PATH'] if config.platform == 'linux': evars.append('LD_LIBRARY_PATH') elif config.platform == 'osx': evars.append('DYLD_LIBRARY_PATH') for ev in sorted(evars): print("%s: %s" % (ev, os.getenv(ev, '<not set>'))) print() if args.license and not args.json: try: from _license import show_info show_info() except ImportError: print("""\ WARNING: could import _license.show_info # try: # $ conda install -n root _license""") if args.json: common.stdout_json(info_dict)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) fetched_in = join(fetched_in, install._dist2filename(dist)) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config_always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if isupdate and not (args.file or args.all or args.packages): common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") linked = ci.linked(prefix) lnames = {ci.name_dist(d) for d in linked} if isupdate and not args.all: for name in args.packages: common.arg2spec(name, json=args.json, update=True) if name not in lnames: common.error_and_exit("Package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_use_local(args) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix, verbose=not args.quiet) return elif getattr(args, 'all', False): if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) specs.extend(nm for nm in lnames) specs.extend(common.specs_from_args(args.packages, json=args.json)) if isinstall and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.explicit(args.packages, prefix, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, fetch_args={'use_cache': args.use_index_cache, 'unknown': args.unknown}) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline, prefix=prefix) r = Resolve(index) ospecs = list(specs) plan.add_defaults_to_specs(r, linked, specs, update=isupdate) # Don't update packages that are already up-to-date if isupdate and not (args.all or args.force): orig_packages = args.packages[:] installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] for name in orig_packages: vers_inst = [m['version'] for m in installed_metadata if m['name'] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(name)) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True if args.no_deps: only_names = set(s.split()[0] for s in ospecs) else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if isinstall and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, always_copy=args.copy, minimal_hint=args.alt_hint, update_deps=args.update_deps) except NoPackagesFound as e: error_message = e.args[0] if isupdate and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} nfound = 0 for pkg in sorted(e.pkgs): pkg = pkg.split()[0] if pkg in packages: continue close = get_close_matches(pkg, packages, cutoff=0.7) if not close: continue if nfound == 0: error_message += "\n\nClose matches found; did you mean one of these?\n" error_message += "\n %s: %s" % (pkg, ', '.join(close)) nfound += 1 error_message += '\n\nYou can search for packages on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n(and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client' error_message += ' command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: path = join(prefix, 'conda-meta', 'pinned') error_message += "\n\nNote that you have pinned specs in %s:" % path error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except (Unsatisfiable, SystemExit) as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") elif not args.file: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix) if command == 'update': if args.all: if args.packages: sys.exit("""Error: --all cannot be used with packages""") else: if len(args.packages) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in set(ci.name_dist(d) for d in linked): sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) if newenv and args.clone: if args.packages: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if args.file: specs = common.specs_from_url(args.file) elif getattr(args, 'all', False): specs = [] linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) else: specs = common.specs_from_args(args.packages) if command == 'install' and args.revision: get_revision(args.revision) else: common.check_specs(prefix, specs) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: sys.exit("Error: you need to have 'conda-build' installed" " to use the --use-local option") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown) else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] assert len(vers_inst) == 1, name assert len(build_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') list_packages(prefix, regex) return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: sys.exit("Error: could not create directory: %s" % prefix) else: sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if newenv: touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)