def clone(src_arg, dst_prefix, json=False, quiet=False): from conda.misc import clone_env if os.sep in src_arg: src_prefix = abspath(src_arg) if not isdir(src_prefix): common.error_and_exit('no such directory: %s' % src_arg, json=json, error_type="NoEnvironmentFound") else: src_prefix = common.find_prefix_name(src_arg) if src_prefix is None: common.error_and_exit('could not find environment: %s' % src_arg, json=json, error_type="NoEnvironmentFound") if not json: print("src_prefix: %r" % src_prefix) print("dst_prefix: %r" % dst_prefix) with common.json_progress_bars(json=json and not quiet): actions, untracked_files = clone_env(src_prefix, dst_prefix, verbose=not json, quiet=quiet) if json: common.stdout_json_success(actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, dst_prefix=dst_prefix)
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None): if os.sep in src_arg: src_prefix = abspath(src_arg) if not isdir(src_prefix): common.error_and_exit('no such directory: %s' % src_arg, json=json, error_type="NoEnvironmentFound") else: src_prefix = common.find_prefix_name(src_arg) if src_prefix is None: common.error_and_exit('could not find environment: %s' % src_arg, json=json, error_type="NoEnvironmentFound") if not json: print("src_prefix: %r" % src_prefix) print("dst_prefix: %r" % dst_prefix) with common.json_progress_bars(json=json and not quiet): actions, untracked_files = misc.clone_env(src_prefix, dst_prefix, verbose=not json, quiet=quiet, index=index) if json: common.stdout_json_success( actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, dst_prefix=dst_prefix )
def print_result(args, prefix, result): if context.json: if result["conda"] is None and result["pip"] is None: cli_common.stdout_json_success( message='All requested packages already installed.') else: if result["conda"] is not None: actions = result["conda"] else: actions = {} if result["pip"] is not None: actions["PIP"] = result["pip"] cli_common.stdout_json_success(prefix=prefix, actions=actions) else: cli_install.print_activate(args.name if args.name else prefix)
def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): if unlink_link_transaction.nothing_to_do: if remove_op: # No packages found to remove from environment raise PackagesNotFoundError(args.package_names) elif not newenv: if context.json: cli_common.stdout_json_success( message="All requested packages already installed." ) return if context.dry_run: actions = unlink_link_transaction._make_legacy_action_groups()[0] if context.json: cli_common.stdout_json_success(prefix=prefix, actions=actions, dry_run=True) raise DryRunExit() try: unlink_link_transaction.download_and_extract() if context.download_only: raise CondaExitZero( "Package caches prepared. UnlinkLinkTransaction cancelled with " "--download-only option." ) unlink_link_transaction.execute() except SystemExit as e: raise CondaSystemExit("Exiting", e) if context.json: actions = unlink_link_transaction._make_legacy_action_groups()[0] cli_common.stdout_json_success(prefix=prefix, actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") elif not args.file: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit( "you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[ 0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: error_message = e.args[0] packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages) if close: error_message += "\n\nDid you mean one of these?\n %s" % ( ', '.join(close)) error_message += '\n\nYou can search for this package on Binstar with' error_message += '\n\n binstar search -t conda %s' % pkg error_message += '\n\nYou may need to install the Binstar command line client with' error_message += '\n\n conda install binstar' common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute_config(args, parser): try: import yaml except ImportError: common.error_and_exit("pyyaml is required to modify configuration", json=args.json, error_type="ImportError") json_warnings = [] json_get = {} if args.system: rc_path = config.sys_rc_path elif args.file: rc_path = args.file else: rc_path = config.user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): if args.add and 'channels' in list(zip( *args.add))[0] and not ['channels', 'defaults'] in args.add: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml.load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in config.rc_list_keys + config.rc_bool_keys: if key not in config.rc_other: if not args.json: message = "unknown key %s" % key print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], bool): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # PyYaml does not support round tripping, so if we use yaml.dump, it # will clear all comments and structure from the configuration file. # There are no yaml parsers that do this. Our best bet is to do a # simple parsing of the file ourselves. We can check the result at # the end to see if we did it right. # First, do it the pyyaml way new_rc_config = deepcopy(rc_config) # Add for key, item in args.add: try: if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue except TypeError: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") new_rc_config.setdefault(key, []).insert(0, item) # Set for key, item in args.set: yamlitem = yaml.load(item) if not isinstance(yamlitem, bool): common.error_and_exit("%r is not a boolean" % item, json=args.json, error_type="TypeError") new_rc_config[key] = yamlitem # Remove for key, item in args.remove: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in new_rc_config[key]: common.error_and_exit( "%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") new_rc_config[key] = [i for i in new_rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del new_rc_config[key] if args.force: # Note, force will also remove any checking that the keys are in # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml.dump(new_rc_config, default_flow_style=False)) if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) return # Now, try to parse the condarc file. # Just support " key: " for now listkeyregexes = { key: re.compile(r"( *)%s *" % key) for key in dict(args.add) } setkeyregexes = { key: re.compile(r"( *)%s( *):( *)" % key) for key in dict(args.set) } new_rc_text = rc_text[:].split("\n") for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if item in rc_config.get(key, []): # Skip duplicates. See above continue added = False for pos, line in enumerate(new_rc_text[:]): matched = listkeyregexes[key].match(line) if matched: leading_space = matched.group(1) # TODO: Try to guess how much farther to indent the # item. Right now, it is fixed at 2 spaces. new_rc_text.insert(pos + 1, "%s - %s" % (leading_space, item)) added = True if not added: if key in rc_config: # We should have found it above raise CouldntParse("existing list key couldn't be found") # TODO: Try to guess the correct amount of leading space for the # key. Right now it is zero. new_rc_text += ['%s:' % key, ' - %s' % item] if key == 'channels' and ['channels', 'defaults'] not in args.add: # If channels key is added for the first time, make sure it # includes 'defaults' new_rc_text += [' - defaults'] new_rc_config['channels'].append('defaults') for key, item in args.set: if key not in config.rc_bool_keys: common.error_and_exit("Error key must be one of %s, not %s" % (config.rc_bool_keys, key), json=args.json, error_type="ValueError") added = False for pos, line in enumerate(new_rc_text[:]): matched = setkeyregexes[key].match(line) if matched: leading_space = matched.group(1) precol_space = matched.group(2) postcol_space = matched.group(3) new_rc_text[pos] = '%s%s%s:%s%s' % ( leading_space, key, precol_space, postcol_space, item) added = True if not added: if key in rc_config: raise CouldntParse("existing bool key couldn't be found") new_rc_text += ['%s: %s' % (key, item)] for key, item in args.remove: raise NotImplementedError( "--remove without --force is not implemented " "yet") for key, in args.remove_key: raise NotImplementedError("--remove-key without --force is not " "implemented yet") if args.add or args.set: # Verify that the new rc text parses to the same thing as if we had # used yaml. try: parsed_new_rc_text = yaml.load('\n'.join(new_rc_text).strip('\n')) except yaml.parser.ParserError: raise CouldntParse("couldn't parse modified yaml") else: if not parsed_new_rc_text == new_rc_config: raise CouldntParse("modified yaml doesn't match what it " "should be") if args.add or args.set: with open(rc_path, 'w') as rc: rc.write('\n'.join(new_rc_text).strip('\n')) rc.write('\n') if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if isupdate and not (args.file or args.all or args.packages): common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") linked = ci.linked(prefix) lnames = {ci.name_dist(d) for d in linked} if isupdate and not args.all: for name in args.packages: common.arg2spec(name, json=args.json, update=True) if name not in lnames: common.error_and_exit("Package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_use_local(args) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix, verbose=not args.quiet) return elif getattr(args, 'all', False): if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) specs.extend(nm for nm in lnames) specs.extend(common.specs_from_args(args.packages, json=args.json)) if isinstall and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.explicit(args.packages, prefix, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, fetch_args={'use_cache': args.use_index_cache, 'unknown': args.unknown}) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline, prefix=prefix) r = Resolve(index) ospecs = list(specs) plan.add_defaults_to_specs(r, linked, specs, update=isupdate) # Don't update packages that are already up-to-date if isupdate and not (args.all or args.force): orig_packages = args.packages[:] installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] for name in orig_packages: vers_inst = [m['version'] for m in installed_metadata if m['name'] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(name)) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True if args.no_deps: only_names = set(s.split()[0] for s in ospecs) else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if isinstall and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, always_copy=args.copy, minimal_hint=args.alt_hint, update_deps=args.update_deps) except NoPackagesFound as e: error_message = e.args[0] if isupdate and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} nfound = 0 for pkg in sorted(e.pkgs): pkg = pkg.split()[0] if pkg in packages: continue close = get_close_matches(pkg, packages, cutoff=0.7) if not close: continue if nfound == 0: error_message += "\n\nClose matches found; did you mean one of these?\n" error_message += "\n %s: %s" % (pkg, ', '.join(close)) nfound += 1 error_message += '\n\nYou can search for packages on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n(and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client' error_message += ' command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: path = join(prefix, 'conda-meta', 'pinned') error_message += "\n\nNote that you have pinned specs in %s:" % path error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except (Unsatisfiable, SystemExit) as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute_config(args, parser): try: import yaml except ImportError: common.error_and_exit("pyyaml is required to modify configuration", json=args.json, error_type="ImportError") json_warnings = [] json_get = {} if args.system: rc_path = config.sys_rc_path elif args.file: rc_path = args.file else: rc_path = config.user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): if args.add and 'channels' in list(zip(*args.add))[0] and not ['channels', 'defaults'] in args.add: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml.load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in config.rc_list_keys + config.rc_bool_keys: if key not in config.rc_other: if not args.json: message = "unknown key %s" % key print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], bool): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # PyYaml does not support round tripping, so if we use yaml.dump, it # will clear all comments and structure from the configuration file. # There are no yaml parsers that do this. Our best bet is to do a # simple parsing of the file ourselves. We can check the result at # the end to see if we did it right. # First, do it the pyyaml way new_rc_config = deepcopy(rc_config) # Add for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %r" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if not isinstance(rc_config.get(key, []), list): raise CouldntParse("key %r should be a list, not %s." % (key, rc_config[key].__class__.__name__)) if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue new_rc_config.setdefault(key, []).insert(0, item) # Set for key, item in args.set: yamlitem = yaml.load(item) if not isinstance(yamlitem, bool): common.error_and_exit("%r is not a boolean" % item, json=args.json, error_type="TypeError") new_rc_config[key] = yamlitem # Remove for key, item in args.remove: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in new_rc_config[key]: common.error_and_exit("%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") new_rc_config[key] = [i for i in new_rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del new_rc_config[key] if args.force: # Note, force will also remove any checking that the keys are in # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml.dump(new_rc_config, default_flow_style=False)) if args.json: common.stdout_json_success( rc_path=rc_path, warnings=json_warnings, get=json_get ) return # Now, try to parse the condarc file. # Just support " key: " for now listkeyregexes = {key:re.compile(r"( *)%s *" % key) for key in dict(args.add) } setkeyregexes = {key:re.compile(r"( *)%s( *):( *)" % key) for key in dict(args.set) } new_rc_text = rc_text[:].split("\n") for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if item in rc_config.get(key, []): # Skip duplicates. See above continue added = False for pos, line in enumerate(new_rc_text[:]): matched = listkeyregexes[key].match(line) if matched: leading_space = matched.group(1) # TODO: Try to guess how much farther to indent the # item. Right now, it is fixed at 2 spaces. new_rc_text.insert(pos + 1, "%s - %s" % (leading_space, item)) added = True if not added: if key in rc_config: # We should have found it above raise CouldntParse("existing list key couldn't be found") # TODO: Try to guess the correct amount of leading space for the # key. Right now it is zero. new_rc_text += ['%s:' % key, ' - %s' % item] if key == 'channels' and ['channels', 'defaults'] not in args.add: # If channels key is added for the first time, make sure it # includes 'defaults' new_rc_text += [' - defaults'] new_rc_config['channels'].append('defaults') for key, item in args.set: if key not in config.rc_bool_keys: common.error_and_exit("Error key must be one of %s, not %s" % (config.rc_bool_keys, key), json=args.json, error_type="ValueError") added = False for pos, line in enumerate(new_rc_text[:]): matched = setkeyregexes[key].match(line) if matched: leading_space = matched.group(1) precol_space = matched.group(2) postcol_space = matched.group(3) new_rc_text[pos] = '%s%s%s:%s%s' % (leading_space, key, precol_space, postcol_space, item) added = True if not added: if key in rc_config: raise CouldntParse("existing bool key couldn't be found") new_rc_text += ['%s: %s' % (key, item)] for key, item in args.remove: raise NotImplementedError("--remove without --force is not implemented " "yet") for key, in args.remove_key: raise NotImplementedError("--remove-key without --force is not " "implemented yet") if args.add or args.set: # Verify that the new rc text parses to the same thing as if we had # used yaml. try: parsed_new_rc_text = yaml.load('\n'.join(new_rc_text).strip('\n')) except yaml.parser.ParserError: raise CouldntParse("couldn't parse modified yaml") else: if not parsed_new_rc_text == new_rc_config: raise CouldntParse("modified yaml doesn't match what it " "should be") if args.add or args.set: with open(rc_path, 'w') as rc: rc.write('\n'.join(new_rc_text).strip('\n')) rc.write('\n') if args.json: common.stdout_json_success( rc_path=rc_path, warnings=json_warnings, get=json_get )
def execute_config(args, parser): json_warnings = [] json_get = {} if args.system: rc_path = config.sys_rc_path elif args.file: rc_path = args.file else: rc_path = config.user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): has_defaults = ['channels', 'defaults'] in args.add if args.add and 'channels' in list(zip(*args.add))[0] and not has_defaults: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml_load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in config.rc_list_keys + config.rc_bool_keys + config.rc_string_keys: if key not in config.rc_other: message = "unknown key %s" % key if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], (bool, string_types)): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # Add for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %r" % (', '.join(config.rc_list_keys), key), json=args.json, error_type="ValueError") if not isinstance(rc_config.get(key, []), list): bad = rc_config[key].__class__.__name__ raise CouldntParse("key %r should be a list, not %s." % (key, bad)) if key == 'default_channels' and rc_path != config.sys_rc_path: msg = "'default_channels' is only configurable for system installs" raise NotImplementedError(msg) if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue rc_config.setdefault(key, []).insert(0, item) # Set set_bools, set_strings = set(config.rc_bool_keys), set(config.rc_string_keys) for key, item in args.set: # Check key and value yamlitem = yaml_load(item) if key in set_bools: if not isinstance(yamlitem, bool): common.error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item), json=args.json, error_type="TypeError") rc_config[key] = yamlitem elif key in set_strings: rc_config[key] = yamlitem else: common.error_and_exit("Error key must be one of %s, not %s" % (', '.join(set_bools | set_strings), key), json=args.json, error_type="ValueError") # Remove for key, item in args.remove: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in rc_config[key]: common.error_and_exit("%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") rc_config[key] = [i for i in rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del rc_config[key] # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml_dump(rc_config)) if args.json: common.stdout_json_success( rc_path=rc_path, warnings=json_warnings, get=json_get ) return
def execute_config(args, parser): json_warnings = [] json_get = {} if args.system: rc_path = sys_rc_path elif args.file: rc_path = args.file else: rc_path = user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): has_defaults = ['channels', 'defaults'] in args.add if args.add and 'channels' in list( zip(*args.add))[0] and not has_defaults: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml_load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in rc_list_keys + rc_bool_keys + rc_string_keys: if key not in rc_other: message = "unknown key %s" % key if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], (bool, string_types)): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # Add for key, item in args.add: if key not in rc_list_keys: common.error_and_exit("key must be one of %s, not %r" % (', '.join(rc_list_keys), key), json=args.json, error_type="ValueError") if not isinstance(rc_config.get(key, []), list): bad = rc_config[key].__class__.__name__ raise CouldntParse("key %r should be a list, not %s." % (key, bad)) if key == 'default_channels' and rc_path != sys_rc_path: msg = "'default_channels' is only configurable for system installs" raise NotImplementedError(msg) if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue rc_config.setdefault(key, []).insert(0, item) # Set set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys) for key, item in args.set: # Check key and value yamlitem = yaml_load(item) if key in set_bools: if not isinstance(yamlitem, bool): common.error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item), json=args.json, error_type="TypeError") rc_config[key] = yamlitem elif key in set_strings: rc_config[key] = yamlitem else: common.error_and_exit("Error key must be one of %s, not %s" % (', '.join(set_bools | set_strings), key), json=args.json, error_type="ValueError") # Remove for key, item in args.remove: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in rc_config[key]: common.error_and_exit( "%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") rc_config[key] = [i for i in rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del rc_config[key] # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml_dump(rc_config)) if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) return
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit("you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit("cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: common.exception_and_exit(e, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit("Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)