def print_packages(prefix, regex=None, format='human', piplist=False, json=False): if not isdir(prefix): common.error_and_exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix, json=json, error_type="NoEnvironmentFound") if not json: if format == 'human': print('# packages in environment at %s:' % prefix) print('#') if format == 'export': print_export_header() installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed, json=json) exitcode, output = list_packages(prefix, installed, regex, format=format) if not json: print('\n'.join(output)) else: common.stdout_json(output) return exitcode
def execute(args, parser): prefix = common.get_prefix(args) regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' if args.json: format = 'canonical' exitcode = print_packages(prefix, regex, format, piplist=args.pip, json=args.json) sys.exit(exitcode)
def print_packages( prefix, regex=None, format="human", piplist=False, json=False, show_channel_urls=config.show_channel_urls ): if not isdir(prefix): common.error_and_exit( """\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix, json=json, error_type="NoEnvironmentFound", ) if not json: if format == "human": print("# packages in environment at %s:" % prefix) print("#") if format == "export": print_export_header() installed = install.linked(prefix) if piplist and config.use_pip and format == "human": add_pip_installed(prefix, installed, json=json) exitcode, output = list_packages(prefix, installed, regex, format=format, show_channel_urls=show_channel_urls) if not json: print("\n".join(output)) else: common.stdout_json(output) return exitcode
def get_revision(arg, json=False): try: return int(arg) except ValueError: common.error_and_exit("expected revision number, not: '%s'" % arg, json=json, error_type="ValueError")
def print_packages(prefix, regex=None, format='human', piplist=False, json=False): if not isdir(prefix): common.error_and_exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix, json=json, error_type="NoEnvironmentFound") if not json: if format == 'human': print('# packages in environment at %s:' % prefix) print('#') if format == 'export': print_export_header() installed = install.linked(prefix) if piplist and config.use_pip and format == 'human': add_pip_installed(prefix, installed, json=json) exitcode, output = list_packages(prefix, installed, regex, format=format) if not json: print('\n'.join(output)) else: common.stdout_json(output) sys.exit(exitcode)
def execute(args, parser): if not args.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""" ).lstrip() # TODO Add json support common.error_and_exit(msg, json=False) args.name = name else: name = args.name prefix = common.get_prefix(args) env = from_environment(name, prefix, no_builds=args.no_builds) if args.override_channels: env.remove_channels() if args.channel is not None: env.add_channels(args.channel) if args.file is None: print(env.to_yaml()) else: fp = open(args.file, 'wb') env.to_yaml(stream=fp)
def execute(args, parser): prefix = common.get_prefix(args) if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = "canonical" elif args.export: format = "export" else: format = "human" if args.json: format = "canonical" print_packages(prefix, args.regex, format, piplist=args.pip, json=args.json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None): if os.sep in src_arg: src_prefix = abspath(src_arg) if not isdir(src_prefix): common.error_and_exit('no such directory: %s' % src_arg, json=json, error_type="NoEnvironmentFound") else: src_prefix = common.find_prefix_name(src_arg) if src_prefix is None: common.error_and_exit('could not find environment: %s' % src_arg, json=json, error_type="NoEnvironmentFound") if not json: print("src_prefix: %r" % src_prefix) print("dst_prefix: %r" % dst_prefix) with common.json_progress_bars(json=json and not quiet): actions, untracked_files = misc.clone_env(src_prefix, dst_prefix, verbose=not json, quiet=quiet, index=index) if json: common.stdout_json_success( actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, dst_prefix=dst_prefix )
def execute(args, parser): if not args.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""").lstrip() # TODO Add json support common.error_and_exit(msg, json=False) args.name = name else: name = args.name prefix = common.get_prefix(args) env = from_environment(name, prefix) if args.file is None: fp = sys.stdout else: fp = open(args.file, 'wb') env.to_yaml(stream=fp)
def clone(src_arg, dst_prefix, json=False, quiet=False): from conda.misc import clone_env if os.sep in src_arg: src_prefix = abspath(src_arg) if not isdir(src_prefix): common.error_and_exit('no such directory: %s' % src_arg, json=json, error_type="NoEnvironmentFound") else: src_prefix = common.find_prefix_name(src_arg) if src_prefix is None: common.error_and_exit('could not find environment: %s' % src_arg, json=json, error_type="NoEnvironmentFound") if not json: print("src_prefix: %r" % src_prefix) print("dst_prefix: %r" % dst_prefix) with common.json_progress_bars(json=json and not quiet): actions, untracked_files = clone_env(src_prefix, dst_prefix, verbose=not json, quiet=quiet) if json: common.stdout_json_success(actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, dst_prefix=dst_prefix)
def execute(args, parser): try: env = from_file(args.file) except exceptions.EnvironmentFileNotFound as e: msg = 'Unable to locate environment file: %s\n\n' % e.filename msg += "\n".join(textwrap.wrap(textwrap.dedent(""" Please verify that the above file is present and that you have permission read the file's contents. Note, you can specify the file to use by explictly adding --file=/path/to/file when calling conda env update.""").lstrip())) common.error_and_exit(msg, json=args.json) if not args.name: if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""").lstrip() # TODO Add json support common.error_and_exit(msg, json=False) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = common.get_prefix(args, search=False) # CAN'T Check with this function since it assumes we will create prefix. # cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, env) except InvalidInstaller: sys.stderr.write(textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type) ) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def args_func(args, p): from conda.cli import common use_json = getattr(args, 'json', False) try: args.func(args, p) except RuntimeError as e: common.error_and_exit(str(e), json=use_json) except Exception as e: if e.__class__.__name__ not in ('ScannerError', 'ParserError'): message = """\ An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. """ if use_json: import traceback common.error_and_exit(message + traceback.format_exc(), error_type="UnexpectedError", json=True) print(message) raise # as if we did not catch it
def execute(args, parser): prefix = common.get_prefix(args) regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' if args.json: format = 'canonical' print_packages(prefix, regex, format, piplist=args.pip, json=args.json)
def execute(args, parser): name = args.remote_definition or args.name try: spec = install_specs.detect(name=name, filename=args.file, directory=os.getcwd()) env = spec.environment except exceptions.SpecNotFound as e: common.error_and_exit(str(e), json=args.json) if not args.name: if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""" ).lstrip() # TODO Add json support common.error_and_exit(msg, json=False) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = common.get_prefix(args, search=False) # CAN'T Check with this function since it assumes we will create prefix. # cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, env) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def execute(args, parser): name = args.remote_definition or args.name try: spec = install_specs.detect( name=name, filename=args.file, directory=os.getcwd()) env = spec.environment except exceptions.SpecNotFound as e: common.error_and_exit(str(e), json=args.json) if not args.name: if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""" ).lstrip() # TODO Add json support common.error_and_exit(msg, json=False) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = common.get_prefix(args, search=False) # CAN'T Check with this function since it assumes we will create prefix. # cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, env) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def execute(args, parser): json_result = {'success': True} if args.lock or args.all: locks = list(find_lock()) json_result['lock'] = {'files': locks} rm_lock(locks, verbose=not args.json) if args.tarballs or args.all: pkgs_dirs, totalsize = find_tarballs() first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['tarballs'] = { 'pkgs_dir': first, # Backwards compabitility 'pkgs_dirs': dict(pkgs_dirs), 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize } rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json) if args.index_cache or args.all: json_result['index_cache'] = { 'files': [join(config_pkgs_dirs[0], 'cache')] } rm_index_cache() if args.packages or args.all: pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['packages'] = { 'pkgs_dir': first, # Backwards compatibility 'pkgs_dirs': dict(pkgs_dirs), 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize, 'warnings': warnings, 'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs}, } rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, verbose=not args.json) if args.source_cache or args.all: json_result['source_cache'] = find_source_cache() rm_source_cache(args, **json_result['source_cache']) if not any((args.lock, args.tarballs, args.index_cache, args.packages, args.source_cache, args.all)): common.error_and_exit( "One of {--lock, --tarballs, --index-cache, --packages, " "--source-cache, --all} required", error_type="ValueError") if args.json: common.stdout_json(json_result)
def execute(args, parser): json_result = { 'success': True } if args.lock or args.all: locks = list(find_lock()) json_result['lock'] = { 'files': locks } rm_lock(locks, verbose=not args.json) if args.tarballs or args.all: pkgs_dirs, totalsize = find_tarballs() first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['tarballs'] = { 'pkgs_dir': first, # Backwards compabitility 'pkgs_dirs': dict(pkgs_dirs), 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize } rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json) if args.index_cache or args.all: json_result['index_cache'] = { 'files': [join(config.pkgs_dirs[0], 'cache')] } rm_index_cache() if args.packages or args.all: pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['packages'] = { 'pkgs_dir': first, # Backwards compatibility 'pkgs_dirs': dict(pkgs_dirs), 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize, 'warnings': warnings, 'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs}, } rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, verbose=not args.json) if args.source_cache or args.all: json_result['source_cache'] = find_source_cache() rm_source_cache(args, **json_result['source_cache']) if not any((args.lock, args.tarballs, args.index_cache, args.packages, args.source_cache, args.all)): common.error_and_exit( "One of {--lock, --tarballs, --index-cache, --packages, " "--source-cache, --all} required", error_type="ValueError") if args.json: common.stdout_json(json_result)
def execute(args, parser): if not args.package: parser.print_help() return import conda.install import conda.resolve from conda.api import get_package_versions, app_is_installed from conda.misc import launch prefix = common.get_prefix(args) if args.quiet: logging.disable(logging.CRITICAL) if args.package.endswith('.tar.bz2'): if app_is_installed(args.package, prefixes=[prefix]): fn = args.package else: error_message = "Package {} not installed.".format(args.package) common.error_and_exit(error_message, json=args.json, error_type="PackageNotInstalled") else: installed = [] for pkg in get_package_versions(args.package): if app_is_installed(pkg.fn, prefixes=[prefix]): installed.append(pkg) for pkg in conda.install.linked(prefix): name, version, build = pkg.rsplit('-', 2) if name == args.package: installed = [conda.resolve.Package(pkg + '.tar.bz2', conda.install.is_linked(prefix, pkg))] break if installed: package = max(installed) fn = package.fn try: subprocess = launch(fn, prefix=prefix, additional_args=args.arguments, background=args.json) if args.json: common.stdout_json(dict(fn=fn, pid=subprocess.pid)) elif not args.quiet: print("Started app. Some apps may take a while to finish loading.") except TypeError: execute_command(args.package, prefix, args.arguments, args.json) except Exception as e: common.exception_and_exit(e, json=args.json) else: # Try interpreting it as a command execute_command(args.package, prefix, args.arguments, args.json)
def execute(args, parser): try: env = from_file(args.file) except exceptions.EnvironmentFileNotFound as e: msg = 'Unable to locate environment file: %s\n\n' % e.filename msg += "\n".join(textwrap.wrap(textwrap.dedent(""" Please verify that the above file is present and that you have permission read the file's contents. Note, you can specify the file to use by explictly adding --file=/path/to/file when calling conda env create.""").lstrip())) common.error_and_exit(msg, json=args.json) if not args.name: if not env.name: # TODO It would be nice to be able to format this more cleanly common.error_and_exit( 'An environment name is required.\n\n' 'You can either specify one directly with --name or you can add\n' 'a name property to your %s file.' % args.file, json=args.json ) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = common.get_prefix(args, search=False) cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, env) except InvalidInstaller: sys.stderr.write(textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type) ) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def check_prefix(prefix, json=False): name = basename(prefix) error = None if name.startswith('.'): error = "environment name cannot start with '.': %s" % name if name == config.root_env_name: error = "'%s' is a reserved environment name" % name if exists(prefix): error = "prefix already exists: %s" % prefix if error: common.error_and_exit(error, json=json, error_type="ValueError")
def execute(args, parser): json_result = {'success': True} if args.lock: locks = list(find_lock()) json_result['lock'] = {'files': locks} rm_lock(locks, verbose=not args.json) if args.tarballs: pkgs_dir, rmlist, totalsize = find_tarballs() json_result['tarballs'] = { 'pkgs_dir': pkgs_dir, 'files': rmlist, 'total_size': totalsize } rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=not args.json) if args.index_cache: json_result['index_cache'] = { 'files': [join(config.pkgs_dirs[0], 'cache')] } rm_index_cache() if args.packages: pkgs_dir, rmlist, warnings, totalsize, pkgsizes = find_pkgs() json_result['packages'] = { 'pkgs_dir': pkgs_dir, 'files': rmlist, 'total_size': totalsize, 'warnings': warnings, 'pkg_sizes': dict(zip(rmlist, pkgsizes)) } rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes, verbose=not args.json) if args.source_cache: json_result['source_cache'] = find_source_cache() rm_source_cache(args, **json_result['source_cache']) if not (args.lock or args.tarballs or args.index_cache or args.packages or args.source_cache): common.error_and_exit( "One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required", error_type="ValueError") if args.json: common.stdout_json(json_result)
def execute_command(cmd, prefix, additional_args, json=False): from conda.misc import execute_in_environment try: process = execute_in_environment( cmd, prefix=prefix, additional_args=additional_args, inherit=not json) if not json: sys.exit(process.wait()) else: common.stdout_json(dict(cmd=cmd, pid=process.pid)) except OSError: error_message = "App {} not installed.".format(cmd) common.error_and_exit(error_message, json=json, error_type="AppNotInstalled")
def execute(args, parser): json_result = { 'success': True } if args.lock: locks = list(find_lock()) json_result['lock'] = { 'files': locks } rm_lock(locks, verbose=not args.json) if args.tarballs: pkgs_dir, rmlist, totalsize = find_tarballs() json_result['tarballs'] = { 'pkgs_dir': pkgs_dir, 'files': rmlist, 'total_size': totalsize } rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=not args.json) if args.index_cache: json_result['index_cache'] = { 'files': [join(config.pkgs_dirs[0], 'cache')] } rm_index_cache() if args.packages: pkgs_dir, rmlist, warnings, totalsize, pkgsizes = find_pkgs() json_result['packages'] = { 'pkgs_dir': pkgs_dir, 'files': rmlist, 'total_size': totalsize, 'warnings': warnings, 'pkg_sizes': dict(zip(rmlist, pkgsizes)) } rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes, verbose=not args.json) if args.source_cache: json_result['source_cache'] = find_source_cache() rm_source_cache(args, **json_result['source_cache']) if not (args.lock or args.tarballs or args.index_cache or args.packages or args.source_cache): common.error_and_exit( "One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required", error_type="ValueError") if args.json: common.stdout_json(json_result)
def args_func(args, p): from conda.cli import common use_json = getattr(args, 'json', False) try: args.func(args, p) except RuntimeError as e: if 'maximum recursion depth exceeded' in str(e): print_issue_message(e, use_json=use_json) raise common.error_and_exit(str(e), json=use_json) except Exception as e: print_issue_message(e, use_json=use_json) raise # as if we did not catch it
def print_explicit(prefix): import json if not isdir(prefix): common.error_and_exit("Error: environment does not exist: %s" % prefix) print_export_header() print("@EXPLICIT") meta_dir = join(prefix, 'conda-meta') for fn in sorted(os.listdir(meta_dir)): if not fn.endswith('.json'): continue with open(join(meta_dir, fn)) as fi: meta = json.load(fi) print(meta.get('url') or '# no URL for: %s' % fn[:-5])
def print_explicit(prefix): import json if not isdir(prefix): common.error_and_exit("Error: environment does not exist: %s" % prefix) print_export_header() print("@EXPLICIT") meta_dir = join(prefix, "conda-meta") for fn in sorted(os.listdir(meta_dir)): if not fn.endswith(".json"): continue with open(join(meta_dir, fn)) as fi: meta = json.load(fi) print(meta.get("url") or "# no URL for: %s" % fn[:-5])
def execute(args, parser): name = args.remote_definition or args.name try: spec = specs.detect(name=name, filename=args.file, directory=os.getcwd(), selectors=args.select) env = spec.environment # FIXME conda code currently requires args to have a name or prefix if args.prefix is None: args.name = env.name except exceptions.SpecNotFound as e: common.error_and_exit(str(e), json=args.json) prefix = common.get_prefix(args, search=False) if args.force and not is_root_prefix(prefix) and os.path.exists(prefix): rm_rf(prefix) cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, pkg_specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, pkg_specs, args, env) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def execute(args, parser): name = None if args.old_name: print("--name is deprecated. Use the following command instead:\n" " conda env create {}".format(args.old_name), file=sys.stderr) name = args.old_name elif args.name: name = args.name try: spec = specs.detect(name=name, filename=args.file, directory=os.getcwd()) env = spec.environment # FIXME conda code currently requires args to have a name or prefix args.name = env.name except exceptions.SpecNotFound as e: common.error_and_exit(str(e), json=args.json) prefix = common.get_prefix(args, search=False) cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, pkg_specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, pkg_specs, args, env) except InvalidInstaller: sys.stderr.write(textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type) ) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def print_issue_message(e, use_json=False): from conda.cli import common message = "" if e.__class__.__name__ not in ('ScannerError', 'ParserError'): message = """\ An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. """ if use_json: import traceback common.error_and_exit(message + traceback.format_exc(), error_type="UnexpectedError", json=True) print(message)
def print_explicit(prefix, add_md5=False): import json if not isdir(prefix): common.error_and_exit("Error: environment does not exist: %s" % prefix) print_export_header() print("@EXPLICIT") meta_dir = join(prefix, 'conda-meta') for fn in sorted(os.listdir(meta_dir)): if not fn.endswith('.json'): continue with open(join(meta_dir, fn)) as fi: meta = json.load(fi) if not meta.get('url'): print('# no URL for: %s' % fn[:-5]) continue md5 = meta.get('md5') print(meta['url'] + ('#%s' % md5 if add_md5 and md5 else ''))
def root_read_only(command, prefix, json=False): assert command in {'install', 'update', 'remove'} msg = read_message('ro.txt') if not msg: msg = """\ Missing write permissions in: ${root_dir} # # You don't appear to have the necessary permissions to ${command} packages # into the install area '${root_dir}'. # However you can clone this environment into your home directory and # then make changes to it. # This may be done using the command: # # $ conda create -n my_${name} --clone=${prefix} """ msg = msg.replace('${root_dir}', config.root_dir) msg = msg.replace('${prefix}', prefix) msg = msg.replace('${name}', name_prefix(prefix)) msg = msg.replace('${command}', command) error_and_exit(msg, json=json, error_type='RootNotWritable')
def print_explicit(prefix, add_md5=False): import json if not isdir(prefix): common.error_and_exit("Error: environment does not exist: %s" % prefix) print_export_header() print("@EXPLICIT") meta_dir = join(prefix, 'conda-meta') for fn in sorted(os.listdir(meta_dir)): if not fn.endswith('.json'): continue with open(join(meta_dir, fn)) as fi: meta = json.load(fi) url = meta.get('url') def format_url(): return '%s%s-%s-%s.tar.bz2' % (meta['channel'], meta['name'], meta['version'], meta['build']) # two cases in which we want to try to format the url: # 1. There is no url key in the metadata # 2. The url key in the metadata is referencing a file on the local # machine if not url: try: url = format_url() except KeyError: # Declare failure :-( print('# no URL for: %s' % fn[:-5]) continue if url.startswith('file'): try: url = format_url() except KeyError: # declare failure and allow the url to be the file from which it was # originally installed continue md5 = meta.get('md5') print(url + ('#%s' % md5 if add_md5 and md5 else ''))
def execute(args, parser): if not args.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""").lstrip() # TODO Add json support common.error_and_exit(msg, json=False) args.name = name prefix = common.get_prefix(args) installed = install.linked(prefix) conda_pkgs = copy(installed) # json=True hides the output, data is added to installed main_list.add_pip_installed(prefix, installed, json=True) pip_pkgs = sorted(installed - conda_pkgs) dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)] if len(pip_pkgs) > 0: dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]}) data = { 'name': args.name, 'dependencies': dependencies, } if args.file is None: fp = sys.stdout else: fp = open(args.file, 'wb') yaml.dump(data, default_flow_style=False, stream=fp)
def print_packages(prefix, regex=None, format='human', piplist=False, json=False, show_channel_urls=show_channel_urls): if not isdir(prefix): common.error_and_exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before listing its packages.""" % prefix, json=json, error_type="NoEnvironmentFound") if not json: if format == 'human': print('# packages in environment at %s:' % prefix) print('#') if format == 'export': print_export_header() installed = install.linked(prefix) if piplist and use_pip and format == 'human': installed.update(get_egg_info(prefix)) exitcode, output = list_packages(prefix, installed, regex, format=format, show_channel_urls=show_channel_urls) if not json: print('\n'.join(output)) else: common.stdout_json(output) return exitcode
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
def execute(args, parser): try: env = from_file(args.file) except exceptions.EnvironmentFileNotFound as e: msg = 'Unable to locate environment file: %s\n\n' % e.filename msg += "\n".join( textwrap.wrap( textwrap.dedent(""" Please verify that the above file is present and that you have permission read the file's contents. Note, you can specify the file to use by explictly adding --file=/path/to/file when calling conda env update.""").lstrip())) common.error_and_exit(msg, json=args.json) if not args.name: if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""" ).lstrip() # TODO Add json support common.error_and_exit(msg, json=False) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = common.get_prefix(args, search=False) # CAN'T Check with this function since it assumes we will create prefix. # cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () for installer_type, specs in env.dependencies.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, env) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)
def execute_search(args, parser): import re from conda.resolve import Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = ' '.join(args.regex.split('=')) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, prefix=prefix, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, prefix=prefix, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if '@' in name: continue if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = name pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit( "you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[ 0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: error_message = e.args[0] packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages) if close: error_message += "\n\nDid you mean one of these?\n %s" % ( ', '.join(close)) error_message += '\n\nYou can search for this package on Binstar with' error_message += '\n\n binstar search -t conda %s' % pkg error_message += '\n\nYou may need to install the Binstar command line client with' error_message += '\n\n conda install binstar' common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute(args, parser): from conda import config, plan from conda.install import linked, rm_rf prefix = common.get_prefix(args) if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") if prefix == config.default_prefix: # FIXME The way the "name" is determined now is handled by # looking at the basename of the prefix. This is brittle # and underlines a use-case for an Environment object that # is capable of providing a name attribute. common.error_and_exit( textwrap.dedent(""" Conda cannot remove the current environment. Please deactivate and run conda env remove again with the name specified. conda env remove --name %s """ % basename(prefix)).lstrip()) # TODO Why do we need an index for removing packages? index = common.get_index_trap(json=args.json) actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} if plan.nothing_to_do(actions): # TODO Should this automatically remove even *before* confirmation? # TODO Should this display an error when removing something that # doesn't exist? rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: print() print("Remove the following packages in environment %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
======= >>>>>>> origin/feature/instruction-arguments from conda.cli import pscheck >>>>>>> conda/feature/instruction-arguments from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit('no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit("cannot remove current environment. deactivate and run conda remove again") common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL
def execute_config(args, parser): try: import yaml except ImportError: common.error_and_exit("pyyaml is required to modify configuration", json=args.json, error_type="ImportError") json_warnings = [] json_get = {} if args.system: rc_path = config.sys_rc_path elif args.file: rc_path = args.file else: rc_path = config.user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): if args.add and 'channels' in list(zip(*args.add))[0] and not ['channels', 'defaults'] in args.add: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml.load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in config.rc_list_keys + config.rc_bool_keys: if key not in config.rc_other: if not args.json: message = "unknown key %s" % key print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], bool): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # PyYaml does not support round tripping, so if we use yaml.dump, it # will clear all comments and structure from the configuration file. # There are no yaml parsers that do this. Our best bet is to do a # simple parsing of the file ourselves. We can check the result at # the end to see if we did it right. # First, do it the pyyaml way new_rc_config = deepcopy(rc_config) # Add for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %r" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if not isinstance(rc_config.get(key, []), list): raise CouldntParse("key %r should be a list, not %s." % (key, rc_config[key].__class__.__name__)) if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue new_rc_config.setdefault(key, []).insert(0, item) # Set for key, item in args.set: yamlitem = yaml.load(item) if not isinstance(yamlitem, bool): common.error_and_exit("%r is not a boolean" % item, json=args.json, error_type="TypeError") new_rc_config[key] = yamlitem # Remove for key, item in args.remove: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in new_rc_config[key]: common.error_and_exit("%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") new_rc_config[key] = [i for i in new_rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del new_rc_config[key] if args.force: # Note, force will also remove any checking that the keys are in # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml.dump(new_rc_config, default_flow_style=False)) if args.json: common.stdout_json_success( rc_path=rc_path, warnings=json_warnings, get=json_get ) return # Now, try to parse the condarc file. # Just support " key: " for now listkeyregexes = {key:re.compile(r"( *)%s *" % key) for key in dict(args.add) } setkeyregexes = {key:re.compile(r"( *)%s( *):( *)" % key) for key in dict(args.set) } new_rc_text = rc_text[:].split("\n") for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if item in rc_config.get(key, []): # Skip duplicates. See above continue added = False for pos, line in enumerate(new_rc_text[:]): matched = listkeyregexes[key].match(line) if matched: leading_space = matched.group(1) # TODO: Try to guess how much farther to indent the # item. Right now, it is fixed at 2 spaces. new_rc_text.insert(pos + 1, "%s - %s" % (leading_space, item)) added = True if not added: if key in rc_config: # We should have found it above raise CouldntParse("existing list key couldn't be found") # TODO: Try to guess the correct amount of leading space for the # key. Right now it is zero. new_rc_text += ['%s:' % key, ' - %s' % item] if key == 'channels' and ['channels', 'defaults'] not in args.add: # If channels key is added for the first time, make sure it # includes 'defaults' new_rc_text += [' - defaults'] new_rc_config['channels'].append('defaults') for key, item in args.set: if key not in config.rc_bool_keys: common.error_and_exit("Error key must be one of %s, not %s" % (config.rc_bool_keys, key), json=args.json, error_type="ValueError") added = False for pos, line in enumerate(new_rc_text[:]): matched = setkeyregexes[key].match(line) if matched: leading_space = matched.group(1) precol_space = matched.group(2) postcol_space = matched.group(3) new_rc_text[pos] = '%s%s%s:%s%s' % (leading_space, key, precol_space, postcol_space, item) added = True if not added: if key in rc_config: raise CouldntParse("existing bool key couldn't be found") new_rc_text += ['%s: %s' % (key, item)] for key, item in args.remove: raise NotImplementedError("--remove without --force is not implemented " "yet") for key, in args.remove_key: raise NotImplementedError("--remove-key without --force is not " "implemented yet") if args.add or args.set: # Verify that the new rc text parses to the same thing as if we had # used yaml. try: parsed_new_rc_text = yaml.load('\n'.join(new_rc_text).strip('\n')) except yaml.parser.ParserError: raise CouldntParse("couldn't parse modified yaml") else: if not parsed_new_rc_text == new_rc_config: raise CouldntParse("modified yaml doesn't match what it " "should be") if args.add or args.set: with open(rc_path, 'w') as rc: rc.write('\n'.join(new_rc_text).strip('\n')) rc.write('\n') if args.json: common.stdout_json_success( rc_path=rc_path, warnings=json_warnings, get=json_get )
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute_search(args, parser): import re from conda.resolve import MatchSpec, Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute_config(args, parser): try: import yaml except ImportError: common.error_and_exit("pyyaml is required to modify configuration", json=args.json, error_type="ImportError") json_warnings = [] json_get = {} if args.system: rc_path = config.sys_rc_path elif args.file: rc_path = args.file else: rc_path = config.user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): if args.add and 'channels' in list(zip( *args.add))[0] and not ['channels', 'defaults'] in args.add: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml.load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in config.rc_list_keys + config.rc_bool_keys: if key not in config.rc_other: if not args.json: message = "unknown key %s" % key print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], bool): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # PyYaml does not support round tripping, so if we use yaml.dump, it # will clear all comments and structure from the configuration file. # There are no yaml parsers that do this. Our best bet is to do a # simple parsing of the file ourselves. We can check the result at # the end to see if we did it right. # First, do it the pyyaml way new_rc_config = deepcopy(rc_config) # Add for key, item in args.add: try: if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue except TypeError: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") new_rc_config.setdefault(key, []).insert(0, item) # Set for key, item in args.set: yamlitem = yaml.load(item) if not isinstance(yamlitem, bool): common.error_and_exit("%r is not a boolean" % item, json=args.json, error_type="TypeError") new_rc_config[key] = yamlitem # Remove for key, item in args.remove: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in new_rc_config[key]: common.error_and_exit( "%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") new_rc_config[key] = [i for i in new_rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in new_rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del new_rc_config[key] if args.force: # Note, force will also remove any checking that the keys are in # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml.dump(new_rc_config, default_flow_style=False)) if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) return # Now, try to parse the condarc file. # Just support " key: " for now listkeyregexes = { key: re.compile(r"( *)%s *" % key) for key in dict(args.add) } setkeyregexes = { key: re.compile(r"( *)%s( *):( *)" % key) for key in dict(args.set) } new_rc_text = rc_text[:].split("\n") for key, item in args.add: if key not in config.rc_list_keys: common.error_and_exit("key must be one of %s, not %s" % (config.rc_list_keys, key), json=args.json, error_type="ValueError") if item in rc_config.get(key, []): # Skip duplicates. See above continue added = False for pos, line in enumerate(new_rc_text[:]): matched = listkeyregexes[key].match(line) if matched: leading_space = matched.group(1) # TODO: Try to guess how much farther to indent the # item. Right now, it is fixed at 2 spaces. new_rc_text.insert(pos + 1, "%s - %s" % (leading_space, item)) added = True if not added: if key in rc_config: # We should have found it above raise CouldntParse("existing list key couldn't be found") # TODO: Try to guess the correct amount of leading space for the # key. Right now it is zero. new_rc_text += ['%s:' % key, ' - %s' % item] if key == 'channels' and ['channels', 'defaults'] not in args.add: # If channels key is added for the first time, make sure it # includes 'defaults' new_rc_text += [' - defaults'] new_rc_config['channels'].append('defaults') for key, item in args.set: if key not in config.rc_bool_keys: common.error_and_exit("Error key must be one of %s, not %s" % (config.rc_bool_keys, key), json=args.json, error_type="ValueError") added = False for pos, line in enumerate(new_rc_text[:]): matched = setkeyregexes[key].match(line) if matched: leading_space = matched.group(1) precol_space = matched.group(2) postcol_space = matched.group(3) new_rc_text[pos] = '%s%s%s:%s%s' % ( leading_space, key, precol_space, postcol_space, item) added = True if not added: if key in rc_config: raise CouldntParse("existing bool key couldn't be found") new_rc_text += ['%s: %s' % (key, item)] for key, item in args.remove: raise NotImplementedError( "--remove without --force is not implemented " "yet") for key, in args.remove_key: raise NotImplementedError("--remove-key without --force is not " "implemented yet") if args.add or args.set: # Verify that the new rc text parses to the same thing as if we had # used yaml. try: parsed_new_rc_text = yaml.load('\n'.join(new_rc_text).strip('\n')) except yaml.parser.ParserError: raise CouldntParse("couldn't parse modified yaml") else: if not parsed_new_rc_text == new_rc_config: raise CouldntParse("modified yaml doesn't match what it " "should be") if args.add or args.set: with open(rc_path, 'w') as rc: rc.write('\n'.join(new_rc_text).strip('\n')) rc.write('\n') if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get)
def execute_config(args, parser): json_warnings = [] json_get = {} if args.system: rc_path = sys_rc_path elif args.file: rc_path = args.file else: rc_path = user_rc_path # Create the file if it doesn't exist if not os.path.exists(rc_path): has_defaults = ['channels', 'defaults'] in args.add if args.add and 'channels' in list( zip(*args.add))[0] and not has_defaults: # If someone adds a channel and their .condarc doesn't exist, make # sure it includes the defaults channel, or else they will end up # with a broken conda. rc_text = """\ channels: - defaults """ else: rc_text = "" else: with open(rc_path, 'r') as rc: rc_text = rc.read() rc_config = yaml_load(rc_text) if rc_config is None: rc_config = {} # Get if args.get is not None: if args.get == []: args.get = sorted(rc_config.keys()) for key in args.get: if key not in rc_list_keys + rc_bool_keys + rc_string_keys: if key not in rc_other: message = "unknown key %s" % key if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue if key not in rc_config: continue if args.json: json_get[key] = rc_config[key] continue if isinstance(rc_config[key], (bool, string_types)): print("--set", key, rc_config[key]) else: # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file for item in reversed(rc_config.get(key, [])): # Use repr so that it can be pasted back in to conda config --add print("--add", key, repr(item)) # Add for key, item in args.add: if key not in rc_list_keys: common.error_and_exit("key must be one of %s, not %r" % (', '.join(rc_list_keys), key), json=args.json, error_type="ValueError") if not isinstance(rc_config.get(key, []), list): bad = rc_config[key].__class__.__name__ raise CouldntParse("key %r should be a list, not %s." % (key, bad)) if key == 'default_channels' and rc_path != sys_rc_path: msg = "'default_channels' is only configurable for system installs" raise NotImplementedError(msg) if item in rc_config.get(key, []): # Right now, all list keys should not contain duplicates message = "Skipping %s: %s, item already exists" % (key, item) if not args.json: print(message, file=sys.stderr) else: json_warnings.append(message) continue rc_config.setdefault(key, []).insert(0, item) # Set set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys) for key, item in args.set: # Check key and value yamlitem = yaml_load(item) if key in set_bools: if not isinstance(yamlitem, bool): common.error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item), json=args.json, error_type="TypeError") rc_config[key] = yamlitem elif key in set_strings: rc_config[key] = yamlitem else: common.error_and_exit("Error key must be one of %s, not %s" % (', '.join(set_bools | set_strings), key), json=args.json, error_type="ValueError") # Remove for key, item in args.remove: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") if item not in rc_config[key]: common.error_and_exit( "%r is not in the %r key of the config file" % (item, key), json=args.json, error_type="KeyError") rc_config[key] = [i for i in rc_config[key] if i != item] # Remove Key for key, in args.remove_key: if key not in rc_config: common.error_and_exit("key %r is not in the config file" % key, json=args.json, error_type="KeyError") del rc_config[key] # config.rc_keys with open(rc_path, 'w') as rc: rc.write(yaml_dump(rc_config)) if args.json: common.stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) return
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") elif not args.file: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if isupdate and not (args.file or args.all or args.packages): common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") linked = ci.linked(prefix) lnames = {ci.name_dist(d) for d in linked} if isupdate and not args.all: for name in args.packages: common.arg2spec(name, json=args.json, update=True) if name not in lnames: common.error_and_exit("Package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_use_local(args) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix, verbose=not args.quiet) return elif getattr(args, 'all', False): if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) specs.extend(nm for nm in lnames) specs.extend(common.specs_from_args(args.packages, json=args.json)) if isinstall and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.explicit(args.packages, prefix, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, fetch_args={'use_cache': args.use_index_cache, 'unknown': args.unknown}) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline, prefix=prefix) r = Resolve(index) ospecs = list(specs) plan.add_defaults_to_specs(r, linked, specs, update=isupdate) # Don't update packages that are already up-to-date if isupdate and not (args.all or args.force): orig_packages = args.packages[:] installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] for name in orig_packages: vers_inst = [m['version'] for m in installed_metadata if m['name'] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(name)) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True if args.no_deps: only_names = set(s.split()[0] for s in ospecs) else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if isinstall and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, always_copy=args.copy, minimal_hint=args.alt_hint, update_deps=args.update_deps) except NoPackagesFound as e: error_message = e.args[0] if isupdate and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} nfound = 0 for pkg in sorted(e.pkgs): pkg = pkg.split()[0] if pkg in packages: continue close = get_close_matches(pkg, packages, cutoff=0.7) if not close: continue if nfound == 0: error_message += "\n\nClose matches found; did you mean one of these?\n" error_message += "\n %s: %s" % (pkg, ', '.join(close)) nfound += 1 error_message += '\n\nYou can search for packages on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n(and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client' error_message += ' command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: path = join(prefix, 'conda-meta', 'pinned') error_message += "\n\nNote that you have pinned specs in %s:" % path error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except (Unsatisfiable, SystemExit) as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit('no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () index = common.get_index_trap(channel_urls=channel_urls, use_cache=args.use_index_cache, prepend=not args.override_channels, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions }) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit("Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions })
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {inst.PREFIX: prefix} for dist in sorted(linked(prefix)): plan.add_unlink(actions, dist) else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: common.confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})