def install_local_packages(prefix, paths, verbose=False): # copy packages to pkgs dir pkgs_dir = config.pkgs_dirs[0] dists = [] for src_path in paths: assert src_path.endswith('.tar.bz2') fn = basename(src_path) dists.append(fn[:-8]) dst_path = join(pkgs_dir, fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose) depends = [] for dist in dists: try: with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) depends.extend(meta['depends']) except (IOError, KeyError): continue print('depends: %r' % depends) return depends
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [chan for chan in env.channels if chan != "nodefaults"] index = get_index(channel_urls=channel_urls, prepend="nodefaults" not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) action_set = plan.install_actions_list( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map ) with common.json_progress_bars(json=args.json and not args.quiet): for actions in action_set: try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError("Already locked: %s" % text_type(e)) else: raise CondaRuntimeError("RuntimeError: %s" % e) except SystemExit as e: raise CondaSystemExit("Exiting", e)
def clone_bundle(path, prefix): """ Clone the bundle (located at `path`) by creating a new environment at `prefix`. The directory `path` is located in should be some temp directory or some other directory OUTSITE /opt/anaconda (this function handles copying the of the file if necessary for you). After calling this funtion, the original file (at `path`) may be removed. """ assert not abspath(path).startswith(abspath(config.root_dir)) assert not isdir(prefix) fn = basename(path) assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn dist = fn[:-8] if not install.is_extracted(config.pkgs_dir, dist): shutil.copyfile(path, join(config.pkgs_dir, dist + '.tar.bz2')) plan.execute_plan(['%s %s' % (plan.EXTRACT, dist)]) assert install.is_extracted(config.pkgs_dir, dist) with open(join(config.pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) # for backwards compatibility, use "requires" when "depends" is not there dists = ['-'.join(r.split()) for r in meta.get('depends', meta.get('requires')) if not r.startswith('conda ')] dists.append(dist) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) os.unlink(join(prefix, 'conda-meta', dist + '.json'))
def clone_bundle(path, prefix=None, bundle_name=None): """ Clone the bundle (located at `path`) by creating a new environment at `prefix` (unless prefix is None or the prefix directory already exists) """ try: t = tarfile.open(path, 'r:*') meta = json.load(t.extractfile('info/index.json')) except tarfile.ReadError: raise RuntimeError('bad tar archive: %s' % path) except KeyError: raise RuntimeError("no archive 'info/index.json' in: %s" % (path)) if prefix and not isdir(prefix): for m in t.getmembers(): if m.path.startswith((BDP, 'info/')): continue t.extract(m, path=prefix) dists = discard_conda('-'.join(s.split()) for s in meta.get('depends', [])) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) bundle_dir = abspath(expanduser('~/bundles/%s' % (bundle_name or meta.get('bundle_name')))) for m in t.getmembers(): if m.path.startswith(BDP): targetpath = join(bundle_dir, m.path[len(BDP):]) t._extract_member(m, targetpath) t.close()
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not isdir(config.bldpkgs_dir): os.makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) if specs: # Don't waste time if there is nothing to do if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix)
def create_env(prefix, specs, clear_cache=True): ''' Create a conda envrionment for the given prefix and specs. ''' specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix)
def execute(args, parser): import sys import conda.plan as plan from conda.api import get_index from conda.cli import pscheck from conda.install import rm_rf, linked if not (args.all or args.package_names): sys.exit('Error: no package names supplied,\n' ' try "conda remove -h" for more details') prefix = common.get_prefix(args) common.check_write('remove', prefix) index = None if args.features: channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): sys.exit('Error: cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): sys.exit('Error: cannot remove %s from root environment' % ', '.join(common.root_no_rm)) actions = plan.remove_actions(prefix, specs) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) return sys.exit('Error: no packages found to remove from ' 'environment: %s' % prefix) print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix)
def execute(args, parser): import sys from os.path import exists import conda.plan as plan from conda.api import get_index if len(args.package_specs) == 0 and not args.file: sys.exit('Error: too few arguments, must supply command line ' 'package specs or --file') common.ensure_name_or_prefix(args, 'create') prefix = common.get_prefix(args) if exists(prefix): if args.prefix: raise RuntimeError("'%s' already exists, must supply new " "directory for -p/--prefix" % prefix) else: raise RuntimeError("'%s' already exists, must supply new " "directory for -n/--name" % prefix) if args.file: specs = common.specs_from_file(args.file) else: specs = common.specs_from_args(args.package_specs) common.check_specs(prefix, specs) channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): print('No matching packages could be found, nothing to do') return print() print("Package plan for creating environment at %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if sys.platform != 'win32': activate_name = prefix if args.name: activate_name = args.name print("#") print("# To activate this environment, use:") print("# $ source activate %s" % activate_name) print("#") print("# To deactivate this environment, use:") print("# $ source deactivate") print("#")
def execute(args, parser): import conda.plan as plan from conda.api import get_index from conda.cli import pscheck prefix = common.get_prefix(args) # handle explict installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): raise RuntimeError("cannot mix specifications with conda package " "filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_file(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet)
def app_install(fn, prefix=config.root_dir): """ Install the application `fn` into prefix (which defauts to the root environment). """ import conda.plan as plan index = get_index() actions = plan.install_actions(prefix, index, [_fn2spec(fn)]) plan.execute_actions(actions, index)
def execute(args, parser): import conda.config as config import conda.plan as plan from conda.api import get_index from conda.misc import touch_nonadmin common.ensure_name_or_prefix(args, 'create') prefix = common.get_prefix(args, search=False) check_prefix(prefix) config.set_pkgs_dirs(prefix) if args.clone: if args.package_specs: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not args.no_default_packages: args.package_specs.extend(config.create_default_packages) if len(args.package_specs) == 0 and not args.file: sys.exit('Error: too few arguments, must supply command line ' 'package specs or --file') if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.package_specs) common.check_specs(prefix, specs) channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): print('No matching packages could be found, nothing to do') return print() print("Package plan for creating environment at %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def create_env(pref, specs): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) fetch_index.cache = {} index = get_index([url_path(config.croot)]) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def execute(args, parser): import sys import conda.install as ci import conda.config as config import conda.plan as plan from conda.api import get_index from conda.cli import pscheck if len(args.pkg_names) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update anaconda """) prefix = common.get_prefix(args) config.set_pkgs_dirs(prefix) linked = set(ci.name_dist(d) for d in ci.linked(prefix)) for name in args.pkg_names: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in linked: sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, args.pkg_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(args.pkg_names) print('# All packages already at latest version, nothing to do.') list_packages(prefix, regex) return print("Updating conda environment at %s" % prefix) plan.display_actions(actions, index) common.check_write('update', prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet)
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) dists = discard_conda(install.linked(prefix1)) if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) if index is None: index = get_index() r = Resolve(index) sorted_dists = r.dependency_sort(dists) actions = ensure_linked_actions(sorted_dists, prefix2) execute_actions(actions, index=index, verbose=not quiet) return actions, untracked_files
def create_env(pref, specs): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def test_install_prune(self): with make_temp_env("python=2 decorator") as prefix: assert_package_is_installed(prefix, 'decorator') # prune is a feature used by conda-env # conda itself does not provide a public API for it index = get_index_trap(prefix=prefix) actions = plan.install_actions(prefix, index, specs=['flask'], prune=True) plan.execute_actions(actions, index, verbose=True) assert_package_is_installed(prefix, 'flask') assert not package_is_installed(prefix, 'decorator')
def force_extract_and_link(dists, prefix, verbose=False): actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK # maps names of installed packages to dists linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) execute_actions(actions, verbose=verbose)
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not debug: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False): """ Create a conda envrionment for the given prefix and specs. """ if not isdir(config.bldpkgs_dir): os.makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) if specs: # Don't waste time if there is nothing to do index = get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=override_channels) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix)
def install(prefix, specs, args, env): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap(channel_urls=env.channels) actions = plan.install_actions(prefix, index, specs) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
def clone_env(prefix1, prefix2, verbose=True, quiet=False): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) dists = discard_conda(install.linked(prefix1)) if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = ensure_linked_actions(dists, prefix2) execute_actions(actions, index=get_index(), verbose=not quiet) return actions, untracked_files
def app_uninstall(fn, prefix=config.root_dir): """ Uninstall application `fn` (but not its dependencies). Like `conda remove fn`. """ import conda.cli.common as common import conda.plan as plan index = None specs = [_fn2spec(fn)] if plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs): raise ValueError("Cannot remove %s from the root environment" % ", ".join(common.root_no_rm)) actions = plan.remove_actions(prefix, specs, index=index) if plan.nothing_to_do(actions): raise ValueError("Nothing to do") plan.execute_actions(actions, index)
def create_env(pref, specs, clear_cache=True, verbose=True): ''' Create a conda envrionment for the given prefix and specs. ''' if not isdir(config.bldpkgs_dir): os.makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) if specs: # Don't waste time if there is nothing to do if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def create_env(pref, specs, pypi=False): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) cc.pkgs_dirs = cc.pkgs_dirs[:1] if pypi: from conda.from_pypi import install_from_pypi specs = install_from_pypi(pref, index, specs) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] index = get_index(channel_urls=channel_urls, prepend='nodefaults' not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) action_set = plan.install_actions_list( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map) with common.json_progress_bars(json=args.json and not args.quiet): for actions in action_set: try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError('Already locked: %s' % text_type(e)) else: raise CondaHTTPError('CondaHTTPError: %s' % e) except SystemExit as e: raise CondaSystemExit('Exiting', e)
def install(prefix, specs, args, env, prune=False): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults index = common.get_index_trap( channel_urls=[chan for chan in env.channels if chan != 'nodefaults'], prepend='nodefaults' not in env.channels) actions = plan.install_actions(prefix, index, specs, prune=prune) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
def install_local_packages(prefix, paths, verbose=False): # copy packages to pkgs dir dists = [] for src_path in paths: assert src_path.endswith('.tar.bz2') fn = basename(src_path) dists.append(fn[:-8]) dst_path = join(config.pkgs_dirs[0], fn) if abspath(src_path) == abspath(dst_path): continue shutil.copyfile(src_path, dst_path) actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose)
def install(prefix, specs, args, env, prune=False): # TODO: do we need this? check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults index = get_index_trap(channel_urls=[chan for chan in env.channels if chan != 'nodefaults'], prepend='nodefaults' not in env.channels) actions = plan.install_actions(prefix, index, specs, prune=prune) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: exception_and_exit(e, json=args.json)
def install(prefix, specs, args, env): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap(channel_urls=env.channels) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
def clone_analysispackage(path, prefix=None, analysispackage_name=None, data_path=None): """ Clone the analysispackage (located at `path`) by creating a new environment at `prefix` (unless prefix is None or the prefix directory already exists) """ try: t = tarfile.open(path, 'r:*') meta = json.load(t.extractfile('info/index.json')) except tarfile.ReadError: raise RuntimeError('bad tar archive: %s' % path) except KeyError: raise RuntimeError("no archive 'info/index.json' in: %s" % (path)) if prefix and isdir(prefix): print ("erasing old environment at %s" % prefix) shutil.rmtree(prefix) if prefix and not isdir(prefix): for m in t.getmembers(): if m.path.startswith((BDP, 'info/')): continue t.extract(m, path=prefix) dists = discard_conda('-'.join(s.split()) for s in meta.get('depends', [])) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=False) if not data_path: analysispackage_dir = abspath(expanduser('~/analysispackages/%s' % (analysispackage_name or meta.get('analysispackage_name')))) else: analysispackage_dir = data_path for m in t.getmembers(): if m.path.startswith(BDP): targetpath = join(analysispackage_dir, m.path[len(BDP):]) t._extract_member(m, targetpath) with open(join(data_path, ".metadata.json"), "w+") as f: json.dump({'env' : prefix}, f) t.close()
def app_uninstall(fn, prefix=config.root_dir): """ Uninstall application `fn` (but not its dependencies). Like `conda remove fn`. """ import conda.cli.common as common import conda.plan as plan index = None specs = [_fn2spec(fn)] if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): raise ValueError("Cannot remove %s from the root environment" % ', '.join(common.root_no_rm)) actions = plan.remove_actions(prefix, specs, index=index) if plan.nothing_to_do(actions): raise ValueError("Nothing to do") plan.execute_actions(actions, index)
def old_clone_bundle(path, prefix): """ Clone the bundle (located at `path`) by creating a new environment at `prefix`. The directory `path` is located in should be some temp directory or some other directory OUTSITE /opt/anaconda (this function handles copying the of the file if necessary for you). After calling this funtion, the original file (at `path`) may be removed. """ assert not abspath(path).startswith(abspath(config.root_dir)) assert not isdir(prefix) fn = basename(path) assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn dist = fn[:-8] pkgs_dir = config.pkgs_dirs[0] if not install.is_extracted(pkgs_dir, dist): shutil.copyfile(path, join(pkgs_dir, dist + '.tar.bz2')) plan.execute_plan(['%s %s' % (plan.EXTRACT, dist)]) assert install.is_extracted(pkgs_dir, dist) with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) # for backwards compatibility, use "requires" when "depends" is not there dists = [ '-'.join(r.split()) for r in meta.get('depends', meta.get('requires', [])) if not r.startswith('conda ') ] dists.append(dist) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.execute_actions(actions, index, verbose=False) os.unlink(join(prefix, 'conda-meta', dist + '.json'))
def clone_env(prefix1, prefix2, verbose=True): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) dists = install.linked(prefix1) print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) actions = ensure_linked_actions(dists, prefix2) execute_actions(actions, index=get_index(), verbose=verbose)
def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, verbose=True, dirty=False, activate=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [ name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name)) ] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. m, need_source_download = parse_or_try_download( m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print( "Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): """ Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. """ if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files(): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env( config.build_prefix, [ms.spec for ms in m.ms_depends("build")], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels, ) if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section("source")) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for rx in m.always_include_files(): pat = re.compile(rx) has_matches = False for f in set(files1): if pat.match(f): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Regex %s from always_include_files does not match any files" % rx) # Save this for later with open(join(config.croot, "prefix_files.txt"), "w") as f: f.write(u"\n".join(sorted(list(files1)))) f.write(u"\n") if sys.platform == "win32": import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, "build.sh") script = m.get_value("build/script", None) if script: if isinstance(script, list): script = "\n".join(script) build_file = join(source.get_dir(), "conda_build.sh") with open(build_file, "w") as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ["/bin/bash", "-x", "-e", build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, "prefix_files.txt"), "r") as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value("build/entry_points")) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir"))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value("build/noarch_python"): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, "w:bz2") for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join( config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = command == 'create' if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix) config.set_pkgs_dirs(prefix) if command == 'update': if len(args.packages) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in set(ci.name_dist(d) for d in linked): sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) if newenv and args.clone: if args.packages: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: sys.exit("Error: you need to have 'conda-build' installed" " to use the --use-local option") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)], use_cache=args.use_cache) else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_cache) # Don't update packages that are already up-to-date if command == 'update': r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0] for dist in linked if dist.rsplit('-', 2)[0] == name] assert len(vers_inst) == 1, name assert len(build_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue # This won't do the right thing for python 2 latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') list_packages(prefix, regex) return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: sys.exit("Error: could not create directory: %s" % prefix) else: sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if newenv: touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked_data if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details', args.json) prefix = get_prefix(args) if args.all and prefix == default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=args.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist + '.tar.bz2': info for dist, info in iteritems(index)} else: index = get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, json=args.json, offline=args.offline, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', args.json) actions = {inst.PREFIX: prefix} for fkey in sorted(iterkeys(index)): plan.add_unlink(actions, fkey[:-8]) else: specs = specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and names_in_specs(root_no_rm, specs)): raise CondaEnvironmentError('cannot remove %s from root environment' % ', '.join(root_no_rm), args.json) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: stdout_json({ 'success': True, 'actions': actions }) return raise PackageNotFoundError('no packages found to remove from ' 'environment: %s' % prefix, args.json) if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: stdout_json({ 'success': True, 'actions': actions })
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) channels[url_p + '/'] = (schannel, 0) actions[FETCH].append(dist) verifies.append((dist + '.tar.bz2', md5)) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Pull the repodata for channels we are using if channels: index.update(fetch_index(channels, **fetch_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: sys.exit('MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
continue shutil.copyfile(src_path, dst_path) <<<<<<< HEAD force_extract_and_link(dists, prefix, verbose=verbose) ======= actions = defaultdict(list) actions['PREFIX'] = [prefix] actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK for dist in dists: actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if install.is_linked(prefix, dist): actions[UNLINK].append(dist) actions[LINK].append(dist) execute_actions(actions, verbose=verbose) depends = [] for dist in dists: try: with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) depends.extend(meta['depends']) except (IOError, KeyError): continue print('depends: %r' % depends) return depends >>>>>>> conda/feature/instruction-arguments def environment_for_conda_environment(prefix=config.root_dir):
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if debug: logging.getLogger("conda").setLevel(logging.DEBUG) logging.getLogger("binstar").setLevel(logging.DEBUG) logging.getLogger("install").setLevel(logging.DEBUG) logging.getLogger("conda.install").setLevel(logging.DEBUG) logging.getLogger("fetch").setLevel(logging.DEBUG) logging.getLogger("print").setLevel(logging.DEBUG) logging.getLogger("progress").setLevel(logging.DEBUG) logging.getLogger("dotupdate").setLevel(logging.DEBUG) logging.getLogger("stdoutlog").setLevel(logging.DEBUG) logging.getLogger("requests").setLevel(logging.DEBUG) else: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda").setLevel(logging.WARN) logging.getLogger("binstar").setLevel(logging.WARN) logging.getLogger("install").setLevel(logging.ERROR) logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests").setLevel(logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) try: plan.execute_actions(actions, index, verbose=debug) except SystemExit as exc: if "too short in" in exc.message and config.prefix_length > 80: log.warn("Build prefix failed with prefix length {0}.".format( config.prefix_length)) log.warn("Error was: ") log.warn(exc.message) log.warn( "One or more of your package dependencies needs to be rebuilt with a " "longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters.") log.warn( "Your package will not install into prefixes longer than 80 characters." ) config.prefix_length = 80 create_env(prefix, specs, clear_cache=clear_cache, debug=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") elif not args.file: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked if not (args.all or args.package_names): error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = get_prefix(args) if args.all and prefix == default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" error_and_exit(msg) check_write('remove', prefix, json=args.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = {} else: index = get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, json=args.json, offline=args.offline, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {inst.PREFIX: prefix} for dist in sorted(linked(prefix)): plan.add_unlink(actions, dist) else: specs = specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and names_in_specs(root_no_rm, specs)): error_and_exit('cannot remove %s from root environment' % ', '.join(root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: stdout_json({'success': True, 'actions': actions}) return error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: stdout_json({'success': True, 'dry_run': True, 'actions': actions}) return if not args.json: confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: stdout_json({'success': True, 'actions': actions})
def execute(args, parser): from conda import config, plan from conda.install import linked, rm_rf prefix = common.get_prefix(args) if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") if prefix == config.default_prefix: # FIXME The way the "name" is determined now is handled by # looking at the basename of the prefix. This is brittle # and underlines a use-case for an Environment object that # is capable of providing a name attribute. common.error_and_exit( textwrap.dedent(""" Conda cannot remove the current environment. Please deactivate and run conda env remove again with the name specified. conda env remove --name %s """ % basename(prefix)).lstrip()) # TODO Why do we need an index for removing packages? index = common.get_index_trap(json=args.json) actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} if plan.nothing_to_do(actions): # TODO Should this automatically remove even *before* confirmation? # TODO Should this display an error when removing something that # doesn't exist? rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: print() print("Remove the following packages in environment %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: common.confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit( "you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[ 0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: error_message = e.args[0] packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages) if close: error_message += "\n\nDid you mean one of these?\n %s" % ( ', '.join(close)) error_message += '\n\nYou can search for this package on Binstar with' error_message += '\n\n binstar search -t conda %s' % pkg error_message += '\n\nYou may need to install the Binstar command line client with' error_message += '\n\n conda install binstar' common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if isupdate and not (args.file or args.all or args.packages): common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") linked = ci.linked(prefix) lnames = {ci.name_dist(d) for d in linked} if isupdate and not args.all: for name in args.packages: common.arg2spec(name, json=args.json, update=True) if name not in lnames: common.error_and_exit("Package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_use_local(args) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix, verbose=not args.quiet) return elif getattr(args, 'all', False): if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) specs.extend(nm for nm in lnames) specs.extend(common.specs_from_args(args.packages, json=args.json)) if isinstall and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.explicit(args.packages, prefix, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, fetch_args={'use_cache': args.use_index_cache, 'unknown': args.unknown}) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline, prefix=prefix) r = Resolve(index) ospecs = list(specs) plan.add_defaults_to_specs(r, linked, specs, update=isupdate) # Don't update packages that are already up-to-date if isupdate and not (args.all or args.force): orig_packages = args.packages[:] installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] for name in orig_packages: vers_inst = [m['version'] for m in installed_metadata if m['name'] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(name)) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True if args.no_deps: only_names = set(s.split()[0] for s in ospecs) else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if isinstall and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, always_copy=args.copy, minimal_hint=args.alt_hint, update_deps=args.update_deps) except NoPackagesFound as e: error_message = e.args[0] if isupdate and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} nfound = 0 for pkg in sorted(e.pkgs): pkg = pkg.split()[0] if pkg in packages: continue close = get_close_matches(pkg, packages, cutoff=0.7) if not close: continue if nfound == 0: error_message += "\n\nClose matches found; did you mean one of these?\n" error_message += "\n %s: %s" % (pkg, ', '.join(close)) nfound += 1 error_message += '\n\nYou can search for packages on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n(and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client' error_message += ' command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: path = join(prefix, 'conda-meta', 'pinned') error_message += "\n\nNote that you have pinned specs in %s:" % path error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except (Unsatisfiable, SystemExit) as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.gateways.disk.delete import rm_rf from conda.core.linked_data import linked_data if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details') prefix = context.prefix_w_legacy_search if args.all and prefix == context.default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) check_write('remove', prefix, json=context.json) ensure_use_local(args) ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if not args.features and args.all: index = linked_data(prefix) index = {dist: info for dist, info in iteritems(index)} else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {inst.PREFIX: prefix} for dist in sorted(iterkeys(index)): plan.add_unlink(actions, dist) else: specs = specs_from_args(args.package_names) # import pdb; pdb.set_trace() if (context.conda_in_root and plan.is_root_prefix(prefix) and names_in_specs(ROOT_NO_RM, specs) and not args.force): raise CondaEnvironmentError( 'cannot remove %s from root environment' % ', '.join(ROOT_NO_RM)) actions = plan.remove_actions(prefix, specs, index=index, force=args.force, pinned=args.pinned) delete_trash() if plan.nothing_to_do(actions): if args.all: print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if not context.json: confirm_yn(args) rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions}) return raise PackageNotFoundError( '', 'no packages found to remove from ' 'environment: %s' % prefix) if not context.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if context.json and args.dry_run: stdout_json({'success': True, 'dry_run': True, 'actions': actions}) return if not context.json: confirm_yn(args) if context.json and not context.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not context.quiet) else: plan.execute_actions(actions, index, verbose=not context.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % ','.join(specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if context.json: stdout_json({'success': True, 'actions': actions})
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () index = common.get_index_trap(channel_urls=channel_urls, use_cache=args.use_index_cache, prepend=not args.override_channels, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {inst.PREFIX: prefix} for dist in sorted(linked(prefix)): plan.add_unlink(actions, dist) else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: common.confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})