def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not isdir(config.bldpkgs_dir): os.makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) if specs: # Don't waste time if there is nothing to do if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix)
def get_build_index(clear_cache=True, channel_urls=(), override_channels=False): if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels)
def build_missing_metas(metas): metas = sort_dependency_order(metas) if os.path.exists(conda_build.config.config.bldpkgs_dir): # Get hold of just the built packages. index = conda.api.get_index([url_path(conda_build.config.config.croot)], prepend=False) else: # There are no built packages yet. index = {} already_builts = [meta for meta in metas if '{}.tar.bz2'.format(meta.dist()) in index] needs_building = [meta for meta in metas if '{}.tar.bz2'.format(meta.dist()) not in index] print('-' * 60) if already_builts: print('Packages which are already built:\n ' + '\n '.join(meta.name() for meta in already_builts)) if needs_building: print('Packages which will be built (in order):\n ' + '\n '.join(meta.name() for meta in needs_building)) print('-' * 60) for meta in needs_building: build(meta, test=True)
def get_build_index(clear_cache=True): if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels)
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [ p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask' ][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') # Regression test for 2812 # install from local channel from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages': {flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, subdir) channel = url_path(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') # regression test for #2626 # install tarball with full path tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2599 linked_data_.clear() flask_metadata = glob( join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def add_cached_package(pdir, url, overwrite=False, urlstxt=False): """ Adds a new package to the cache. The URL is used to determine the package filename and channel, and the directory pdir is scanned for both a compressed and an extracted version of that package. If urlstxt=True, this URL will be appended to the urls.txt file in the cache, so that subsequent runs will correctly identify the package. """ package_cache() if '/' in url: dist = url.rsplit('/', 1)[-1] else: dist = url url = None if dist.endswith('.tar.bz2'): fname = dist dist = dist[:-8] else: fname = dist + '.tar.bz2' xpkg = join(pdir, fname) if not overwrite and xpkg in fname_table_: return if not isfile(xpkg): xpkg = None xdir = join(pdir, dist) if not (isdir(xdir) and isfile(join(xdir, 'info', 'files')) and isfile(join(xdir, 'info', 'index.json'))): xdir = None if not (xpkg or xdir): return if url: url = remove_binstar_tokens(url) _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' xkey = xpkg or (xdir + '.tar.bz2') fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix fkey = prefix + dist rec = package_cache_.get(fkey) if rec is None: rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[]) if url and url not in rec['urls']: rec['urls'].append(url) if xpkg and xpkg not in rec['files']: rec['files'].append(xpkg) if xdir and xdir not in rec['dirs']: rec['dirs'].append(xdir) if urlstxt: try: with open(join(pdir, 'urls.txt'), 'a') as fa: fa.write('%s\n' % url) except IOError: pass
def compute_source_indices(env_sources): """Generate a dictionary mapping source name to source index.""" src_index = {} for sources in env_sources: for source_name in sources: with conda_build_croot_for_source(source_name): if os.path.exists(conda_bld_config.bldpkgs_dir): # Get hold of just the built packages. src_urls = [url_path(conda_bld_config.croot)] index = conda.api.get_index(src_urls, prepend=False) src_index[source_name] = index else: src_index[source_name] = {} return src_index
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') # Regression test for 2812 # install from local channel from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages':{flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, subdir) channel = url_path(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') # regression test for #2626 # install tarball with full path tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def create_env(pref, specs): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def compute_source_indices(env_sources): """Generate a dictionary mapping source name to source index.""" src_index = {} for sources in env_sources: for source_name in sources: with conda_build_croot_for_source(source_name): if os.path.exists(conda_bld_config.bldpkgs_dir): # Get hold of just the built packages. src_urls = [url_path(conda_bld_config.croot)] index = conda.api.get_index(src_urls, prepend=False) src_index[source_name] = index else: # raise ValueError('Source "{}" not found. Consider fetching' # ' the sources first.'.format(source_name)) src_index[source_name] = {} return src_index
def rm_fetched(dist): """ Checks to see if the requested package is in the cache; and if so, it removes both the package itself and its extracted contents. """ rec = package_cache().get(dist) if rec is None: return for fname in rec['files']: del fname_table_[fname] del fname_table_[url_path(fname)] with Locked(dirname(fname)): rm_rf(fname) for fname in rec['dirs']: with Locked(dirname(fname)): rm_rf(fname) del package_cache_[dist]
def get_local_urls(clear_cache=True): # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL if clear_cache: from conda.fetch import fetch_index fetch_index.cache = {} if local_channel: return local_channel from os.path import exists from conda.utils import url_path try: from conda_build.config import croot if exists(croot): local_channel.append(url_path(croot)) except ImportError: pass return local_channel
def create_env(pref, specs, pypi=False): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) cc.pkgs_dirs = cc.pkgs_dirs[:1] if pypi: from conda.from_pypi import install_from_pypi specs = install_from_pypi(pref, index, specs) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) actions[FETCH].append(dist) if md5: # Need to verify against the package index verifies.append((dist + '.tar.bz2', md5)) channels[url_p + '/'] = (schannel, 0) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Finish the MD5 verification if verifies: index = fetch_index(channels, **fetch_args) for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if info['md5'] != md5: sys.exit( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features)
envs = load_envs(args.envs) for env in envs: env_sources = env['sources'] orig_build_root = conda_bld_config.croot channels = [] for sources in env_sources: for source_name in sources: source_build_directory = conda_manifest.config.src_distributions_dir(source_name) s = os.path.join(source_build_directory, conda.config.subdir) if not os.path.exists(s): os.makedirs(s) import conda_build.index conda_build.index.update_index(s) channels.append(url_path(source_build_directory)) conda.config.rc['channels'] = channels print 'Channels:', channels env_recipe_dir = conda_manifest.config.env_recipes_dir(env=env) metas = list(find_all_recipes([env_recipe_dir])) stdoutlog.debug('Found the following recipes:\n{}\n-------------------' ''.format('\n'.join(meta.name() for meta in metas))) metas = sort_dependency_order(metas) stdoutlog.debug('Metas sorted into the following order:\n{}\n---------' ''.format('\n'.join(meta.name() for meta in metas))) src_index = {}
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit( "you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[ 0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: error_message = e.args[0] packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages) if close: error_message += "\n\nDid you mean one of these?\n %s" % ( ', '.join(close)) error_message += '\n\nYou can search for this package on Binstar with' error_message += '\n\n binstar search -t conda %s' % pkg error_message += '\n\nYou may need to install the Binstar command line client with' error_message += '\n\n conda install binstar' common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute_search(args, parser): import re from conda.resolve import MatchSpec, Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit( "Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix) config.set_pkgs_dirs(prefix) if command == 'update': if args.all: if args.packages: sys.exit("""Error: --all cannot be used with packages""") else: if len(args.packages) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in set(ci.name_dist(d) for d in linked): sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) if newenv and args.clone: if args.packages: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if args.file: specs = common.specs_from_url(args.file) elif getattr(args, 'all', False): specs = [] linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) else: specs = common.specs_from_args(args.packages) if command == 'install' and args.revision: get_revision(args.revision) else: common.check_specs(prefix, specs) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: sys.exit("Error: you need to have 'conda-build' installed" " to use the --use-local option") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown) else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0] for dist in linked if dist.rsplit('-', 2)[0] == name] assert len(vers_inst) == 1, name assert len(build_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') list_packages(prefix, regex) return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: sys.exit("Error: could not create directory: %s" % prefix) else: sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if newenv: touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def execute(args, parser): import sys import conda.plan as plan from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit('no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json) if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions }) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit("Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions })
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( 'no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit( "cannot remove current environment. deactivate and run conda remove again" ) common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {inst.PREFIX: prefix} for dist in sorted(linked(prefix)): plan.add_unlink(actions, dist) else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions}) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: common.confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: common.stdout_json({'success': True, 'actions': actions})
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") else: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and args.clone: if args.packages: common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet) touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: specs.extend(common.specs_from_url(args.file, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) else: common.check_specs(prefix, specs, json=args.json) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: common.error_and_exit("you need to have 'conda-build' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap([url_path(build_config.croot)], use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) # Don't update packages that are already up-to-date if command == 'update' and not args.all: r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build_number == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): common.error_and_exit("cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) except NoPackagesFound as e: common.exception_and_exit(e, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success(message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: if not pscheck.main(args): common.confirm_yn(args) else: if (sys.platform == 'win32' and not args.force_pscheck and not pscheck.check_processes(verbose=False)): common.error_and_exit("Cannot continue operation while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning") elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: assert_package_is_installed(prefix, 'flask-0.10.1') flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0] run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) # regression test for #2886 (part 1 of 2) # install tarball from package cache, default channel run_command(Commands.INSTALL, prefix, tar_old_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2626 # install tarball with full path, outside channel tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path, outside channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0.10.1') tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # Regression test for 2812 # install from local channel for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages':{flask_fname: flask_data}} with make_temp_env() as channel: subchan = join(channel, subdir) channel = url_path(channel) os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') # Regression test for 2970 # install from build channel as a tarball conda_bld = join(sys.prefix, 'conda-bld') conda_bld_sub = join(conda_bld, subdir) tar_bld_path = join(conda_bld_sub, flask_fname) if os.path.exists(conda_bld): try: os.rename(tar_new_path, tar_bld_path) except OSError: pass else: os.makedirs(conda_bld) os.rename(subchan, conda_bld_sub) run_command(Commands.INSTALL, prefix, tar_bld_path) assert_package_is_installed(prefix, 'flask-') # regression test for #2886 (part 2 of 2) # install tarball from package cache, local channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') run_command(Commands.INSTALL, prefix, tar_old_path) # The last install was from the `local::` channel assert_package_is_installed(prefix, 'flask-') # regression test for #2599 linked_data_.clear() flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1] bad_metadata = join(prefix, 'conda-meta', 'flask.json') copyfile(flask_metadata, bad_metadata) assert not package_is_installed(prefix, 'flask', exact=True) assert_package_is_installed(prefix, 'flask-0.')
def execute_search(args, parser): import re import sys from conda.api import get_index from conda.resolve import MatchSpec, Resolve pat = None ms = None if args.regex: if args.spec: ms = MatchSpec(' '.join(args.regex.split('='))) else: try: pat = re.compile(args.regex, re.I) except re.error as e: common.error_and_exit( "%r is not a valid regex pattern (exception: %s)" % (args.regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, platform=args.platform) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json) r = Resolve(index) if args.canonical: json = [] else: json = {} for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = MatchSpec(name) if not args.canonical: json[name] = [] if args.outdated: vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(ms_name)) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(ms_name)): dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def execute(args, parser): import sys from os.path import isdir import conda.config as config import conda.plan as plan from conda.api import get_index from conda.cli import pscheck prefix = common.get_prefix(args) config.set_pkgs_dirs(prefix) # handle tar file containaing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explict installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix): sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path from conda.builder import config as build_config # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)]) if args.pip and config.use_pip: # Remove from specs packages that are not in conda index # and install them using pip # Return the updated specs specs = install_with_pip(prefix, index, specs) if not specs: return actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) common.check_write('install', prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute_search(args, parser): import re from conda.resolve import Resolve if args.reverse_dependency: if not args.regex: parser.error("--reverse-dependency requires at least one package name") if args.spec: parser.error("--reverse-dependency does not work with --spec") pat = None ms = None if args.regex: if args.spec: ms = ' '.join(args.regex.split('=')) else: regex = args.regex if args.full_name: regex = r'^%s$' % regex try: pat = re.compile(regex, re.I) except re.error as e: common.error_and_exit( "'%s' is not a valid regex pattern (exception: %s)" % (regex, e), json=args.json, error_type="ValueError") prefix = common.get_prefix(args) import conda.config import conda.install linked = conda.install.linked(prefix) extracted = set() for pkgs_dir in conda.config.pkgs_dirs: extracted.update(conda.install.extracted(pkgs_dir)) # XXX: Make this work with more than one platform platform = args.platform or '' if platform and platform != config.subdir: args.unknown = False common.ensure_override_channels_requires_channel(args, dashc=False, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, prefix=prefix, json=args.json, platform=args.platform, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform, use_cache=args.use_index_cache, prefix=prefix, unknown=args.unknown, json=args.json, offline=args.offline) r = Resolve(index) if args.canonical: json = [] else: json = {} names = [] for name in sorted(r.groups): if '@' in name: continue if args.reverse_dependency: ms_name = ms for pkg in r.groups[name]: for dep in r.ms_depends(pkg): if pat.search(dep.name): names.append((name, Package(pkg, r.index[pkg]))) else: if pat and pat.search(name) is None: continue if ms and name != ms.name: continue if ms: ms_name = ms else: ms_name = name pkgs = sorted(r.get_pkgs(ms_name)) names.append((name, pkgs)) if args.reverse_dependency: new_names = [] old = None for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])): if name == old: new_names[-1][1].append(pkg) else: new_names.append((name, [pkg])) old = name names = new_names for name, pkgs in names: if args.reverse_dependency: disp_name = pkgs[0].name else: disp_name = name if args.names_only and not args.outdated: print(name) continue if not args.canonical: json[name] = [] if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue if args.names_only: print(name) continue for pkg in pkgs: dist = pkg.fn[:-8] if args.canonical: if not args.json: print(dist) else: json.append(dist) continue if platform and platform != config.subdir: inst = ' ' elif dist in linked: inst = '*' elif dist in extracted: inst = '.' else: inst = ' ' if not args.json: print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, pkg.build, config.canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = '' else: data = {} data.update(pkg.info) data.update({ 'fn': pkg.fn, 'installed': inst == '*', 'extracted': inst in '*.', 'version': pkg.version, 'build': pkg.build, 'build_number': pkg.build_number, 'channel': config.canonical_channel_name(pkg.channel), 'full_channel': pkg.channel, 'features': list(r.features(pkg.fn)), 'license': pkg.info.get('license'), 'size': pkg.info.get('size'), 'depends': pkg.info.get('depends'), 'type': pkg.info.get('type') }) if data['type'] == 'app': data['icon'] = make_icon_url(pkg.info) json[name].append(data) if args.json: common.stdout_json(json)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = command == 'create' if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix) config.set_pkgs_dirs(prefix) if command == 'update': if len(args.packages) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in set(ci.name_dist(d) for d in linked): sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) if newenv and args.clone: if args.packages: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: sys.exit("Error: you need to have 'conda-build' installed" " to use the --use-local option") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)], use_cache=args.use_cache) else: index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_cache) # Don't update packages that are already up-to-date if command == 'update': r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0] for dist in linked if dist.rsplit('-', 2)[0] == name] assert len(vers_inst) == 1, name assert len(build_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue # This won't do the right thing for python 2 latest = pkgs[-1] if latest.version == vers_inst[0] and latest.build == build_inst[0]: args.packages.remove(name) if not args.packages: from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') list_packages(prefix, regex) return # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explicit installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: sys.exit("Error: could not create directory: %s" % prefix) else: sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if newenv: touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = command == 'create' if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix) config.set_pkgs_dirs(prefix) if command == 'update': if len(args.packages) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if command == 'update': linked = set(ci.name_dist(d) for d in ci.linked(prefix)) for name in args.packages: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in linked: sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) if newenv and args.clone: if args.packages: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if newenv and not args.no_default_packages: args.packages.extend(config.create_default_packages) # handle tar file containaing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return # handle explict installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): sys.exit("cannot mix specifications with conda package filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: sys.exit("Error: could not create directory: %s" % prefix) else: sys.exit("""\ Error: environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build import config as build_config except ImportError: sys.exit("Error: you need to have 'conda-build' installed" " to use the --use-local option") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(build_config.croot)]) actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if newenv: touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) channels[url_p + '/'] = (schannel, 0) actions[FETCH].append(dist) verifies.append((dist + '.tar.bz2', md5)) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Pull the repodata for channels we are using if channels: index.update(fetch_index(channels, **fetch_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: sys.exit('MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
def execute(args, parser): import conda.plan as plan import conda.instructions as inst from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit('no package names supplied,\n' ' try "conda remove -h" for more details', json=args.json, error_type="ValueError") prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit("cannot remove current environment. deactivate and run conda remove again") common.check_write('remove', prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) else: index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit('cannot remove root environment,\n' ' add -n NAME or -p PREFIX option', json=args.json, error_type="CantRemoveRoot") actions = {inst.PREFIX: prefix} for dist in sorted(linked(prefix)): plan.add_unlink(actions, dist) else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): common.error_and_exit('cannot remove %s from root environment' % ', '.join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot") actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions }) return common.error_and_exit('no packages found to remove from ' 'environment: %s' % prefix, json=args.json, error_type="PackageNotInstalled") if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({ 'success': True, 'dry_run': True, 'actions': actions }) return if not args.json: common.confirm_yn(args) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# remove specs: %s\n' % specs) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise if args.all: rm_rf(prefix) if args.json: common.stdout_json({ 'success': True, 'actions': actions })
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if args.all: if args.packages: common.error_and_exit("""--all cannot be used with packages""", json=args.json, error_type="ValueError") elif not args.file: if len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update': linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) if args.no_deps: depends = [] specs = list(set(depends)) args.unknown = True if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint) if args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def execute(args, parser): import sys import conda.plan as plan import conda.instructions as inst from conda.cli import pscheck from conda.install import rm_rf, linked from conda import config if not (args.all or args.package_names): common.error_and_exit( "no package names supplied,\n" ' try "conda remove -h" for more details', json=args.json, error_type="ValueError", ) prefix = common.get_prefix(args) if args.all and prefix == config.default_prefix: common.error_and_exit("cannot remove current environment. deactivate and run conda remove again") common.check_write("remove", prefix, json=args.json) common.ensure_override_channels_requires_channel(args, json=args.json) channel_urls = args.channel or () if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError", ) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = common.get_index_trap( channel_urls=[url_path(croot)] + list(channel_urls), prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline, ) else: index = common.get_index_trap( channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, json=args.json, offline=args.offline, ) specs = None if args.features: features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): common.error_and_exit( "cannot remove root environment,\n" " add -n NAME or -p PREFIX option", json=args.json, error_type="CantRemoveRoot", ) actions = {inst.PREFIX: prefix, inst.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs): common.error_and_exit( "cannot remove %s from root environment" % ", ".join(common.root_no_rm), json=args.json, error_type="CantRemoveFromRoot", ) actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) if args.json: common.stdout_json({"success": True, "actions": actions}) return common.error_and_exit( "no packages found to remove from " "environment: %s" % prefix, json=args.json, error_type="PackageNotInstalled", ) if not args.json: print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions, index) if args.json and args.dry_run: common.stdout_json({"success": True, "dry_run": True, "actions": actions}) return if not args.json: if not pscheck.main(args): common.confirm_yn(args) elif sys.platform == "win32" and not args.force_pscheck and not pscheck.check_processes(verbose=False): common.error_and_exit( "Cannot continue removal while processes " "from packages are running without --force-pscheck.", json=True, error_type="ProcessesStillRunning", ) if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet) else: plan.execute_actions(actions, index, verbose=not args.quiet) if specs: with open(join(prefix, "conda-meta", "history"), "a") as f: f.write("# remove specs: %s\n" % specs) if args.all: rm_rf(prefix) if args.json: common.stdout_json({"success": True, "actions": actions})