def clone_bundle(path, prefix=None, bundle_name=None): """ Clone the bundle (located at `path`) by creating a new environment at `prefix` (unless prefix is None or the prefix directory already exists) """ try: t = tarfile.open(path, 'r:*') meta = json.load(t.extractfile('info/index.json')) except tarfile.ReadError: raise RuntimeError('bad tar archive: %s' % path) except KeyError: raise RuntimeError("no archive 'info/index.json' in: %s" % (path)) if prefix and not isdir(prefix): for m in t.getmembers(): if m.path.startswith((BDP, 'info/')): continue t.extract(m, path=prefix) dists = discard_conda('-'.join(s.split()) for s in meta.get('depends', [])) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) bundle_dir = abspath(expanduser('~/bundles/%s' % (bundle_name or meta.get('bundle_name')))) for m in t.getmembers(): if m.path.startswith(BDP): targetpath = join(bundle_dir, m.path[len(BDP):]) t._extract_member(m, targetpath) t.close()
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [ chan for chan in env.channels if chan != 'nodefaults' ] index = get_index(channel_urls=channel_urls, prepend='nodefaults' not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) unlink_link_transaction = get_install_transaction( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map) with common.json_progress_bars(json=args.json and not args.quiet): pfe = unlink_link_transaction.get_pfe() pfe.execute() unlink_link_transaction.execute()
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not isdir(config.bldpkgs_dir): os.makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) if specs: # Don't waste time if there is nothing to do if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=verbose) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix)
def distribution_exists_on_channel(binstar_cli, meta, fname, owner, channel='main'): """ Determine whether a distribution exists on a specific channel. Note from @pelson: As far as I can see, there is no easy way to do this on binstar. """ channel_url = '/'.join([owner, 'label', channel]) fname = os.path.basename(fname) distributions_on_channel = get_index([channel_url], prepend=False, use_cache=False) try: on_channel = ( distributions_on_channel[fname]['subdir'] == conda.config.subdir) except KeyError: on_channel = False return on_channel
def clone_bundle(path, prefix=None, bundle_name=None): """ Clone the bundle (located at `path`) by creating a new environment at `prefix` (unless prefix is None or the prefix directory already exists) """ try: t = tarfile.open(path, 'r:*') meta = json.load(t.extractfile('info/index.json')) except tarfile.ReadError: raise RuntimeError('bad tar archive: %s' % path) except KeyError: raise RuntimeError("no archive 'info/index.json' in: %s" % (path)) if prefix and not isdir(prefix): for m in t.getmembers(): if m.path.startswith((BDP, 'info/')): continue t.extract(m, path=prefix) dists = discard_conda('-'.join(s.split()) for s in meta.get('depends', [])) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) bundle_dir = abspath( expanduser('~/bundles/%s' % (bundle_name or meta.get('bundle_name')))) for m in t.getmembers(): if m.path.startswith(BDP): targetpath = join(bundle_dir, m.path[len(BDP):]) t._extract_member(m, targetpath) t.close()
def execute(args, parser): import re import conda.install as install from conda.api import get_index from conda.resolve import MatchSpec, Resolve if args.regex: pat = re.compile(args.regex, re.I) else: pat = None prefix = common.get_prefix(args) if not args.canonical: linked = install.linked(prefix) # XXX: Make this work with more than one platform platform = args.platform or '' common.ensure_override_channels_requires_channel(args, dashc=False) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform) r = Resolve(index) for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if args.outdated: vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(MatchSpec(name))): dist = pkg.fn[:-8] if args.canonical: print(dist) continue inst = '*' if dist in linked else ' ' print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, r.index[pkg.fn]['build'], canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = ''
def get_build_index(clear_cache=True): if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels)
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [chan for chan in env.channels if chan != "nodefaults"] index = get_index(channel_urls=channel_urls, prepend="nodefaults" not in env.channels, prefix=prefix) _channel_priority_map = prioritize_channels(channel_urls) action_set = plan.install_actions_list( prefix, index, specs, prune=prune, channel_priority_map=_channel_priority_map ) with common.json_progress_bars(json=args.json and not args.quiet): for actions in action_set: try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError("Already locked: %s" % text_type(e)) else: raise CondaRuntimeError("RuntimeError: %s" % e) except SystemExit as e: raise CondaSystemExit("Exiting", e)
def main(self): index = get_index(use_cache=True) # If it is not already defined with environment variables, we set the CONDA_NPY # to the latest possible value. Since we compute a build matrix anyway, this is # useful to prevent conda-build bailing if the recipe depends on it (e.g. # ``numpy x.x``), and to ensure that recipes that don't care which version they want # at build/test time get a sensible version. if conda_build.config.config.CONDA_NPY is None: resolver = conda.resolve.Resolve(index) npy = resolver.get_pkgs('numpy', emptyok=True) if npy: version = ''.join(max(npy).version.split('.')[:2]) conda_build.config.config.CONDA_NPY = version recipe_metas = self.fetch_all_metas() print('Resolving distributions from {} recipes... '.format(len(recipe_metas))) all_distros = self.compute_build_distros(index, recipe_metas) print('Computed that there are {} distributions from the {} ' 'recipes:'.format(len(all_distros), len(recipe_metas))) recipes_and_dist_locn = self.find_existing_built_dists(all_distros) print('Resolved dependencies, will be built in the following order: \n\t{}'.format( '\n\t'.join(['{} (will be built: {})'.format(meta.dist(), dist_locn is None) for meta, dist_locn in recipes_and_dist_locn]))) for meta, built_dist_location in recipes_and_dist_locn: was_built = built_dist_location is None if was_built: built_dist_location = self.build(meta) self.post_build(meta, built_dist_location, was_built)
def clone_bundle(path, prefix): """ Clone the bundle (located at `path`) by creating a new environment at `prefix`. The directory `path` is located in should be some temp directory or some other directory OUTSITE /opt/anaconda (this function handles copying the of the file if necessary for you). After calling this funtion, the original file (at `path`) may be removed. """ assert not abspath(path).startswith(abspath(config.root_dir)) assert not isdir(prefix) fn = basename(path) assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn dist = fn[:-8] if not install.is_extracted(config.pkgs_dir, dist): shutil.copyfile(path, join(config.pkgs_dir, dist + '.tar.bz2')) plan.execute_plan(['%s %s' % (plan.EXTRACT, dist)]) assert install.is_extracted(config.pkgs_dir, dist) with open(join(config.pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) # for backwards compatibility, use "requires" when "depends" is not there dists = ['-'.join(r.split()) for r in meta.get('depends', meta.get('requires')) if not r.startswith('conda ')] dists.append(dist) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) os.unlink(join(prefix, 'conda-meta', dist + '.json'))
def find_existing_built_dists(self, recipe_metas): recipes = tuple([meta, None] for meta in recipe_metas) if self.inspection_channels: # For an unknown reason we are unable to cache the get_index call. There is a # test which fails against v3.18.6 if use_cache is True. index = get_index(self.inspection_channels, prepend=False, use_cache=False) # We look to see if a distribution exists in the channel. Note: This is not checking # there is a distribution for this platform. This isn't a big deal, as channels are # typically split by platform. If this changes, we would need to re-consider how this # is implemented. # We temporarily workaround the index containing the channel information in the key. # We should deal with this properly though. index = {meta['fn']: meta for meta in index.values()} for recipe_pair in recipes: meta, dist_location = recipe_pair if meta.pkg_fn() in index: recipe_pair[1] = index[meta.pkg_fn()]['channel'] if self.inspection_directories: for directory in self.inspection_directories: files = glob.glob(os.path.join(directory, '*.tar.bz2')) fnames = [os.path.basename(fpath) for fpath in files] for recipe_pair in recipes: meta, dist_location = recipe_pair if dist_location is None and meta.pkg_fn() in fnames: recipe_pair[1] = directory return recipes
def get_build_index(clear_cache=True, channel_urls=(), override_channels=False): if clear_cache: # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels)
def install(prefix, specs, args, env, prune=False): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults index = get_index(channel_urls=channel_urls + [chan for chan in env.channels if chan != 'nodefaults'], prepend='nodefaults' not in env.channels, prefix=prefix) actions = plan.install_actions(prefix, index, specs, prune=prune) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: raise LockError('Already locked: %s' % text_type(e)) else: raise CondaRuntimeError('RuntimeError: %s' % e) except SystemExit as e: raise CondaSystemExit('Exiting', e)
def main(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('specs', nargs='+', help='One or more package specifications. ' 'Note that to use spaces inside\na spec, you need to enclose it in ' 'quotes on the command line. \nExamples: \'numpy 1.9*\' scikit-learn \'python 3.5*\'') p.add_argument( '-p', '--platform', choices=['linux-64', 'linux-32', 'osx-64', 'win-32', 'win-64'], default=conda.config.subdir, help='The platform. Default: \'%s\'' % conda.config.subdir) args = p.parse_args() print(args) conda.config.platform = args.platform.split('-')[0] conda.config.subdir = args.platform index = get_index() resolver = Resolve(index) fns = solve(args.specs, resolver) if fns is not False: print('\n\nFound solution:') print(' ', '\n '.join(fns)) return 0 else: print("Generating hint: %s" % (', '.join(args.specs))) execute(args.specs, resolver) return 1
def old_clone_bundle(path, prefix): """ Clone the bundle (located at `path`) by creating a new environment at `prefix`. The directory `path` is located in should be some temp directory or some other directory OUTSITE /opt/anaconda (this function handles copying the of the file if necessary for you). After calling this funtion, the original file (at `path`) may be removed. """ assert not abspath(path).startswith(abspath(config.root_dir)) assert not isdir(prefix) fn = basename(path) assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn dist = fn[:-8] pkgs_dir = config.pkgs_dirs[0] if not install.is_extracted(pkgs_dir, dist): shutil.copyfile(path, join(pkgs_dir, dist + '.tar.bz2')) inst.execute_instructions([(inst.EXTRACT, (dist,))]) assert install.is_extracted(pkgs_dir, dist) with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi: meta = json.load(fi) # for backwards compatibility, use "requires" when "depends" is not there dists = ['-'.join(r.split()) for r in meta.get('depends', meta.get('requires', [])) if not r.startswith('conda ')] dists.append(dist) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.execute_actions(actions, index, verbose=False) os.unlink(join(prefix, 'conda-meta', dist + '.json'))
def execute(args, parser): import sys import conda.plan as plan from conda.api import get_index from conda.cli import pscheck from conda.install import rm_rf, linked if not (args.all or args.package_names): sys.exit('Error: no package names supplied,\n' ' try "conda remove -h" for more details') prefix = common.get_prefix(args) common.check_write('remove', prefix) index = None if args.features: channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): sys.exit('Error: cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): sys.exit('Error: cannot remove %s from root environment' % ', '.join(common.root_no_rm)) actions = plan.remove_actions(prefix, specs) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) return sys.exit('Error: no packages found to remove from ' 'environment: %s' % prefix) print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix)
def main(): r = Resolve(get_index()) plan = r.solve(['anaconda 2.4.1'], features=set(), installed=set(), update_deps=True) for fn in plan: print(os.path.join('tarballs', fn))
def _get_items(self): # TODO: Include .tar.bz2 files for local installs. from conda.api import get_index args = self.parsed_args index = get_index(channel_urls=args.channel or (), use_cache=True, prepend=not args.override_channels, unknown=args.unknown, offline=args.offline) return [i.rsplit('-', 2)[0] for i in index]
def execute(args, parser): import sys from os.path import exists import conda.plan as plan from conda.api import get_index if len(args.package_specs) == 0 and not args.file: sys.exit('Error: too few arguments, must supply command line ' 'package specs or --file') common.ensure_name_or_prefix(args, 'create') prefix = common.get_prefix(args) if exists(prefix): if args.prefix: raise RuntimeError("'%s' already exists, must supply new " "directory for -p/--prefix" % prefix) else: raise RuntimeError("'%s' already exists, must supply new " "directory for -n/--name" % prefix) if args.file: specs = common.specs_from_file(args.file) else: specs = common.specs_from_args(args.package_specs) common.check_specs(prefix, specs) channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): print('No matching packages could be found, nothing to do') return print() print("Package plan for creating environment at %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if sys.platform != 'win32': activate_name = prefix if args.name: activate_name = args.name print("#") print("# To activate this environment, use:") print("# $ source activate %s" % activate_name) print("#") print("# To deactivate this environment, use:") print("# $ source deactivate") print("#")
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False): try: prefix = tempfile.mkdtemp('conda') specs = common.specs_from_args(packages) index = get_index(channel_urls=channel_urls, prepend=prepend, platform=platform) plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint) finally: ci.rm_rf(prefix)
def check_install(packages, platform=None, channel_urls=(), prepend=True): try: prefix = tempfile.mkdtemp('conda') specs = common.specs_from_args(packages) index = get_index(channel_urls=channel_urls, prepend=prepend, platform=platform) plan.install_actions(prefix, index, specs) finally: ci.rm_rf(prefix)
def execute(args, parser): import re import conda.install as install from conda.api import get_index from conda.resolve import MatchSpec, Resolve if args.regex: pat = re.compile(args.regex, re.I) else: pat = None prefix = common.get_prefix(args) if not args.canonical: linked = install.linked(prefix) # XXX: Make this work with more than one platform platform = args.platform or '' common.ensure_override_channels_requires_channel(args, dashc=False) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels, platform=args.platform) r = Resolve(index) for name in sorted(r.groups): disp_name = name if pat and pat.search(name) is None: continue if args.outdated: vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] if not vers_inst: continue assert len(vers_inst) == 1, name pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: continue latest = pkgs[-1] if latest.version == vers_inst[0]: continue for pkg in sorted(r.get_pkgs(MatchSpec(name))): dist = pkg.fn[:-8] if args.canonical: print(dist) continue inst = '*' if dist in linked else ' ' print('%-25s %s %-15s %15s %-15s %s' % ( disp_name, inst, pkg.version, r.index[pkg.fn]['build'], canonical_channel_name(pkg.channel), common.disp_features(r.features(pkg.fn)), )) disp_name = ''
def execute(args, parser): import sys import conda.plan as plan from conda.api import get_index from conda.cli import pscheck from conda.install import rm_rf, linked if not (args.all or args.package_names): sys.exit('Error: no package names supplied,\n' ' try "conda remove -h" for more details') prefix = common.get_prefix(args) common.check_write('remove', prefix) index = None if args.features: common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) elif args.all: if plan.is_root_prefix(prefix): sys.exit('Error: cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))} else: specs = common.specs_from_args(args.package_names) if (plan.is_root_prefix(prefix) and common.names_in_specs(common.root_no_rm, specs)): sys.exit('Error: cannot remove %s from root environment' % ', '.join(common.root_no_rm)) actions = plan.remove_actions(prefix, specs) if plan.nothing_to_do(actions): if args.all: rm_rf(prefix) return sys.exit('Error: no packages found to remove from ' 'environment: %s' % prefix) print() print("Package plan for package removal in environment %s:" % prefix) plan.display_actions(actions) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) if args.all: rm_rf(prefix)
def load_index(cachefn): if os.path.exists(cachefn): with open(cachefn) as f: return json.load(f) else: index = get_index() with open(cachefn, 'w') as f: json.dump(index, f) return index
def execute(args, parser): import conda.plan as plan from conda.api import get_index from conda.cli import pscheck prefix = common.get_prefix(args) # handle explict installs of conda packages if args.packages and all(s.endswith('.tar.bz2') for s in args.packages): from conda.misc import install_local_packages install_local_packages(prefix, args.packages, verbose=not args.quiet) return if any(s.endswith('.tar.bz2') for s in args.packages): raise RuntimeError("cannot mix specifications with conda package " "filenames") if args.force: args.no_deps = True if args.file: specs = common.specs_from_file(args.file) else: specs = common.specs_from_args(args.packages) common.check_specs(prefix, specs) spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(spec_names) print('# All requested packages already installed.') list_packages(prefix, regex) return print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet)
def test_installable(channel='defaults', verbose=True): if not verbose: sys.stdout = open(os.devnull, 'w') success = False has_py = re.compile(r'py(\d)(\d)') for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']: print("######## Testing platform %s ########" % platform) channels = [channel] + get_default_urls() index = get_index(channel_urls=channels, prepend=False, platform=platform) for package in sorted(index): if channel != 'defaults': # If we give channels at the command line, only look at # packages from those channels (not defaults). if index[package]['channel'] not in normalize_urls( [channel], platform=platform): continue name, version, build = package.rsplit('.tar.bz2', 1)[0].rsplit('-', 2) if name in {'conda', 'conda-build'}: # conda can only be installed in the root environment continue # Don't fail just because the package is a different version of Python # than the default. We should probably check depends rather than the # build string. match = has_py.search(build) assert match if 'py' in build else True, build if match: additional_packages = [ 'python=%s.%s' % (match.group(1), match.group(2)) ] else: additional_packages = [] print('Testing %s=%s' % (name, version)) # if additional_packages: # print("Including %s" % additional_packages[0]) try: check_install([name + '=' + version] + additional_packages, channel_urls=channels, prepend=False, platform=platform) except KeyboardInterrupt: raise # sys.exit raises an exception that doesn't subclass from Exception except BaseException as e: success = True print("FAIL: %s %s on %s with %s (%s)" % (name, version, platform, additional_packages, e), file=sys.stderr) return success
def _get_items(self): # TODO: Include .tar.bz2 files for local installs. from conda.api import get_index args = self.parsed_args call_dict = dict(channel_urls=args.channel or (), use_cache=True, prepend=not args.override_channels, unknown=args.unknown, offline=args.offline) if hasattr(args, 'platform'): # in search call_dict['platform'] = args.platform index = get_index(**call_dict) return [i.rsplit('-', 2)[0] for i in index]
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False): try: prefix = tempfile.mkdtemp('conda') specs = common.specs_from_args(packages) index = get_index(channel_urls=channel_urls, prepend=prepend, platform=platform) actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint) plan.display_actions(actions, index) return actions finally: ci.rm_rf(prefix)
def execute(args, parser): import conda.config as config import conda.plan as plan from conda.api import get_index from conda.misc import touch_nonadmin common.ensure_name_or_prefix(args, 'create') prefix = common.get_prefix(args, search=False) check_prefix(prefix) config.set_pkgs_dirs(prefix) if args.clone: if args.package_specs: sys.exit('Error: did not expect any arguments for --clone') clone(args.clone, prefix) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not args.no_default_packages: args.package_specs.extend(config.create_default_packages) if len(args.package_specs) == 0 and not args.file: sys.exit('Error: too few arguments, must supply command line ' 'package specs or --file') if args.file: specs = common.specs_from_url(args.file) else: specs = common.specs_from_args(args.package_specs) common.check_specs(prefix, specs) channel_urls = args.channel or () common.ensure_override_channels_requires_channel(args) index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): print('No matching packages could be found, nothing to do') return print() print("Package plan for creating environment at %s:" % prefix) plan.display_actions(actions, index) common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix)
def create_env(pref, specs): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) fetch_index.cache = {} index = get_index([url_path(config.croot)]) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def show_pkg_info(name): from conda.api import get_index from conda.resolve import Resolve index = get_index() r = Resolve(index) print(name) if name in r.groups: for pkg in sorted(r.get_pkgs(name)): print(' %-15s %15s %s' % (pkg.version, pkg.build, disp_features(r.features(pkg.fn)))) else: print(' not available')
def execute(args, parser): import sys import conda.install as ci import conda.config as config import conda.plan as plan from conda.api import get_index from conda.cli import pscheck if len(args.pkg_names) == 0: sys.exit("""Error: no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update anaconda """) prefix = common.get_prefix(args) config.set_pkgs_dirs(prefix) linked = set(ci.name_dist(d) for d in ci.linked(prefix)) for name in args.pkg_names: common.arg2spec(name) if '=' in name: sys.exit("Invalid package name: '%s'" % (name)) if name not in linked: sys.exit("Error: package '%s' is not installed in %s" % (name, prefix)) common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () index = get_index(channel_urls=channel_urls, prepend=not args.override_channels) actions = plan.install_actions(prefix, index, args.pkg_names) if plan.nothing_to_do(actions): from conda.cli.main_list import list_packages regex = '^(%s)$' % '|'.join(args.pkg_names) print('# All packages already at latest version, nothing to do.') list_packages(prefix, regex) return print("Updating conda environment at %s" % prefix) plan.display_actions(actions, index) common.check_write('update', prefix) if not pscheck.main(args): common.confirm_yn(args) plan.execute_actions(actions, index, verbose=not args.quiet)
def main(): p = OptionParser(usage="usage: %prog [options] FILENAME", description="execute an conda plan") p.add_option('-q', '--quiet', action="store_true") opts, args = p.parse_args() logging.basicConfig() if len(args) != 1: p.error('exactly one argument required') execute_plan(open(args[0]), get_index(), not opts.quiet)
def latest_pkg_version(pkg): """ :returns: the latest version of the specified conda package available """ r = Resolve(get_index()) try: pkg_list = sorted(r.get_pkgs(MatchSpec(pkg))) except RuntimeError: pkg_list = None if pkg_list: pkg_version = LooseVersion(pkg_list[-1].version) else: pkg_version = None return pkg_version
def latest_pkg_version(pkg): ''' :returns: the latest version of the specified conda package available ''' r = Resolve(get_index()) try: pkg_list = sorted(r.get_pkgs(MatchSpec(pkg))) except RuntimeError: pkg_list = None if pkg_list: pkg_version = LooseVersion(pkg_list[-1].version) else: pkg_version = None return pkg_version
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index=None): """ clone existing prefix1 into new prefix2 """ untracked_files = untracked(prefix1) dists = discard_conda(install.linked(prefix1)) if verbose: print('Packages: %d' % len(dists)) print('Files: %d' % len(untracked_files)) for f in untracked_files: src = join(prefix1, f) dst = join(prefix2, f) dst_dir = dirname(dst) if islink(dst_dir) or isfile(dst_dir): os.unlink(dst_dir) if not isdir(dst_dir): os.makedirs(dst_dir) if islink(src): os.symlink(os.readlink(src), dst) continue try: with open(src, 'rb') as fi: data = fi.read() except IOError: continue try: s = data.decode('utf-8') s = s.replace(prefix1, prefix2) data = s.encode('utf-8') except UnicodeDecodeError: # data is binary pass with open(dst, 'wb') as fo: fo.write(data) shutil.copystat(src, dst) if index is None: index = get_index() r = Resolve(index) sorted_dists = r.dependency_sort(dists) actions = ensure_linked_actions(sorted_dists, prefix2) execute_actions(actions, index=index, verbose=not quiet) return actions, untracked_files
def show_pkg_info(name): #import conda.install as install from conda.api import get_index from conda.resolve import MatchSpec, Resolve index = get_index() r = Resolve(index) print(name) if name in r.groups: for pkg in sorted(r.get_pkgs(MatchSpec(name))): print(' %-15s %15s %s' % (pkg.version, pkg.build, common.disp_features(r.features(pkg.fn)))) else: print(' not available')
def create_env(pref, specs): if not isdir(bldpkgs_dir): os.makedirs(bldpkgs_dir) update_index(bldpkgs_dir) # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} index = get_index([url_path(config.croot)]) actions = plan.install_actions(pref, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=True) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(pref): os.makedirs(pref)
def show_pkg_info(name): from conda.api import get_index from conda.resolve import Resolve index = get_index() r = Resolve(index) print(name) if name in r.groups: for pkg in sorted(r.get_pkgs(name)): print(' %-15s %15s %s' % ( pkg.version, pkg.build, common.disp_features(r.features(pkg.fn)))) else: print(' not available')
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False): try: prefix = tempfile.mkdtemp('conda') specs = common.specs_from_args(packages) index = get_index(channel_urls=channel_urls, prepend=prepend, platform=platform, prefix=prefix) linked = ci.linked(prefix) plan.add_defaults_to_specs(Resolve(index), linked, specs) actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint) plan.display_actions(actions, index) return actions finally: ci.rm_rf(prefix)
def show_pkg_info(name): # import conda.install as install from conda.api import get_index from conda.resolve import MatchSpec, Resolve index = get_index() r = Resolve(index) print(name) if name in r.groups: for pkg in sorted(r.get_pkgs(MatchSpec(name))): print( " %-15s %15s %s" % (pkg.version, r.index[pkg.fn]["build"], common.disp_features(r.features(pkg.fn))) ) else: print(" not available on channels")
def test_installable(channel='defaults', verbose=True): if not verbose: sys.stdout = open(os.devnull, 'w') success = False has_py = re.compile(r'py(\d)(\d)') for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']: print("######## Testing platform %s ########" % platform) channels = [channel] + get_default_urls() index = get_index(channel_urls=channels, prepend=False, platform=platform) for package, rec in iteritems(index): # If we give channels at the command line, only look at # packages from those channels (not defaults). if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults': continue name = rec['name'] if name in {'conda', 'conda-build'}: # conda can only be installed in the root environment continue # Don't fail just because the package is a different version of Python # than the default. We should probably check depends rather than the # build string. build = rec['build'] match = has_py.search(build) assert match if 'py' in build else True, build if match: additional_packages = ['python=%s.%s' % (match.group(1), match.group(2))] else: additional_packages = [] version = rec['version'] print('Testing %s=%s' % (name, version)) # if additional_packages: # print("Including %s" % additional_packages[0]) try: check_install([name + '=' + version] + additional_packages, channel_urls=channels, prepend=False, platform=platform) except KeyboardInterrupt: raise # sys.exit raises an exception that doesn't subclass from Exception except BaseException as e: success = True print("FAIL: %s %s on %s with %s (%s)" % (name, version, platform, additional_packages, e), file=sys.stderr) return success
def main(self): index = get_index(use_cache=False) if hasattr(conda_build, 'api'): build_config = conda_build.api.Config() else: build_config = conda_build.config.config # If it is not already defined with environment variables, we set the CONDA_NPY # to the latest possible value. Since we compute a build matrix anyway, this is # useful to prevent conda-build bailing if the recipe depends on it (e.g. # ``numpy x.x``), and to ensure that recipes that don't care which version they want # at build/test time get a sensible version. if build_config.CONDA_NPY is None: resolver = conda.resolve.Resolve(index) npy = resolver.get_pkgs('numpy', emptyok=True) if npy: version = ''.join(max(npy).version.split('.')[:2]) build_config.CONDA_NPY = version recipe_metas = self.fetch_all_metas(build_config) print('Resolving distributions from {} recipes... '.format( len(recipe_metas))) all_distros = self.compute_build_distros(index, recipe_metas, build_config) print('Computed that there are {} distributions from the {} ' 'recipes:'.format(len(all_distros), len(recipe_metas))) recipes_and_dist_locn = self.find_existing_built_dists(all_distros) print( 'Resolved dependencies, will be built in the following order: \n\t{}' .format('\n\t'.join([ '{} (will be built: {})'.format(meta.dist(), dist_locn is None) for meta, dist_locn in recipes_and_dist_locn ]))) if self.dry_run: print('Dry run: no distributions built') return for meta, built_dist_location in recipes_and_dist_locn: was_built = built_dist_location is None if was_built: built_dist_location = self.build(meta, build_config) self.post_build(meta, built_dist_location, was_built, config=build_config)
def compute_matrix(cls, meta, index=None, extra_conditions=None): if index is None: with vn_matrix.override_conda_logging('WARN'): index = get_index() cases = vn_matrix.special_case_version_matrix(meta, index) if extra_conditions: cases = list(vn_matrix.filter_cases(cases, index, extra_conditions)) result = [] for case in cases: dist = cls(meta, case) if not dist.skip(): result.append(dist) return result
def main(): p = OptionParser( usage="usage: %prog [options] FILENAME", description="execute an conda plan") p.add_option('-q', '--quiet', action="store_true") opts, args = p.parse_args() logging.basicConfig() if len(args) != 1: p.error('exactly one argument required') execute_plan(open(args[0]), get_index(), not opts.quiet)
def clone_analysispackage(path, prefix=None, analysispackage_name=None, data_path=None): """ Clone the analysispackage (located at `path`) by creating a new environment at `prefix` (unless prefix is None or the prefix directory already exists) """ try: t = tarfile.open(path, 'r:*') meta = json.load(t.extractfile('info/index.json')) except tarfile.ReadError: raise RuntimeError('bad tar archive: %s' % path) except KeyError: raise RuntimeError("no archive 'info/index.json' in: %s" % (path)) if prefix and isdir(prefix): print("erasing old environment at %s" % prefix) shutil.rmtree(prefix) if prefix and not isdir(prefix): for m in t.getmembers(): if m.path.startswith((BDP, 'info/')): continue t.extract(m, path=prefix) dists = discard_conda('-'.join(s.split()) for s in meta.get('depends', [])) actions = plan.ensure_linked_actions(dists, prefix) index = get_index() plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=False) if not data_path: analysispackage_dir = abspath( expanduser( '~/analysispackages/%s' % (analysispackage_name or meta.get('analysispackage_name')))) else: analysispackage_dir = data_path for m in t.getmembers(): if m.path.startswith(BDP): targetpath = join(analysispackage_dir, m.path[len(BDP):]) t._extract_member(m, targetpath) with open(join(data_path, ".metadata.json"), "w+") as f: json.dump({'env': prefix}, f) t.close()
def get_file_names_on_anaconda_channel(channel): """Get the names of **all** the files on a channel Parameters ---------- channel : str Returns ------- set The file names of all files on an anaconda channel. Something like 'linux-64/album-0.0.2.post0-0_g6b05c00_py27.tar.bz2' """ index = get_index([channel], prepend=False) file_names = [ v['channel'].split('/')[-1] + '/' + k.split('::')[1] for k, v in index.items() ] return set(file_names)
def distribution_exists_on_channel(binstar_cli, owner, metadata, channel='main'): """ Determine whether a distribution exists on a specific channel. Note from @pelson: As far as I can see, there is no easy way to do this on binstar. """ fname = '{}.tar.bz2'.format(metadata.dist()) channel_url = '/'.join([owner, 'label', channel]) distributions_on_channel = get_index([channel_url], prepend=False, use_cache=False) try: on_channel = (distributions_on_channel[fname]['subdir'] == conda.config.subdir) except KeyError: on_channel = False return on_channel
def main(self): recipe_metas = self.fetch_all_metas() index = get_index(use_cache=True) print('Resolving distributions from {} recipes... '.format(len(recipe_metas))) all_distros = self.compute_build_distros(index, recipe_metas) print('Computed that there are {} distributions from the {} ' 'recipes:'.format(len(all_distros), len(recipe_metas))) recipes_and_dist_locn = self.find_existing_built_dists(all_distros) print('Resolved dependencies, will be built in the following order: \n\t{}'.format( '\n\t'.join(['{} (will be built: {})'.format(meta.dist(), dist_locn is None) for meta, dist_locn in recipes_and_dist_locn]))) for meta, built_dist_location in recipes_and_dist_locn: was_built = built_dist_location is None if was_built: built_dist_location = self.build(meta) self.post_build(meta, built_dist_location, was_built)
def get_index_trap(*args, **kwargs): """ Retrieves the package index, but traps exceptions and reports them as JSON if necessary. """ from conda.api import get_index if 'json' in kwargs: json = kwargs['json'] del kwargs['json'] else: json = False try: return get_index(*args, **kwargs) except BaseException as e: if json: exception_and_exit(e, json=json) else: raise
def resolve_all(cls, meta, index=None, extra_conditions=None): """ Given a package, return a list of ResolvedDistributions, one for each possible (necessary) version permutation. """ if index is None: with vn_matrix.override_conda_logging('WARN'): index = get_index() cases = sorted(vn_matrix.special_case_version_matrix(meta, index)) if extra_conditions: cases = list(vn_matrix.filter_cases(cases, extra_conditions)) result = [] for case in cases: dist = cls(meta, case) if not dist.skip(): result.append(dist) return result
def conda_package_exists(pkgname, version=None): from conda.api import get_index from conda.resolve import MatchSpec, Resolve pyver = 'py%s' % sys.version[:3].replace('.', '') index = get_index(use_cache=True) r = Resolve(index) try: pkgs = r.get_pkgs(MatchSpec(pkgname)) except RuntimeError: return False exists = False for pkg in pkgs: match_pyver = pkg.build.startswith(pyver) if not match_pyver: continue if version and pkg.version != version: continue exists = True break return exists
def main(self): recipe_metas = self.fetch_all_metas() index = get_index() print('Resolving distributions from {} recipes... '.format( len(recipe_metas))) all_distros = [] for meta in recipe_metas: distros = BakedDistribution.compute_matrix( meta, index, getattr(self, 'extra_build_conditions', [])) all_distros.extend(distros) print('Computed that there are {} distributions from the {} ' 'recipes:'.format(len(all_distros), len(recipe_metas))) recipes_to_build = self.recipes_to_build(all_distros) for meta, build_dist in zip(all_distros, recipes_to_build): if build_dist: self.build(meta) self.post_build(meta, build_occured=build_dist)