Beispiel #1
0
def test_channel_priority():
    fn1 = 'pandas-0.10.1-np17py27_0.tar.bz2'
    fn2 = 'other::' + fn1
    spec = ['pandas', 'python 2.7*']
    index2 = index.copy()
    index2[fn2] = index2[fn1].copy()
    r2 = Resolve(index2)
    rec = r2.index[fn2]
    import conda.resolve
    conda.resolve.channel_priority = True
    rec['priority'] = 0
    # Should select the "other", older package because it
    # has a lower channel priority number
    installed1 = r2.install(spec)
    # Should select the newer package because now the "other"
    # package has a higher priority number
    rec['priority'] = 2
    installed2 = r2.install(spec)
    # Should also select the newer package because we have
    # turned off channel priority altogether
    conda.resolve.channel_priority = False
    rec['priority'] = 0
    installed3 = r2.install(spec)
    assert installed1 != installed2
    assert installed1 != installed3
    assert installed2 == installed3
def main():
    p = argparse.ArgumentParser()
    p.add_argument('pkg_fn', type=os.path.basename)
    p.add_argument('output_fn')
    p.add_argument('--dependsdata')
    p.add_argument('--index-cache', default='.index-cache.json')

    args = p.parse_args()

    r = Resolve(load_index(args.index_cache))
    with open(args.dependsdata) as f:
        dependsdata = json.load(f)

    try:
        plan = r.solve([_fn2fullspec(args.pkg_fn)],
                       features=set(),
                       installed=set(),
                       update_deps=False)

    except SystemExit:
        print('\n\n========wtf is this!\n\n')
        with open(args.output_fn, 'w') as f:
            json.dump({'pkg_fn': args.pkg_fn, 'unmet_depends': []}, f)
            return

    depends_provides = setreduce(dependsdata[fn]['provides'] for fn in plan)
    requires = setreduce(dependsdata[fn]['requires'] for fn in plan)

    with open(args.output_fn, 'w') as f:
        json.dump(
            {
                'pkg_fn': args.pkg_fn,
                'unmet_depends': list(requires - depends_provides)
            }, f)
Beispiel #3
0
def test_channel_priority():
    fn1 = 'pandas-0.10.1-np17py27_0.tar.bz2'
    fn2 = 'other::' + fn1
    spec = ['pandas', 'python 2.7*']
    index2 = index.copy()
    index2[Dist(fn2)] = index2[Dist(fn1)].copy()
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r2 = Resolve(index2)
    rec = r2.index[Dist(fn2)]

    os.environ['CONDA_CHANNEL_PRIORITY'] = 'True'
    reset_context(())

    rec['priority'] = 0
    # Should select the "other", older package because it
    # has a lower channel priority number
    installed1 = r2.install(spec)
    # Should select the newer package because now the "other"
    # package has a higher priority number
    rec['priority'] = 2
    installed2 = r2.install(spec)
    # Should also select the newer package because we have
    # turned off channel priority altogether

    os.environ['CONDA_CHANNEL_PRIORITY'] = 'False'
    reset_context(())

    rec['priority'] = 0
    installed3 = r2.install(spec)
    assert installed1 != installed2
    assert installed1 != installed3
    assert installed2 == installed3
Beispiel #4
0
def remove_actions(prefix, specs, index, force=False, pinned=True):
    r = Resolve(index)
    linked = [d+'.tar.bz2' for d in install.linked(prefix)]
    mss = list(map(MatchSpec, specs))

    if force:
        nlinked = {r.package_name(fn):fn[:-8] for fn in linked if not any(r.match(ms, fn) for ms in mss)}
    else:
        if config.track_features:
            specs.extend(x + '@' for x in config.track_features)
        nlinked = {r.package_name(fn):fn[:-8] for fn in r.remove(specs, linked)}

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)

    linked = {r.package_name(fn):fn[:-8] for fn in linked}

    actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
    for old_fn in reversed(r.dependency_sort(linked)):
        dist = old_fn + '.tar.bz2'
        name = r.package_name(dist)
        if old_fn == nlinked.get(name,''):
            continue
        if pinned and any(r.match(ms, dist) for ms in pinned_specs):
            raise RuntimeError(
                "Cannot remove %s because it is pinned. Use --no-pin to override." % dist)
        if name == 'conda' and name not in nlinked:
            if any(ms.name == 'conda' for ms in mss):
                sys.exit("Error: 'conda' cannot be removed from the root environment")
            else:
                sys.exit("Error: this 'remove' command cannot be executed because it\nwould require removing 'conda' dependencies.")
        add_unlink(actions, old_fn)

    return actions
Beispiel #5
0
def remove_actions(prefix, specs, index=None, pinned=True):
    linked = install.linked(prefix)

    mss = [MatchSpec(spec) for spec in specs]

    if index:
        r = Resolve(index)
    else:
        r = None

    pinned_specs = get_pinned_specs(prefix)

    actions = defaultdict(list)
    actions[inst.PREFIX] = prefix
    for dist in sorted(linked):
        fn = dist + '.tar.bz2'
        if any(ms.match(fn) for ms in mss):
            if pinned and any(MatchSpec(spec).match('%s.tar.bz2' % dist) for spec in
    pinned_specs):
                raise RuntimeError("Cannot remove %s because it is pinned. Use --no-pin to override." % dist)

            actions[inst.UNLINK].append(dist)
            if r and fn in index and r.track_features(fn):
                features_actions = remove_features_actions(prefix, index, r.track_features(fn))
                for action in features_actions:
                    if isinstance(actions[action], list):
                        for item in features_actions[action]:
                            if item not in actions[action]:
                                actions[action].append(item)
                    else:
                        assert actions[action] == features_actions[action]

    return actions
Beispiel #6
0
def get_package_versions(package):
    index = get_index()
    r = Resolve(index)
    if package in r.groups:
        return r.get_pkgs(MatchSpec(package))
    else:
        return []
Beispiel #7
0
def warn_on_old_conda_build(index):
    root_linked = linked(cc.root_dir)
    vers_inst = [dist.rsplit("-", 2)[1] for dist in root_linked if dist.rsplit("-", 2)[0] == "conda-build"]
    if not len(vers_inst) == 1:
        print("WARNING: Could not detect installed version of conda-build", file=sys.stderr)
        return
    r = Resolve(index)
    try:
        pkgs = sorted(r.get_pkgs(MatchSpec("conda-build")))
    except NoPackagesFound:
        print("WARNING: Could not find any versions of conda-build in the channels", file=sys.stderr)
        return
    if pkgs[-1].version != vers_inst[0]:
        print(
            """
WARNING: conda-build appears to be out of date. You have version %s but the
latest version is %s. Run

conda update -n root conda-build

to get the latest version.
"""
            % (vers_inst[0], pkgs[-1].version),
            file=sys.stderr,
        )
Beispiel #8
0
def install_with_pip(prefix, index, specs):
    r = Resolve(index)
    for_conda = []

    try:
        next(r.find_matches(MatchSpec('pip')))
    except StopIteration:
        print("Pip not found, running `conda install pip` ...")
        try:
            install_package(prefix, 'pip')
        except Exception as e:
            print("Could not install pip --- continuing...")
            return specs

    for s in specs:
        try:
            next(r.find_matches(MatchSpec(s)))
        except StopIteration:
            if s == 'pip':
                for_conda.append(s)
                continue
            print("Conda package not available for %s, attempting to install "
                  "via pip" % s)
            pip_install(prefix, s)
        else:
            for_conda.append(s)
    return for_conda
Beispiel #9
0
def install_actions(prefix, index, specs, force=False, only_names=None,
                    pinned=True, minimal_hint=False, update_deps=True):
    r = Resolve(index)
    linked = install.linked(prefix)

    if config.self_update and is_root_prefix(prefix):
        specs.append('conda')

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)
        specs += pinned_specs
        # TODO: Improve error messages here
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
                      config.track_features, minimal_hint=minimal_hint,
                      update_deps=update_deps):
        dist = fn[:-8]
        name = install.name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        if install.on_win:
            for name in install.win_ignore_root:
                if name in must_have:
                    del must_have[name]
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    elif basename(prefix).startswith('_'):
        # anything (including conda) can be installed into environments
        # starting with '_', mainly to allow conda-build to build conda
        pass
    else:
        # disallow conda from being installed into all other environments
        if 'conda' in must_have:
            sys.exit("Error: 'conda' can only be installed into the "
                     "root environment")

    smh = r.graph_sort(must_have)

    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    if actions[inst.LINK] and sys.platform != 'win32' and prefix != config.root_dir:
        actions[inst.SYMLINK_CONDA] = [config.root_dir]

    for dist in sorted(linked):
        name = install.name_dist(dist)
        if name in must_have and dist != must_have[name]:
            add_unlink(actions, dist)

    return actions
Beispiel #10
0
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
                    pinned=True, minimal_hint=False, update_deps=True, prune=False):
    r = Resolve(index)
    linked = r.installed

    if config.self_update and is_root_prefix(prefix):
        specs.append('conda')

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)
        specs += pinned_specs

    must_have = {}
    if config.track_features:
        specs.extend(x + '@' for x in config.track_features)

    pkgs = r.install(specs, linked, update_deps=update_deps)

    for fn in pkgs:
        dist = fn[:-8]
        name = install.name_dist(dist)
        if not name or only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    elif basename(prefix).startswith('_'):
        # anything (including conda) can be installed into environments
        # starting with '_', mainly to allow conda-build to build conda
        pass
    else:
        # disallow conda from being installed into all other environments
        if 'conda' in must_have or 'conda-env' in must_have:
            sys.exit("Error: 'conda' can only be installed into the "
                     "root environment")

    smh = r.dependency_sort(must_have)

    actions = ensure_linked_actions(
        smh, prefix,
        index=index if force else None,
        force=force, always_copy=always_copy)

    if actions[inst.LINK]:
        actions[inst.SYMLINK_CONDA] = [config.root_dir]

    for fkey in sorted(linked):
        dist = fkey[:-8]
        name = install.name_dist(dist)
        replace_existing = name in must_have and dist != must_have[name]
        prune_it = prune and dist not in smh
        if replace_existing or prune_it:
            add_unlink(actions, dist)

    return actions
def main():
    r = Resolve(get_index())
    plan = r.solve(['anaconda 2.4.1'],
                   features=set(),
                   installed=set(),
                   update_deps=True)
    for fn in plan:
        print(os.path.join('tarballs', fn))
Beispiel #12
0
def execute(args, parser):
    import re

    import conda.install as install
    from conda.api import get_index
    from conda.resolve import MatchSpec, Resolve


    if args.regex:
        pat = re.compile(args.regex, re.I)
    else:
        pat = None

    prefix = common.get_prefix(args)
    if not args.canonical:
        linked = install.linked(prefix)

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    common.ensure_override_channels_requires_channel(args, dashc=False)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls, prepend=not
                      args.override_channels, platform=args.platform)

    r = Resolve(index)
    for name in sorted(r.groups):
        disp_name = name
        if pat and pat.search(name) is None:
            continue

        if args.outdated:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue

        for pkg in sorted(r.get_pkgs(MatchSpec(name))):
            dist = pkg.fn[:-8]
            if args.canonical:
                print(dist)
                continue
            inst = '*' if dist in linked else ' '
            print('%-25s %s  %-15s %15s  %-15s %s' % (
                disp_name, inst,
                pkg.version,
                r.index[pkg.fn]['build'],
                canonical_channel_name(pkg.channel),
                common.disp_features(r.features(pkg.fn)),
                ))
            disp_name = ''
Beispiel #13
0
def test_multiple_solution():
    index2 = index.copy()
    fn = 'pandas-0.11.0-np16py27_1.tar.bz2'
    res1 = set([fn])
    for k in range(1,15):
        fn2 = '%s_%d.tar.bz2'%(fn[:-8],k)
        index2[fn2] = index[fn]
        res1.add(fn2)
    r = Resolve(index2)
    res = r.solve(['pandas', 'python 2.7*', 'numpy 1.6*'], returnall=True)
    res = set([x[3] for x in res])
    assert res <= res1
Beispiel #14
0
def install_actions(prefix, index, specs, force=False, only_names=None, pinned=True, minimal_hint=False):

    r = Resolve(index)
    linked = install.linked(prefix)

    if config.self_update and is_root_prefix(prefix):
        specs.append('conda')
    add_defaults_to_specs(r, linked, specs)
    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        specs += pinned_specs
        # TODO: Improve error messages here

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
                      config.track_features, minimal_hint=minimal_hint):
        dist = fn[:-8]
        name = install.name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        if install.on_win:
            for name in install.win_ignore_root:
                if name in must_have:
                    del must_have[name]
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    else:
        # discard conda from other environments
        if 'conda' in must_have:
            sys.exit("Error: 'conda' can only be installed into "
                     "root environment")

    smh = r.graph_sort(must_have)

    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    if actions[LINK] and sys.platform != 'win32':
        actions[SYMLINK_CONDA] = [config.root_dir]

    for dist in sorted(linked):
        name = install.name_dist(dist)
        if name in must_have and dist != must_have[name]:
            actions[UNLINK].append(dist)

    return actions
Beispiel #15
0
def test_multiple_solution():
    index2 = index.copy()
    fn = 'pandas-0.11.0-np16py27_1.tar.bz2'
    res1 = set([fn])
    for k in range(1,15):
        fn2 = Dist('%s_%d.tar.bz2'%(fn[:-8],k))
        index2[fn2] = index[Dist(fn)]
        res1.add(fn2)
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)
    res = r.solve(['pandas', 'python 2.7*', 'numpy 1.6*'], returnall=True)
    res = set([y for x in res for y in x if r.package_name(y).startswith('pandas')])
    assert len(res) <= len(res1)
Beispiel #16
0
def remove_features_actions(prefix, index, features):
    linked = install.linked(prefix)
    r = Resolve(index)

    actions = defaultdict(list)
    actions[inst.PREFIX] = [prefix]
    _linked = [d + '.tar.bz2' for d in linked]
    to_link = []
    for dist in sorted(linked):
        fn = dist + '.tar.bz2'
        if fn not in index:
            continue
        if r.track_features(fn).intersection(features):
Beispiel #17
0
def resolve(info):
    if not index:
        sys.exit("Error: index is empty, maybe 'channels' are missing?")
    specs = info['specs']
    r = Resolve(index)
    add_defaults_to_specs(r, [], specs)
    res = list(r.solve(specs))
    sys.stdout.write('\n')

    if 'install_in_dependency_order' in info:
        sort_info = {name_dist(d): d[:-8] for d in res}
        dists.extend(d + '.tar.bz2' for d in r.graph_sort(sort_info))
    else:
        dists.extend(res)
Beispiel #18
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)
    dists = discard_conda(install.linked(prefix1))

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    if index is None:
        index = get_index()

    r = Resolve(index)
    sorted_dists = r.dependency_sort(dists)

    actions = ensure_linked_actions(sorted_dists, prefix2)
    execute_actions(actions, index=index, verbose=not quiet)

    return actions, untracked_files
Beispiel #19
0
def latest_pkg_version(pkg):
    """
    :returns: the latest version of the specified conda package available
    """
    r = Resolve(get_index())
    try:
        pkg_list = sorted(r.get_pkgs(MatchSpec(pkg)))
    except RuntimeError:
        pkg_list = None
    if pkg_list:
        pkg_version = LooseVersion(pkg_list[-1].version)
    else:
        pkg_version = None
    return pkg_version
Beispiel #20
0
def show_pkg_info(name):
    from conda.api import get_index
    from conda.resolve import Resolve

    index = get_index()
    r = Resolve(index)
    print(name)
    if name in r.groups:
        for pkg in sorted(r.get_pkgs(name)):
            print('    %-15s %15s  %s' % (
                    pkg.version,
                    pkg.build,
                    common.disp_features(r.features(pkg.fn))))
    else:
        print('    not available')
Beispiel #21
0
def install_from_pypi(prefix, index, specs):
    r = Resolve(index)
    for_conda = []
    for s in specs:
        try:
            next(r.find_matches(MatchSpec(s)))
        except StopIteration:
            print("Conda package not available for %s, attempting to create "
                  "and install conda package from pypi" % s)
            recipedir = create_recipe(s)
            pkgname = build_package(prefix, recipedir)
            install_package(prefix, pkgname)
        else:
            for_conda.append(s)
    return for_conda
Beispiel #22
0
def show_pkg_info(name):
    # import conda.install as install
    from conda.api import get_index
    from conda.resolve import MatchSpec, Resolve

    index = get_index()
    r = Resolve(index)
    print(name)
    if name in r.groups:
        for pkg in sorted(r.get_pkgs(MatchSpec(name))):
            print(
                "    %-15s %15s  %s" % (pkg.version, r.index[pkg.fn]["build"], common.disp_features(r.features(pkg.fn)))
            )
    else:
        print("    not available on channels")
Beispiel #23
0
def test_broken_install():
    installed = r.install(['pandas', 'python 2.7*', 'numpy 1.6*'])
    assert installed == [Dist(fname) for fname in [
        'dateutil-2.1-py27_1.tar.bz2',
        'numpy-1.6.2-py27_4.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'pandas-0.11.0-np16py27_1.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'pytz-2013b-py27_0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'scipy-0.12.0-np16py27_0.tar.bz2',
        'six-1.3.0-py27_0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2']]

    # Add a fake package and an incompatible numpy
    installed2 = list(installed)
    installed2[1] = Dist('numpy-1.7.1-py33_p0.tar.bz2')
    installed2.append(Dist('notarealpackage-2.0-0.tar.bz2'))
    assert r.install([], installed2) == installed2
    installed3 = r.install(['numpy'], installed2)
    installed4 = r.remove(['pandas'], installed2)
    assert set(installed4) == set(installed2[:3] + installed2[4:])

    # Remove the installed version of pandas from the index
    index2 = index.copy()
    rec = index2[Dist('pandas-0.11.0-np16py27_1.tar.bz2')]
    index2[Dist('pandas-0.11.0-np16py27_1.tar.bz2')] = rec = rec.copy()
    rec['priority'] = MAX_CHANNEL_PRIORITY
    r2 = Resolve(index2)
    installed2 = r2.install(['pandas', 'python 2.7*', 'numpy 1.6*'], installed)
    assert installed2 == [Dist(d) for d in [
        'dateutil-2.1-py27_1.tar.bz2',
        'numpy-1.6.2-py27_4.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'pandas-0.10.1-np16py27_0.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'pytz-2013b-py27_0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'scipy-0.11.0-np16py27_3.tar.bz2',
        'six-1.3.0-py27_0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2']]
Beispiel #24
0
def resolve(info):
    """
    sets global DISTS and INDEX
    """
    global DISTS

    specs = info['specs']
    r = Resolve(INDEX)
    add_defaults_to_specs(r, [], specs)
    DISTS = list(r.solve(specs))

    sort_info = {}
    for d in DISTS:
        name, unused_version, unused_build = d.rsplit('-', 2)
        sort_info[name] = d.rsplit('.tar.bz2', 1)[0]

    DISTS = map(lambda d: d + '.tar.bz2', r.graph_sort(sort_info))
Beispiel #25
0
def install_actions(prefix, index, specs, force=False, only_names=None):
    r = Resolve(index)
    linked = install.linked(prefix)

    if is_root_prefix(prefix):
        specs.append('conda')
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
                      config.track_features):
        dist = fn[:-8]
        name = install.name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        if not (force or only_names or r.explicit(specs)):
            # ensure conda is in root environment
            assert 'conda' in must_have
        if install.on_win:
            for name in install.win_ignore_root:
                if name in must_have:
                    del must_have[name]
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    else:
        # discard conda from other environments
        if 'conda' in must_have:
            sys.exit("Error: 'conda' can only be installed into "
                     "root environment")

    smh = sorted(must_have.values())
    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    for dist in sorted(linked):
        name = install.name_dist(dist)
        if name in must_have and dist != must_have[name]:
            actions[UNLINK].append(dist)

    return actions
Beispiel #26
0
def app_info_packages(fn):
    """
    given the filename of a package, return which packages (and their sizes)
    still need to be downloaded, in order to install the package.  That is,
    the package itself and it's dependencies.
    Returns a list of tuples (pkg_name, pkg_version, size,
    fetched? True or False).
    """
    from conda.resolve import Resolve

    index = get_index()
    r = Resolve(index)
    res = []
    for fn2 in r.solve([fn2spec(fn)]):
        info = index[fn2]
        res.append((info['name'], info['version'], info['size'],
                    install.is_fetched(config.pkgs_dir, fn2[:-8])))
    return res
Beispiel #27
0
def install_actions(prefix, index, specs, force=False, only_names=None):
    r = Resolve(index)
    linked = install.linked(prefix)

    # Here is a temporary fix to prevent adding conda to the specs;
    # Bootstrapping problem: conda is not available as a conda package for
    # py3k yet.
    import sys
    PY3 = sys.version_info[0] == 3

    if is_root_prefix(prefix) and not PY3:
        specs.append('conda')
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked]):
        dist = fn[:-8]
        name = name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix) and not PY3:
        if not force:
            # ensure conda is in root environment
            assert 'conda' in must_have
    else:
        # discard conda from other environments
        if 'conda' in must_have:
            del must_have['conda']

    smh = sorted(must_have.values())
    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    for dist in sorted(linked):
        name = name_dist(dist)
        if name in must_have and dist != must_have[name]:
            actions[UNLINK].append(dist)

    return actions
Beispiel #28
0
Datei: api.py Projekt: 3kwa/conda
def app_info_packages(fn, prefix=config.root_dir):
    """
    given the filename of a package, return which packages (and their sizes)
    still need to be downloaded, in order to install the package.  That is,
    the package itself and it's dependencies.
    Returns a list of tuples (pkg_name, pkg_version, size,
    fetched? True or False).
    """
    from conda.resolve import Resolve

    index = get_index(prefix=prefix)
    r = Resolve(index)
    res = []
    for fn2 in r.solve([_fn2fullspec(fn)], installed=install.linked(prefix)):
        info = index[fn2]
        if 'link' not in info:
            res.append((info['name'], info['version'], info['size'],
                        any(install.is_fetched(pkgs_dir, fn2[:-8])
                            for pkgs_dir in config.pkgs_dirs)))
    return res
Beispiel #29
0
def install_actions(prefix, index, specs, force=False, only_names=None):
    r = Resolve(index)
    linked = install.linked(prefix)

    if is_root_prefix(prefix):
        specs.append("conda")
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + ".tar.bz2" for d in linked]):
        dist = fn[:-8]
        name = name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        if not (force or only_names):
            # ensure conda is in root environment
            assert "conda" in must_have
        if install.on_win:
            for name in install.win_ignore_root:
                if name in must_have:
                    del must_have[name]
    else:
        # discard conda from other environments
        if "conda" in must_have:
            del must_have["conda"]

    smh = sorted(must_have.values())
    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    for dist in sorted(linked):
        name = name_dist(dist)
        if name in must_have and dist != must_have[name]:
            actions[UNLINK].append(dist)

    return actions
def conda_package_exists(pkgname, version=None):
    from conda.api import get_index
    from conda.resolve import MatchSpec, Resolve

    pyver = 'py%s' % sys.version[:3].replace('.','')
    index = get_index(use_cache = True)
    r = Resolve(index)
    try:
        pkgs = r.get_pkgs(MatchSpec(pkgname))
    except RuntimeError:
        return False
    exists = False
    for pkg in pkgs:
        match_pyver = pkg.build.startswith(pyver)
        if not match_pyver:
            continue
        if version and pkg.version != version:
            continue
        exists = True
        break
    return exists
Beispiel #31
0
def test_install_package_with_feature():
    index2 = index.copy()
    index2['mypackage-1.0-featurepy33_0.tar.bz2'] = IndexRecord(**{
        'build': 'featurepy33_0',
        'build_number': 0,
        'depends': ['python 3.3*'],
        'name': 'mypackage',
        'version': '1.0',
        'features': 'feature',
    })
    index2['feature-1.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['python 3.3*'],
        'name': 'feature',
        'version': '1.0',
        'track_features': 'feature',
    })

    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)

    # It should not raise
    r.install(['mypackage','feature 1.0'])
Beispiel #32
0
def remove_actions(prefix, specs, index=None, pinned=True):
    linked = install.linked(prefix)

    mss = [MatchSpec(spec) for spec in specs]

    if index:
        r = Resolve(index)
    else:
        r = None

    pinned_specs = get_pinned_specs(prefix)

    actions = defaultdict(list)
    actions[inst.PREFIX] = [prefix]
    for dist in sorted(linked):
        fn = dist + '.tar.bz2'
        if any(ms.match(fn) for ms in mss):
Beispiel #33
0
def get_index_r_3():
    with open(join(dirname(__file__), 'index3.json')) as fi:
        packages = json.load(fi)
        repodata = {
            "info": {
                "subdir": context.subdir,
                "arch": context.arch_name,
                "platform": context.platform,
            },
            "packages": packages,
        }

    index = {}
    channel = Channel('defaults')
    supplement_index_with_repodata(index, repodata, channel, 1)
    add_feature_records(index)
    index = frozendict(index)
    r = Resolve(index)
    index = r.index
    return index, r
Beispiel #34
0
def test_circular_dependencies():
    index2 = index.copy()
    index2['package1-1.0-0.tar.bz2'] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            'build': '0',
            'build_number': 0,
            'depends': ['package2'],
            'name': 'package1',
            'requires': ['package2'],
            'version': '1.0',
        })
    index2['package2-1.0-0.tar.bz2'] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            'build': '0',
            'build_number': 0,
            'depends': ['package1'],
            'name': 'package2',
            'requires': ['package1'],
            'version': '1.0',
        })
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('package1'))) == {
        Dist('package1-1.0-0.tar.bz2'),
    }
    assert set(r.get_reduced_index(['package1']).keys()) == {
        Dist('package1-1.0-0.tar.bz2'),
        Dist('package2-1.0-0.tar.bz2'),
    }
    assert r.install(['package1']) == r.install(['package2']) == \
        r.install(['package1', 'package2']) == [
        Dist('package1-1.0-0.tar.bz2'),
        Dist('package2-1.0-0.tar.bz2'),
    ]
Beispiel #35
0
def create_rpm_installer(target, python_spec='python'):
    index = conda.api.get_index()
    matches = Resolve(index).get_pkgs(MatchSpec(python_spec))
    if not matches:
        raise RuntimeError('No python found in the channels.')
    pkg_info = matches[-1].info
    dist_name = '{}-{}-{}'.format(pkg_info['name'], pkg_info['version'],
                                  pkg_info['build'])
    pkg_cache = os.path.join(target, 'SOURCES')
    if not conda.install.is_fetched(pkg_cache, dist_name):
        print('Fetching {}'.format(dist_name))
        conda.fetch.fetch_pkg(pkg_info, pkg_cache)

    installer_source = os.path.join(os.path.dirname(__file__), 'install.py')
    installer_target = os.path.join(pkg_cache, 'install.py')

    shutil.copyfile(installer_source, installer_target)

    specfile = os.path.join(target, 'SPECS', 'SciTools-installer.spec')
    with open(specfile, 'w') as fh:
        fh.write(generate.render_installer(pkg_info))
Beispiel #36
0
def get_index_r_2(subdir=context.subdir):
    with open(join(dirname(__file__), 'data', 'index2.json')) as fi:
        packages = json.load(fi)
        repodata = {
            "info": {
                "subdir": subdir,
                "arch": context.arch_name,
                "platform": context.platform,
            },
            "packages": packages,
        }

    channel = Channel('https://conda.anaconda.org/channel-2/%s' % subdir)
    sd = SubdirData(channel)
    with env_var("CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", reset_context):
        sd._process_raw_repodata_str(json.dumps(repodata))
    sd._loaded = True
    SubdirData._cache_[channel.url(with_credentials=True)] = sd

    index = {prec: prec for prec in sd._package_records}
    r = Resolve(index, channels=(channel,))
    return index, r
Beispiel #37
0
def check_install(packages,
                  platform=None,
                  channel_urls=(),
                  prepend=True,
                  minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls,
                          prepend=prepend,
                          platform=platform,
                          prefix=prefix)
        linked = ci.linked(prefix)
        plan.add_defaults_to_specs(Resolve(index), linked, specs)
        actions = plan.install_actions(prefix,
                                       index,
                                       specs,
                                       pinned=False,
                                       minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Beispiel #38
0
def main():
    p = argparse.ArgumentParser(description=__doc__,
                                formatter_class=argparse.RawTextHelpFormatter)
    p.add_argument(
        'specs',
        nargs='+',
        help='One or more package specifications. '
        'Note that to use spaces inside\na spec, you need to enclose it in '
        'quotes on the command line. \nExamples: \'numpy 1.9*\' scikit-learn \'python 3.5*\''
    )
    p.add_argument(
        '-p',
        '--platform',
        choices=['linux-64', 'linux-32', 'osx-64', 'win-32', 'win-64'],
        default=conda.config.subdir,
        help='The platform. Default: \'%s\'' % conda.config.subdir)

    args = p.parse_args()
    print(args)

    conda.config.platform = args.platform.split('-')[0]
    conda.config.subdir = args.platform

    index = get_index()
    resolver = Resolve(index)

    fns = solve(args.specs, resolver)
    if fns is not False:
        print('\n\nFound solution:')
        print(' ', '\n  '.join(fns))
        return 0
    else:
        print("Generating hint: %s" % (', '.join(args.specs)))
        execute(args.specs, resolver)

    return 1
Beispiel #39
0
def test_no_features():
    # Without this, there would be another solution including 'scipy-0.11.0-np16py26_p3.tar.bz2'.
    assert r.install(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'],
        returnall=True) == [[Dist(add_defaults_if_no_channel(fname)) for fname in [
            'numpy-1.6.2-py26_4.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-2.6.8-6.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'scipy-0.11.0-np16py26_3.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]]

    assert r.install(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*', 'mkl@'],
        returnall=True) == [[Dist(add_defaults_if_no_channel(fname)) for fname in [
            'mkl-rt-11.0-p0.tar.bz2',           # This,
            'numpy-1.6.2-py26_p4.tar.bz2',      # this,
            'openssl-1.0.1c-0.tar.bz2',
            'python-2.6.8-6.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'scipy-0.11.0-np16py26_p3.tar.bz2', # and this are different.
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]]

    index2 = index.copy()
    index2["defaults::pandas-0.12.0-np16py27_0.tar.bz2"] = IndexRecord(**{
            "build": "np16py27_0",
            "build_number": 0,
            "depends": [
              "dateutil",
              "numpy 1.6*",
              "python 2.7*",
              "pytz"
            ],
            "name": "pandas",
            "requires": [
              "dateutil 1.5",
              "numpy 1.6",
              "python 2.7",
              "pytz"
            ],
            "version": "0.12.0"
        })
    # Make it want to choose the pro version by having it be newer.
    index2["defaults::numpy-1.6.2-py27_p5.tar.bz2"] = IndexRecord(**{
            "build": "py27_p5",
            "build_number": 5,
            "depends": [
              "mkl-rt 11.0",
              "python 2.7*"
            ],
            "features": "mkl",
            "name": "numpy",
            "pub_date": "2013-04-29",
            "requires": [
              "mkl-rt 11.0",
              "python 2.7"
            ],
            "version": "1.6.2"
        })

    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r2 = Resolve(index2)

    # This should not pick any mkl packages (the difference here is that none
    # of the specs directly have mkl versions)
    assert r2.solve(['pandas 0.12.0 np16py27_0', 'python 2.7*'],
        returnall=True) == [[Dist(add_defaults_if_no_channel(fname)) for fname in [
            'dateutil-2.1-py27_1.tar.bz2',
            'numpy-1.6.2-py27_4.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'pandas-0.12.0-np16py27_0.tar.bz2',
            'python-2.7.5-0.tar.bz2',
            'pytz-2013b-py27_0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'six-1.3.0-py27_0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]]

    assert r2.solve(['pandas 0.12.0 np16py27_0', 'python 2.7*', 'mkl@'],
        returnall=True)[0] == [[Dist(add_defaults_if_no_channel(fname)) for fname in [
            'dateutil-2.1-py27_1.tar.bz2',
            'mkl-rt-11.0-p0.tar.bz2',           # This
            'numpy-1.6.2-py27_p5.tar.bz2',      # and this are different.
            'openssl-1.0.1c-0.tar.bz2',
            'pandas-0.12.0-np16py27_0.tar.bz2',
            'python-2.7.5-0.tar.bz2',
            'pytz-2013b-py27_0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'six-1.3.0-py27_0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]][0]
Beispiel #40
0
def test_nonexistent_deps():
    index2 = index.copy()
    index2['mypackage-1.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    })
    index2['mypackage-1.1-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    })
    index2['anotherpackage-1.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.1'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.1'],
        'version': '1.0',
    })
    index2['anotherpackage-2.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    })
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        Dist('mypackage-1.0-py33_0.tar.bz2'),
        Dist('mypackage-1.1-py33_0.tar.bz2'),
    }
    assert set(d.to_filename() for d in r.get_reduced_index(['mypackage']).keys()) == {
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2'}

    target_result = r.install(['mypackage'])
    assert target_result == r.install(['mypackage 1.1'])
    assert target_result == [
        Dist(add_defaults_if_no_channel(dname)) for dname in [
        '<unknown>::mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]
    assert raises(NoPackagesFoundError, lambda: r.install(['mypackage 1.0']))
    assert raises(NoPackagesFoundError, lambda: r.install(['mypackage 1.0', 'burgertime 1.0']))

    assert r.install(['anotherpackage 1.0']) == [
        Dist(add_defaults_if_no_channel(dname)) for dname in [
        '<unknown>::anotherpackage-1.0-py33_0.tar.bz2',
        '<unknown>::mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]

    assert r.install(['anotherpackage']) == [
        Dist(add_defaults_if_no_channel(dname)) for dname in [
        '<unknown>::anotherpackage-2.0-py33_0.tar.bz2',
        '<unknown>::mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]

    # This time, the latest version is messed up
    index3 = index.copy()
    index3['mypackage-1.1-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    })
    index3['mypackage-1.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    })
    index3['anotherpackage-1.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.0'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.0'],
        'version': '1.0',
    })
    index3['anotherpackage-2.0-py33_0.tar.bz2'] = IndexRecord(**{
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    })
    index3 = {Dist(key): value for key, value in iteritems(index3)}
    r = Resolve(index3)

    assert set(d.to_filename() for d in r.find_matches(MatchSpec('mypackage'))) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        }
    assert set(d.to_filename() for d in r.get_reduced_index(['mypackage']).keys()) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2'}

    assert r.install(['mypackage']) == r.install(['mypackage 1.0']) == [
        Dist(add_defaults_if_no_channel(dname)) for dname in [
        '<unknown>::mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]
    assert raises(NoPackagesFoundError, lambda: r.install(['mypackage 1.1']))

    assert r.install(['anotherpackage 1.0']) == [
        Dist(add_defaults_if_no_channel(dname))for dname in [
        '<unknown>::anotherpackage-1.0-py33_0.tar.bz2',
        '<unknown>::mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]

    # If recursive checking is working correctly, this will give
    # anotherpackage 2.0, not anotherpackage 1.0
    assert r.install(['anotherpackage']) == [
        Dist(add_defaults_if_no_channel(dname))for dname in [
        '<unknown>::anotherpackage-2.0-py33_0.tar.bz2',
        '<unknown>::mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]]
Beispiel #41
0
def test_generate_eq():
    dists = r.get_reduced_index(['anaconda'])
    r2 = Resolve(dists, True, True)
    C = r2.gen_clauses()
    eqv, eqb = r2.generate_version_metrics(C, list(r2.groups.keys()))
    # Should satisfy the following criteria:
    # - lower versions of the same package should should have higher
    #   coefficients.
    # - the same versions of the same package (e.g., different build strings)
    #   should have the same coefficients.
    # - a package that only has one version should not appear, unless
    #   include=True as it will have a 0 coefficient. The same is true of the
    #   latest version of a package.
    eqv = {Dist(key).to_filename(): value for key, value in iteritems(eqv)}
    eqb = {Dist(key).to_filename(): value for key, value in iteritems(eqb)}
    assert eqv == {
        'anaconda-1.4.0-np15py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np15py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np16py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np16py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py33_0.tar.bz2': 1,
        'astropy-0.2-np15py26_0.tar.bz2': 1,
        'astropy-0.2-np15py27_0.tar.bz2': 1,
        'astropy-0.2-np16py26_0.tar.bz2': 1,
        'astropy-0.2-np16py27_0.tar.bz2': 1,
        'astropy-0.2-np17py26_0.tar.bz2': 1,
        'astropy-0.2-np17py27_0.tar.bz2': 1,
        'astropy-0.2-np17py33_0.tar.bz2': 1,
        'biopython-1.60-np15py26_0.tar.bz2': 1,
        'biopython-1.60-np15py27_0.tar.bz2': 1,
        'biopython-1.60-np16py26_0.tar.bz2': 1,
        'biopython-1.60-np16py27_0.tar.bz2': 1,
        'biopython-1.60-np17py26_0.tar.bz2': 1,
        'biopython-1.60-np17py27_0.tar.bz2': 1,
        'bitarray-0.8.0-py26_0.tar.bz2': 1,
        'bitarray-0.8.0-py27_0.tar.bz2': 1,
        'bitarray-0.8.0-py33_0.tar.bz2': 1,
        'boto-2.8.0-py26_0.tar.bz2': 1,
        'boto-2.8.0-py27_0.tar.bz2': 1,
        'conda-1.4.4-py27_0.tar.bz2': 1,
        'cython-0.18-py26_0.tar.bz2': 1,
        'cython-0.18-py27_0.tar.bz2': 1,
        'cython-0.18-py33_0.tar.bz2': 1,
        'distribute-0.6.34-py26_1.tar.bz2': 1,
        'distribute-0.6.34-py27_1.tar.bz2': 1,
        'distribute-0.6.34-py33_1.tar.bz2': 1,
        'gevent-0.13.7-py26_0.tar.bz2': 1,
        'gevent-0.13.7-py27_0.tar.bz2': 1,
        'ipython-0.13.1-py26_1.tar.bz2': 1,
        'ipython-0.13.1-py27_1.tar.bz2': 1,
        'ipython-0.13.1-py33_1.tar.bz2': 1,
        'llvmpy-0.11.1-py26_0.tar.bz2': 1,
        'llvmpy-0.11.1-py27_0.tar.bz2': 1,
        'llvmpy-0.11.1-py33_0.tar.bz2': 1,
        'lxml-3.0.2-py26_0.tar.bz2': 1,
        'lxml-3.0.2-py27_0.tar.bz2': 1,
        'lxml-3.0.2-py33_0.tar.bz2': 1,
        'matplotlib-1.2.0-np15py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np15py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py33_1.tar.bz2': 1,
        'nose-1.2.1-py26_0.tar.bz2': 1,
        'nose-1.2.1-py27_0.tar.bz2': 1,
        'nose-1.2.1-py33_0.tar.bz2': 1,
        'numba-0.7.0-np16py26_1.tar.bz2': 1,
        'numba-0.7.0-np16py27_1.tar.bz2': 1,
        'numba-0.7.0-np17py26_1.tar.bz2': 1,
        'numba-0.7.0-np17py27_1.tar.bz2': 1,
        'numpy-1.5.1-py26_3.tar.bz2': 3,
        'numpy-1.5.1-py27_3.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 2,
        'numpy-1.6.2-py26_4.tar.bz2': 2,
        'numpy-1.6.2-py26_p4.tar.bz2': 2,
        'numpy-1.6.2-py27_3.tar.bz2': 2,
        'numpy-1.6.2-py27_4.tar.bz2': 2,
        'numpy-1.6.2-py27_p4.tar.bz2': 2,
        'numpy-1.7.0-py26_0.tar.bz2': 1,
        'numpy-1.7.0-py27_0.tar.bz2': 1,
        'numpy-1.7.0-py33_0.tar.bz2': 1,
        'pandas-0.10.0-np16py26_0.tar.bz2': 2,
        'pandas-0.10.0-np16py27_0.tar.bz2': 2,
        'pandas-0.10.0-np17py26_0.tar.bz2': 2,
        'pandas-0.10.0-np17py27_0.tar.bz2': 2,
        'pandas-0.10.1-np16py26_0.tar.bz2': 1,
        'pandas-0.10.1-np16py27_0.tar.bz2': 1,
        'pandas-0.10.1-np17py26_0.tar.bz2': 1,
        'pandas-0.10.1-np17py27_0.tar.bz2': 1,
        'pandas-0.10.1-np17py33_0.tar.bz2': 1,
        'pandas-0.8.1-np16py26_0.tar.bz2': 5,
        'pandas-0.8.1-np16py27_0.tar.bz2': 5,
        'pandas-0.8.1-np17py26_0.tar.bz2': 5,
        'pandas-0.8.1-np17py27_0.tar.bz2': 5,
        'pandas-0.9.0-np16py26_0.tar.bz2': 4,
        'pandas-0.9.0-np16py27_0.tar.bz2': 4,
        'pandas-0.9.0-np17py26_0.tar.bz2': 4,
        'pandas-0.9.0-np17py27_0.tar.bz2': 4,
        'pandas-0.9.1-np16py26_0.tar.bz2': 3,
        'pandas-0.9.1-np16py27_0.tar.bz2': 3,
        'pandas-0.9.1-np17py26_0.tar.bz2': 3,
        'pandas-0.9.1-np17py27_0.tar.bz2': 3,
        'pip-1.2.1-py26_1.tar.bz2': 1,
        'pip-1.2.1-py27_1.tar.bz2': 1,
        'pip-1.2.1-py33_1.tar.bz2': 1,
        'psutil-0.6.1-py26_0.tar.bz2': 1,
        'psutil-0.6.1-py27_0.tar.bz2': 1,
        'psutil-0.6.1-py33_0.tar.bz2': 1,
        'pyflakes-0.6.1-py26_0.tar.bz2': 1,
        'pyflakes-0.6.1-py27_0.tar.bz2': 1,
        'pyflakes-0.6.1-py33_0.tar.bz2': 1,
        'python-2.6.8-6.tar.bz2': 4,
        'python-2.7.3-7.tar.bz2': 3,
        'python-2.7.4-0.tar.bz2': 2,
        'python-3.3.0-4.tar.bz2': 1,
        'pytz-2012j-py26_0.tar.bz2': 1,
        'pytz-2012j-py27_0.tar.bz2': 1,
        'pytz-2012j-py33_0.tar.bz2': 1,
        'requests-0.13.9-py26_0.tar.bz2': 1,
        'requests-0.13.9-py27_0.tar.bz2': 1,
        'requests-0.13.9-py33_0.tar.bz2': 1,
        'scikit-learn-0.13-np15py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np15py27_1.tar.bz2': 1,
        'scikit-learn-0.13-np16py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np16py27_1.tar.bz2': 1,
        'scikit-learn-0.13-np17py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np17py27_1.tar.bz2': 1,
        'scipy-0.11.0-np15py26_3.tar.bz2': 1,
        'scipy-0.11.0-np15py27_3.tar.bz2': 1,
        'scipy-0.11.0-np16py26_3.tar.bz2': 1,
        'scipy-0.11.0-np16py27_3.tar.bz2': 1,
        'scipy-0.11.0-np17py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py27_3.tar.bz2': 1,
        'scipy-0.11.0-np17py33_3.tar.bz2': 1,
        'six-1.2.0-py26_0.tar.bz2': 1,
        'six-1.2.0-py27_0.tar.bz2': 1,
        'six-1.2.0-py33_0.tar.bz2': 1,
        'spyder-2.1.13-py27_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py26_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py27_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py33_0.tar.bz2': 1,
        'sympy-0.7.1-py26_0.tar.bz2': 1,
        'sympy-0.7.1-py27_0.tar.bz2': 1,
        'tornado-2.4.1-py26_0.tar.bz2': 1,
        'tornado-2.4.1-py27_0.tar.bz2': 1,
        'tornado-2.4.1-py33_0.tar.bz2': 1,
        'xlrd-0.9.0-py26_0.tar.bz2': 1,
        'xlrd-0.9.0-py27_0.tar.bz2': 1,
        'xlrd-0.9.0-py33_0.tar.bz2': 1,
        'xlwt-0.7.4-py26_0.tar.bz2': 1,
        'xlwt-0.7.4-py27_0.tar.bz2': 1}
    assert eqb == {
        'cairo-1.12.2-0.tar.bz2': 1,
        'cubes-0.10.2-py27_0.tar.bz2': 1,
        'dateutil-2.1-py26_0.tar.bz2': 1,
        'dateutil-2.1-py27_0.tar.bz2': 1,
        'dateutil-2.1-py33_0.tar.bz2': 1,
        'gevent-websocket-0.3.6-py26_1.tar.bz2': 1,
        'gevent-websocket-0.3.6-py27_1.tar.bz2': 1,
        'gevent_zeromq-0.2.5-py26_1.tar.bz2': 1,
        'gevent_zeromq-0.2.5-py27_1.tar.bz2': 1,
        'libnetcdf-4.2.1.1-0.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_1.tar.bz2': 2,
        'numexpr-2.0.1-np16py26_2.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np16py26_p1.tar.bz2': 2,
        'numexpr-2.0.1-np16py26_p2.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np16py27_1.tar.bz2': 2,
        'numexpr-2.0.1-np16py27_2.tar.bz2': 1,
        'numexpr-2.0.1-np16py27_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np16py27_p1.tar.bz2': 2,
        'numexpr-2.0.1-np16py27_p2.tar.bz2': 1,
        'numexpr-2.0.1-np16py27_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np17py26_1.tar.bz2': 2,
        'numexpr-2.0.1-np17py26_2.tar.bz2': 1,
        'numexpr-2.0.1-np17py26_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np17py26_p1.tar.bz2': 2,
        'numexpr-2.0.1-np17py26_p2.tar.bz2': 1,
        'numexpr-2.0.1-np17py26_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np17py27_1.tar.bz2': 2,
        'numexpr-2.0.1-np17py27_2.tar.bz2': 1,
        'numexpr-2.0.1-np17py27_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np17py27_p1.tar.bz2': 2,
        'numexpr-2.0.1-np17py27_p2.tar.bz2': 1,
        'numexpr-2.0.1-np17py27_pro0.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 1,
        'numpy-1.6.2-py27_3.tar.bz2': 1,
        'py2cairo-1.10.0-py26_0.tar.bz2': 1,
        'py2cairo-1.10.0-py27_0.tar.bz2': 1,
        'pycurl-7.19.0-py26_0.tar.bz2': 1,
        'pycurl-7.19.0-py27_0.tar.bz2': 1,
        'pysal-1.5.0-np15py27_0.tar.bz2': 1,
        'pysal-1.5.0-np16py27_0.tar.bz2': 1,
        'pysal-1.5.0-np17py27_0.tar.bz2': 1,
        'pytest-2.3.4-py26_0.tar.bz2': 1,
        'pytest-2.3.4-py27_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py26_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py27_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py33_0.tar.bz2': 1,
        'scikit-image-0.8.2-np16py26_0.tar.bz2': 1,
        'scikit-image-0.8.2-np16py27_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py26_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py27_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py33_0.tar.bz2': 1,
        'sphinx-1.1.3-py26_2.tar.bz2': 1,
        'sphinx-1.1.3-py27_2.tar.bz2': 1,
        'sphinx-1.1.3-py33_2.tar.bz2': 1,
        'statsmodels-0.4.3-np16py26_0.tar.bz2': 1,
        'statsmodels-0.4.3-np16py27_0.tar.bz2': 1,
        'statsmodels-0.4.3-np17py26_0.tar.bz2': 1,
        'statsmodels-0.4.3-np17py27_0.tar.bz2': 1,
        'system-5.8-0.tar.bz2': 1,
        'theano-0.5.0-np15py26_0.tar.bz2': 1,
        'theano-0.5.0-np15py27_0.tar.bz2': 1,
        'theano-0.5.0-np16py26_0.tar.bz2': 1,
        'theano-0.5.0-np16py27_0.tar.bz2': 1,
        'theano-0.5.0-np17py26_0.tar.bz2': 1,
        'theano-0.5.0-np17py27_0.tar.bz2': 1,
        'zeromq-2.2.0-0.tar.bz2': 1}
Beispiel #42
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)

    if command == 'update':
        if args.all:
            if args.packages:
                common.error_and_exit("""--all cannot be used with packages""",
                                      json=args.json,
                                      error_type="ValueError")
        else:
            if len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and args.clone:
        if args.packages:
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet)
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        specs.extend(common.specs_from_url(args.file, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s >=%s' % (name, ver))
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    else:
        common.check_specs(prefix, specs, json=args.json)

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build import config as build_config
        except ImportError:
            common.error_and_exit(
                "you need to have 'conda-build' installed"
                " to use the --use-local option",
                json=args.json,
                error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = common.get_index_trap([url_path(build_config.croot)],
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json)
    else:
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json)

    # Don't update packages that are already up-to-date
    if command == 'update' and not args.all:
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [
                ci.is_linked(prefix, dist) for dist in linked
            ]
            vers_inst = [
                dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name
            ]
            build_inst = [
                m['build_number'] for m in installed_metadata
                if m['name'] == name
            ]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if latest.version == vers_inst[
                    0] and latest.build_number == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        common.error_and_exit(
            "cannot mix specifications with conda package filenames",
            json=args.json,
            error_type="ValueError")

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" %
                                      prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            actions = plan.install_actions(prefix,
                                           index,
                                           specs,
                                           force=args.force,
                                           only_names=only_names,
                                           pinned=args.pinned,
                                           minimal_hint=args.alt_hint)
    except NoPackagesFound as e:
        error_message = e.args[0]

        packages = {index[fn]['name'] for fn in index}

        for pkg in e.pkgs:
            close = get_close_matches(pkg, packages)
            if close:
                error_message += "\n\nDid you mean one of these?\n    %s" % (
                    ', '.join(close))
            error_message += '\n\nYou can search for this package on Binstar with'
            error_message += '\n\n    binstar search -t conda %s' % pkg
            error_message += '\n\nYou may need to install the Binstar command line client with'
            error_message += '\n\n    conda install binstar'
        common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e,
                                  json=args.json,
                                  newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        if not pscheck.main(args):
            common.confirm_yn(args)
    else:
        if (sys.platform == 'win32' and not args.force_pscheck
                and not pscheck.check_processes(verbose=False)):
            common.error_and_exit(
                "Cannot continue operation while processes "
                "from packages are running without --force-pscheck.",
                json=True,
                error_type="ProcessesStillRunning")
        elif args.dry_run:
            common.stdout_json_success(actions=actions, dry_run=True)
            sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Beispiel #43
0
def execute(specs: List[str], r: Resolve) -> None:
    mspecs = [MatchSpec(m) for m in specs]

    unmet_dependency = None  # type: Union[None, str]
    pkgs = implicated_packages(specs, r)
    unmet_depgraph = {}  # type: Dict[str, Set[str]]

    # mapping from package name to all of the filenames that are plausible
    # installation candidates for that package
    valid = {}  # type: Dict

    # when all of the plausible installation candidates for a package are
    # removed from ``valid``, we record the reason for that exclusion in
    # this dict, which maps package names to descriptions (english strings).
    exclusion_reasons = OrderedDict()  # type: OrderedDict

    for pkg in pkgs:
        try:
            # if we have a matchspec for this package, get the valid files
            # for it.
            ms = next((ms for ms in mspecs if ms.name == MatchSpec(pkg).name))
        except StopIteration:
            # if this package is an indirect dependency, we just have the name,
            # so we get all of the candidate files
            ms = MatchSpec(pkg)
        valid[pkg] = list(r.find_matches(ms))

    while True:
        # in each iteration of this loop, we try to prune out some packages
        # from the valid dict.

        # first, record the number of filenames in the valid dict. use this
        # to ditect convergence and control terminatio of the while loop.
        pre_length = sum(len(fns) for fns in valid.values())

        # iterate over the items in ``valid`` with the keys in the order
        # of pkgs, so that we go up the dependency chain.
        for key, fns in zip(pkgs, (valid[pkg] for pkg in pkgs)):

            # map filenames to a dict whose keys are the MatchSpecs
            # that this file depends on, and the values are whether
            # or not that MatchSpec currently has *any* valid files that
            # would satisfy it.
            satisfied = {fn: deps_are_satisfiable(fn, valid, r) for fn in fns}

            # files can only stay in valid if each of their dependencies
            # is satisfiable.
            valid[key] = {
                fn
                for fn, sat in satisfied.items() if all(sat.values())
            }

            # if a certain package now has zero valid installation candidates,
            # we want to record a string to help explain why.
            if len(valid[key]) == 0 and key not in exclusion_reasons:
                unmet_depgraph[key] = {
                    ms.name
                    for ms2sat in satisfied.values() for ms in ms2sat
                    if not ms2sat[ms]
                }

                fn2coloreddeps = {}  # type: Dict[str, str]
                for fn, sat in satisfied.items():
                    parts = [
                        colored(d.spec, 'green' if sat[d] else 'red')
                        for d in sorted(sat, key=lambda m: m.spec)
                    ]
                    fn2coloreddeps[fn] = ', '.join(parts)

                lines = ['No %s binary matches specs:' % colored(key, 'blue')]
                max_fn_length = max(len(fn) for fn in fn2coloreddeps.keys())
                fn_spec = '%-{0:d}s'.format(max_fn_length - 6)
                for fn in sorted(fn2coloreddeps.keys(), reverse=True):
                    coloreddeps = fn2coloreddeps[fn]
                    # strip off the '.tar.bz2' when making the printout
                    lines.append(''.join(
                        ('  ', fn_spec % (fn[:-8] + ': '), coloreddeps)))
                exclusion_reasons[key] = '\n'.join(lines)

                # if a package with zero installation candidates is *required*
                # (in the user's supplied specs), then we know we've failed.
                if any(key == ms.name for ms in mspecs):
                    unmet_dependency = key

        # convergence without any invalidated packages, so we can't generate
        # a hint :(
        post_length = sum(len(fns) for fns in valid.values())
        if pre_length == post_length:
            break

        if unmet_dependency is not None:
            print_output(unmet_dependency, exclusion_reasons, unmet_depgraph)

        return None
Beispiel #44
0
def deps_are_satisfiable(fn: str, valid: Dict[str, List[str]],
                         r: Resolve) -> Dict[MatchSpec, bool]:
    return {
        ms: any(depfn in valid[ms.name] for depfn in r.find_matches(ms))
        for ms in r.ms_depends(fn)
    }
Beispiel #45
0
def get_package_versions(package, offline=False):
    index = get_index(offline=offline)
    r = Resolve(index)
    return r.get_pkgs(package, emptyok=True)
Beispiel #46
0
import json
import unittest
from os.path import dirname, join

from conda.config import default_python, pkgs_dirs
from conda.install import LINK_HARD
import conda.plan as plan
from conda.resolve import Resolve

with open(join(dirname(__file__), 'index.json')) as fi:
    r = Resolve(json.load(fi))


def solve(specs):
    return [fn[:-8] for fn in r.solve(specs)]


class TestMisc(unittest.TestCase):
    def test_split_linkarg(self):
        for arg, res in [
            ('w3-1.2-0', ('w3-1.2-0', pkgs_dirs[0], LINK_HARD)),
            ('w3-1.2-0 /opt/pkgs 1', ('w3-1.2-0', '/opt/pkgs', 1)),
            (' w3-1.2-0  /opt/pkgs  1  ', ('w3-1.2-0', '/opt/pkgs', 1)),
            (r'w3-1.2-0 C:\A B\pkgs 2', ('w3-1.2-0', r'C:\A B\pkgs', 2))
        ]:
            self.assertEqual(plan.split_linkarg(arg), res)


class TestAddDeaultsToSpec(unittest.TestCase):
    # tests for plan.add_defaults_to_specs(r, linked, specs)
Beispiel #47
0
def install_actions(prefix,
                    index,
                    specs,
                    force=False,
                    only_names=None,
                    pinned=True,
                    minimal_hint=False,
                    update_deps=True):
    r = Resolve(index)
    linked = install.linked(prefix)

    if config.self_update and is_root_prefix(prefix):
        specs.append('conda')

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)
        specs += pinned_specs
        # TODO: Improve error messages here
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
                      config.track_features,
                      minimal_hint=minimal_hint,
                      update_deps=update_deps):
        dist = fn[:-8]
        name = install.name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        if install.on_win:
            for name in install.win_ignore_root:
                if name in must_have:
                    del must_have[name]
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    elif basename(prefix).startswith('_'):
        # anything (including conda) can be installed into environments
        # starting with '_', mainly to allow conda-build to build conda
        pass
    else:
        # disallow conda from being installed into all other environments
        if 'conda' in must_have:
            sys.exit("Error: 'conda' can only be installed into the "
                     "root environment")

    smh = r.graph_sort(must_have)

    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    if actions[inst.
               LINK] and sys.platform != 'win32' and prefix != config.root_dir:
        actions[inst.SYMLINK_CONDA] = [config.root_dir]

    for dist in sorted(linked):
        name = install.name_dist(dist)
        if name in must_have and dist != must_have[name]:
            add_unlink(actions, dist)

    return actions
Beispiel #48
0
def test_no_features():
    # Features that aren't specified shouldn't be selected.
    r.msd_cache = {}

    # Without this, there would be another solution including 'scipy-0.11.0-np16py26_p3.tar.bz2'.
    assert r.solve2(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'], set(),
        returnall=True) == [[
            'numpy-1.6.2-py26_4.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-2.6.8-6.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'scipy-0.11.0-np16py26_3.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]

    assert r.solve2(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'], f_mkl,
        returnall=True) == [[
            'mkl-rt-11.0-p0.tar.bz2',           # This,
            'numpy-1.6.2-py26_p4.tar.bz2',      # this,
            'openssl-1.0.1c-0.tar.bz2',
            'python-2.6.8-6.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'scipy-0.11.0-np16py26_p3.tar.bz2', # and this are different.
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]

    index2 = index.copy()
    index2["pandas-0.12.0-np16py27_0.tar.bz2"] = \
        {
            "build": "np16py27_0",
            "build_number": 0,
            "depends": [
              "dateutil",
              "numpy 1.6*",
              "python 2.7*",
              "pytz"
            ],
            "name": "pandas",
            "requires": [
              "dateutil 1.5",
              "numpy 1.6",
              "python 2.7",
              "pytz"
            ],
            "version": "0.12.0"
        }
    # Make it want to choose the pro version by having it be newer.
    index2["numpy-1.6.2-py27_p5.tar.bz2"] = \
        {
            "build": "py27_p5",
            "build_number": 5,
            "depends": [
              "mkl-rt 11.0",
              "python 2.7*"
            ],
            "features": "mkl",
            "name": "numpy",
            "pub_date": "2013-04-29",
            "requires": [
              "mkl-rt 11.0",
              "python 2.7"
            ],
            "version": "1.6.2"
        }

    r2 = Resolve(index2)

    # This should not pick any mkl packages (the difference here is that none
    # of the specs directly have mkl versions)
    assert r2.solve2(['pandas 0.12.0 np16py27_0', 'python 2.7*'], set(),
        returnall=True) == [[
            'dateutil-2.1-py27_1.tar.bz2',
            'numpy-1.6.2-py27_4.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'pandas-0.12.0-np16py27_0.tar.bz2',
            'python-2.7.5-0.tar.bz2',
            'pytz-2013b-py27_0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'six-1.3.0-py27_0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]]

    assert r2.solve2(['pandas 0.12.0 np16py27_0', 'python 2.7*'], f_mkl,
        returnall=True)[0] == [[
            'dateutil-2.1-py27_1.tar.bz2',
            'mkl-rt-11.0-p0.tar.bz2',           # This
            'numpy-1.6.2-py27_p5.tar.bz2',      # and this are different.
            'openssl-1.0.1c-0.tar.bz2',
            'pandas-0.12.0-np16py27_0.tar.bz2',
            'python-2.7.5-0.tar.bz2',
            'pytz-2013b-py27_0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'six-1.3.0-py27_0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
            ]][0]
Beispiel #49
0
def execute(args, parser):
    import os
    from os.path import basename, dirname

    import conda
    import conda.config as config
    import conda.misc as misc
    from conda.resolve import Resolve, MatchSpec
    from conda.cli.main_init import is_initialized
    from conda.api import get_index, get_package_versions

    if args.packages:
        if args.json:
            results = defaultdict(list)
            for arg in args.packages:
                for pkg in get_package_versions(arg):
                    results[arg].append(pkg._asdict())
            common.stdout_json(results)
            return
        index = get_index()
        r = Resolve(index)
        specs = map(common.arg2spec, args.packages)

        for spec in specs:
            versions = r.get_pkgs(MatchSpec(spec))
            for pkg in versions:
                pretty_package(pkg)

        return

    options = 'envs', 'system', 'license'

    try:
        import requests
        requests_version = requests.__version__
    except ImportError:
        requests_version = "could not import"
    except Exception as e:
        requests_version = "Error %s" % e

    try:
        import conda_build
    except ImportError:
        conda_build_version = "not installed"
    except Exception as e:
        conda_build_version = "Error %s" % e
    else:
        conda_build_version = conda_build.__version__

    info_dict = dict(
        platform=config.subdir,
        conda_version=conda.__version__,
        conda_build_version=conda_build_version,
        root_prefix=config.root_dir,
        root_writable=config.root_writable,
        pkgs_dirs=config.pkgs_dirs,
        envs_dirs=config.envs_dirs,
        default_prefix=config.default_prefix,
        channels=config.get_channel_urls(),
        rc_path=config.rc_path,
        is_foreign=bool(config.foreign),
        envs=[],
        python_version='.'.join(map(str, sys.version_info)),
        requests_version=requests_version,
    )

    if args.all or args.json:
        for option in options:
            setattr(args, option, True)

    info_dict['channels_disp'] = [
        config.hide_binstar_tokens(c) for c in info_dict['channels']
    ]

    if args.all or all(not getattr(args, opt) for opt in options):
        for key in 'pkgs_dirs', 'envs_dirs', 'channels_disp':
            info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key])
        info_dict['_rtwro'] = ('writable'
                               if info_dict['root_writable'] else 'read only')
        print("""\
Current conda install:

             platform : %(platform)s
        conda version : %(conda_version)s
  conda-build version : %(conda_build_version)s
       python version : %(python_version)s
     requests version : %(requests_version)s
     root environment : %(root_prefix)s  (%(_rtwro)s)
  default environment : %(default_prefix)s
     envs directories : %(_envs_dirs)s
        package cache : %(_pkgs_dirs)s
         channel URLs : %(_channels_disp)s
          config file : %(rc_path)s
    is foreign system : %(is_foreign)s
""" % info_dict)
        if not is_initialized():
            print("""\
# NOTE:
#     root directory '%s' is uninitialized""" % config.root_dir)

    del info_dict['channels_disp']

    if args.envs:
        if not args.json:
            print("# conda environments:")
            print("#")

        def disp_env(prefix):
            fmt = '%-20s  %s  %s'
            default = '*' if prefix == config.default_prefix else ' '
            name = (config.root_env_name
                    if prefix == config.root_dir else basename(prefix))
            if not args.json:
                print(fmt % (name, default, prefix))

        for prefix in misc.list_prefixes():
            disp_env(prefix)
            if prefix != config.root_dir:
                info_dict['envs'].append(prefix)

        print()

    if args.system and not args.json:
        from conda.cli.find_commands import find_commands, find_executable

        print("sys.version: %s..." % (sys.version[:40]))
        print("sys.prefix: %s" % sys.prefix)
        print("sys.executable: %s" % sys.executable)
        print("conda location: %s" % dirname(conda.__file__))
        for cmd in sorted(set(find_commands() + ['build'])):
            print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
        print("user site dirs: ", end='')
        site_dirs = get_user_site()
        if site_dirs:
            print(site_dirs[0])
        else:
            print()
        for site_dir in site_dirs[1:]:
            print('                %s' % site_dir)
        print()

        evars = [
            'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
            'CIO_TEST', 'CONDA_ENVS_PATH'
        ]
        if config.platform == 'linux':
            evars.append('LD_LIBRARY_PATH')
        elif config.platform == 'osx':
            evars.append('DYLD_LIBRARY_PATH')
        for ev in sorted(evars):
            print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
        print()

    if args.license and not args.json:
        try:
            from _license import show_info
            show_info()
        except ImportError:
            print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")

    if args.json:
        common.stdout_json(info_dict)
Beispiel #50
0
def test_nonexistent_deps():
    index2 = index.copy()
    index2['mypackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    }
    index2['mypackage-1.1-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    }
    index2['anotherpackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.1'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.1'],
        'version': '1.0',
    }
    index2['anotherpackage-2.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    }
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
    }
    assert set(r.get_dists(['mypackage']).keys()) == {
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.1.2-py26_0.tar.bz2',
        'nose-1.1.2-py27_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py26_0.tar.bz2',
        'nose-1.2.1-py27_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-1.tar.bz2',
        'python-2.6.8-2.tar.bz2',
        'python-2.6.8-3.tar.bz2',
        'python-2.6.8-4.tar.bz2',
        'python-2.6.8-5.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.3-2.tar.bz2',
        'python-2.7.3-3.tar.bz2',
        'python-2.7.3-4.tar.bz2',
        'python-2.7.3-5.tar.bz2',
        'python-2.7.3-6.tar.bz2',
        'python-2.7.3-7.tar.bz2',
        'python-2.7.4-0.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert set(r.get_dists(['mypackage'], max_only=True).keys()) == {
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert r.solve(['mypackage']) == r.solve(['mypackage 1.1']) == [
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
    assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.0']))

    assert r.solve(['anotherpackage 1.0']) == [
        'anotherpackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    assert r.solve(['anotherpackage']) == [
        'anotherpackage-2.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    # This time, the latest version is messed up
    index3 = index.copy()
    index3['mypackage-1.1-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    }
    index3['mypackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    }
    index3['anotherpackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.0'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.0'],
        'version': '1.0',
    }
    index3['anotherpackage-2.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    }
    r = Resolve(index3)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        }
    assert set(r.get_dists(['mypackage']).keys()) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.1.2-py26_0.tar.bz2',
        'nose-1.1.2-py27_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py26_0.tar.bz2',
        'nose-1.2.1-py27_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-1.tar.bz2',
        'python-2.6.8-2.tar.bz2',
        'python-2.6.8-3.tar.bz2',
        'python-2.6.8-4.tar.bz2',
        'python-2.6.8-5.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.3-2.tar.bz2',
        'python-2.7.3-3.tar.bz2',
        'python-2.7.3-4.tar.bz2',
        'python-2.7.3-5.tar.bz2',
        'python-2.7.3-6.tar.bz2',
        'python-2.7.3-7.tar.bz2',
        'python-2.7.4-0.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert raises(NoPackagesFound, lambda: r.get_dists(['mypackage'], max_only=True))

    assert r.solve(['mypackage']) == r.solve(['mypackage 1.0']) == [
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
    assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1']))


    assert r.solve(['anotherpackage 1.0']) == [
        'anotherpackage-1.0-py33_0.tar.bz2',
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    # If recursive checking is working correctly, this will give
    # anotherpackage 2.0, not anotherpackage 1.0
    assert r.solve(['anotherpackage']) == [
        'anotherpackage-2.0-py33_0.tar.bz2',
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
Beispiel #51
0
def test_generate_eq():
    specs = ['anaconda']
    dists, specs = r.get_dists(specs)
    r2 = Resolve(dists, True, True)
    C = r2.gen_clauses(specs)
    eqv, eqb = r2.generate_version_metrics(C, specs)
    # Should satisfy the following criteria:
    # - lower versions of the same package should should have higher
    #   coefficients.
    # - the same versions of the same package (e.g., different build strings)
    #   should have the same coefficients.
    # - a package that only has one version should not appear, unless
    #   include=True as it will have a 0 coefficient. The same is true of the
    #   latest version of a package.
    assert eqv == {
        'astropy-0.2-np15py26_0.tar.bz2': 1,
        'astropy-0.2-np16py26_0.tar.bz2': 1,
        'astropy-0.2-np17py26_0.tar.bz2': 1,
        'astropy-0.2-np17py33_0.tar.bz2': 1,
        'bitarray-0.8.0-py26_0.tar.bz2': 1,
        'bitarray-0.8.0-py33_0.tar.bz2': 1,
        'cython-0.18-py26_0.tar.bz2': 1,
        'cython-0.18-py33_0.tar.bz2': 1,
        'distribute-0.6.34-py26_1.tar.bz2': 1,
        'distribute-0.6.34-py33_1.tar.bz2': 1,
        'ipython-0.13.1-py26_1.tar.bz2': 1,
        'ipython-0.13.1-py33_1.tar.bz2': 1,
        'llvmpy-0.11.1-py26_0.tar.bz2': 1,
        'llvmpy-0.11.1-py33_0.tar.bz2': 1,
        'lxml-3.0.2-py26_0.tar.bz2': 1,
        'lxml-3.0.2-py33_0.tar.bz2': 1,
        'matplotlib-1.2.0-np15py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py33_1.tar.bz2': 1,
        'nose-1.2.1-py26_0.tar.bz2': 1,
        'nose-1.2.1-py33_0.tar.bz2': 1,
        'numpy-1.5.1-py26_3.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 2,
        'numpy-1.6.2-py26_4.tar.bz2': 2,
        'numpy-1.6.2-py27_4.tar.bz2': 2,
        'numpy-1.7.0-py26_0.tar.bz2': 1,
        'numpy-1.7.0-py33_0.tar.bz2': 1,
        'pip-1.2.1-py26_1.tar.bz2': 1,
        'pip-1.2.1-py33_1.tar.bz2': 1,
        'psutil-0.6.1-py26_0.tar.bz2': 1,
        'psutil-0.6.1-py33_0.tar.bz2': 1,
        'pyflakes-0.6.1-py26_0.tar.bz2': 1,
        'pyflakes-0.6.1-py33_0.tar.bz2': 1,
        'python-2.6.8-6.tar.bz2': 3,
        'python-2.7.4-0.tar.bz2': 2,
        'python-3.3.0-4.tar.bz2': 1,
        'pytz-2012j-py26_0.tar.bz2': 1,
        'pytz-2012j-py33_0.tar.bz2': 1,
        'requests-0.13.9-py26_0.tar.bz2': 1,
        'requests-0.13.9-py33_0.tar.bz2': 1,
        'scipy-0.11.0-np15py26_3.tar.bz2': 1,
        'scipy-0.11.0-np16py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py33_3.tar.bz2': 1,
        'six-1.2.0-py26_0.tar.bz2': 1,
        'six-1.2.0-py33_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py26_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py33_0.tar.bz2': 1,
        'tornado-2.4.1-py26_0.tar.bz2': 1,
        'tornado-2.4.1-py33_0.tar.bz2': 1,
        'xlrd-0.9.0-py26_0.tar.bz2': 1,
        'xlrd-0.9.0-py33_0.tar.bz2': 1
    }
    assert eqb == {
        'dateutil-2.1-py26_0.tar.bz2': 1,
        'dateutil-2.1-py33_0.tar.bz2': 1,
        'numpy-1.6.2-py26_3.tar.bz2': 1,
        'pyzmq-2.2.0.1-py26_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py33_0.tar.bz2': 1,
        'sphinx-1.1.3-py26_2.tar.bz2': 1,
        'sphinx-1.1.3-py33_2.tar.bz2': 1,
        'system-5.8-0.tar.bz2': 1,
        'zeromq-2.2.0-0.tar.bz2': 1
    }
Beispiel #52
0
def remove_actions(prefix, specs, index, force=False, pinned=True):
    r = Resolve(index)
    linked = [d + '.tar.bz2' for d in install.linked(prefix)]
    mss = list(map(MatchSpec, specs))

    if force:
        nlinked = {
            r.package_name(fn): fn
            for fn in linked if not any(r.match(ms, fn) for ms in mss)
        }
    else:
        if pinned:
            pinned_specs = get_pinned_specs(prefix)
            log.debug("Pinned specs=%s" % pinned_specs)
        if config.track_features:
            specs.extend(x + '@' for x in config.track_features)
        nlinked = {
            r.package_name(fn): fn[:-8]
            for fn in r.remove(specs, linked)
        }
    linked = {r.package_name(fn): fn[:-8] for fn in linked}

    actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
    for old_fn in reversed(r.dependency_sort(linked)):
        dist = old_fn + '.tar.bz2'
        name = r.package_name(dist)
        if old_fn == nlinked.get(name, ''):
            continue
        if pinned and any(r.match(ms, dist) for ms in pinned_specs):
            raise RuntimeError(
                "Cannot remove %s because it is pinned. Use --no-pin to override."
                % dist)
        if name == 'conda' and name not in nlinked:
            if any(ms.name == 'conda' for ms in mss):
                sys.exit(
                    "Error: 'conda' cannot be removed from the root environment"
                )
            else:
                sys.exit(
                    "Error: this 'remove' command cannot be executed because it\nwould require removing 'conda' dependencies."
                )
        add_unlink(actions, old_fn)

    return actions
Beispiel #53
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda and any package that depends on it
    drecs = install.linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True
    if not quiet and filter:
        print(
            'The following packages cannot be cloned out of the root environment:'
        )
        for pkg in itervalues(filter):
            print(' - ' + pkg)

    # Assemble the URL and channel list
    urls = {}
    index = {}
    for dist, info in iteritems(drecs):
        if info['name'] in filter:
            continue
        url = info.get('url')
        if url is None:
            sys.exit('Error: no URL found for package: %s' % dist)
        _, schannel = url_channel(url)
        index[dist + '.tar.bz2'] = info
        urls[dist] = url

    r = Resolve(index)
    dists = r.dependency_sort(urls.keys())
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls,
                       prefix2,
                       verbose=not quiet,
                       force_extract=False,
                       fetch_args=fetch_args)
    return actions, untracked_files
Beispiel #54
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)
    if config.force_32bit and plan.is_root_prefix(prefix):
        common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")

    if command == 'update':
        if not args.file:
            if not args.all and len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update' and not args.all:
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)
    else:
        default_packages = []

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        for fpath in args.file:
            specs.extend(common.specs_from_url(fpath, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        if not linked:
            common.error_and_exit("There are no packages installed in the "
                "prefix %s" % prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name in getattr(args, '_skip', ['anaconda']):
                continue
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s' % name)
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    elif not (newenv and args.clone):
        common.check_specs(prefix, specs, json=args.json,
                           create=(command == 'create'))


    num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
    if num_cp:
        if num_cp == len(args.packages):
            depends = misc.install_local_packages(prefix, args.packages,
                                                  verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True
        else:
            common.error_and_exit(
                "cannot mix specifications with conda package filenames",
                json=args.json,
                error_type="ValueError")

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            depends = install_tar(prefix, tar_path, verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit(
                    "you need to have 'conda-build >= 1.7.1' installed"
                    " to use the --use-local option",
                    json=args.json,
                    error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)

    index = common.get_index_trap(channel_urls=channel_urls,
                                  prepend=not args.override_channels,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown,
                                  json=args.json,
                                  offline=args.offline)

    if newenv and args.clone:
        if set(args.packages) - set(default_packages):
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    # Don't update packages that are already up-to-date
    if command == 'update' and not (args.all or args.force):
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [ci.is_linked(prefix, dist)
                                  for dist in linked]
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            build_inst = [m['build_number'] for m in installed_metadata if
                          m['name'] == name]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if (latest.version == vers_inst[0] and
                       latest.build_number == build_inst[0]):
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" % prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            with common.json_progress_bars(json=args.json and not args.quiet):

                actions = plan.install_actions(prefix, index, specs,
                                               force=args.force,
                                               only_names=only_names,
                                               pinned=args.pinned,
                                               minimal_hint=args.alt_hint,
                                               update_deps=args.update_deps)
            if config.always_copy or args.copy:
                new_link = []
                for pkg in actions["LINK"]:
                    dist, pkgs_dir, lt = inst.split_linkarg(pkg)
                    lt = ci.LINK_COPY
                    new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
                actions["LINK"] = new_link
    except NoPackagesFound as e:
        error_message = e.args[0]

        if command == 'update' and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', ['anaconda'])
            for pkg in e.pkgs:
                p = pkg.split()[0]
                if p in args._skip:
                    # Avoid infinite recursion. This can happen if a spec
                    # comes from elsewhere, like --file
                    raise
                args._skip.append(p)

            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            for pkg in e.pkgs:
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if close:
                    error_message += ("\n\nDid you mean one of these?"
                                      "\n\n    %s" % (', '.join(close)))
            error_message += '\n\nYou can search for this package on anaconda.org with'
            error_message += '\n\n    anaconda search -t conda %s' % pkg
            if len(e.pkgs) > 1:
                # Note this currently only happens with dependencies not found
                error_message += '\n\n (and similarly for the other packages)'

            if not find_executable('anaconda', include_others=False):
                error_message += '\n\nYou may need to install the anaconda-client command line client with'
                error_message += '\n\n    conda install anaconda-client'

            pinned_specs = plan.get_pinned_specs(prefix)
            if pinned_specs:
                error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned')
                error_message += "\n\n    %r" % pinned_specs

            common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e, json=args.json, newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        common.confirm_yn(args)
    elif args.dry_run:
        common.stdout_json_success(actions=actions, dry_run=True)
        sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
            if not (command == 'update' and args.all):
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# %s specs: %s\n' % (command, specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Beispiel #55
0
def execute_search(args, parser):
    import re
    from conda.resolve import Resolve

    if args.reverse_dependency:
        if not args.regex:
            parser.error(
                "--reverse-dependency requires at least one package name")
        if args.spec:
            parser.error("--reverse-dependency does not work with --spec")

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = ' '.join(args.regex.split('='))
        else:
            regex = args.regex
            if args.full_name:
                regex = r'^%s$' % regex
            try:
                pat = re.compile(regex, re.I)
            except re.error as e:
                raise CommandArgumentError(
                    "Failed to compile regex pattern for "
                    "search: %(regex)s\n"
                    "regex error: %(regex_error)s",
                    regex=regex,
                    regex_error=repr(e))

    prefix = context.prefix_w_legacy_search

    from ..core.linked_data import linked as linked_data
    from ..core.package_cache import PackageCache

    linked = linked_data(prefix)
    extracted = set(pc_entry.dist.name
                    for pc_entry in PackageCache.get_all_extracted_entries())

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != context.subdir:
        args.unknown = False
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args, dashc=False)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls,
                      prepend=not args.override_channels,
                      platform=args.platform,
                      use_local=args.use_local,
                      use_cache=args.use_index_cache,
                      prefix=None,
                      unknown=args.unknown)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    names = []
    for name in sorted(r.groups):
        if '@' in name:
            continue
        if args.reverse_dependency:
            ms_name = ms
            for pkg in r.groups[name]:
                for dep in r.ms_depends(pkg):
                    if pat.search(dep.name):
                        names.append((name, Package(pkg, r.index[pkg])))
        else:
            if pat and pat.search(name) is None:
                continue
            if ms and name != ms.split()[0]:
                continue

            if ms:
                ms_name = ms
            else:
                ms_name = name

            pkgs = sorted(r.get_pkgs(ms_name))
            names.append((name, pkgs))

    if args.reverse_dependency:
        new_names = []
        old = None
        for name, pkg in sorted(names, key=lambda x: (x[0], x[1].name, x[1])):
            if name == old:
                new_names[-1][1].append(pkg)
            else:
                new_names.append((name, [pkg]))
            old = name
        names = new_names

    for name, pkgs in names:
        if args.reverse_dependency:
            disp_name = pkgs[0].name
        else:
            disp_name = name

        if args.names_only and not args.outdated:
            print(name)
            continue

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [
                dist.quad[1] for dist in linked if dist.quad[0] == name
            ]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue
            if args.names_only:
                print(name)
                continue

        for pkg in pkgs:
            dist = Dist(pkg)
            if args.canonical:
                if not context.json:
                    print(dist.dist_name)
                else:
                    json.append(dist.dist_name)
                continue
            if platform and platform != context.subdir:
                inst = ' '
            elif dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            features = r.features(dist)

            if not context.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name,
                    inst,
                    pkg.version,
                    pkg.build,
                    pkg.schannel,
                    disp_features(features),
                ))
                disp_name = ''
            else:
                data = {}
                data.update(pkg.info.dump())
                data.update({
                    'fn': pkg.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': pkg.version,
                    'build': pkg.build,
                    'build_number': pkg.build_number,
                    'channel': pkg.schannel,
                    'full_channel': pkg.channel,
                    'features': list(features),
                    'license': pkg.info.get('license'),
                    'size': pkg.info.get('size'),
                    'depends': pkg.info.get('depends'),
                    'type': pkg.info.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(pkg.info)
                json[name].append(data)

    if context.json:
        stdout_json(json)
Beispiel #56
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """

    newenv = command == 'create'
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix)
    config.set_pkgs_dirs(prefix)

    if command == 'update':
        if len(args.packages) == 0:
            sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name)
            if '=' in name:
                sys.exit("Invalid package name: '%s'" % (name))
            if name not in set(ci.name_dist(d) for d in linked):
                sys.exit("Error: package '%s' is not installed in %s" %
                         (name, prefix))

    if newenv and args.clone:
        if args.packages:
            sys.exit('Error: did not expect any arguments for --clone')
        clone(args.clone, prefix)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build import config as build_config
        except ImportError:
            sys.exit("Error: you need to have 'conda-build' installed"
                     " to use the --use-local option")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = get_index([url_path(build_config.croot)],
                          use_cache=args.use_cache)
    else:
        index = get_index(channel_urls=channel_urls, prepend=not
                          args.override_channels,
                          use_cache=args.use_cache)

    # Don't update packages that are already up-to-date
    if command == 'update':
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name]
            build_inst = [dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0]
                          for dist in linked
                          if dist.rsplit('-', 2)[0] == name]
            assert len(vers_inst) == 1, name
            assert len(build_inst) == 1, name
            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            # This won't do the right thing for python 2
            latest = pkgs[-1]
            if latest.version == vers_inst[0] and latest.build == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import list_packages

            regex = '^(%s)$' % '|'.join(orig_packages)
            print('# All requested packages already installed.')
            list_packages(prefix, regex)
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        sys.exit("cannot mix specifications with conda package filenames")

    if args.force:
        args.no_deps = True

    if args.file:
        specs = common.specs_from_url(args.file)
    else:
        specs = common.specs_from_args(args.packages)

    common.check_specs(prefix, specs)

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                sys.exit("Error: could not create directory: %s" % prefix)
        else:
            sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix)

    actions = plan.install_actions(prefix, index, specs,
                                   force=args.force, only_names=only_names)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import list_packages

        regex = '^(%s)$' % '|'.join(spec_names)
        print('# All requested packages already installed.')
        list_packages(prefix, regex)
        return

    print()
    print("Package plan for installation in environment %s:" % prefix)
    plan.display_actions(actions, index)
    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)
    if newenv:
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
Beispiel #57
0
def execute_search(args, parser):
    import re
    from conda.resolve import Resolve

    if args.reverse_dependency:
        if not args.regex:
            parser.error("--reverse-dependency requires at least one package name")
        if args.spec:
            parser.error("--reverse-dependency does not work with --spec")

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = ' '.join(args.regex.split('='))
        else:
            regex = args.regex
            if args.full_name:
                regex = r'^%s$' % regex
            try:
                pat = re.compile(regex, re.I)
            except re.error as e:
                common.error_and_exit(
                    "'%s' is not a valid regex pattern (exception: %s)" %
                    (regex, e),
                    json=args.json,
                    error_type="ValueError")

    prefix = common.get_prefix(args)

    import conda.config
    import conda.install

    linked = conda.install.linked(prefix)
    extracted = set()
    for pkgs_dir in conda.config.pkgs_dirs:
        extracted.update(conda.install.extracted(pkgs_dir))

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != config.subdir:
        args.unknown = False
    common.ensure_override_channels_requires_channel(args, dashc=False,
                                                     json=args.json)
    channel_urls = args.channel or ()
    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
                                  " to use the --use-local option",
                                  json=args.json,
                                  error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown, prefix=prefix,
                                      json=args.json, platform=args.platform, offline=args.offline)
    else:
        index = common.get_index_trap(channel_urls=channel_urls, prepend=not
                                      args.override_channels, platform=args.platform,
                                      use_cache=args.use_index_cache, prefix=prefix,
                                      unknown=args.unknown, json=args.json, offline=args.offline)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    names = []
    for name in sorted(r.groups):
        if '@' in name:
            continue
        if args.reverse_dependency:
            ms_name = ms
            for pkg in r.groups[name]:
                for dep in r.ms_depends(pkg):
                    if pat.search(dep.name):
                        names.append((name, Package(pkg, r.index[pkg])))
        else:
            if pat and pat.search(name) is None:
                continue
            if ms and name != ms.name:
                continue

            if ms:
                ms_name = ms
            else:
                ms_name = name

            pkgs = sorted(r.get_pkgs(ms_name))
            names.append((name, pkgs))

    if args.reverse_dependency:
        new_names = []
        old = None
        for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])):
            if name == old:
                new_names[-1][1].append(pkg)
            else:
                new_names.append((name, [pkg]))
            old = name
        names = new_names

    for name, pkgs in names:
        if args.reverse_dependency:
            disp_name = pkgs[0].name
        else:
            disp_name = name

        if args.names_only and not args.outdated:
            print(name)
            continue

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue
            if args.names_only:
                print(name)
                continue

        for pkg in pkgs:
            dist = pkg.fn[:-8]
            if args.canonical:
                if not args.json:
                    print(dist)
                else:
                    json.append(dist)
                continue
            if platform and platform != config.subdir:
                inst = ' '
            elif dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            if not args.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name, inst,
                    pkg.version,
                    pkg.build,
                    config.canonical_channel_name(pkg.channel),
                    common.disp_features(r.features(pkg.fn)),
                    ))
                disp_name = ''
            else:
                data = {}
                data.update(pkg.info)
                data.update({
                    'fn': pkg.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': pkg.version,
                    'build': pkg.build,
                    'build_number': pkg.build_number,
                    'channel': config.canonical_channel_name(pkg.channel),
                    'full_channel': pkg.channel,
                    'features': list(r.features(pkg.fn)),
                    'license': pkg.info.get('license'),
                    'size': pkg.info.get('size'),
                    'depends': pkg.info.get('depends'),
                    'type': pkg.info.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(pkg.info)
                json[name].append(data)

    if args.json:
        common.stdout_json(json)
Beispiel #58
0
def execute(args, parser):
    import os
    from os.path import dirname

    import conda
    from conda.config import (root_dir, get_channel_urls, subdir, pkgs_dirs,
                              root_writable, envs_dirs, default_prefix,
                              rc_path, user_rc_path, sys_rc_path, foreign,
                              hide_binstar_tokens, platform, offline)
    from conda.resolve import Resolve
    from conda.api import get_index

    if args.root:
        if args.json:
            stdout_json({'root_prefix': root_dir})
        else:
            print(root_dir)
        return

    if args.packages:
        index = get_index()
        r = Resolve(index)
        if args.json:
            stdout_json({
                package:
                [p._asdict() for p in sorted(r.get_pkgs(arg2spec(package)))]
                for package in args.packages
            })
        else:
            for package in args.packages:
                versions = r.get_pkgs(arg2spec(package))
                for pkg in sorted(versions):
                    pretty_package(pkg)
        return

    options = 'envs', 'system', 'license'

    try:
        from conda.install import linked_data
        root_pkgs = linked_data(sys.prefix)
    except:
        root_pkgs = None

    try:
        import requests
        requests_version = requests.__version__
    except ImportError:
        requests_version = "could not import"
    except Exception as e:
        requests_version = "Error %s" % e

    try:
        cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env']
        conda_env_version = cenv[0]['version']
    except:
        conda_env_version = "not installed"

    try:
        import conda_build
    except ImportError:
        conda_build_version = "not installed"
    except Exception as e:
        conda_build_version = "Error %s" % e
    else:
        conda_build_version = conda_build.__version__

    channels = get_channel_urls(offline=offline)

    info_dict = dict(
        platform=subdir,
        conda_version=conda.__version__,
        conda_env_version=conda_env_version,
        conda_build_version=conda_build_version,
        root_prefix=root_dir,
        root_writable=root_writable,
        pkgs_dirs=pkgs_dirs,
        envs_dirs=envs_dirs,
        default_prefix=default_prefix,
        channels=channels,
        rc_path=rc_path,
        user_rc_path=user_rc_path,
        sys_rc_path=sys_rc_path,
        is_foreign=bool(foreign),
        offline=offline,
        envs=[],
        python_version='.'.join(map(str, sys.version_info)),
        requests_version=requests_version,
    )

    if args.unsafe_channels:
        if not args.json:
            print("\n".join(info_dict["channels"]))
        else:
            print(json.dumps({"channels": info_dict["channels"]}))
        return 0
    else:
        info_dict['channels'] = [
            hide_binstar_tokens(c) for c in info_dict['channels']
        ]
    if args.all or args.json:
        for option in options:
            setattr(args, option, True)

    if args.all or all(not getattr(args, opt) for opt in options):
        for key in 'pkgs_dirs', 'envs_dirs', 'channels':
            info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key])
        info_dict['_rtwro'] = ('writable'
                               if info_dict['root_writable'] else 'read only')
        print("""\
Current conda install:

             platform : %(platform)s
        conda version : %(conda_version)s
    conda-env version : %(conda_env_version)s
  conda-build version : %(conda_build_version)s
       python version : %(python_version)s
     requests version : %(requests_version)s
     root environment : %(root_prefix)s  (%(_rtwro)s)
  default environment : %(default_prefix)s
     envs directories : %(_envs_dirs)s
        package cache : %(_pkgs_dirs)s
         channel URLs : %(_channels)s
          config file : %(rc_path)s
         offline mode : %(offline)s
    is foreign system : %(is_foreign)s
""" % info_dict)

    if args.envs:
        handle_envs_list(info_dict['envs'], not args.json)

    if args.system and not args.json:
        from conda.cli.find_commands import find_commands, find_executable

        print("sys.version: %s..." % (sys.version[:40]))
        print("sys.prefix: %s" % sys.prefix)
        print("sys.executable: %s" % sys.executable)
        print("conda location: %s" % dirname(conda.__file__))
        for cmd in sorted(set(find_commands() + ['build'])):
            print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
        print("user site dirs: ", end='')
        site_dirs = get_user_site()
        if site_dirs:
            print(site_dirs[0])
        else:
            print()
        for site_dir in site_dirs[1:]:
            print('                %s' % site_dir)
        print()

        evars = [
            'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
            'CIO_TEST', 'CONDA_ENVS_PATH'
        ]
        if platform == 'linux':
            evars.append('LD_LIBRARY_PATH')
        elif platform == 'osx':
            evars.append('DYLD_LIBRARY_PATH')
        for ev in sorted(evars):
            print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
        print()

    if args.license and not args.json:
        try:
            from _license import show_info
            show_info()
        except ImportError:
            print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")

    if args.json:
        stdout_json(info_dict)
Beispiel #59
0
import conda.config
from conda.install import LINK_HARD
import conda.plan as plan
import conda.instructions as inst
from conda.plan import display_actions
from conda.resolve import Resolve

# FIXME This should be a relative import
from tests.helpers import captured
from conda.exceptions import CondaException

from .helpers import mock

with open(join(dirname(__file__), 'index.json')) as fi:
    index = json.load(fi)
    r = Resolve(index)


def solve(specs):
    return [fn[:-8] for fn in r.solve(specs)]


class TestMisc(unittest.TestCase):

    def test_split_linkarg(self):
        for arg, res in [
            ('w3-1.2-0', ('w3-1.2-0', pkgs_dirs[0], LINK_HARD)),
            ('w3-1.2-0 /opt/pkgs 1', ('w3-1.2-0', '/opt/pkgs', 1)),
            (' w3-1.2-0  /opt/pkgs  1  ', ('w3-1.2-0', '/opt/pkgs', 1)),
            (r'w3-1.2-0 C:\A B\pkgs 2', ('w3-1.2-0', r'C:\A B\pkgs', 2))]:
            self.assertEqual(inst.split_linkarg(arg), res)
Beispiel #60
0
def execute(args, parser):
    import re

    from conda.api import get_index
    from conda.resolve import MatchSpec, Resolve

    if args.regex:
        pat = re.compile(args.regex, re.I)
    else:
        pat = None

    prefix = common.get_prefix(args)
    if not args.canonical:
        import conda.config
        import conda.install

        linked = conda.install.linked(prefix)
        extracted = set()
        for pkgs_dir in conda.config.pkgs_dirs:
            extracted.update(conda.install.extracted(pkgs_dir))

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != config.subdir:
        args.unknown = False
    common.ensure_override_channels_requires_channel(args, dashc=False)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls,
                      prepend=not args.override_channels,
                      platform=args.platform,
                      use_cache=args.use_index_cache,
                      unknown=args.unknown)

    r = Resolve(index)
    for name in sorted(r.groups):
        disp_name = name
        if pat and pat.search(name) is None:
            continue

        if args.outdated:
            vers_inst = [
                dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name
            ]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue

        for pkg in sorted(r.get_pkgs(MatchSpec(name))):
            dist = pkg.fn[:-8]
            if args.canonical:
                print(dist)
                continue
            if dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            print('%-25s %s  %-15s %15s  %-15s %s' % (
                disp_name,
                inst,
                pkg.version,
                r.index[pkg.fn]['build'],
                config.canonical_channel_name(pkg.channel),
                common.disp_features(r.features(pkg.fn)),
            ))
            disp_name = ''