Пример #1
0
def execute(args, parser):
    import os
    from os.path import basename, dirname

    import conda
    import conda.config as config
    import conda.misc as misc
    from conda.resolve import Resolve, MatchSpec
    from conda.cli.main_init import is_initialized
    from conda.api import get_index, get_package_versions

    if args.root:
        if args.json:
            common.stdout_json({'root_prefix': config.root_dir})
        else:
            print(config.root_dir)
        return

    if args.packages:
        if args.json:
            results = defaultdict(list)
            for arg in args.packages:
                for pkg in get_package_versions(arg):
                    results[arg].append(pkg._asdict())
            common.stdout_json(results)
            return
        index = get_index()
        r = Resolve(index)
        specs = map(common.arg2spec, args.packages)

        for spec in specs:
            versions = r.get_pkgs(MatchSpec(spec))
            for pkg in versions:
                pretty_package(pkg)

        return

    options = 'envs', 'system', 'license'

    try:
        import requests
        requests_version = requests.__version__
    except ImportError:
        requests_version = "could not import"
    except Exception as e:
        requests_version = "Error %s" % e

    try:
        import conda_build
    except ImportError:
        conda_build_version = "not installed"
    except Exception as e:
        conda_build_version = "Error %s" % e
    else:
        conda_build_version = conda_build.__version__

    info_dict = dict(
        platform=config.subdir,
        conda_version=conda.__version__,
        conda_build_version=conda_build_version,
        root_prefix=config.root_dir,
        root_writable=config.root_writable,
        pkgs_dirs=config.pkgs_dirs,
        envs_dirs=config.envs_dirs,
        default_prefix=config.default_prefix,
        channels=config.get_channel_urls(),
        rc_path=config.rc_path,
        user_rc_path=config.user_rc_path,
        sys_rc_path=config.sys_rc_path,
        is_foreign=bool(config.foreign),
        envs=[],
        python_version='.'.join(map(str, sys.version_info)),
        requests_version=requests_version,
    )

    if args.all or args.json:
        for option in options:
            setattr(args, option, True)

    info_dict['channels'] = [
        config.hide_binstar_tokens(c) for c in info_dict['channels']
    ]

    if args.all or all(not getattr(args, opt) for opt in options):
        for key in 'pkgs_dirs', 'envs_dirs', 'channels':
            info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key])
        info_dict['_rtwro'] = ('writable'
                               if info_dict['root_writable'] else 'read only')
        print("""\
Current conda install:

             platform : %(platform)s
        conda version : %(conda_version)s
  conda-build version : %(conda_build_version)s
       python version : %(python_version)s
     requests version : %(requests_version)s
     root environment : %(root_prefix)s  (%(_rtwro)s)
  default environment : %(default_prefix)s
     envs directories : %(_envs_dirs)s
        package cache : %(_pkgs_dirs)s
         channel URLs : %(_channels)s
          config file : %(rc_path)s
    is foreign system : %(is_foreign)s
""" % info_dict)
        if not is_initialized():
            print("""\
# NOTE:
#     root directory '%s' is uninitialized""" % config.root_dir)

    if args.envs:
        common.handle_envs_list(info_dict['envs'], not args.json)

    if args.system and not args.json:
        from conda.cli.find_commands import find_commands, find_executable

        print("sys.version: %s..." % (sys.version[:40]))
        print("sys.prefix: %s" % sys.prefix)
        print("sys.executable: %s" % sys.executable)
        print("conda location: %s" % dirname(conda.__file__))
        for cmd in sorted(set(find_commands() + ['build'])):
            print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
        print("user site dirs: ", end='')
        site_dirs = get_user_site()
        if site_dirs:
            print(site_dirs[0])
        else:
            print()
        for site_dir in site_dirs[1:]:
            print('                %s' % site_dir)
        print()

        evars = [
            'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
            'CIO_TEST', 'CONDA_ENVS_PATH'
        ]
        if config.platform == 'linux':
            evars.append('LD_LIBRARY_PATH')
        elif config.platform == 'osx':
            evars.append('DYLD_LIBRARY_PATH')
        for ev in sorted(evars):
            print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
        print()

    if args.license and not args.json:
        try:
            from _license import show_info
            show_info()
        except ImportError:
            print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")

    if args.json:
        common.stdout_json(info_dict)
Пример #2
0
def test_generate_eq():
    dists = r.get_reduced_index(['anaconda'])
    r2 = Resolve(dists, True, True)
    C = r2.gen_clauses()
    eqv, eqb = r2.generate_version_metrics(C, list(r2.groups.keys()))
    # Should satisfy the following criteria:
    # - lower versions of the same package should should have higher
    #   coefficients.
    # - the same versions of the same package (e.g., different build strings)
    #   should have the same coefficients.
    # - a package that only has one version should not appear, unless
    #   include=True as it will have a 0 coefficient. The same is true of the
    #   latest version of a package.
    eqv = {Dist(key).to_filename(): value for key, value in iteritems(eqv)}
    eqb = {Dist(key).to_filename(): value for key, value in iteritems(eqb)}
    assert eqv == {
        'anaconda-1.4.0-np15py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np15py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np16py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np16py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py26_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py27_0.tar.bz2': 1,
        'anaconda-1.4.0-np17py33_0.tar.bz2': 1,
        'astropy-0.2-np15py26_0.tar.bz2': 1,
        'astropy-0.2-np15py27_0.tar.bz2': 1,
        'astropy-0.2-np16py26_0.tar.bz2': 1,
        'astropy-0.2-np16py27_0.tar.bz2': 1,
        'astropy-0.2-np17py26_0.tar.bz2': 1,
        'astropy-0.2-np17py27_0.tar.bz2': 1,
        'astropy-0.2-np17py33_0.tar.bz2': 1,
        'biopython-1.60-np15py26_0.tar.bz2': 1,
        'biopython-1.60-np15py27_0.tar.bz2': 1,
        'biopython-1.60-np16py26_0.tar.bz2': 1,
        'biopython-1.60-np16py27_0.tar.bz2': 1,
        'biopython-1.60-np17py26_0.tar.bz2': 1,
        'biopython-1.60-np17py27_0.tar.bz2': 1,
        'bitarray-0.8.0-py26_0.tar.bz2': 1,
        'bitarray-0.8.0-py27_0.tar.bz2': 1,
        'bitarray-0.8.0-py33_0.tar.bz2': 1,
        'boto-2.8.0-py26_0.tar.bz2': 1,
        'boto-2.8.0-py27_0.tar.bz2': 1,
        'conda-1.4.4-py27_0.tar.bz2': 1,
        'cython-0.18-py26_0.tar.bz2': 1,
        'cython-0.18-py27_0.tar.bz2': 1,
        'cython-0.18-py33_0.tar.bz2': 1,
        'distribute-0.6.34-py26_1.tar.bz2': 1,
        'distribute-0.6.34-py27_1.tar.bz2': 1,
        'distribute-0.6.34-py33_1.tar.bz2': 1,
        'gevent-0.13.7-py26_0.tar.bz2': 1,
        'gevent-0.13.7-py27_0.tar.bz2': 1,
        'ipython-0.13.1-py26_1.tar.bz2': 1,
        'ipython-0.13.1-py27_1.tar.bz2': 1,
        'ipython-0.13.1-py33_1.tar.bz2': 1,
        'llvmpy-0.11.1-py26_0.tar.bz2': 1,
        'llvmpy-0.11.1-py27_0.tar.bz2': 1,
        'llvmpy-0.11.1-py33_0.tar.bz2': 1,
        'lxml-3.0.2-py26_0.tar.bz2': 1,
        'lxml-3.0.2-py27_0.tar.bz2': 1,
        'lxml-3.0.2-py33_0.tar.bz2': 1,
        'matplotlib-1.2.0-np15py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np15py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py27_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py33_1.tar.bz2': 1,
        'nose-1.2.1-py26_0.tar.bz2': 1,
        'nose-1.2.1-py27_0.tar.bz2': 1,
        'nose-1.2.1-py33_0.tar.bz2': 1,
        'numba-0.7.0-np16py26_1.tar.bz2': 1,
        'numba-0.7.0-np16py27_1.tar.bz2': 1,
        'numba-0.7.0-np17py26_1.tar.bz2': 1,
        'numba-0.7.0-np17py27_1.tar.bz2': 1,
        'numpy-1.5.1-py26_3.tar.bz2': 3,
        'numpy-1.5.1-py27_3.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 2,
        'numpy-1.6.2-py26_4.tar.bz2': 2,
        'numpy-1.6.2-py26_p4.tar.bz2': 2,
        'numpy-1.6.2-py27_3.tar.bz2': 2,
        'numpy-1.6.2-py27_4.tar.bz2': 2,
        'numpy-1.6.2-py27_p4.tar.bz2': 2,
        'numpy-1.7.0-py26_0.tar.bz2': 1,
        'numpy-1.7.0-py27_0.tar.bz2': 1,
        'numpy-1.7.0-py33_0.tar.bz2': 1,
        'pandas-0.10.0-np16py26_0.tar.bz2': 2,
        'pandas-0.10.0-np16py27_0.tar.bz2': 2,
        'pandas-0.10.0-np17py26_0.tar.bz2': 2,
        'pandas-0.10.0-np17py27_0.tar.bz2': 2,
        'pandas-0.10.1-np16py26_0.tar.bz2': 1,
        'pandas-0.10.1-np16py27_0.tar.bz2': 1,
        'pandas-0.10.1-np17py26_0.tar.bz2': 1,
        'pandas-0.10.1-np17py27_0.tar.bz2': 1,
        'pandas-0.10.1-np17py33_0.tar.bz2': 1,
        'pandas-0.8.1-np16py26_0.tar.bz2': 5,
        'pandas-0.8.1-np16py27_0.tar.bz2': 5,
        'pandas-0.8.1-np17py26_0.tar.bz2': 5,
        'pandas-0.8.1-np17py27_0.tar.bz2': 5,
        'pandas-0.9.0-np16py26_0.tar.bz2': 4,
        'pandas-0.9.0-np16py27_0.tar.bz2': 4,
        'pandas-0.9.0-np17py26_0.tar.bz2': 4,
        'pandas-0.9.0-np17py27_0.tar.bz2': 4,
        'pandas-0.9.1-np16py26_0.tar.bz2': 3,
        'pandas-0.9.1-np16py27_0.tar.bz2': 3,
        'pandas-0.9.1-np17py26_0.tar.bz2': 3,
        'pandas-0.9.1-np17py27_0.tar.bz2': 3,
        'pip-1.2.1-py26_1.tar.bz2': 1,
        'pip-1.2.1-py27_1.tar.bz2': 1,
        'pip-1.2.1-py33_1.tar.bz2': 1,
        'psutil-0.6.1-py26_0.tar.bz2': 1,
        'psutil-0.6.1-py27_0.tar.bz2': 1,
        'psutil-0.6.1-py33_0.tar.bz2': 1,
        'pyflakes-0.6.1-py26_0.tar.bz2': 1,
        'pyflakes-0.6.1-py27_0.tar.bz2': 1,
        'pyflakes-0.6.1-py33_0.tar.bz2': 1,
        'python-2.6.8-6.tar.bz2': 4,
        'python-2.7.3-7.tar.bz2': 3,
        'python-2.7.4-0.tar.bz2': 2,
        'python-3.3.0-4.tar.bz2': 1,
        'pytz-2012j-py26_0.tar.bz2': 1,
        'pytz-2012j-py27_0.tar.bz2': 1,
        'pytz-2012j-py33_0.tar.bz2': 1,
        'requests-0.13.9-py26_0.tar.bz2': 1,
        'requests-0.13.9-py27_0.tar.bz2': 1,
        'requests-0.13.9-py33_0.tar.bz2': 1,
        'scikit-learn-0.13-np15py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np15py27_1.tar.bz2': 1,
        'scikit-learn-0.13-np16py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np16py27_1.tar.bz2': 1,
        'scikit-learn-0.13-np17py26_1.tar.bz2': 1,
        'scikit-learn-0.13-np17py27_1.tar.bz2': 1,
        'scipy-0.11.0-np15py26_3.tar.bz2': 1,
        'scipy-0.11.0-np15py27_3.tar.bz2': 1,
        'scipy-0.11.0-np16py26_3.tar.bz2': 1,
        'scipy-0.11.0-np16py27_3.tar.bz2': 1,
        'scipy-0.11.0-np17py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py27_3.tar.bz2': 1,
        'scipy-0.11.0-np17py33_3.tar.bz2': 1,
        'six-1.2.0-py26_0.tar.bz2': 1,
        'six-1.2.0-py27_0.tar.bz2': 1,
        'six-1.2.0-py33_0.tar.bz2': 1,
        'spyder-2.1.13-py27_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py26_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py27_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py33_0.tar.bz2': 1,
        'sympy-0.7.1-py26_0.tar.bz2': 1,
        'sympy-0.7.1-py27_0.tar.bz2': 1,
        'tornado-2.4.1-py26_0.tar.bz2': 1,
        'tornado-2.4.1-py27_0.tar.bz2': 1,
        'tornado-2.4.1-py33_0.tar.bz2': 1,
        'xlrd-0.9.0-py26_0.tar.bz2': 1,
        'xlrd-0.9.0-py27_0.tar.bz2': 1,
        'xlrd-0.9.0-py33_0.tar.bz2': 1,
        'xlwt-0.7.4-py26_0.tar.bz2': 1,
        'xlwt-0.7.4-py27_0.tar.bz2': 1
    }
    assert eqb == {
        'cairo-1.12.2-0.tar.bz2': 1,
        'cubes-0.10.2-py27_0.tar.bz2': 1,
        'dateutil-2.1-py26_0.tar.bz2': 1,
        'dateutil-2.1-py27_0.tar.bz2': 1,
        'dateutil-2.1-py33_0.tar.bz2': 1,
        'gevent-websocket-0.3.6-py26_1.tar.bz2': 1,
        'gevent-websocket-0.3.6-py27_1.tar.bz2': 1,
        'gevent_zeromq-0.2.5-py26_1.tar.bz2': 1,
        'gevent_zeromq-0.2.5-py27_1.tar.bz2': 1,
        'libnetcdf-4.2.1.1-0.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_1.tar.bz2': 2,
        'numexpr-2.0.1-np16py26_2.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np16py26_p1.tar.bz2': 2,
        'numexpr-2.0.1-np16py26_p2.tar.bz2': 1,
        'numexpr-2.0.1-np16py26_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np16py27_1.tar.bz2': 2,
        'numexpr-2.0.1-np16py27_2.tar.bz2': 1,
        'numexpr-2.0.1-np16py27_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np16py27_p1.tar.bz2': 2,
        'numexpr-2.0.1-np16py27_p2.tar.bz2': 1,
        'numexpr-2.0.1-np16py27_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np17py26_1.tar.bz2': 2,
        'numexpr-2.0.1-np17py26_2.tar.bz2': 1,
        'numexpr-2.0.1-np17py26_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np17py26_p1.tar.bz2': 2,
        'numexpr-2.0.1-np17py26_p2.tar.bz2': 1,
        'numexpr-2.0.1-np17py26_pro0.tar.bz2': 3,
        'numexpr-2.0.1-np17py27_1.tar.bz2': 2,
        'numexpr-2.0.1-np17py27_2.tar.bz2': 1,
        'numexpr-2.0.1-np17py27_ce0.tar.bz2': 3,
        'numexpr-2.0.1-np17py27_p1.tar.bz2': 2,
        'numexpr-2.0.1-np17py27_p2.tar.bz2': 1,
        'numexpr-2.0.1-np17py27_pro0.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 1,
        'numpy-1.6.2-py27_3.tar.bz2': 1,
        'py2cairo-1.10.0-py26_0.tar.bz2': 1,
        'py2cairo-1.10.0-py27_0.tar.bz2': 1,
        'pycurl-7.19.0-py26_0.tar.bz2': 1,
        'pycurl-7.19.0-py27_0.tar.bz2': 1,
        'pysal-1.5.0-np15py27_0.tar.bz2': 1,
        'pysal-1.5.0-np16py27_0.tar.bz2': 1,
        'pysal-1.5.0-np17py27_0.tar.bz2': 1,
        'pytest-2.3.4-py26_0.tar.bz2': 1,
        'pytest-2.3.4-py27_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py26_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py27_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py33_0.tar.bz2': 1,
        'scikit-image-0.8.2-np16py26_0.tar.bz2': 1,
        'scikit-image-0.8.2-np16py27_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py26_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py27_0.tar.bz2': 1,
        'scikit-image-0.8.2-np17py33_0.tar.bz2': 1,
        'sphinx-1.1.3-py26_2.tar.bz2': 1,
        'sphinx-1.1.3-py27_2.tar.bz2': 1,
        'sphinx-1.1.3-py33_2.tar.bz2': 1,
        'statsmodels-0.4.3-np16py26_0.tar.bz2': 1,
        'statsmodels-0.4.3-np16py27_0.tar.bz2': 1,
        'statsmodels-0.4.3-np17py26_0.tar.bz2': 1,
        'statsmodels-0.4.3-np17py27_0.tar.bz2': 1,
        'system-5.8-0.tar.bz2': 1,
        'theano-0.5.0-np15py26_0.tar.bz2': 1,
        'theano-0.5.0-np15py27_0.tar.bz2': 1,
        'theano-0.5.0-np16py26_0.tar.bz2': 1,
        'theano-0.5.0-np16py27_0.tar.bz2': 1,
        'theano-0.5.0-np17py26_0.tar.bz2': 1,
        'theano-0.5.0-np17py27_0.tar.bz2': 1,
        'zeromq-2.2.0-0.tar.bz2': 1
    }
Пример #3
0
def test_nonexistent_deps():
    index2 = index.copy()
    index2['mypackage-1.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
            'name': 'mypackage',
            'requires': ['nose 1.2.1', 'python 3.3'],
            'version': '1.0',
        })
    index2['mypackage-1.1-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'python 3.3*'],
            'name': 'mypackage',
            'requires': ['nose 1.2.1', 'python 3.3'],
            'version': '1.1',
        })
    index2['anotherpackage-1.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'mypackage 1.1'],
            'name': 'anotherpackage',
            'requires': ['nose', 'mypackage 1.1'],
            'version': '1.0',
        })
    index2['anotherpackage-2.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'mypackage'],
            'name': 'anotherpackage',
            'requires': ['nose', 'mypackage'],
            'version': '2.0',
        })
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        Dist('mypackage-1.0-py33_0.tar.bz2'),
        Dist('mypackage-1.1-py33_0.tar.bz2'),
    }
    assert set(d.to_filename()
               for d in r.get_reduced_index(['mypackage']).keys()) == {
                   'mypackage-1.1-py33_0.tar.bz2', 'nose-1.1.2-py33_0.tar.bz2',
                   'nose-1.2.1-py33_0.tar.bz2', 'nose-1.3.0-py33_0.tar.bz2',
                   'openssl-1.0.1c-0.tar.bz2', 'python-3.3.0-2.tar.bz2',
                   'python-3.3.0-3.tar.bz2', 'python-3.3.0-4.tar.bz2',
                   'python-3.3.0-pro0.tar.bz2', 'python-3.3.0-pro1.tar.bz2',
                   'python-3.3.1-0.tar.bz2', 'python-3.3.2-0.tar.bz2',
                   'readline-6.2-0.tar.bz2', 'sqlite-3.7.13-0.tar.bz2',
                   'system-5.8-0.tar.bz2', 'system-5.8-1.tar.bz2',
                   'tk-8.5.13-0.tar.bz2', 'zlib-1.2.7-0.tar.bz2'
               }

    assert r.install(['mypackage']) == r.install(['mypackage 1.1']) == [
        Dist(dname) for dname in [
            'mypackage-1.1-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]
    assert raises(NoPackagesFoundError, lambda: r.install(['mypackage 1.0']))
    assert raises(NoPackagesFoundError,
                  lambda: r.install(['mypackage 1.0', 'burgertime 1.0']))

    assert r.install(['anotherpackage 1.0']) == [
        Dist(dname) for dname in [
            'anotherpackage-1.0-py33_0.tar.bz2',
            'mypackage-1.1-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]

    assert r.install(['anotherpackage']) == [
        Dist(dname) for dname in [
            'anotherpackage-2.0-py33_0.tar.bz2',
            'mypackage-1.1-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]

    # This time, the latest version is messed up
    index3 = index.copy()
    index3['mypackage-1.1-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
            'name': 'mypackage',
            'requires': ['nose 1.2.1', 'python 3.3'],
            'version': '1.1',
        })
    index3['mypackage-1.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'python 3.3*'],
            'name': 'mypackage',
            'requires': ['nose 1.2.1', 'python 3.3'],
            'version': '1.0',
        })
    index3['anotherpackage-1.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'mypackage 1.0'],
            'name': 'anotherpackage',
            'requires': ['nose', 'mypackage 1.0'],
            'version': '1.0',
        })
    index3['anotherpackage-2.0-py33_0.tar.bz2'] = IndexRecord(
        **{
            'build': 'py33_0',
            'build_number': 0,
            'depends': ['nose', 'mypackage'],
            'name': 'anotherpackage',
            'requires': ['nose', 'mypackage'],
            'version': '2.0',
        })
    index3 = {Dist(key): value for key, value in iteritems(index3)}
    r = Resolve(index3)

    assert set(d.to_filename()
               for d in r.find_matches(MatchSpec('mypackage'))) == {
                   'mypackage-1.0-py33_0.tar.bz2',
                   'mypackage-1.1-py33_0.tar.bz2',
               }
    assert set(d.to_filename()
               for d in r.get_reduced_index(['mypackage']).keys()) == {
                   'mypackage-1.0-py33_0.tar.bz2', 'nose-1.1.2-py33_0.tar.bz2',
                   'nose-1.2.1-py33_0.tar.bz2', 'nose-1.3.0-py33_0.tar.bz2',
                   'openssl-1.0.1c-0.tar.bz2', 'python-3.3.0-2.tar.bz2',
                   'python-3.3.0-3.tar.bz2', 'python-3.3.0-4.tar.bz2',
                   'python-3.3.0-pro0.tar.bz2', 'python-3.3.0-pro1.tar.bz2',
                   'python-3.3.1-0.tar.bz2', 'python-3.3.2-0.tar.bz2',
                   'readline-6.2-0.tar.bz2', 'sqlite-3.7.13-0.tar.bz2',
                   'system-5.8-0.tar.bz2', 'system-5.8-1.tar.bz2',
                   'tk-8.5.13-0.tar.bz2', 'zlib-1.2.7-0.tar.bz2'
               }

    assert r.install(['mypackage']) == r.install(['mypackage 1.0']) == [
        Dist(dname) for dname in [
            'mypackage-1.0-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]
    assert raises(NoPackagesFoundError, lambda: r.install(['mypackage 1.1']))

    assert r.install(['anotherpackage 1.0']) == [
        Dist(dname) for dname in [
            'anotherpackage-1.0-py33_0.tar.bz2',
            'mypackage-1.0-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]

    # If recursive checking is working correctly, this will give
    # anotherpackage 2.0, not anotherpackage 1.0
    assert r.install(['anotherpackage']) == [
        Dist(dname) for dname in [
            'anotherpackage-2.0-py33_0.tar.bz2',
            'mypackage-1.0-py33_0.tar.bz2',
            'nose-1.3.0-py33_0.tar.bz2',
            'openssl-1.0.1c-0.tar.bz2',
            'python-3.3.2-0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]
    ]
Пример #4
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)

    if command == 'update':
        if args.all:
            if args.packages:
                common.error_and_exit("""--all cannot be used with packages""",
                                      json=args.json,
                                      error_type="ValueError")
        else:
            if len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and args.clone:
        if args.packages:
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet)
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        specs.extend(common.specs_from_url(args.file, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name in getattr(args, '_skip', []):
                continue
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s >=%s' % (name, ver))
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    else:
        common.check_specs(prefix, specs, json=args.json)

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
                                  " to use the --use-local option",
                                  json=args.json,
                                  error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json)
    else:
        index = common.get_index_trap(channel_urls=channel_urls, prepend=not
                                      args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown, json=args.json)

    # Don't update packages that are already up-to-date
    if command == 'update' and not args.all:
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [ci.is_linked(prefix, dist)
                                  for dist in linked]
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            build_inst = [m['build_number'] for m in installed_metadata if
                          m['name'] == name]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if latest.version == vers_inst[0] and latest.build_number == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(message='All requested packages already installed.')
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        common.error_and_exit("cannot mix specifications with conda package filenames",
                              json=args.json,
                              error_type="ValueError")

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" % prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            actions = plan.install_actions(prefix, index, specs, force=args.force,
                                           only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint)
    except NoPackagesFound as e:
        error_message = e.args[0]

        if command == 'update' and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', [])
            args._skip.extend([i.split()[0] for i in e.pkgs])
            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            for pkg in e.pkgs:
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if close:
                    error_message += "\n\nDid you mean one of these?\n\n    %s" % (', '.join(close))
                error_message += '\n\nYou can search for this package on Binstar with'
                error_message += '\n\n    binstar search -t conda %s' % pkg
                binstar = find_executable('binstar', include_others=False)
                if not binstar:
                    error_message += '\n\nYou may need to install the Binstar command line client with'
                    error_message += '\n\n    conda install binstar'
            common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e, json=args.json, newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        if not pscheck.main(args):
            common.confirm_yn(args)
    else:
        if (sys.platform == 'win32' and not args.force_pscheck and
            not pscheck.check_processes(verbose=False)):
            common.error_and_exit("Cannot continue operation while processes "
                                  "from packages are running without --force-pscheck.",
                                  json=True,
                                  error_type="ProcessesStillRunning")
        elif args.dry_run:
            common.stdout_json_success(actions=actions, dry_run=True)
            sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Пример #5
0
def test_no_features():
    # Without this, there would be another solution including 'scipy-0.11.0-np16py26_p3.tar.bz2'.
    assert r.install(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'],
                     returnall=True) == [[
                         Dist(add_defaults_if_no_channel(fname)) for fname in [
                             'numpy-1.6.2-py26_4.tar.bz2',
                             'openssl-1.0.1c-0.tar.bz2',
                             'python-2.6.8-6.tar.bz2',
                             'readline-6.2-0.tar.bz2',
                             'scipy-0.11.0-np16py26_3.tar.bz2',
                             'sqlite-3.7.13-0.tar.bz2',
                             'system-5.8-1.tar.bz2',
                             'tk-8.5.13-0.tar.bz2',
                             'zlib-1.2.7-0.tar.bz2',
                         ]
                     ]]

    assert r.install(
        ['python 2.6*', 'numpy 1.6*', 'scipy 0.11*', 'mkl@'],
        returnall=True) == [[
            Dist(add_defaults_if_no_channel(fname)) for fname in [
                'mkl-rt-11.0-p0.tar.bz2',  # This,
                'numpy-1.6.2-py26_p4.tar.bz2',  # this,
                'openssl-1.0.1c-0.tar.bz2',
                'python-2.6.8-6.tar.bz2',
                'readline-6.2-0.tar.bz2',
                'scipy-0.11.0-np16py26_p3.tar.bz2',  # and this are different.
                'sqlite-3.7.13-0.tar.bz2',
                'system-5.8-1.tar.bz2',
                'tk-8.5.13-0.tar.bz2',
                'zlib-1.2.7-0.tar.bz2',
            ]
        ]]

    index2 = index.copy()
    index2["defaults::pandas-0.12.0-np16py27_0.tar.bz2"] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            "build": "np16py27_0",
            "build_number": 0,
            "depends": ["dateutil", "numpy 1.6*", "python 2.7*", "pytz"],
            "name": "pandas",
            "requires": ["dateutil 1.5", "numpy 1.6", "python 2.7", "pytz"],
            "version": "0.12.0"
        })
    # Make it want to choose the pro version by having it be newer.
    index2["defaults::numpy-1.6.2-py27_p5.tar.bz2"] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            "build": "py27_p5",
            "build_number": 5,
            "depends": ["mkl-rt 11.0", "python 2.7*"],
            "features": "mkl",
            "name": "numpy",
            "pub_date": "2013-04-29",
            "requires": ["mkl-rt 11.0", "python 2.7"],
            "version": "1.6.2"
        })

    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r2 = Resolve(index2)

    # This should not pick any mkl packages (the difference here is that none
    # of the specs directly have mkl versions)
    assert r2.solve(['pandas 0.12.0 np16py27_0', 'python 2.7*'],
                    returnall=True) == [[
                        Dist(add_defaults_if_no_channel(fname)) for fname in [
                            'dateutil-2.1-py27_1.tar.bz2',
                            'numpy-1.6.2-py27_4.tar.bz2',
                            'openssl-1.0.1c-0.tar.bz2',
                            'pandas-0.12.0-np16py27_0.tar.bz2',
                            'python-2.7.5-0.tar.bz2',
                            'pytz-2013b-py27_0.tar.bz2',
                            'readline-6.2-0.tar.bz2',
                            'six-1.3.0-py27_0.tar.bz2',
                            'sqlite-3.7.13-0.tar.bz2',
                            'system-5.8-1.tar.bz2',
                            'tk-8.5.13-0.tar.bz2',
                            'zlib-1.2.7-0.tar.bz2',
                        ]
                    ]]

    assert r2.solve(
        ['pandas 0.12.0 np16py27_0', 'python 2.7*', 'mkl@'],
        returnall=True)[0] == [[
            Dist(add_defaults_if_no_channel(fname)) for fname in [
                'dateutil-2.1-py27_1.tar.bz2',
                'mkl-rt-11.0-p0.tar.bz2',  # This
                'numpy-1.6.2-py27_p5.tar.bz2',  # and this are different.
                'openssl-1.0.1c-0.tar.bz2',
                'pandas-0.12.0-np16py27_0.tar.bz2',
                'python-2.7.5-0.tar.bz2',
                'pytz-2013b-py27_0.tar.bz2',
                'readline-6.2-0.tar.bz2',
                'six-1.3.0-py27_0.tar.bz2',
                'sqlite-3.7.13-0.tar.bz2',
                'system-5.8-1.tar.bz2',
                'tk-8.5.13-0.tar.bz2',
                'zlib-1.2.7-0.tar.bz2',
            ]
        ]][0]
Пример #6
0
def execute_search(args, parser):
    import re
    from conda.resolve import Resolve

    if args.reverse_dependency:
        if not args.regex:
            parser.error("--reverse-dependency requires at least one package name")
        if args.spec:
            parser.error("--reverse-dependency does not work with --spec")

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = MatchSpec(arg2spec(args.regex))
        else:
            regex = args.regex
            if args.full_name:
                regex = r'^%s$' % regex
            try:
                pat = re.compile(regex, re.I)
            except re.error as e:
                raise CommandArgumentError("Failed to compile regex pattern for "
                                           "search: %(regex)s\n"
                                           "regex error: %(regex_error)s",
                                           regex=regex, regex_error=repr(e))

    prefix = context.prefix_w_legacy_search

    from ..core.linked_data import linked as linked_data
    from ..core.package_cache import PackageCache

    linked = linked_data(prefix)
    extracted = set(pc_entry.dist.name for pc_entry in PackageCache.get_all_extracted_entries())

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != context.subdir:
        args.unknown = False
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args, dashc=False)
    index = get_index(channel_urls=context.channels, prepend=not args.override_channels,
                      platform=args.platform, use_local=args.use_local,
                      use_cache=args.use_index_cache, prefix=None,
                      unknown=args.unknown)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    names = []
    for name in sorted(r.groups):
        if '@' in name:
            continue
        res = []
        if args.reverse_dependency:
            res = [dist for dist in r.get_dists_for_spec(name)
                   if any(pat.search(dep.name) for dep in r.ms_depends(dist))]
        elif ms is not None:
            if ms.name == name:
                res = r.get_dists_for_spec(ms)
        elif pat is None or pat.search(name):
            res = r.get_dists_for_spec(name)
        if res:
            names.append((name, res))

    for name, pkgs in names:
        disp_name = name

        if args.names_only and not args.outdated:
            print(name)
            continue

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [dist.quad[1] for dist in linked if dist.quad[0] == name]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue
            if args.names_only:
                print(name)
                continue

        for dist in pkgs:
            index_record = r.index[dist]
            if args.canonical:
                if not context.json:
                    print(dist.dist_name)
                else:
                    json.append(dist.dist_name)
                continue
            if platform and platform != context.subdir:
                inst = ' '
            elif dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            features = r.features(dist)

            if not context.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name, inst,
                    index_record.version,
                    index_record.build,
                    index_record.schannel,
                    disp_features(features),
                ))
                disp_name = ''
            else:
                data = {}
                data.update(index_record.dump())
                data.update({
                    'fn': index_record.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': index_record.version,
                    'build': index_record.build,
                    'build_number': index_record.build_number,
                    'channel': index_record.schannel,
                    'full_channel': index_record.channel,
                    'features': list(features),
                    'license': index_record.get('license'),
                    'size': index_record.get('size'),
                    'depends': index_record.get('depends'),
                    'type': index_record.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(index_record.info)
                json[name].append(data)

    if context.json:
        stdout_json(json)
Пример #7
0
def test_nonexistent_deps():
    index2 = index.copy()
    index2['mypackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    }
    index2['mypackage-1.1-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    }
    index2['anotherpackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.1'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.1'],
        'version': '1.0',
    }
    index2['anotherpackage-2.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    }
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
    }
    assert set(r.get_dists(['mypackage']).keys()) == {
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.1.2-py26_0.tar.bz2',
        'nose-1.1.2-py27_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py26_0.tar.bz2',
        'nose-1.2.1-py27_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-1.tar.bz2',
        'python-2.6.8-2.tar.bz2',
        'python-2.6.8-3.tar.bz2',
        'python-2.6.8-4.tar.bz2',
        'python-2.6.8-5.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.3-2.tar.bz2',
        'python-2.7.3-3.tar.bz2',
        'python-2.7.3-4.tar.bz2',
        'python-2.7.3-5.tar.bz2',
        'python-2.7.3-6.tar.bz2',
        'python-2.7.3-7.tar.bz2',
        'python-2.7.4-0.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert set(r.get_dists(['mypackage'], max_only=True).keys()) == {
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert r.solve(['mypackage']) == r.solve(['mypackage 1.1']) == [
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
    assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.0']))

    assert r.solve(['anotherpackage 1.0']) == [
        'anotherpackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    assert r.solve(['anotherpackage']) == [
        'anotherpackage-2.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    # This time, the latest version is messed up
    index3 = index.copy()
    index3['mypackage-1.1-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.1',
    }
    index3['mypackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'python 3.3*'],
        'name': 'mypackage',
        'requires': ['nose 1.2.1', 'python 3.3'],
        'version': '1.0',
    }
    index3['anotherpackage-1.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage 1.0'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage 1.0'],
        'version': '1.0',
    }
    index3['anotherpackage-2.0-py33_0.tar.bz2'] = {
        'build': 'py33_0',
        'build_number': 0,
        'depends': ['nose', 'mypackage'],
        'name': 'anotherpackage',
        'requires': ['nose', 'mypackage'],
        'version': '2.0',
    }
    r = Resolve(index3)

    assert set(r.find_matches(MatchSpec('mypackage'))) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'mypackage-1.1-py33_0.tar.bz2',
    }
    assert set(r.get_dists(['mypackage']).keys()) == {
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.1.2-py26_0.tar.bz2',
        'nose-1.1.2-py27_0.tar.bz2',
        'nose-1.1.2-py33_0.tar.bz2',
        'nose-1.2.1-py26_0.tar.bz2',
        'nose-1.2.1-py27_0.tar.bz2',
        'nose-1.2.1-py33_0.tar.bz2',
        'nose-1.3.0-py26_0.tar.bz2',
        'nose-1.3.0-py27_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-2.6.8-1.tar.bz2',
        'python-2.6.8-2.tar.bz2',
        'python-2.6.8-3.tar.bz2',
        'python-2.6.8-4.tar.bz2',
        'python-2.6.8-5.tar.bz2',
        'python-2.6.8-6.tar.bz2',
        'python-2.7.3-2.tar.bz2',
        'python-2.7.3-3.tar.bz2',
        'python-2.7.3-4.tar.bz2',
        'python-2.7.3-5.tar.bz2',
        'python-2.7.3-6.tar.bz2',
        'python-2.7.3-7.tar.bz2',
        'python-2.7.4-0.tar.bz2',
        'python-2.7.5-0.tar.bz2',
        'python-3.3.0-2.tar.bz2',
        'python-3.3.0-3.tar.bz2',
        'python-3.3.0-4.tar.bz2',
        'python-3.3.0-pro0.tar.bz2',
        'python-3.3.0-pro1.tar.bz2',
        'python-3.3.1-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    }

    assert raises(NoPackagesFound,
                  lambda: r.get_dists(['mypackage'], max_only=True))

    assert r.solve(['mypackage']) == r.solve(['mypackage 1.0']) == [
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
    assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1']))

    assert r.solve(['anotherpackage 1.0']) == [
        'anotherpackage-1.0-py33_0.tar.bz2',
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]

    # If recursive checking is working correctly, this will give
    # anotherpackage 2.0, not anotherpackage 1.0
    assert r.solve(['anotherpackage']) == [
        'anotherpackage-2.0-py33_0.tar.bz2',
        'mypackage-1.0-py33_0.tar.bz2',
        'nose-1.3.0-py33_0.tar.bz2',
        'openssl-1.0.1c-0.tar.bz2',
        'python-3.3.2-0.tar.bz2',
        'readline-6.2-0.tar.bz2',
        'sqlite-3.7.13-0.tar.bz2',
        'system-5.8-1.tar.bz2',
        'tk-8.5.13-0.tar.bz2',
        'zlib-1.2.7-0.tar.bz2',
    ]
Пример #8
0
def execute_search(args, parser):
    import re
    import sys

    from conda.api import get_index
    from conda.resolve import MatchSpec, Resolve

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = MatchSpec(' '.join(args.regex.split('=')))
        else:
            try:
                pat = re.compile(args.regex, re.I)
            except re.error as e:
                common.error_and_exit("%r is not a valid regex pattern (exception: %s)" %
                                      (args.regex, e),
                                      json=args.json,
                                      error_type="ValueError")


    prefix = common.get_prefix(args)

    import conda.config
    import conda.install

    linked = conda.install.linked(prefix)
    extracted = set()
    for pkgs_dir in conda.config.pkgs_dirs:
        extracted.update(conda.install.extracted(pkgs_dir))

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != config.subdir:
        args.unknown = False
    common.ensure_override_channels_requires_channel(args, dashc=False,
                                                     json=args.json)
    channel_urls = args.channel or ()
    index = common.get_index_trap(channel_urls=channel_urls, prepend=not
                                  args.override_channels, platform=args.platform,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown, json=args.json)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    for name in sorted(r.groups):
        disp_name = name
        if pat and pat.search(name) is None:
            continue
        if ms and name != ms.name:
            continue

        if ms:
            ms_name = ms
        else:
            ms_name = MatchSpec(name)

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            pkgs = sorted(r.get_pkgs(ms_name))
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue

        for pkg in sorted(r.get_pkgs(ms_name)):
            dist = pkg.fn[:-8]
            if args.canonical:
                if not args.json:
                    print(dist)
                else:
                    json.append(dist)
                continue
            if dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            if not args.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name, inst,
                    pkg.version,
                    pkg.build,
                    config.canonical_channel_name(pkg.channel),
                    common.disp_features(r.features(pkg.fn)),
                    ))
                disp_name = ''
            else:
                data = {}
                data.update(pkg.info)
                data.update({
                    'fn': pkg.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': pkg.version,
                    'build': pkg.build,
                    'build_number': pkg.build_number,
                    'channel': config.canonical_channel_name(pkg.channel),
                    'full_channel': pkg.channel,
                    'features': list(r.features(pkg.fn)),
                    'license': pkg.info.get('license'),
                    'size': pkg.info.get('size'),
                    'depends': pkg.info.get('depends'),
                    'type': pkg.info.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(pkg.info)
                json[name].append(data)

    if args.json:
        common.stdout_json(json)
Пример #9
0
def get_index_must_unfreeze(subdir=context.subdir):
    repodata = {
        "info": {
            "subdir": subdir,
            "arch": context.arch_name,
            "platform": context.platform,
        },
        "packages": {
            "foobar-1.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": ["libbar 2.0.*", "libfoo 1.0.*"],
                "md5": "11ec1194bcc56b9a53c127142a272772",
                "name": "foobar",
                "timestamp": 1562861325613,
                "version": "1.0"
            },
            "foobar-2.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": ["libbar 2.0.*", "libfoo 2.0.*"],
                "md5": "f8eb5a7fa1ff6dead4e360631a6cd048",
                "name": "foobar",
                "version": "2.0"
            },
            "libbar-1.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": [],
                "md5": "f51f4d48a541b7105b5e343704114f0f",
                "name": "libbar",
                "timestamp": 1562858881022,
                "version": "1.0"
            },
            "libbar-2.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": [],
                "md5": "27f4e717ed263f909074f64d9cbf935d",
                "name": "libbar",
                "timestamp": 1562858881748,
                "version": "2.0"
            },
            "libfoo-1.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": [],
                "md5": "ad7c088566ffe2389958daedf8ff312c",
                "name": "libfoo",
                "timestamp": 1562858763881,
                "version": "1.0"
            },
            "libfoo-2.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": [],
                "md5": "daf7af7086d8f22be49ae11bdc41f332",
                "name": "libfoo",
                "timestamp": 1562858836924,
                "version": "2.0"
            },
            "qux-1.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": ["libbar 2.0.*", "libfoo 1.0.*"],
                "md5": "18604cbe4f789fe853232eef4babd4f9",
                "name": "qux",
                "timestamp": 1562861393808,
                "version": "1.0"
            },
            "qux-2.0-0.tar.bz2": {
                "build": "0",
                "build_number": 0,
                "depends": ["libbar 1.0.*", "libfoo 2.0.*"],
                "md5": "892aa4b9ec64b67045a46866ef1ea488",
                "name": "qux",
                "timestamp": 1562861394828,
                "version": "2.0"
            }
        }
    }
    channel = Channel('https://conda.anaconda.org/channel-freeze/%s' % subdir)
    sd = SubdirData(channel)
    with env_var("CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY",
                 "false",
                 stack_callback=conda_tests_ctxt_mgmt_def_pol):
        sd._process_raw_repodata_str(json.dumps(repodata))
    sd._loaded = True
    SubdirData._cache_[channel.url(with_credentials=True)] = sd

    index = {prec: prec for prec in sd._package_records}
    r = Resolve(index, channels=(channel, ))

    return index, r
Пример #10
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix)

    if command == 'update':
        if args.all:
            if args.packages:
                sys.exit("""Error: --all cannot be used with packages""")
        else:
            if len(args.packages) == 0:
                sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name)
            if '=' in name:
                sys.exit("Invalid package name: '%s'" % (name))
            if name not in set(ci.name_dist(d) for d in linked):
                sys.exit("Error: package '%s' is not installed in %s" %
                         (name, prefix))

    if newenv and args.clone:
        if args.packages:
            sys.exit('Error: did not expect any arguments for --clone')
        clone(args.clone, prefix)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    if args.file:
        specs = common.specs_from_url(args.file)
    elif getattr(args, 'all', False):
        specs = []
        linked = ci.linked(prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s >=%s' % (name, ver))
    else:
        specs = common.specs_from_args(args.packages)

    if command == 'install' and args.revision:
        get_revision(args.revision)
    else:
        common.check_specs(prefix, specs)

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build import config as build_config
        except ImportError:
            sys.exit("Error: you need to have 'conda-build' installed"
                     " to use the --use-local option")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = get_index([url_path(build_config.croot)],
                          use_cache=args.use_index_cache,
                          unknown=args.unknown)
    else:
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels,
                          use_cache=args.use_index_cache,
                          unknown=args.unknown)

    # Don't update packages that are already up-to-date
    if command == 'update' and not args.all:
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            vers_inst = [
                dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name
            ]
            build_inst = [
                dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0]
                for dist in linked if dist.rsplit('-', 2)[0] == name
            ]
            assert len(vers_inst) == 1, name
            assert len(build_inst) == 1, name
            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0] and latest.build == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import list_packages

            regex = '^(%s)$' % '|'.join(orig_packages)
            print('# All requested packages already installed.')
            list_packages(prefix, regex)
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        sys.exit("cannot mix specifications with conda package filenames")

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                sys.exit("Error: could not create directory: %s" % prefix)
        else:
            sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix)

    if command == 'install' and args.revision:
        actions = plan.revert_actions(prefix, get_revision(args.revision))
    else:
        actions = plan.install_actions(prefix,
                                       index,
                                       specs,
                                       force=args.force,
                                       only_names=only_names)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import list_packages

        regex = '^(%s)$' % '|'.join(spec_names)
        print('# All requested packages already installed.')
        list_packages(prefix, regex)
        return

    print()
    print("Package plan for installation in environment %s:" % prefix)
    plan.display_actions(actions, index)
    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)
    if newenv:
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
Пример #11
0
def execute_search(args, parser):
    import re
    from conda.resolve import Resolve

    if args.reverse_dependency:
        if not args.regex:
            parser.error(
                "--reverse-dependency requires at least one package name")
        if args.spec:
            parser.error("--reverse-dependency does not work with --spec")

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = ' '.join(args.regex.split('='))
        else:
            regex = args.regex
            if args.full_name:
                regex = r'^%s$' % regex
            try:
                pat = re.compile(regex, re.I)
            except re.error as e:
                raise CondaValueError(
                    "'%s' is not a valid regex pattern (exception: %s)" %
                    (regex, e), args.json)

    prefix = context.prefix_w_legacy_search

    import conda.install

    linked = conda.install.linked(prefix)
    extracted = conda.install.extracted()

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != context.subdir:
        args.unknown = False
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args, dashc=False)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls,
                      prepend=not args.override_channels,
                      platform=args.platform,
                      use_local=args.use_local,
                      use_cache=args.use_index_cache,
                      prefix=prefix,
                      unknown=args.unknown)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    names = []
    for name in sorted(r.groups):
        if '@' in name:
            continue
        if args.reverse_dependency:
            ms_name = ms
            for pkg in r.groups[name]:
                for dep in r.ms_depends(pkg):
                    if pat.search(dep.name):
                        names.append((name, Package(pkg, r.index[pkg])))
        else:
            if pat and pat.search(name) is None:
                continue
            if ms and name != ms.split()[0]:
                continue

            if ms:
                ms_name = ms
            else:
                ms_name = name

            pkgs = sorted(r.get_pkgs(ms_name))
            names.append((name, pkgs))

    if args.reverse_dependency:
        new_names = []
        old = None
        for name, pkg in sorted(names, key=lambda x: (x[0], x[1].name, x[1])):
            if name == old:
                new_names[-1][1].append(pkg)
            else:
                new_names.append((name, [pkg]))
            old = name
        names = new_names

    for name, pkgs in names:
        if args.reverse_dependency:
            disp_name = pkgs[0].name
        else:
            disp_name = name

        if args.names_only and not args.outdated:
            print(name)
            continue

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [
                dist[1] for dist in map(dist2quad, linked) if dist[0] == name
            ]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue
            if args.names_only:
                print(name)
                continue

        for pkg in pkgs:
            dist = pkg.fn[:-8]
            if args.canonical:
                if not args.json:
                    print(dist)
                else:
                    json.append(dist)
                continue
            if platform and platform != context.subdir:
                inst = ' '
            elif dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            if not args.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name,
                    inst,
                    pkg.version,
                    pkg.build,
                    Channel(pkg.channel).canonical_name,
                    disp_features(r.features(pkg.fn)),
                ))
                disp_name = ''
            else:
                data = {}
                data.update(pkg.info)
                data.update({
                    'fn': pkg.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': pkg.version,
                    'build': pkg.build,
                    'build_number': pkg.build_number,
                    'channel': Channel(pkg.channel).canonical_name,
                    'full_channel': pkg.channel,
                    'features': list(r.features(pkg.fn)),
                    'license': pkg.info.get('license'),
                    'size': pkg.info.get('size'),
                    'depends': pkg.info.get('depends'),
                    'type': pkg.info.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(pkg.info)
                json[name].append(data)

    if args.json:
        stdout_json(json)
Пример #12
0
def install_actions(prefix,
                    index,
                    specs,
                    force=False,
                    only_names=None,
                    always_copy=False,
                    pinned=True,
                    minimal_hint=False,
                    update_deps=True,
                    prune=False):
    r = Resolve(index)
    linked = r.installed

    if self_update and is_root_prefix(prefix):
        specs.append('conda')

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)
        specs += pinned_specs

    must_have = {}
    if track_features:
        specs.extend(x + '@' for x in track_features)

    pkgs = r.install(specs, linked, update_deps=update_deps)

    for fn in pkgs:
        dist = fn[:-8]
        name = install.name_dist(dist)
        if not name or only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        for name in foreign:
            if name in must_have:
                del must_have[name]
    elif basename(prefix).startswith('_'):
        # anything (including conda) can be installed into environments
        # starting with '_', mainly to allow conda-build to build conda
        pass
    else:
        # disallow conda from being installed into all other environments
        if 'conda' in must_have or 'conda-env' in must_have:
            sys.exit("Error: 'conda' can only be installed into the "
                     "root environment")

    smh = r.dependency_sort(must_have)

    actions = ensure_linked_actions(smh,
                                    prefix,
                                    index=index if force else None,
                                    force=force,
                                    always_copy=always_copy)

    if actions[inst.LINK]:
        actions[inst.SYMLINK_CONDA] = [root_dir]

    for fkey in sorted(linked):
        dist = fkey[:-8]
        name = install.name_dist(dist)
        replace_existing = name in must_have and dist != must_have[name]
        prune_it = prune and dist not in smh
        if replace_existing or prune_it:
            add_unlink(actions, dist)

    return actions
Пример #13
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda and any package that depends on it
    drecs = install.linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True
    if not quiet and filter:
        print(
            'The following packages cannot be cloned out of the root environment:'
        )
        for pkg in itervalues(filter):
            print(' - ' + pkg)

    # Assemble the URL and channel list
    urls = {}
    index = {}
    for dist, info in iteritems(drecs):
        if info['name'] in filter:
            continue
        url = info.get('url')
        if url is None:
            sys.exit('Error: no URL found for package: %s' % dist)
        _, schannel = url_channel(url)
        index[dist + '.tar.bz2'] = info
        urls[dist] = url

    r = Resolve(index)
    dists = r.dependency_sort(urls.keys())
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls,
                       prefix2,
                       verbose=not quiet,
                       force_extract=False,
                       fetch_args=fetch_args)
    return actions, untracked_files
Пример #14
0
def create_rpmbuild_for_tag(repo,
                            tag_name,
                            target,
                            config,
                            api_user=None,
                            api_key=None):
    try:
        # Python3...
        from urllib.parse import urlparse
    except ImportError:
        # Python2...
        from urlparse import urlparse

    rpm_prefix = config['rpm']['prefix']
    print("CREATE FOR {}".format(tag_name))
    tag = repo.tags[tag_name]
    # Checkout the tag in a detached head form.
    repo.head.reference = tag.commit
    repo.head.reset(working_tree=True)

    manifest_fname = os.path.join(repo.working_dir, 'env.manifest')
    if not os.path.exists(manifest_fname):
        emsg = "The tag '{}' doesn't have a manifested environment."
        raise ValueError(emsg.format(tag_name))
    with open(manifest_fname, 'r') as fh:
        manifest = sorted(line.strip().split('\t') for line in fh)
        if api_user and api_key:
            # Inject the API user and key into the channel URLs...
            for i, (url, _) in enumerate(manifest):
                parts = urlparse(url)
                api_url = '{}://{}:{}@{}{}'.format(parts.scheme, api_user,
                                                   api_key, parts.netloc,
                                                   parts.path)
                manifest[i][0] = api_url

    create_rpmbuild_for_env(manifest, target, config)

    index = conda.fetch.fetch_index(list(set([url for url, _ in manifest])),
                                    use_cache=False)
    resolver = Resolve(index)

    # To sort, the distributions must match the format of the keys of the index
    # For example, most will look like `http://channel::pkg
    # However channels on anaconda go by their name rather than their url,
    # i.e. `conda-forge::pkg`
    dists = []
    for url, pkg in manifest:
        anaconda_url = 'https://conda.anaconda.org/'
        if url.startswith(anaconda_url):
            url = url[len(anaconda_url):]
        dists.append('::'.join([os.path.dirname(url), pkg]))
    sorted_dists = resolver.dependency_sort(dists)
    sorted_pkgs = [dist.split('::')[-1] for dist in sorted_dists]

    spec_fname = os.path.join(repo.working_dir, 'env.spec')
    if not os.path.exists(spec_fname):
        emsg = "The tag '{}' doesn't have an environment specification."
        raise ValueError(emsg.format(tag_name))
    with open(spec_fname, 'r') as fh:
        env_spec = yaml.safe_load(fh).get('env', [])

    env_name, tag = tag_name.split('-', 2)[1:]
    fname = '{}-env-{}-tag-{}.spec'.format(rpm_prefix, env_name, tag)
    with open(os.path.join(target, 'SPECS', fname), 'w') as fh:
        fh.write(
            generate.render_taggedenv(env_name, tag, sorted_pkgs, config,
                                      env_spec))
Пример #15
0
def get_package_versions(package, offline=False):
    index = get_index(offline=offline)
    r = Resolve(index)
    return r.get_pkgs(package, emptyok=True)
Пример #16
0
def execute(args, parser):
    import os
    from os.path import dirname

    import conda
    from conda.base.context import context
    from conda.models.channel import offline_keep
    from conda.resolve import Resolve
    from conda.api import get_index
    from conda.connection import user_agent

    if args.root:
        if context.json:
            stdout_json({'root_prefix': context.root_dir})
        else:
            print(context.root_dir)
        return

    if args.packages:
        index = get_index()
        r = Resolve(index)
        if context.json:
            stdout_json({
                package: [
                    dump_record(r.index[d])
                    for d in r.get_dists_for_spec(arg2spec(package))
                ]
                for package in args.packages
            })
        else:
            for package in args.packages:
                for dist in r.get_dists_for_spec(arg2spec(package)):
                    pretty_package(dist, r.index[dist])
        return

    options = 'envs', 'system', 'license'

    try:
        from conda.install import linked_data
        root_pkgs = linked_data(context.root_prefix)
    except:
        root_pkgs = None

    try:
        import requests
        requests_version = requests.__version__
    except ImportError:
        requests_version = "could not import"
    except Exception as e:
        requests_version = "Error %s" % e

    try:
        import conda_env
        conda_env_version = conda_env.__version__
    except:
        try:
            cenv = [
                p for p in itervalues(root_pkgs) if p['name'] == 'conda-env'
            ]
            conda_env_version = cenv[0]['version']
        except:
            conda_env_version = "not installed"

    try:
        import conda_build
    except ImportError:
        conda_build_version = "not installed"
    except Exception as e:
        conda_build_version = "Error %s" % e
    else:
        conda_build_version = conda_build.__version__

    channels = context.channels

    if args.unsafe_channels:
        if not context.json:
            print("\n".join(channels))
        else:
            print(json.dumps({"channels": channels}))
        return 0

    channels = list(prioritize_channels(channels).keys())
    if not context.json:
        channels = [
            c + ('' if offline_keep(c) else '  (offline)') for c in channels
        ]
    channels = [mask_anaconda_token(c) for c in channels]

    info_dict = dict(
        platform=context.subdir,
        conda_version=conda.__version__,
        conda_env_version=conda_env_version,
        conda_build_version=conda_build_version,
        root_prefix=context.root_dir,
        conda_prefix=context.conda_prefix,
        conda_private=context.conda_private,
        root_writable=context.root_writable,
        pkgs_dirs=context.pkgs_dirs,
        envs_dirs=context.envs_dirs,
        default_prefix=context.default_prefix,
        channels=channels,
        rc_path=rc_path,
        user_rc_path=user_rc_path,
        sys_rc_path=sys_rc_path,
        # is_foreign=bool(foreign),
        offline=context.offline,
        envs=[],
        python_version='.'.join(map(str, sys.version_info)),
        requests_version=requests_version,
        user_agent=user_agent,
    )
    if not on_win:
        info_dict['UID'] = os.geteuid()
        info_dict['GID'] = os.getegid()

    if args.all or context.json:
        for option in options:
            setattr(args, option, True)

    if (args.all or all(not getattr(args, opt)
                        for opt in options)) and not context.json:
        for key in 'pkgs_dirs', 'envs_dirs', 'channels':
            info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key])
        info_dict['_rtwro'] = ('writable'
                               if info_dict['root_writable'] else 'read only')
        print("""\
Current conda install:

               platform : %(platform)s
          conda version : %(conda_version)s
       conda is private : %(conda_private)s
      conda-env version : %(conda_env_version)s
    conda-build version : %(conda_build_version)s
         python version : %(python_version)s
       requests version : %(requests_version)s
       root environment : %(root_prefix)s  (%(_rtwro)s)
    default environment : %(default_prefix)s
       envs directories : %(_envs_dirs)s
          package cache : %(_pkgs_dirs)s
           channel URLs : %(_channels)s
            config file : %(rc_path)s
           offline mode : %(offline)s
             user-agent : %(user_agent)s\
""" % info_dict)

        if not on_win:
            print("""\
                UID:GID : %(UID)s:%(GID)s
""" % info_dict)
        else:
            print()

    if args.envs:
        handle_envs_list(info_dict['envs'], not context.json)

    if args.system:
        from conda.cli.find_commands import find_commands, find_executable

        site_dirs = get_user_site()
        evars = [
            'PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
            'CIO_TEST', 'CONDA_ENVS_PATH'
        ]

        if context.platform == 'linux':
            evars.append('LD_LIBRARY_PATH')
        elif context.platform == 'osx':
            evars.append('DYLD_LIBRARY_PATH')

        if context.json:
            info_dict['sys.version'] = sys.version
            info_dict['sys.prefix'] = sys.prefix
            info_dict['sys.executable'] = sys.executable
            info_dict['site_dirs'] = get_user_site()
            info_dict['env_vars'] = {
                ev: os.getenv(ev, '<not set>')
                for ev in evars
            }
        else:
            print("sys.version: %s..." % (sys.version[:40]))
            print("sys.prefix: %s" % sys.prefix)
            print("sys.executable: %s" % sys.executable)
            print("conda location: %s" % dirname(conda.__file__))
            for cmd in sorted(set(find_commands() + ['build'])):
                print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
            print("user site dirs: ", end='')
            if site_dirs:
                print(site_dirs[0])
            else:
                print()
            for site_dir in site_dirs[1:]:
                print('                %s' % site_dir)
            print()

            for ev in sorted(evars):
                print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
            print()

    if args.license and not context.json:
        try:
            from _license import show_info
            show_info()
        except ImportError:
            print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")

    if context.json:
        stdout_json(info_dict)
Пример #17
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    isupdate = bool(command == 'update')
    isinstall = bool(command == 'install')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)
    if force_32bit and plan.is_root_prefix(prefix):
        common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")

    if isupdate and not (args.file or args.all or args.packages):
        common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                              json=args.json,
                              error_type="ValueError")

    linked = ci.linked(prefix)
    lnames = {ci.name_dist(d) for d in linked}
    if isupdate and not args.all:
        for name in args.packages:
            common.arg2spec(name, json=args.json, update=True)
            if name not in lnames:
                common.error_and_exit("Package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and not args.no_default_packages:
        default_packages = create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)
    else:
        default_packages = []

    common.ensure_use_local(args)
    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        for fpath in args.file:
            specs.extend(common.specs_from_url(fpath, json=args.json))
        if '@EXPLICIT' in specs:
            misc.explicit(specs, prefix, verbose=not args.quiet)
            return
    elif getattr(args, 'all', False):
        if not linked:
            common.error_and_exit("There are no packages installed in the "
                                  "prefix %s" % prefix)
        specs.extend(nm for nm in lnames)
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=args.json)
    elif not (newenv and args.clone):
        common.check_specs(prefix,
                           specs,
                           json=args.json,
                           create=(command == 'create'))

    num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
    if num_cp:
        if num_cp == len(args.packages):
            misc.explicit(args.packages, prefix, verbose=not args.quiet)
            return
        else:
            common.error_and_exit(
                "cannot mix specifications with conda package filenames",
                json=args.json,
                error_type="ValueError")

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    if newenv and args.clone:
        if set(args.packages) - set(default_packages):
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone,
              prefix,
              json=args.json,
              quiet=args.quiet,
              fetch_args={
                  'use_cache': args.use_index_cache,
                  'unknown': args.unknown
              })
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    index = common.get_index_trap(channel_urls=channel_urls,
                                  prepend=not args.override_channels,
                                  use_local=args.use_local,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown,
                                  json=args.json,
                                  offline=args.offline,
                                  prefix=prefix)
    r = Resolve(index)
    ospecs = list(specs)
    plan.add_defaults_to_specs(r, linked, specs, update=isupdate)

    # Don't update packages that are already up-to-date
    if isupdate and not (args.all or args.force):
        orig_packages = args.packages[:]
        installed_metadata = [ci.is_linked(prefix, dist) for dist in linked]
        for name in orig_packages:
            vers_inst = [
                m['version'] for m in installed_metadata if m['name'] == name
            ]
            build_inst = [
                m['build_number'] for m in installed_metadata
                if m['name'] == name
            ]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(name))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if (latest.version == vers_inst[0]
                    and latest.build_number == build_inst[0]):
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    if args.force:
        args.no_deps = True

    if args.no_deps:
        only_names = set(s.split()[0] for s in specs)
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" %
                                      prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if isinstall and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            with common.json_progress_bars(json=args.json and not args.quiet):
                actions = plan.install_actions(prefix,
                                               index,
                                               specs,
                                               force=args.force,
                                               only_names=only_names,
                                               pinned=args.pinned,
                                               always_copy=args.copy,
                                               minimal_hint=args.alt_hint,
                                               update_deps=args.update_deps)
    except NoPackagesFound as e:
        error_message = e.args[0]

        if isupdate and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', ['anaconda'])
            for pkg in e.pkgs:
                p = pkg.split()[0]
                if p in args._skip:
                    # Avoid infinite recursion. This can happen if a spec
                    # comes from elsewhere, like --file
                    raise
                args._skip.append(p)

            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            for pkg in e.pkgs:
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if close:
                    error_message += ("\n\nDid you mean one of these?"
                                      "\n\n    %s" % (', '.join(close)))
            error_message += '\n\nYou can search for this package on anaconda.org with'
            error_message += '\n\n    anaconda search -t conda %s' % pkg
            if len(e.pkgs) > 1:
                # Note this currently only happens with dependencies not found
                error_message += '\n\n (and similarly for the other packages)'

            if not find_executable('anaconda', include_others=False):
                error_message += '\n\nYou may need to install the anaconda-client'
                error_message += ' command line client with'
                error_message += '\n\n    conda install anaconda-client'

            pinned_specs = plan.get_pinned_specs(prefix)
            if pinned_specs:
                path = join(prefix, 'conda-meta', 'pinned')
                error_message += "\n\nNote that you have pinned specs in %s:" % path
                error_message += "\n\n    %r" % pinned_specs

            common.error_and_exit(error_message, json=args.json)
    except (Unsatisfiable, SystemExit) as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e,
                                  json=args.json,
                                  newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions,
                             index,
                             show_channel_urls=args.show_channel_urls)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        common.confirm_yn(args)
    elif args.dry_run:
        common.stdout_json_success(actions=actions, dry_run=True)
        sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
            if not (command == 'update' and args.all):
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# %s specs: %s\n' % (command, specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Пример #18
0
def test_generate_eq():
    specs = ['anaconda']
    dists, specs = r.get_dists(specs)
    r2 = Resolve(dists, True, True)
    C = r2.gen_clauses(specs)
    eqv, eqb = r2.generate_version_metrics(C, specs)
    # Should satisfy the following criteria:
    # - lower versions of the same package should should have higher
    #   coefficients.
    # - the same versions of the same package (e.g., different build strings)
    #   should have the same coefficients.
    # - a package that only has one version should not appear, unless
    #   include=True as it will have a 0 coefficient. The same is true of the
    #   latest version of a package.
    assert eqv == {
        'astropy-0.2-np15py26_0.tar.bz2': 1,
        'astropy-0.2-np16py26_0.tar.bz2': 1,
        'astropy-0.2-np17py26_0.tar.bz2': 1,
        'astropy-0.2-np17py33_0.tar.bz2': 1,
        'bitarray-0.8.0-py26_0.tar.bz2': 1,
        'bitarray-0.8.0-py33_0.tar.bz2': 1,
        'cython-0.18-py26_0.tar.bz2': 1,
        'cython-0.18-py33_0.tar.bz2': 1,
        'distribute-0.6.34-py26_1.tar.bz2': 1,
        'distribute-0.6.34-py33_1.tar.bz2': 1,
        'ipython-0.13.1-py26_1.tar.bz2': 1,
        'ipython-0.13.1-py33_1.tar.bz2': 1,
        'llvmpy-0.11.1-py26_0.tar.bz2': 1,
        'llvmpy-0.11.1-py33_0.tar.bz2': 1,
        'lxml-3.0.2-py26_0.tar.bz2': 1,
        'lxml-3.0.2-py33_0.tar.bz2': 1,
        'matplotlib-1.2.0-np15py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np16py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py26_1.tar.bz2': 1,
        'matplotlib-1.2.0-np17py33_1.tar.bz2': 1,
        'nose-1.2.1-py26_0.tar.bz2': 1,
        'nose-1.2.1-py33_0.tar.bz2': 1,
        'numpy-1.5.1-py26_3.tar.bz2': 3,
        'numpy-1.6.2-py26_3.tar.bz2': 2,
        'numpy-1.6.2-py26_4.tar.bz2': 2,
        'numpy-1.6.2-py27_4.tar.bz2': 2,
        'numpy-1.7.0-py26_0.tar.bz2': 1,
        'numpy-1.7.0-py33_0.tar.bz2': 1,
        'pip-1.2.1-py26_1.tar.bz2': 1,
        'pip-1.2.1-py33_1.tar.bz2': 1,
        'psutil-0.6.1-py26_0.tar.bz2': 1,
        'psutil-0.6.1-py33_0.tar.bz2': 1,
        'pyflakes-0.6.1-py26_0.tar.bz2': 1,
        'pyflakes-0.6.1-py33_0.tar.bz2': 1,
        'python-2.6.8-6.tar.bz2': 3,
        'python-2.7.4-0.tar.bz2': 2,
        'python-3.3.0-4.tar.bz2': 1,
        'pytz-2012j-py26_0.tar.bz2': 1,
        'pytz-2012j-py33_0.tar.bz2': 1,
        'requests-0.13.9-py26_0.tar.bz2': 1,
        'requests-0.13.9-py33_0.tar.bz2': 1,
        'scipy-0.11.0-np15py26_3.tar.bz2': 1,
        'scipy-0.11.0-np16py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py26_3.tar.bz2': 1,
        'scipy-0.11.0-np17py33_3.tar.bz2': 1,
        'six-1.2.0-py26_0.tar.bz2': 1,
        'six-1.2.0-py33_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py26_0.tar.bz2': 1,
        'sqlalchemy-0.7.8-py33_0.tar.bz2': 1,
        'tornado-2.4.1-py26_0.tar.bz2': 1,
        'tornado-2.4.1-py33_0.tar.bz2': 1,
        'xlrd-0.9.0-py26_0.tar.bz2': 1,
        'xlrd-0.9.0-py33_0.tar.bz2': 1}
    assert eqb == {
        'dateutil-2.1-py26_0.tar.bz2': 1,
        'dateutil-2.1-py33_0.tar.bz2': 1,
        'numpy-1.6.2-py26_3.tar.bz2': 1,
        'pyzmq-2.2.0.1-py26_0.tar.bz2': 1,
        'pyzmq-2.2.0.1-py33_0.tar.bz2': 1,
        'sphinx-1.1.3-py26_2.tar.bz2': 1,
        'sphinx-1.1.3-py33_2.tar.bz2': 1,
        'system-5.8-0.tar.bz2': 1,
        'zeromq-2.2.0-0.tar.bz2': 1}
Пример #19
0
def test_no_features():
    # Features that aren't specified shouldn't be selected.
    r.msd_cache = {}

    # Without this, there would be another solution including 'scipy-0.11.0-np16py26_p3.tar.bz2'.
    assert r.solve2(['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'],
                    set(),
                    returnall=True) == [[
                        'numpy-1.6.2-py26_4.tar.bz2',
                        'openssl-1.0.1c-0.tar.bz2',
                        'python-2.6.8-6.tar.bz2',
                        'readline-6.2-0.tar.bz2',
                        'scipy-0.11.0-np16py26_3.tar.bz2',
                        'sqlite-3.7.13-0.tar.bz2',
                        'system-5.8-1.tar.bz2',
                        'tk-8.5.13-0.tar.bz2',
                        'zlib-1.2.7-0.tar.bz2',
                    ]]

    assert r.solve2(
        ['python 2.6*', 'numpy 1.6*', 'scipy 0.11*'], f_mkl,
        returnall=True) == [[
            'mkl-rt-11.0-p0.tar.bz2',  # This,
            'numpy-1.6.2-py26_p4.tar.bz2',  # this,
            'openssl-1.0.1c-0.tar.bz2',
            'python-2.6.8-6.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'scipy-0.11.0-np16py26_p3.tar.bz2',  # and this are different.
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]]

    index2 = index.copy()
    index2["pandas-0.12.0-np16py27_0.tar.bz2"] = \
        {
            "build": "np16py27_0",
            "build_number": 0,
            "depends": [
              "dateutil",
              "numpy 1.6*",
              "python 2.7*",
              "pytz"
            ],
            "name": "pandas",
            "requires": [
              "dateutil 1.5",
              "numpy 1.6",
              "python 2.7",
              "pytz"
            ],
            "version": "0.12.0"
        }
    # Make it want to choose the pro version by having it be newer.
    index2["numpy-1.6.2-py27_p5.tar.bz2"] = \
        {
            "build": "py27_p5",
            "build_number": 5,
            "depends": [
              "mkl-rt 11.0",
              "python 2.7*"
            ],
            "features": "mkl",
            "name": "numpy",
            "pub_date": "2013-04-29",
            "requires": [
              "mkl-rt 11.0",
              "python 2.7"
            ],
            "version": "1.6.2"
        }

    r2 = Resolve(index2)

    # This should not pick any mkl packages (the difference here is that none
    # of the specs directly have mkl versions)
    assert r2.solve2(['pandas 0.12.0 np16py27_0', 'python 2.7*'],
                     set(),
                     returnall=True) == [[
                         'dateutil-2.1-py27_1.tar.bz2',
                         'numpy-1.6.2-py27_4.tar.bz2',
                         'openssl-1.0.1c-0.tar.bz2',
                         'pandas-0.12.0-np16py27_0.tar.bz2',
                         'python-2.7.5-0.tar.bz2',
                         'pytz-2013b-py27_0.tar.bz2',
                         'readline-6.2-0.tar.bz2',
                         'six-1.3.0-py27_0.tar.bz2',
                         'sqlite-3.7.13-0.tar.bz2',
                         'system-5.8-1.tar.bz2',
                         'tk-8.5.13-0.tar.bz2',
                         'zlib-1.2.7-0.tar.bz2',
                     ]]

    assert r2.solve2(
        ['pandas 0.12.0 np16py27_0', 'python 2.7*'], f_mkl,
        returnall=True)[0] == [[
            'dateutil-2.1-py27_1.tar.bz2',
            'mkl-rt-11.0-p0.tar.bz2',  # This
            'numpy-1.6.2-py27_p5.tar.bz2',  # and this are different.
            'openssl-1.0.1c-0.tar.bz2',
            'pandas-0.12.0-np16py27_0.tar.bz2',
            'python-2.7.5-0.tar.bz2',
            'pytz-2013b-py27_0.tar.bz2',
            'readline-6.2-0.tar.bz2',
            'six-1.3.0-py27_0.tar.bz2',
            'sqlite-3.7.13-0.tar.bz2',
            'system-5.8-1.tar.bz2',
            'tk-8.5.13-0.tar.bz2',
            'zlib-1.2.7-0.tar.bz2',
        ]][0]
Пример #20
0
def execute(args, parser):
    import conda.plan as plan
    import conda.instructions as inst
    from conda.gateways.disk.delete import rm_rf
    from conda.core.linked_data import linked_data

    if not (args.all or args.package_names):
        raise CondaValueError('no package names supplied,\n'
                              '       try "conda remove -h" for more details')

    prefix = context.prefix_w_legacy_search
    if args.all and prefix == context.default_prefix:
        msg = "cannot remove current environment. deactivate and run conda remove again"
        raise CondaEnvironmentError(msg)
    check_write('remove', prefix, json=context.json)
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()
    if not args.features and args.all:
        index = linked_data(prefix)
        index = {dist: info for dist, info in iteritems(index)}
    else:
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels,
                          use_local=args.use_local,
                          use_cache=args.use_index_cache,
                          prefix=prefix)
    specs = None
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)
        action_groups = actions,
    elif args.all:
        if plan.is_root_prefix(prefix):
            raise CondaEnvironmentError(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option')
        actions = {inst.PREFIX: prefix}
        for dist in sorted(iterkeys(index)):
            plan.add_unlink(actions, dist)
        action_groups = actions,
    else:
        specs = specs_from_args(args.package_names)
        r = Resolve(index)
        prefix_spec_map = create_prefix_spec_map_with_deps(r, specs, prefix)

        if (context.conda_in_root and plan.is_root_prefix(prefix)
                and names_in_specs(ROOT_NO_RM, specs) and not args.force):
            raise CondaEnvironmentError(
                'cannot remove %s from root environment' %
                ', '.join(ROOT_NO_RM))
        actions = []
        for prfx, spcs in iteritems(prefix_spec_map):
            index = linked_data(prfx)
            index = {dist: info for dist, info in iteritems(index)}
            actions.append(
                plan.remove_actions(prfx,
                                    list(spcs),
                                    index=index,
                                    force=args.force,
                                    pinned=args.pinned))
        action_groups = tuple(actions)

    delete_trash()
    if any(plan.nothing_to_do(actions) for actions in action_groups):
        if args.all:
            print("\nRemove all packages in environment %s:\n" % prefix,
                  file=sys.stderr)
            if not context.json:
                confirm_yn(args)
            rm_rf(prefix)

            if context.json:
                stdout_json({'success': True, 'actions': action_groups})
            return
        raise PackageNotFoundError(
            '', 'no packages found to remove from '
            'environment: %s' % prefix)
    for action in action_groups:
        if not context.json:
            print()
            print("Package plan for package removal in environment %s:" %
                  action["PREFIX"])
            plan.display_actions(action, index)

        if context.json and args.dry_run:
            stdout_json({
                'success': True,
                'dry_run': True,
                'actions': action_groups
            })
            return

    if not context.json:
        confirm_yn(args)

    for actions in action_groups:
        if context.json and not context.quiet:
            with json_progress_bars():
                plan.execute_actions(actions, index, verbose=not context.quiet)
        else:
            plan.execute_actions(actions, index, verbose=not context.quiet)
            if specs:
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# remove specs: %s\n' % ','.join(specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        target_prefix = actions["PREFIX"]
        if (is_private_env(
                prefix_to_env_name(target_prefix, context.root_prefix))
                and linked_data(target_prefix) == {}):
            rm_rf(target_prefix)

    if args.all:
        rm_rf(prefix)

    if context.json:
        stdout_json({'success': True, 'actions': actions})
Пример #21
0
def execute_search(args, parser):
    import re
    from conda.resolve import MatchSpec, Resolve

    if args.reverse_dependency:
        if not args.regex:
            parser.error("--reverse-dependency requires at least one package name")
        if args.spec:
            parser.error("--reverse-dependency does not work with --spec")

    pat = None
    ms = None
    if args.regex:
        if args.spec:
            ms = MatchSpec(' '.join(args.regex.split('=')))
        else:
            regex = args.regex
            if args.full_name:
                regex = r'^%s$' % regex
            try:
                pat = re.compile(regex, re.I)
            except re.error as e:
                common.error_and_exit(
                    "'%s' is not a valid regex pattern (exception: %s)" %
                    (regex, e),
                    json=args.json,
                    error_type="ValueError")

    prefix = common.get_prefix(args)

    import conda.config
    import conda.install

    linked = conda.install.linked(prefix)
    extracted = set()
    for pkgs_dir in conda.config.pkgs_dirs:
        extracted.update(conda.install.extracted(pkgs_dir))

    # XXX: Make this work with more than one platform
    platform = args.platform or ''
    if platform and platform != config.subdir:
        args.unknown = False
    common.ensure_override_channels_requires_channel(args, dashc=False,
                                                     json=args.json)
    channel_urls = args.channel or ()
    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
                                  " to use the --use-local option",
                                  json=args.json,
                                  error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json, platform=args.platform, offline=args.offline)
    else:
        index = common.get_index_trap(channel_urls=channel_urls, prepend=not
                                      args.override_channels, platform=args.platform,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown, json=args.json, offline=args.offline)

    r = Resolve(index)

    if args.canonical:
        json = []
    else:
        json = {}

    names = []
    for name in sorted(r.groups):
        if args.reverse_dependency:
            ms_name = ms
            for pkg in r.groups[name]:
                for dep in r.ms_depends(pkg):
                    if pat.search(dep.name):
                        names.append((name, Package(pkg, r.index[pkg])))
        else:
            if pat and pat.search(name) is None:
                continue
            if ms and name != ms.name:
                continue

            if ms:
                ms_name = ms
            else:
                ms_name = MatchSpec(name)

            pkgs = sorted(r.get_pkgs(ms_name))
            names.append((name, pkgs))

    if args.reverse_dependency:
        new_names = []
        old = None
        for name, pkg in sorted(names, key=lambda x:(x[0], x[1].name, x[1])):
            if name == old:
                new_names[-1][1].append(pkg)
            else:
                new_names.append((name, [pkg]))
            old = name
        names = new_names

    for name, pkgs in names:
        if args.reverse_dependency:
            disp_name = pkgs[0].name
        else:
            disp_name = name

        if args.names_only and not args.outdated:
            print(name)
            continue

        if not args.canonical:
            json[name] = []

        if args.outdated:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            if not vers_inst:
                continue
            assert len(vers_inst) == 1, name
            if not pkgs:
                continue
            latest = pkgs[-1]
            if latest.version == vers_inst[0]:
                continue
            if args.names_only:
                print(name)
                continue

        for pkg in pkgs:
            dist = pkg.fn[:-8]
            if args.canonical:
                if not args.json:
                    print(dist)
                else:
                    json.append(dist)
                continue
            if platform and platform != config.subdir:
                inst = ' '
            elif dist in linked:
                inst = '*'
            elif dist in extracted:
                inst = '.'
            else:
                inst = ' '

            if not args.json:
                print('%-25s %s  %-15s %15s  %-15s %s' % (
                    disp_name, inst,
                    pkg.version,
                    pkg.build,
                    config.canonical_channel_name(pkg.channel),
                    common.disp_features(r.features(pkg.fn)),
                    ))
                disp_name = ''
            else:
                data = {}
                data.update(pkg.info)
                data.update({
                    'fn': pkg.fn,
                    'installed': inst == '*',
                    'extracted': inst in '*.',
                    'version': pkg.version,
                    'build': pkg.build,
                    'build_number': pkg.build_number,
                    'channel': config.canonical_channel_name(pkg.channel),
                    'full_channel': pkg.channel,
                    'features': list(r.features(pkg.fn)),
                    'license': pkg.info.get('license'),
                    'size': pkg.info.get('size'),
                    'depends': pkg.info.get('depends'),
                    'type': pkg.info.get('type')
                })

                if data['type'] == 'app':
                    data['icon'] = make_icon_url(pkg.info)
                json[name].append(data)

    if args.json:
        common.stdout_json(json)
Пример #22
0
def install_actions(prefix,
                    index,
                    specs,
                    force=False,
                    only_names=None,
                    pinned=True,
                    minimal_hint=False,
                    update_deps=True):
    r = Resolve(index)
    linked = install.linked(prefix)

    if config.self_update and is_root_prefix(prefix):
        specs.append('conda')

    if pinned:
        pinned_specs = get_pinned_specs(prefix)
        log.debug("Pinned specs=%s" % pinned_specs)
        specs += pinned_specs
        # TODO: Improve error messages here
    add_defaults_to_specs(r, linked, specs)

    must_have = {}
    for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
                      config.track_features,
                      minimal_hint=minimal_hint,
                      update_deps=update_deps):
        dist = fn[:-8]
        name = install.name_dist(dist)
        if only_names and name not in only_names:
            continue
        must_have[name] = dist

    if is_root_prefix(prefix):
        for name in config.foreign:
            if name in must_have:
                del must_have[name]
    elif basename(prefix).startswith('_'):
        # anything (including conda) can be installed into environments
        # starting with '_', mainly to allow conda-build to build conda
        pass
    else:
        # disallow conda from being installed into all other environments
        if 'conda' in must_have:
            sys.exit("Error: 'conda' can only be installed into the "
                     "root environment")

    smh = r.graph_sort(must_have)

    if force:
        actions = force_linked_actions(smh, index, prefix)
    else:
        actions = ensure_linked_actions(smh, prefix)

    if actions[inst.
               LINK] and sys.platform != 'win32' and prefix != config.root_dir:
        actions[inst.SYMLINK_CONDA] = [config.root_dir]

    for dist in sorted(linked):
        name = install.name_dist(dist)
        if name in must_have and dist != must_have[name]:
            add_unlink(actions, dist)

    return actions
Пример #23
0
import unittest
from os.path import dirname, join
from collections import defaultdict

from conda.config import default_python, pkgs_dirs
import conda.config
from conda.install import LINK_HARD
import conda.plan as plan
from conda.plan import display_actions
from conda.resolve import Resolve

from tests.helpers import captured

with open(join(dirname(__file__), 'index.json')) as fi:
    index = json.load(fi)
    r = Resolve(index)


def solve(specs):
    return [fn[:-8] for fn in r.solve(specs)]


class TestMisc(unittest.TestCase):
    def test_split_linkarg(self):
        for arg, res in [
            ('w3-1.2-0', ('w3-1.2-0', pkgs_dirs[0], LINK_HARD)),
            ('w3-1.2-0 /opt/pkgs 1', ('w3-1.2-0', '/opt/pkgs', 1)),
            (' w3-1.2-0  /opt/pkgs  1  ', ('w3-1.2-0', '/opt/pkgs', 1)),
            (r'w3-1.2-0 C:\A B\pkgs 2', ('w3-1.2-0', r'C:\A B\pkgs', 2))
        ]:
            self.assertEqual(plan.split_linkarg(arg), res)
Пример #24
0
def test_optional_dependencies():
    index2 = index.copy()
    index2['package1-1.0-0.tar.bz2'] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            'build': '0',
            'build_number': 0,
            'constrains': ['package2 >1.0'],
            'name': 'package1',
            'requires': ['package2'],
            'version': '1.0',
        })
    index2['package2-1.0-0.tar.bz2'] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            'build': '0',
            'build_number': 0,
            'depends': [],
            'name': 'package2',
            'requires': [],
            'version': '1.0',
        })
    index2['package2-2.0-0.tar.bz2'] = IndexRecord(
        **{
            "channel": "defaults",
            "subdir": context.subdir,
            "md5": "0123456789",
            "fn": "doesnt-matter-here",
            'build': '0',
            'build_number': 0,
            'depends': [],
            'name': 'package2',
            'requires': [],
            'version': '2.0',
        })
    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r = Resolve(index2)

    assert set(r.find_matches(MatchSpec('package1'))) == {
        Dist('package1-1.0-0.tar.bz2'),
    }
    assert set(r.get_reduced_index(['package1']).keys()) == {
        Dist('package1-1.0-0.tar.bz2'),
        Dist('package2-2.0-0.tar.bz2'),
    }
    assert r.install(['package1']) == [
        Dist('package1-1.0-0.tar.bz2'),
    ]
    assert r.install(['package1', 'package2']) == r.install(
        ['package1', 'package2 >1.0']) == [
            Dist('package1-1.0-0.tar.bz2'),
            Dist('package2-2.0-0.tar.bz2'),
        ]
    assert raises(UnsatisfiableError,
                  lambda: r.install(['package1', 'package2 <2.0']))
    assert raises(UnsatisfiableError,
                  lambda: r.install(['package1', 'package2 1.0']))