Пример #1
0
    def test_merge_single_name(self):
        specs = (MatchSpec('exact'), MatchSpec('exact 1.2.3 1'), MatchSpec('exact >1.0,<2'))
        merged_specs = MatchSpec.merge(specs)
        print(merged_specs)
        assert len(merged_specs) == 1
        merged_spec = merged_specs[0]
        print(merged_spec)
        assert str(merged_spec) == "exact[version='1.2.3,>1.0,<2',build=1]"
        assert merged_spec.match({
            'name': 'exact',
            'version': '1.2.3',
            'build': '1',
            'build_number': 1,
        })
        assert not merged_spec.match({
            'name': 'exact',
            'version': '1.2.2',
            'build': '1',
            'build_number': 1,
        })

        specs = (MatchSpec('exact 1.2.3 1'), MatchSpec('exact 1.2.3 2'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        merged_specs = MatchSpec.merge((MatchSpec('exact 1.2.3 1'),))
        assert len(merged_specs) == 1
        assert str(merged_specs[0]) == "exact==1.2.3=1"
Пример #2
0
 def test_index_record(self):
     dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
     rec = DPkg(dst)
     a = MatchSpec(dst)
     b = MatchSpec(rec)
     assert b.match(rec.dump())
     assert b.match(rec)
     assert a.match(rec)
Пример #3
0
    def test_build_number_merge(self):
        specs = (MatchSpec('python[build_number=1]'), MatchSpec('python=1.2.3=py27_7'), MatchSpec('conda-forge::python<=8[build_number=1]'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "conda-forge::python[version='1.2.3,<=8',build=py27_7,build_number=1]"

        specs = (MatchSpec('python[build_number=2]'), MatchSpec('python=1.2.3=py27_7'), MatchSpec('python<=8[build_number=1]'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)
Пример #4
0
    def test_md5_merge_with_name(self):
        specs = (MatchSpec('python[md5=deadbeef]'), MatchSpec('python=1.2.3'), MatchSpec('conda-forge::python[md5=deadbeef]'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "conda-forge::python=1.2.3[md5=deadbeef]"

        specs = (MatchSpec('python[md5=FFBADD11]'), MatchSpec('python=1.2.3'), MatchSpec('python[md5=ffbadd11]'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)
Пример #5
0
    def test_subdir_merge(self):
        specs = (MatchSpec('pkgs/main/linux-64::python'), MatchSpec('pkgs/main/linux-32::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults/win-32::python'), MatchSpec('defaults/win-64::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('pkgs/free/linux-64::python'), MatchSpec('pkgs/free::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "pkgs/free/linux-64::python==1.2.3"
        assert merged[0] == MatchSpec(channel='pkgs/free', subdir='linux-64', name='python', version='1.2.3')
Пример #6
0
    def test_match_1(self):
        for spec, result in [
            ('numpy 1.7*', True),          ('numpy 1.7.1', True),
            ('numpy 1.7', False),          ('numpy 1.5*', False),
            ('numpy >=1.5', True),         ('numpy >=1.5,<2', True),
            ('numpy >=1.8,<1.9', False),   ('numpy >1.5,<2,!=1.7.1', False),
            ('numpy >1.8,<2|==1.7', False),('numpy >1.8,<2|>=1.7.1', True),
            ('numpy >=1.8|1.7*', True),    ('numpy ==1.7', False),
            ('numpy >=1.5,>1.6', True),    ('numpy ==1.7.1', True),
            ('numpy ==1.7.1.0', True),     ('numpy==1.7.1.0.0', True),
            ('numpy >=1,*.7.*', True),     ('numpy *.7.*,>=1', True),
            ('numpy >=1,*.8.*', False),    ('numpy >=2,*.7.*', False),
            ('numpy 1.6*|1.7*', True),     ('numpy 1.6*|1.8*', False),
            ('numpy 1.6.2|1.7*', True),    ('numpy 1.6.2|1.7.1', True),
            ('numpy 1.6.2|1.7.0', False),  ('numpy 1.7.1 py27_0', True),
            ('numpy 1.7.1 py26_0', False), ('numpy >1.7.1a', True),
            ('python', False),
        ]:
            m = MatchSpec(spec)
            assert m.match(DPkg('numpy-1.7.1-py27_0.tar.bz2')) == result
            assert 'name' in m
            assert m.name == 'python' or 'version' in m

        # both version numbers conforming to PEP 440
        assert not MatchSpec('numpy >=1.0.1').match(DPkg('numpy-1.0.1a-0.tar.bz2'))
        # both version numbers non-conforming to PEP 440
        assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1*.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        # one conforming, other non-conforming to PEP 440
        assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1.vc11-0.tar.bz2'))
        assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1a').match(DPkg('numpy-1.0.1z-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1a py27*').match(DPkg('numpy-1.0.1z-py27_1.tar.bz2'))
        assert MatchSpec('blas * openblas_0').match(DPkg('blas-1.0-openblas_0.tar.bz2'))

        assert MatchSpec('blas')._is_simple()
        assert not MatchSpec('blas 1.0')._is_simple()
        assert not MatchSpec('blas 1.0 1')._is_simple()

        m = MatchSpec('blas 1.0', optional=True)
        m2 = MatchSpec(m, optional=False)
        m3 = MatchSpec(m2, target='blas-1.0-0.tar.bz2')
        m4 = MatchSpec(m3, target=None, optional=True)
        assert m.spec == m2.spec and m.optional != m2.optional
        assert m2.spec == m3.spec and m2.optional == m3.optional and m2.target != m3.target
        assert m == m4

        self.assertRaises(ValueError, MatchSpec, (1, 2, 3))
Пример #7
0
    def test_merge_multiple_name(self):
        specs = tuple(MatchSpec(s) for s in (
            'exact', 'exact 1.2.3 1',
            'bounded >=1.0,<2.0', 'bounded >=1.5', 'bounded <=1.8',
            'exact >1.0,<2',
        ))
        merged_specs = MatchSpec.merge(specs)
        print(merged_specs)
        assert len(merged_specs) == 2

        exact_spec = next(s for s in merged_specs if s.name == 'exact')
        bounded_spec = next(s for s in merged_specs if s.name == 'bounded')

        assert str(exact_spec) == "exact[version='1.2.3,>1.0,<2',build=1]"
        assert str(bounded_spec) == "bounded[version='>=1.0,<2.0,>=1.5,<=1.8']"

        assert not bounded_spec.match({
            'name': 'bounded',
            'version': '1',
            'build': '6',
            'build_number': 6,
        })
        assert bounded_spec.match({
            'name': 'bounded',
            'version': '1.5',
            'build': '7',
            'build_number': 7,
        })
        assert not bounded_spec.match({
            'name': 'bounded',
            'version': '2',
            'build': '8',
            'build_number': 8,
        })
Пример #8
0
 def test_md5_merge_wo_name(self):
     specs = (MatchSpec('*[md5=deadbeef]'), MatchSpec('*[md5=FFBADD11]'))
     merged = MatchSpec.merge(specs)
     assert len(merged) == 2
     str_specs = ('*[md5=deadbeef]', '*[md5=FFBADD11]')
     assert str(merged[0]) in str_specs
     assert str(merged[1]) in str_specs
     assert str(merged[0]) != str(merged[1])
Пример #9
0
    def test_to_filename(self):
        m1 = MatchSpec(fn='foo-1.7-52.tar.bz2')
        m2 = MatchSpec(name='foo', version='1.7', build='52')
        m3 = MatchSpec(Dist('defaults::foo-1.7-52'))
        assert m1._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
        assert m2._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
        assert m3._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'

        for spec in 'bitarray', 'pycosat 0.6.0', 'numpy 1.6*':
            ms = MatchSpec(spec)
            assert ms._to_filename_do_not_use() is None
Пример #10
0
    def test_dist(self):
        with env_unmodified(conda_tests_ctxt_mgmt_def_pol):
            dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
            a = MatchSpec(dst)
            b = MatchSpec(a)
            c = MatchSpec(dst, optional=True, target='burg')
            d = MatchSpec(a, build='5')

            assert a == b
            assert hash(a) == hash(b)
            assert a is b

            assert a != c
            assert hash(a) != hash(c)

            assert a != d
            assert hash(a) != hash(d)

            p = MatchSpec(channel='defaults',name='python',version=VersionSpec('3.5*'))
            assert p.match(Dist(channel='defaults', dist_name='python-3.5.3-1', name='python',
                                version='3.5.3', build_string='1', build_number=1, base_url=None,
                                platform=None))

            assert not p.match(Dist(channel='defaults', dist_name='python-3.6.0-0', name='python',
                                    version='3.6.0', build_string='0', build_number=0, base_url=None,
                                    platform=None))

            assert p.match(Dist(channel='defaults', dist_name='python-3.5.1-0', name='python',
                                version='3.5.1', build_string='0', build_number=0, base_url=None,
                                platform=None))
            assert p.match(PackageRecord(name='python', version='3.5.1', build='0', build_number=0,
                                         depends=('openssl 1.0.2*', 'readline 6.2*', 'sqlite',
                                                   'tk 8.5*', 'xz 5.0.5', 'zlib 1.2*', 'pip'),
                                         channel=Channel(scheme='https', auth=None,
                                                          location='repo.anaconda.com', token=None,
                                                          name='pkgs/main', platform='osx-64',
                                                          package_filename=None),
                                         subdir='osx-64', fn='python-3.5.1-0.tar.bz2',
                                         md5='a813bc0a32691ab3331ac9f37125164c', size=14678857,
                                         priority=0,
                                         url='https://repo.anaconda.com/pkgs/main/osx-64/python-3.5.1-0.tar.bz2'))
Пример #11
0
    def test_channel_merge(self):
        specs = (MatchSpec('pkgs/main::python'), MatchSpec('defaults::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults::python'), MatchSpec('pkgs/main::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults::python'), MatchSpec('defaults::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "defaults::python==1.2.3"

        specs = (MatchSpec('pkgs/free::python'), MatchSpec('pkgs/free::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "pkgs/free::python==1.2.3"
Пример #12
0
 def test_no_name_match_spec(self):
     ms = MatchSpec(track_features="mkl")
     assert str(ms) == '*[track_features=mkl]'
Пример #13
0
 def cb_form(self, spec_str):
     return MatchSpec(spec_str).conda_build_form()
Пример #14
0
    def test_track_features_match(self):
        dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
        a = MatchSpec(features='test')
        assert text_type(a) == "*[features=test]"
        assert not a.match(DPkg(dst))
        assert not a.match(DPkg(dst, track_features=''))

        a = MatchSpec(track_features='test')
        assert a.match(DPkg(dst, track_features='test'))
        assert not a.match(DPkg(dst, track_features='test2'))
        assert not a.match(DPkg(dst, track_features='test me'))
        assert not a.match(DPkg(dst, track_features='you test'))
        assert not a.match(DPkg(dst, track_features='you test me'))
        assert a.get_exact_value('track_features') == frozenset(('test',))

        b = MatchSpec(track_features='mkl')
        assert not b.match(DPkg(dst))
        assert b.match(DPkg(dst, track_features='mkl'))
        assert b.match(DPkg(dst, track_features='mkl'))
        assert not b.match(DPkg(dst, track_features='mkl debug'))
        assert not b.match(DPkg(dst, track_features='debug'))

        c = MatchSpec(track_features='nomkl')
        assert not c.match(DPkg(dst))
        assert not c.match(DPkg(dst, track_features='mkl'))
        assert c.match(DPkg(dst, track_features='nomkl'))
        assert not c.match(DPkg(dst, track_features='nomkl debug'))

        # regression test for #6860
        d = MatchSpec(track_features='')
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=' ')
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=('', ''))
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=('', '', 'test'))
        assert d.get_exact_value('track_features') == frozenset(('test',))
Пример #15
0
    def test_subdir_merge(self):
        specs = (MatchSpec('pkgs/main/linux-64::python'),
                 MatchSpec('pkgs/main/linux-32::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults/win-32::python'),
                 MatchSpec('defaults/win-64::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('pkgs/free/linux-64::python'),
                 MatchSpec('pkgs/free::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "pkgs/free/linux-64::python==1.2.3"
        assert merged[0] == MatchSpec(channel='pkgs/free',
                                      subdir='linux-64',
                                      name='python',
                                      version='1.2.3')
Пример #16
0
    def test_match_1(self):
        for spec, result in [
            ('numpy 1.7*', True),          ('numpy 1.7.1', True),
            ('numpy 1.7', False),          ('numpy 1.5*', False),
            ('numpy >=1.5', True),         ('numpy >=1.5,<2', True),
            ('numpy >=1.8,<1.9', False),   ('numpy >1.5,<2,!=1.7.1', False),
            ('numpy >1.8,<2|==1.7', False),('numpy >1.8,<2|>=1.7.1', True),
            ('numpy >=1.8|1.7*', True),    ('numpy ==1.7', False),
            ('numpy >=1.5,>1.6', True),    ('numpy ==1.7.1', True),
            ('numpy ==1.7.1.0', True),     ('numpy==1.7.1.0.0', True),
            ('numpy >=1,*.7.*', True),     ('numpy *.7.*,>=1', True),
            ('numpy >=1,*.8.*', False),    ('numpy >=2,*.7.*', False),
            ('numpy 1.6*|1.7*', True),     ('numpy 1.6*|1.8*', False),
            ('numpy 1.6.2|1.7*', True),    ('numpy 1.6.2|1.7.1', True),
            ('numpy 1.6.2|1.7.0', False),  ('numpy 1.7.1 py27_0', True),
            ('numpy 1.7.1 py26_0', False), ('numpy >1.7.1a', True),
            ('python', False),
        ]:
            m = MatchSpec(spec)
            assert m.match(DPkg('numpy-1.7.1-py27_0.tar.bz2')) == result
            assert 'name' in m
            assert m.name == 'python' or 'version' in m

        # both version numbers conforming to PEP 440
        assert not MatchSpec('numpy >=1.0.1').match(DPkg('numpy-1.0.1a-0.tar.bz2'))
        # both version numbers non-conforming to PEP 440
        assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1*.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        # one conforming, other non-conforming to PEP 440
        assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1.vc11-0.tar.bz2'))
        assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2'))
        assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1a').match(DPkg('numpy-1.0.1z-0.tar.bz2'))
        assert MatchSpec('numpy >=1.0.1a py27*').match(DPkg('numpy-1.0.1z-py27_1.tar.bz2'))
        assert MatchSpec('blas * openblas_0').match(DPkg('blas-1.0-openblas_0.tar.bz2'))

        assert MatchSpec('blas')._is_simple()
        assert not MatchSpec('blas 1.0')._is_simple()
        assert not MatchSpec('blas 1.0 1')._is_simple()

        m = MatchSpec('blas 1.0', optional=True)
        m2 = MatchSpec(m, optional=False)
        m3 = MatchSpec(m2, target='blas-1.0-0.tar.bz2')
        m4 = MatchSpec(m3, target=None, optional=True)
        assert m.spec == m2.spec and m.optional != m2.optional
        assert m2.spec == m3.spec and m2.optional == m3.optional and m2.target != m3.target
        assert m == m4

        self.assertRaises(ValueError, MatchSpec, (1, 2, 3))
Пример #17
0
 def test_build_number_and_filename(self):
     ms = MatchSpec('zlib 1.2.7 0')
     assert ms.get_exact_value('name') == 'zlib'
     assert ms.get_exact_value('version') == '1.2.7'
     assert ms.get_exact_value('build') == '0'
     assert ms._to_filename_do_not_use() == 'zlib-1.2.7-0.tar.bz2'
Пример #18
0
def test_remove_youngest_descendant_nodes_with_specs():
    records, specs = get_conda_build_record_set()
    graph = PrefixGraph(records,
                        tuple(specs) + (MatchSpec("python:requests"), ))

    removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()

    remaining_nodes = tuple(rec.name for rec in graph.records)
    pprint(remaining_nodes)
    order = (
        'ca-certificates',
        'conda-env',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'patchelf',
        'tk',
        'xz',
        'yaml',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
        'requests',
        'conda',
    )
    assert remaining_nodes == order

    order = (
        'intel-openmp',
        'conda-build',
    )
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == order

    # again
    removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()

    remaining_nodes = tuple(rec.name for rec in graph.records)
    pprint(remaining_nodes)
    order = (
        'conda-env',
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'patchelf',
        'tk',
        'xz',
        'yaml',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
        'requests',
    )
    assert remaining_nodes == order

    order = ('conda', )
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == order

    # now test prune
    removed_nodes = graph.prune()

    remaining_nodes = tuple(rec.name for rec in graph.records)
    pprint(remaining_nodes)
    order = (
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'tk',
        'xz',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'idna',
        'pycparser',
        'pysocks',
        'six',
        'cffi',
        'cryptography',
        'pyopenssl',
        'urllib3',
        'requests',
    )
    assert remaining_nodes == order

    order = (
        'conda-env',
        'patchelf',
        'yaml',
        'beautifulsoup4',
        'filelock',
        'glob2',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pyyaml',
        'ruamel_yaml',
        'setuptools',
        'jinja2',
    )
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == order
Пример #19
0
def get_pandas_record_set():
    specs = MatchSpec("pandas"), MatchSpec("python=2.7"), MatchSpec(
        "numpy 1.13")
    with get_solver_4(specs) as solver:
        final_state = solver.solve_final_state()
    return final_state, frozenset(specs)
Пример #20
0
def get_conda_build_record_set():
    specs = MatchSpec("conda"), MatchSpec("conda-build"), MatchSpec(
        "intel-openmp"),
    with get_solver_4(specs) as solver:
        final_state = solver.solve_final_state()
    return final_state, frozenset(specs)
Пример #21
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    newenv = bool(command == 'create')
    isupdate = bool(command == 'update')
    isinstall = bool(command == 'install')
    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            delete_trash(prefix)
            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    # context.__init__(argparse_args=args)

    prepend = not args.override_channels
    prefix = context.target_prefix

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages, prefix, verbose=not context.quiet)
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = [(str(x.channel), x.cache_path_json) for x in index]

    installed_pkg_recs, output = get_installed_packages(prefix,
                                                        show_channel_urls=True)
    installed_json_f = tempfile.NamedTemporaryFile('w', delete=False)
    installed_json_f.write(json_dump(output))
    installed_json_f.flush()

    specs = []
    if args.file:
        for fpath in args.file:
            try:
                specs.extend(specs_from_url(fpath, json=context.json))
            except UnicodeError:
                raise CondaError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in specs:
            explicit(specs,
                     prefix,
                     verbose=not context.quiet,
                     index_args=index_args)
            return

    specs.extend(specs_from_args(args_packages, json=context.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        print(
            "Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)"
        )
        exit()

    if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL:
        prefix_data = PrefixData(prefix)
        for spec in specs:
            spec = MatchSpec(spec)
            if not spec.is_name_only_spec:
                raise CondaError("Invalid spec for 'conda update': %s\n"
                                 "Use 'conda install' instead." % spec)
            if not prefix_data.get(spec.name, None):
                raise PackageNotInstalledError(prefix, spec.name)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=context.quiet,
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    specs = [MatchSpec(s) for s in specs]
    mamba_solve_specs = [s.conda_build_form() for s in specs]

    print("\n\nLooking for: {}\n\n".format(specs))

    strict_priority = (context.channel_priority == ChannelPriority.STRICT)
    to_link, to_unlink = api.solve(channel_json, installed_json_f.name,
                                   mamba_solve_specs, isupdate,
                                   strict_priority)

    to_link_records, to_unlink_records = [], []

    final_precs = IndexedSet(PrefixData(prefix).iter_records())

    def get_channel(c):
        for x in index:
            if str(x.channel) == c:
                return x

    for c, pkg in to_unlink:
        for i_rec in installed_pkg_recs:
            if i_rec.fn == pkg:
                final_precs.remove(i_rec)
                to_unlink_records.append(i_rec)
                break
        else:
            print("No package record found!")

    for c, pkg, jsn_s in to_link:
        sdir = get_channel(c)
        rec = to_package_record_from_subjson(sdir, pkg, jsn_s)
        final_precs.add(rec)
        to_link_records.append(rec)

    unlink_precs, link_precs = diff_for_unlink_link_precs(
        prefix,
        final_precs=IndexedSet(PrefixGraph(final_precs).graph),
        specs_to_add=specs,
        force_reinstall=context.force_reinstall)

    pref_setup = PrefixSetup(target_prefix=prefix,
                             unlink_precs=unlink_precs,
                             link_precs=link_precs,
                             remove_specs=[],
                             update_specs=specs)

    conda_transaction = UnlinkLinkTransaction(pref_setup)
    handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Пример #22
0
def install(args, parser, command="install"):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == "create")
    isinstall = bool(command == "install")
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == "update")
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, "conda-meta", "history")):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip("\"'") for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith(".tar.bz2") for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        "use_cache": args.use_index_cache,
        "channel_urls": context.channels,
        "unknown": args.unknown,
        "prepend": not args.override_channels,
        "use_local": args.use_local,
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except UnicodeError:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if "@EXPLICIT" in file_specs:
            explicit(
                file_specs,
                prefix,
                verbose=not (context.quiet or context.json),
                index_args=index_args,
            )
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args["channel_urls"] = channels

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        for i in installed_names:
            if i != "python":
                specs.append(MatchSpec(i))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if s.name == "python":
                specs.append(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != "python":
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0,
                len(args.packages),
                list(args.packages),
                "did not expect any arguments for --clone",
            )

        clone(
            args.clone,
            prefix,
            json=context.json,
            quiet=(context.quiet or context.json),
            index_args=index_args,
        )
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None

    if "python" not in spec_names:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    pool = api.Pool()

    repos = []

    if use_mamba_experimental or context.force_reinstall:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    if newenv and not specs:
        # creating an empty environment with e.g. "mamba create -n my_env"
        # should not download the repodata
        index = []
        specs_to_add = []
        specs_to_remove = []
        to_link = []
        to_unlink = []
        installed_pkg_recs = []
    else:
        index = load_channels(pool, channels, repos)

        if context.force_reinstall:
            solver = api.Solver(pool, solver_options, prefix_data)
        else:
            solver = api.Solver(pool, solver_options)

        solver.set_postsolve_flags([
            (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
            (api.MAMBA_ONLY_DEPS,
             context.deps_modifier == DepsModifier.ONLY_DEPS),
            (api.MAMBA_FORCE_REINSTALL, context.force_reinstall),
        ])
        solver.add_jobs(mamba_solve_specs, solver_task)

        if not context.force_reinstall:
            # as a security feature this will _always_ attempt to upgradecertain
            # packages
            for a_pkg in [_.name for _ in context.aggressive_update_packages]:
                if a_pkg in installed_names:
                    solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

        if python_constraint:
            solver.add_pin(python_constraint)

        pinned_specs = get_pinned_specs(context.target_prefix)
        if pinned_specs:
            conda_prefix_data = PrefixData(context.target_prefix)
        for s in pinned_specs:
            x = conda_prefix_data.query(s.name)
            if x:
                for el in x:
                    if not s.match(el):
                        print(
                            "Your pinning does not match what's currently installed."
                            " Please remove the pin and fix your installation")
                        print("  Pin: {}".format(s))
                        print("  Currently installed: {}".format(el))
                        exit(1)
            solver.add_pin(str(s))

        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(solver, package_cache)
        mmb_specs, to_link, to_unlink = transaction.to_conda()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        transaction.log_json()

        downloaded = transaction.prompt(
            PackageCacheData.first_writable().pkgs_dir, repos)
        if not downloaded:
            exit(0)
        PackageCacheData.first_writable().reload()

    # if use_mamba_experimental and not os.name == "nt":
    if use_mamba_experimental:
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
            index,
        )
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass
Пример #23
0
def remove(args, parser):
    if not (args.all or args.package_names):
        raise CondaValueError("no package names supplied,\n"
                              '       try "mamba remove -h" for more details')

    prefix = context.target_prefix
    check_non_admin()
    init_api_context()

    if args.all and prefix == context.default_prefix:
        raise CondaEnvironmentError("cannot remove current environment. \
                                     deactivate and run mamba remove again")

    if args.all and path_is_clean(prefix):
        # full environment removal was requested, but environment doesn't exist anyway
        return 0

    if args.all:
        if prefix == context.root_prefix:
            raise CondaEnvironmentError(
                "cannot remove root environment,\n"
                "       add -n NAME or -p PREFIX option")
        print("\nRemove all packages in environment %s:\n" % prefix,
              file=sys.stderr)

        if "package_names" in args:
            stp = PrefixSetup(
                target_prefix=prefix,
                unlink_precs=tuple(PrefixData(prefix).iter_records()),
                link_precs=(),
                remove_specs=(),
                update_specs=(),
                neutered_specs=(),
            )
            txn = UnlinkLinkTransaction(stp)
            try:
                handle_txn(txn, prefix, args, False, True)
            except PackagesNotFoundError:
                print(
                    "No packages found in %s. Continuing environment removal" %
                    prefix)

        rm_rf(prefix, clean_empty_parents=True)
        unregister_env(prefix)

        return

    else:
        if args.features:
            specs = tuple(
                MatchSpec(track_features=f) for f in set(args.package_names))
        else:
            specs = [s for s in specs_from_args(args.package_names)]
        if not context.quiet:
            print("Removing specs: {}".format(
                [s.conda_build_form() for s in specs]))

        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

        mamba_solve_specs = [s.conda_build_form() for s in specs]

        solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1))

        pool = api.Pool()
        repos = []

        # add installed
        if use_mamba_experimental:
            prefix_data = api.PrefixData(context.target_prefix)
            prefix_data.load()
            repo = api.Repo(pool, prefix_data)
            repos.append(repo)
        else:
            repo = api.Repo(pool, "installed", installed_json_f.name, "")
            repo.set_installed()
            repos.append(repo)

        solver = api.Solver(pool, solver_options)
        solver.add_jobs(mamba_solve_specs, api.SOLVER_ERASE)
        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(solver, package_cache)
        downloaded = transaction.prompt(
            PackageCacheData.first_writable().pkgs_dir, repos)
        if not downloaded:
            exit(0)

        mmb_specs, to_link, to_unlink = transaction.to_conda()
        transaction.log_json()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
        )
        handle_txn(conda_transaction, prefix, args, False, True)
Пример #24
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != 'nodefaults']

    if 'nodefaults' not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]
        subpriority = 0 if chan.platform == 'noarch' else 1
        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    # if using update
    installed_pkg_recs = []
    if 'update' in args.func:
        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    final_precs = IndexedSet()

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Пример #25
0
def test_general_graph_bfs_simple():
    a = PackageRecord(name="a",
                      version="1",
                      build="0",
                      build_number=0,
                      depends=["b", "c", "d"])
    b = PackageRecord(name="b",
                      version="1",
                      build="0",
                      build_number=0,
                      depends=["e"])
    c = PackageRecord(name="c", version="1", build="0", build_number=0)
    d = PackageRecord(name="d",
                      version="1",
                      build="0",
                      build_number=0,
                      depends=["f", "g"])
    e = PackageRecord(name="e", version="1", build="0", build_number=0)
    f = PackageRecord(name="f", version="1", build="0", build_number=0)
    g = PackageRecord(name="g", version="1", build="0", build_number=0)
    records = [a, b, c, d, e, f, g]
    graph = GeneralGraph(records)

    a_to_c = graph.breadth_first_search_by_name(MatchSpec("a"), MatchSpec("c"))
    assert a_to_c == [MatchSpec("a"), MatchSpec("c")]

    a_to_f = graph.breadth_first_search_by_name(MatchSpec("a"), MatchSpec("f"))
    assert a_to_f == [MatchSpec("a"), MatchSpec("d"), MatchSpec("f")]

    a_to_a = graph.breadth_first_search_by_name(MatchSpec("a"), MatchSpec("a"))
    assert a_to_a == [MatchSpec("a")]

    a_to_not_exist = graph.breadth_first_search_by_name(
        MatchSpec("a"), MatchSpec("z"))
    assert a_to_not_exist is None

    backwards = graph.breadth_first_search_by_name(MatchSpec("d"),
                                                   MatchSpec("a"))
    assert backwards is None
Пример #26
0
    def test_exact_values(self):
        assert MatchSpec("*").get_exact_value('name') is None
        assert MatchSpec("numpy").get_exact_value('name') == 'numpy'

        assert MatchSpec("numpy=1.7").get_exact_value('version') is None
        assert MatchSpec("numpy==1.7").get_exact_value('version') == '1.7'
        assert MatchSpec("numpy[version=1.7]").get_exact_value('version') == '1.7'

        assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('version') == '1.7'
        assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('build') is None
        assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('version') == '1.7'
        assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('build') is None
        assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value('version') is None
        assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value('build') == 'py37_2'
Пример #27
0
def get_sqlite_cyclical_record_set():
    # sqlite-3.20.1-haaaaaaa_4
    specs = MatchSpec("sqlite=3.20.1[build_number=4]"), MatchSpec("flask"),
    with get_solver_4(specs) as solver:
        final_state = solver.solve_final_state()
    return final_state, frozenset(specs)
Пример #28
0
    def test_track_features_match(self):
        dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
        a = MatchSpec(features='test')
        assert text_type(a) == "*[features=test]"
        assert not a.match(DPkg(dst))
        assert not a.match(DPkg(dst, track_features=''))

        a = MatchSpec(track_features='test')
        assert a.match(DPkg(dst, track_features='test'))
        assert not a.match(DPkg(dst, track_features='test2'))
        assert not a.match(DPkg(dst, track_features='test me'))
        assert not a.match(DPkg(dst, track_features='you test'))
        assert not a.match(DPkg(dst, track_features='you test me'))
        assert a.get_exact_value('track_features') == frozenset(('test',))

        b = MatchSpec(track_features='mkl')
        assert not b.match(DPkg(dst))
        assert b.match(DPkg(dst, track_features='mkl'))
        assert b.match(DPkg(dst, track_features='mkl'))
        assert not b.match(DPkg(dst, track_features='mkl debug'))
        assert not b.match(DPkg(dst, track_features='debug'))

        c = MatchSpec(track_features='nomkl')
        assert not c.match(DPkg(dst))
        assert not c.match(DPkg(dst, track_features='mkl'))
        assert c.match(DPkg(dst, track_features='nomkl'))
        assert not c.match(DPkg(dst, track_features='nomkl debug'))

        # regression test for #6860
        d = MatchSpec(track_features='')
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=' ')
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=('', ''))
        assert d.get_exact_value('track_features') == frozenset()
        d = MatchSpec(track_features=('', '', 'test'))
        assert d.get_exact_value('track_features') == frozenset(('test',))
Пример #29
0
def test_prefix_graph_1():
    # Basic initial test for public methods of PrefixGraph.

    records, specs = get_conda_build_record_set()
    graph = PrefixGraph(records, specs)

    nodes = tuple(rec.name for rec in graph.records)
    pprint(nodes)
    order = (
        'intel-openmp',
        'ca-certificates',
        'conda-env',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'patchelf',
        'tk',
        'xz',
        'yaml',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
        'requests',
        'conda',
        'conda-build',
    )
    assert nodes == order

    python_node = graph.get_node_by_name('python')
    python_ancestors = graph.all_ancestors(python_node)
    nodes = tuple(rec.name for rec in python_ancestors)
    pprint(nodes)
    order = (
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'tk',
        'xz',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
    )
    assert nodes == order

    python_descendants = graph.all_descendants(python_node)
    nodes = tuple(rec.name for rec in python_descendants)
    pprint(nodes)
    order = (
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
        'requests',
        'conda',
        'conda-build',
    )
    assert nodes == order

    # test remove_specs
    removed_nodes = graph.remove_spec(MatchSpec("requests"))
    nodes = tuple(rec.name for rec in removed_nodes)
    pprint(nodes)
    order = (
        'requests',
        'conda',
        'conda-build',
    )
    assert nodes == order

    nodes = tuple(rec.name for rec in graph.records)
    pprint(nodes)
    order = (
        'conda-env',
        'intel-openmp',
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'patchelf',
        'tk',
        'xz',
        'yaml',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
    )
    assert nodes == order

    spec_matches = {
        'channel-4::intel-openmp-2018.0.3-0': {'intel-openmp'},
    }
    assert {
        node.dist_str(): set(str(ms) for ms in specs)
        for node, specs in graph.spec_matches.items()
    } == spec_matches

    removed_nodes = graph.prune()
    nodes = tuple(rec.dist_str() for rec in graph.records)
    pprint(nodes)
    order = ('channel-4::intel-openmp-2018.0.3-0', )
    assert nodes == order

    removed_nodes = tuple(rec.name for rec in removed_nodes)
    order = (
        'conda-env',
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'patchelf',
        'tk',
        'xz',
        'yaml',
        'zlib',
        'libedit',
        'readline',
        'sqlite',
        'python',
        'asn1crypto',
        'beautifulsoup4',
        'certifi',
        'chardet',
        'cryptography-vectors',
        'filelock',
        'glob2',
        'idna',
        'markupsafe',
        'pkginfo',
        'psutil',
        'pycosat',
        'pycparser',
        'pysocks',
        'pyyaml',
        'ruamel_yaml',
        'six',
        'cffi',
        'setuptools',
        'cryptography',
        'jinja2',
        'pyopenssl',
        'urllib3',
    )
    pprint(removed_nodes)
    assert removed_nodes == order
Пример #30
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context()

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages
                         or context.update_modifier == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            delete_trash(prefix)
            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages, prefix, verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError("cannot mix specifications with conda package"
                                  " filenames")

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'], platform=None,
                      use_local=index_args['use_local'], use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'], prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)

    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0

        subpriority = 0 if chan.platform == 'noarch' else 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Cache path: ", subdir.cache_path())
        channel_json.append((chan, subdir.cache_path(), priority, subpriority))

    installed_json_f = get_installed_jsonfile(prefix)

    specs = []

    if args.file:
        for fpath in args.file:
            try:
                file_specs = specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError("Error reading file, file should be a text file containing"
                                 " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError("too few arguments, "
                              "must supply command line package specs or --file")

    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        # Note: History(prefix).get_requested_specs_map()
        print("Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)")
        exit()

    if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL:
        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                 "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(0, len(args.packages), list(args.packages),
                                        'did not expect any arguments for --clone')

        clone(args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    spec_names = [s.name for s in specs]

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format(spec_names))

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_added = False
    if 'python' not in spec_names:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            specs.append(MatchSpec('python==' + version))
            python_added = True

    mamba_solve_specs = [s.conda_build_form() for s in specs]

    pool = api.Pool()

    repos = []

    # add installed
    if use_mamba_experimental:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, cache_file, priority, subpriority in channel_json:
        repo = api.Repo(pool, str(channel), cache_file, channel.url(with_credentials=True))
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(mamba_solve_specs, solver_task)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    to_link, to_unlink = transaction.to_conda()
    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir, repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if python_added:
        specs = [s for s in specs if s.name != 'python']

    if use_mamba_experimental and not os.name == 'nt':
        if command == 'create' and not isdir(context.target_prefix):
            mkdir_p(prefix)
        transaction.execute(prefix_data, PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs, (), prefix, to_link, to_unlink, index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Пример #31
0
 def test_aggressive_update_packages(self):
     assert context.aggressive_update_packages == tuple()
     specs = ['certifi', 'openssl>=1.1']
     with env_var('CONDA_AGGRESSIVE_UPDATE_PACKAGES', ','.join(specs), stack_callback=conda_tests_ctxt_mgmt_def_pol):
         assert context.aggressive_update_packages == tuple(MatchSpec(s) for s in specs)
Пример #32
0
 def test_build_number_and_filename(self):
     ms = MatchSpec('zlib 1.2.7 0')
     assert ms.get_exact_value('name') == 'zlib'
     assert ms.get_exact_value('version') == '1.2.7'
     assert ms.get_exact_value('build') == '0'
     assert ms._to_filename_do_not_use() == 'zlib-1.2.7-0.tar.bz2'
Пример #33
0
 def test_basic_get_reduced_index(self):
     get_reduced_index(None, (Channel('defaults'), Channel('conda-test')),
                       context.subdirs, (MatchSpec('flask'), ),
                       'repodata.json')
Пример #34
0
    def test_channel_merge(self):
        specs = (MatchSpec('pkgs/main::python'), MatchSpec('defaults::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults::python'), MatchSpec('pkgs/main::python'))
        with pytest.raises(ValueError):
            MatchSpec.merge(specs)

        specs = (MatchSpec('defaults::python'),
                 MatchSpec('defaults::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "defaults::python==1.2.3"

        specs = (MatchSpec('pkgs/free::python'),
                 MatchSpec('pkgs/free::python 1.2.3'))
        merged = MatchSpec.merge(specs)
        assert len(merged) == 1
        assert str(merged[0]) == "pkgs/free::python==1.2.3"
Пример #35
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    ordered_channels_dict = prioritize_channels(channel_urls)

    pool = api.Pool()
    repos = []
    index = load_channels(pool,
                          tuple(ordered_channels_dict.keys()),
                          repos,
                          prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    if "python" not in [s.name for s in match_specs]:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation")
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}"
            solver.add_pin(final_spec)
        except AssertionError:
            print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                  "is of the format\n"
                  "libname VERSION BUILD, for example libblas=*=*mkl\n")

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    solver.add_jobs(specs, api.SOLVER_INSTALL)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache,
                                  PackageCacheData.first_writable().pkgs_dir)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Пример #36
0
    def test_dist(self):
        dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
        a = MatchSpec(dst)
        b = MatchSpec(a)
        c = MatchSpec(dst, optional=True, target='burg')
        d = MatchSpec(a, build='5')

        assert a == b
        assert hash(a) == hash(b)
        assert a is b

        assert a != c
        assert hash(a) != hash(c)

        assert a != d
        assert hash(a) != hash(d)

        p = MatchSpec(channel='defaults',
                      name='python',
                      version=VersionSpec('3.5*'))
        assert p.match(
            Dist(channel='defaults',
                 dist_name='python-3.5.3-1',
                 name='python',
                 version='3.5.3',
                 build_string='1',
                 build_number=1,
                 base_url=None,
                 platform=None))

        assert not p.match(
            Dist(channel='defaults',
                 dist_name='python-3.6.0-0',
                 name='python',
                 version='3.6.0',
                 build_string='0',
                 build_number=0,
                 base_url=None,
                 platform=None))

        assert p.match(
            Dist(channel='defaults',
                 dist_name='python-3.5.1-0',
                 name='python',
                 version='3.5.1',
                 build_string='0',
                 build_number=0,
                 base_url=None,
                 platform=None))
        assert p.match(
            PackageRecord(
                name='python',
                version='3.5.1',
                build='0',
                build_number=0,
                depends=('openssl 1.0.2*', 'readline 6.2*', 'sqlite',
                         'tk 8.5*', 'xz 5.0.5', 'zlib 1.2*', 'pip'),
                channel=Channel(scheme='https',
                                auth=None,
                                location='repo.anaconda.com',
                                token=None,
                                name='pkgs/free',
                                platform='osx-64',
                                package_filename=None),
                subdir='osx-64',
                fn='python-3.5.1-0.tar.bz2',
                md5='a813bc0a32691ab3331ac9f37125164c',
                size=14678857,
                priority=0,
                url=
                'https://repo.anaconda.com/pkgs/free/osx-64/python-3.5.1-0.tar.bz2'
            ))
Пример #37
0
    def test_legacy_features_canonical_string_forms(self):
        assert m("mkl@") == "*[track_features=mkl]"

        # assert m("@mkl") == "*[features=mkl]"
        assert text_type(MatchSpec(features="mkl")) == "*[features=mkl]"
Пример #38
0
    def test_to_filename(self):
        m1 = MatchSpec(fn='foo-1.7-52.tar.bz2')
        m2 = MatchSpec(name='foo', version='1.7', build='52')
        m3 = MatchSpec(Dist('defaults::foo-1.7-52'))
        assert m1._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
        assert m2._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
        assert m3._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'

        for spec in 'bitarray', 'pycosat 0.6.0', 'numpy 1.6*':
            ms = MatchSpec(spec)
            assert ms._to_filename_do_not_use() is None
Пример #39
0
def m(string):
    return text_type(MatchSpec(string))
Пример #40
0
def test_deep_cyclical_dependency():
    # Basically, the whole purpose of this test is to make sure nothing blows up with
    # recursion errors or anything like that.  Cyclical dependencies will always lead to
    # problems, and the tests here document the behavior.

    # "sqlite-3.20.1-haaaaaaa_4.tar.bz2": {
    #   "build": "haaaaaaa_4",
    #   "build_number": 4,
    #   "depends": [
    #     "libedit",
    #     "libgcc-ng >=7.2.0",
    #     "jinja2 2.9.6"
    #   ],
    #   "license": "Public-Domain (http://www.sqlite.org/copyright.html)",
    #   "md5": "deadbeefdd677bc3ed98ddd4deadbeef",
    #   "name": "sqlite",
    #   "sha256": "deadbeefabd915d2f13da177a29e264e59a0ae3c6fd2a31267dcc6a8deadbeef",
    #   "size": 1540584,
    #   "subdir": "linux-64",
    #   "timestamp": 1505666646842,
    #   "version": "3.20.1"
    # },
    graph = PrefixGraph(*get_sqlite_cyclical_record_set())

    nodes = tuple(rec.name for rec in graph.records)
    pprint(nodes)
    order = (
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'tk',
        'xz',
        'zlib',
        'libedit',
        'readline',
        'certifi',
        'click',
        'itsdangerous',
        'markupsafe',
        'python',
        'setuptools',
        'werkzeug',
        'jinja2',
        'flask',
        'sqlite',  # deep cyclical dependency; guess this is what we get
    )
    assert nodes == order

    # test remove spec
    # because of this deep cyclical dependency, removing jinja2 will remove sqlite and python
    expected_removal = (
        'certifi',
        'click',
        'itsdangerous',
        'markupsafe',
        'python',
        'setuptools',
        'werkzeug',
        'jinja2',
        'flask',
        'sqlite',
    )

    removed_nodes = graph.remove_spec(MatchSpec("sqlite"))
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == expected_removal

    graph = PrefixGraph(*get_sqlite_cyclical_record_set())
    removed_nodes = graph.remove_spec(MatchSpec("python"))
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == expected_removal

    graph = PrefixGraph(*get_sqlite_cyclical_record_set())
    removed_nodes = graph.remove_spec(MatchSpec("jinja2"))
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == expected_removal

    graph = PrefixGraph(*get_sqlite_cyclical_record_set())
    removed_nodes = graph.remove_spec(MatchSpec("markupsafe"))
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    assert removed_nodes == expected_removal

    graph = PrefixGraph(*get_sqlite_cyclical_record_set())
    removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    expected_removal = ('flask', )
    assert removed_nodes == expected_removal

    removed_nodes = graph.prune()
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    expected_removal = (
        'click',
        'itsdangerous',
        'werkzeug',
    )
    assert removed_nodes == expected_removal

    removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
    removed_nodes = tuple(rec.name for rec in removed_nodes)
    pprint(removed_nodes)
    expected_removal = (
        # None, because of the cyclical dependency?
    )
    assert removed_nodes == expected_removal

    graph = PrefixGraph(*get_sqlite_cyclical_record_set())
    markupsafe_node = graph.get_node_by_name('markupsafe')
    markupsafe_ancestors = graph.all_ancestors(markupsafe_node)
    nodes = tuple(rec.name for rec in markupsafe_ancestors)
    pprint(nodes)
    order = (
        'ca-certificates',
        'libgcc-ng',
        'libstdcxx-ng',
        'libffi',
        'ncurses',
        'openssl',
        'tk',
        'xz',
        'zlib',
        'libedit',
        'readline',
        'certifi',
        'markupsafe',
        'python',
        'setuptools',
        'jinja2',
        'sqlite',
    )
    assert nodes == order

    markupsafe_descendants = graph.all_descendants(markupsafe_node)
    nodes = tuple(rec.name for rec in markupsafe_descendants)
    pprint(nodes)
    order = (
        'certifi',
        'click',
        'itsdangerous',
        'markupsafe',
        'python',
        'setuptools',
        'werkzeug',
        'jinja2',
        'flask',
        'sqlite',
    )
    assert nodes == order
Пример #41
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs,
                     prefix,
                     verbose=not (context.quiet or context.json),
                     index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value('channel')
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args['channel_urls'] = channels

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)
    subprio_index = len(index)
    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0
        if strict_priority:
            subpriority = 0 if chan.platform == 'noarch' else 1
        else:
            subpriority = subprio_index
            subprio_index -= 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Channel: {}, prio: {} : {}".format(chan, priority,
                                                      subpriority))
            print("Cache path: ", subdir.cache_path())

        channel_json.append((chan, subdir, priority, subpriority))

    # for c in channel_json:
    #     print(c[0], c[2], c[3])

    installed_json_f = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        history_dict = History(prefix).get_requested_specs_map()
        pins = {pin.name: pin for pin in get_pinned_specs(prefix)}
        # for key, match_spec in history_dict.items():
        for key in installed_names:
            if key == 'python':
                i = installed_names.index('python')
                version = installed_pkg_recs[i].version
                py_ver = ".".join(version.split(".")[:2]) + '.*'
                # specs.append(MatchSpec(name="python", version=py_ver))
            else:
                if key in pins:
                    specs.append(pins[key])
                else:
                    specs.append(MatchSpec(key))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != 'python':
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=(context.quiet or context.json),
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None
    additional_specs = []

    if 'python' not in spec_names:
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec('python==' +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    pool = api.Pool()

    repos = []

    if use_mamba_experimental or context.force_reinstall:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    if context.force_reinstall:
        solver = api.Solver(pool, solver_options, prefix_data)
    else:
        solver = api.Solver(pool, solver_options)

    solver.set_postsolve_flags([
        (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
        (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS),
        (api.MAMBA_FORCE_REINSTALL, context.force_reinstall)
    ])
    solver.add_jobs(mamba_solve_specs, solver_task)

    # as a security feature this will _always_ attempt to upgrade certain packages
    for a_pkg in [_.name for _ in context.aggressive_update_packages]:
        if a_pkg in installed_names:
            solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

    if python_constraint:
        solver.add_constraint(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
    specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir,
                                    repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if use_mamba_experimental and not os.name == 'nt':
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix,
                                   to_link, to_unlink, index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Пример #42
0
    def get_variants(env):
        specs = {}

        for var in sys_var_stubs:
            if var in conda_build_config:
                variants[var] = ensure_list(conda_build_config[var])

        for s in env:
            spec = CondaBuildSpec(s)
            specs[spec.name] = spec

        for n, cb_spec in specs.items():
            if cb_spec.is_compiler:
                # This is a compiler package
                _, lang = cb_spec.raw.split()
                compiler = conda_build.jinja_context.compiler(lang, config)
                cb_spec.final = compiler
                config_key = f"{lang}_compiler"
                config_version_key = f"{lang}_compiler_version"

                if conda_build_config.get(config_key):
                    variants[config_key] = conda_build_config[config_key]
                if conda_build_config.get(config_version_key):
                    variants[config_version_key] = conda_build_config[
                        config_version_key]

            # Note: as a historical artifact we __have to__ use underscore-replaced
            # names here!
            variant_key = n.replace("-", "_")
            vlist = None
            if variant_key in conda_build_config:
                vlist = conda_build_config[variant_key]
            elif variant_key in default_variant:
                vlist = [default_variant[variant_key]]
            if vlist:
                # we need to check if v matches the spec
                if cb_spec.is_simple:
                    variants[variant_key] = vlist
                elif cb_spec.is_pin:
                    # ignore variants?
                    pass
                else:
                    # check intersection of MatchSpec and variants
                    ms = MatchSpec(cb_spec.raw)
                    filtered = []
                    for var in vlist:
                        vsplit = var.split()
                        if len(vsplit) == 1:
                            p = {
                                "name": n,
                                "version": vsplit[0],
                                "build_number": 0,
                                "build": "",
                            }
                        elif len(vsplit) == 2:
                            p = {
                                "name": n,
                                "version": var.split()[0],
                                "build": var.split()[1],
                                "build_number": 0,
                            }
                        else:
                            raise RuntimeError("Check your conda_build_config")

                        if ms.match(p):
                            filtered.append(var)
                        else:
                            console.print(
                                f"Configured variant ignored because of the recipe requirement:\n  {cb_spec.raw} : {var}\n"
                            )

                    if len(filtered):
                        variants[variant_key] = filtered

        return variants
Пример #43
0
    def test_key_value_features_match(self):
        dst = Dist('defaults::foo-1.2.3-4.tar.bz2')
        a = MatchSpec(features='test')
        assert text_type(a) == "*[provides_features='test=true']"
        assert not a.match(DPkg(dst))
        assert not a.match(DPkg(dst, track_features=''))
        assert a.match(DPkg(dst, track_features='test'))
        assert not a.match(DPkg(dst, track_features='test2'))
        assert a.match(DPkg(dst, track_features='test me'))
        assert a.match(DPkg(dst, track_features='you test'))
        assert a.match(DPkg(dst, track_features='you test me'))
        assert a.get_exact_value('provides_features') == frozendict(
            {'test': 'true'})

        b = MatchSpec(features='mkl')
        assert not b.match(DPkg(dst))
        assert b.match(DPkg(dst, track_features='mkl'))
        assert b.match(DPkg(dst, track_features='blas=mkl'))
        assert b.match(DPkg(dst, track_features='blas=mkl debug'))
        assert not b.match(DPkg(dst, track_features='debug'))

        c = MatchSpec(features='nomkl')
        assert not c.match(DPkg(dst))
        assert not c.match(DPkg(dst, track_features='mkl'))
        assert c.match(DPkg(dst, track_features='nomkl'))
        assert c.match(DPkg(dst, track_features='blas=nomkl debug'))