def test_test_deptype(): """Ensure that test-only dependencies are only included for specified packages in the following spec DAG:: w /| x y | z w->y deptypes are (link, build), w->x and y->z deptypes are (test) """ default = ('build', 'link') test_only = ('test',) x = MockPackage('x', [], []) z = MockPackage('z', [], []) y = MockPackage('y', [z], [test_only]) w = MockPackage('w', [x, y], [test_only, default]) mock_repo = MockPackageMultiRepo([w, x, y, z]) with spack.repo.swap(mock_repo): spec = Spec('w') spec.concretize(tests=(w.name,)) assert ('x' in spec) assert ('z' not in spec)
def test_external_and_virtual(self): spec = Spec('externaltest') spec.concretize() self.assertEqual(spec['externaltool'].external, '/path/to/external_tool') self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc') self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) self.assertTrue(spec['stuff'].compiler.satisfies('gcc'))
def test_equal(self): # Different spec structures to test for equality flat = Spec.from_literal( {'mpileaks ^callpath ^libelf ^libdwarf': None} ) flat_init = Spec.from_literal({ 'mpileaks': { 'callpath': None, 'libdwarf': None, 'libelf': None } }) flip_flat = Spec.from_literal({ 'mpileaks': { 'libelf': None, 'libdwarf': None, 'callpath': None } }) dag = Spec.from_literal({ 'mpileaks': { 'callpath': { 'libdwarf': { 'libelf': None } } } }) flip_dag = Spec.from_literal({ 'mpileaks': { 'callpath': { 'libelf': { 'libdwarf': None } } } }) # All these are equal to each other with regular == specs = (flat, flat_init, flip_flat, dag, flip_dag) for lhs, rhs in zip(specs, specs): assert lhs == rhs assert str(lhs) == str(rhs) # Same DAGs constructed different ways are equal assert flat.eq_dag(flat_init) # order at same level does not matter -- (dep on same parent) assert flat.eq_dag(flip_flat) # DAGs should be unequal if nesting is different assert not flat.eq_dag(dag) assert not flat.eq_dag(flip_dag) assert not flip_flat.eq_dag(dag) assert not flip_flat.eq_dag(flip_dag) assert not dag.eq_dag(flip_dag)
def get_matching_versions(specs, **kwargs): """Get a spec for EACH known version matching any spec in the list.""" matching = [] for spec in specs: pkg = spec.package # Skip any package that has no known versions. if not pkg.versions: tty.msg("No safe (checksummed) versions for package %s" % pkg.name) continue num_versions = kwargs.get('num_versions', 0) matching_spec = [] for i, v in enumerate(reversed(sorted(pkg.versions))): # Generate no more than num_versions versions for each spec. if num_versions and i >= num_versions: break # Generate only versions that satisfy the spec. if v.satisfies(spec.versions): s = Spec(pkg.name) s.versions = VersionList([v]) s.variants = spec.variants.copy() # This is needed to avoid hanging references during the # concretization phase s.variants.spec = s matching_spec.append(s) if not matching_spec: tty.warn("No known version matches spec: %s" % spec) matching.extend(matching_spec) return matching
def remove(self, query_spec, force=False): """Remove specs from an environment that match a query_spec""" query_spec = Spec(query_spec) # try abstract specs first matches = [] if not query_spec.concrete: matches = [s for s in self.user_specs if s.satisfies(query_spec)] if not matches: # concrete specs match against concrete specs in the env specs_hashes = zip( self.concretized_user_specs, self.concretized_order) matches = [ s for s, h in specs_hashes if query_spec.dag_hash() == h] if not matches: raise SpackEnvironmentError("Not found: {0}".format(query_spec)) for spec in matches: if spec in self.user_specs: self.user_specs.remove(spec) if force and spec in self.concretized_user_specs: i = self.concretized_user_specs.index(spec) del self.concretized_user_specs[i] dag_hash = self.concretized_order[i] del self.concretized_order[i] del self.specs_by_hash[dag_hash]
def test_find_spec_sibling(self): s = Spec.from_literal({ 'a': { 'b +foo': { 'c': None, 'd': None }, 'e +foo': None } }) assert 'e' == find_spec(s['b'], lambda s: '+foo' in s).name assert 'b' == find_spec(s['e'], lambda s: '+foo' in s).name s = Spec.from_literal({ 'a': { 'b +foo': { 'c': None, 'd': None }, 'e': { 'f +foo': None } } }) assert 'f' == find_spec(s['b'], lambda s: '+foo' in s).name
def test_conflicts_in_spec(self, conflict_spec): # Check that an exception is raised an caught by the appropriate # exception types. for exc_type in (ConflictsInSpecError, RuntimeError, SpecError): s = Spec(conflict_spec) with pytest.raises(exc_type): s.concretize()
def check_concretize(abstract_spec): abstract = Spec(abstract_spec) concrete = abstract.concretized() assert not abstract.concrete assert concrete.concrete check_spec(abstract, concrete) return concrete
def test_regression_issue_7705(self): # spec.package.provides(name) doesn't account for conditional # constraints in the concretized spec s = Spec('simple-inheritance~openblas') s.concretize() assert not s.package.provides('lapack')
def test_external_and_virtual(self): spec = Spec('externaltest') spec.concretize() assert spec['externaltool'].external_path == '/path/to/external_tool' assert spec['stuff'].external_path == '/path/to/external_virtual_gcc' assert spec['externaltool'].compiler.satisfies('gcc') assert spec['stuff'].compiler.satisfies('gcc')
def test_copy_satisfies_transitive(self): spec = Spec('dttop') spec.concretize() copy = spec.copy() for s in spec.traverse(): assert s.satisfies(copy[s.name]) assert copy[s.name].satisfies(s)
def test_yaml_subdag(config, builtin_mock): spec = Spec('mpileaks^mpich+debug') spec.concretize() yaml_spec = Spec.from_yaml(spec.to_yaml()) for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'): assert spec[dep].eq_dag(yaml_spec[dep])
def test_using_ordered_dict(builtin_mock): """ Checks that dicts are ordered Necessary to make sure that dag_hash is stable across python versions and processes. """ def descend_and_check(iterable, level=0): if isinstance(iterable, Mapping): assert isinstance(iterable, syaml_dict) return descend_and_check(iterable.values(), level=level + 1) max_level = level for value in iterable: if isinstance(value, Iterable) and not isinstance(value, str): nlevel = descend_and_check(value, level=level + 1) if nlevel > max_level: max_level = nlevel return max_level specs = ['mpileaks ^zmpi', 'dttop', 'dtuse'] for spec in specs: dag = Spec(spec) dag.normalize() from pprint import pprint pprint(dag.to_node_dict()) break level = descend_and_check(dag.to_node_dict()) # level just makes sure we are doing something here assert level >= 5
def test_with_or_without(self): s = Spec('a') s.concretize() pkg = spack.repo.get(s) # Called without parameters options = pkg.with_or_without('foo') assert '--with-bar' in options assert '--without-baz' in options assert '--no-fee' in options def activate(value): return 'something' options = pkg.with_or_without('foo', activation_value=activate) assert '--with-bar=something' in options assert '--without-baz' in options assert '--no-fee' in options options = pkg.enable_or_disable('foo') assert '--enable-bar' in options assert '--disable-baz' in options assert '--disable-fee' in options options = pkg.with_or_without('bvv') assert '--with-bvv' in options
def test_conditional_patched_deps_with_conditions(mock_packages, config): """Test whether conditional patched dependencies with conditions work.""" spec = Spec('patch-several-dependencies @1.0 ^libdwarf@20111030') spec.concretize() # basic patch on libelf assert 'patches' in list(spec['libelf'].variants.keys()) # foo assert ('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c' in spec['libelf'].variants['patches'].value) # conditional patch on libdwarf assert 'patches' in list(spec['libdwarf'].variants.keys()) # bar assert ('7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730' in spec['libdwarf'].variants['patches'].value) # baz is conditional on libdwarf version (no guarantee on order w/conds) assert ('bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' in spec['libdwarf'].variants['patches'].value) # URL patches assert 'patches' in list(spec['fake'].variants.keys()) # urlpatch.patch, urlpatch.patch.gz assert (('1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd', 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234') == spec['fake'].variants['patches'].value)
def test_getitem_query(self): s = Spec('mpileaks') s.concretize() # Check a query to a non-virtual package a = s['callpath'] query = a.last_query assert query.name == 'callpath' assert len(query.extra_parameters) == 0 assert not query.isvirtual # Check a query to a virtual package a = s['mpi'] query = a.last_query assert query.name == 'mpi' assert len(query.extra_parameters) == 0 assert query.isvirtual # Check a query to a virtual package with # extra parameters after query a = s['mpi:cxx,fortran'] query = a.last_query assert query.name == 'mpi' assert len(query.extra_parameters) == 2 assert 'cxx' in query.extra_parameters assert 'fortran' in query.extra_parameters assert query.isvirtual
def test_install_overwrite( mock_packages, mock_archive, mock_fetch, config, install_mockery ): # It's not possible to overwrite something that is not yet installed with pytest.raises(AssertionError): install('--overwrite', 'libdwarf') # --overwrite requires a single spec with pytest.raises(AssertionError): install('--overwrite', 'libdwarf', 'libelf') # Try to install a spec and then to reinstall it. spec = Spec('libdwarf') spec.concretize() install('libdwarf') assert os.path.exists(spec.prefix) expected_md5 = fs.hash_directory(spec.prefix) # Modify the first installation to be sure the content is not the same # as the one after we reinstalled with open(os.path.join(spec.prefix, 'only_in_old'), 'w') as f: f.write('This content is here to differentiate installations.') bad_md5 = fs.hash_directory(spec.prefix) assert bad_md5 != expected_md5 install('--overwrite', '-y', 'libdwarf') assert os.path.exists(spec.prefix) assert fs.hash_directory(spec.prefix) == expected_md5 assert fs.hash_directory(spec.prefix) != bad_md5
def test_patched_dependency( mock_packages, config, install_mockery, mock_fetch): """Test whether patched dependencies work.""" spec = Spec('patch-a-dependency') spec.concretize() assert 'patches' in list(spec['libelf'].variants.keys()) # make sure the patch makes it into the dependency spec assert (('c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8',) == spec['libelf'].variants['patches'].value) # make sure the patch in the dependent's directory is applied to the # dependency libelf = spec['libelf'] pkg = libelf.package pkg.do_patch() with pkg.stage: with working_dir(pkg.stage.source_path): # output a Makefile with 'echo Patched!' as the default target configure = Executable('./configure') configure() # Make sure the Makefile contains the patched text with open('Makefile') as mf: assert 'Patched!' in mf.read()
def test_external_package(self): spec = Spec('externaltool') spec.concretize() self.assertEqual(spec['externaltool'].external, '/path/to/external_tool') self.assertFalse('externalprereq' in spec) self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
def test_dynamic_dot_graph_mpileaks(mock_packages): """Test dynamically graphing the mpileaks package.""" s = Spec('mpileaks').normalized() stream = StringIO() graph_dot([s], static=False, out=stream) dot = stream.getvalue() mpileaks_hash, mpileaks_lbl = s.dag_hash(), s.format('$_$/') mpi_hash, mpi_lbl = s['mpi'].dag_hash(), s['mpi'].format('$_$/') callpath_hash, callpath_lbl = ( s['callpath'].dag_hash(), s['callpath'].format('$_$/')) dyninst_hash, dyninst_lbl = ( s['dyninst'].dag_hash(), s['dyninst'].format('$_$/')) libdwarf_hash, libdwarf_lbl = ( s['libdwarf'].dag_hash(), s['libdwarf'].format('$_$/')) libelf_hash, libelf_lbl = ( s['libelf'].dag_hash(), s['libelf'].format('$_$/')) assert ' "%s" [label="%s"]\n' % (mpileaks_hash, mpileaks_lbl) in dot assert ' "%s" [label="%s"]\n' % (callpath_hash, callpath_lbl) in dot assert ' "%s" [label="%s"]\n' % (mpi_hash, mpi_lbl) in dot assert ' "%s" [label="%s"]\n' % (dyninst_hash, dyninst_lbl) in dot assert ' "%s" [label="%s"]\n' % (libdwarf_hash, libdwarf_lbl) in dot assert ' "%s" [label="%s"]\n' % (libelf_hash, libelf_lbl) in dot assert ' "%s" -> "%s"\n' % (dyninst_hash, libdwarf_hash) in dot assert ' "%s" -> "%s"\n' % (callpath_hash, dyninst_hash) in dot assert ' "%s" -> "%s"\n' % (mpileaks_hash, mpi_hash) in dot assert ' "%s" -> "%s"\n' % (libdwarf_hash, libelf_hash) in dot assert ' "%s" -> "%s"\n' % (callpath_hash, mpi_hash) in dot assert ' "%s" -> "%s"\n' % (mpileaks_hash, callpath_hash) in dot assert ' "%s" -> "%s"\n' % (dyninst_hash, libelf_hash) in dot
def read_spec(self, path): """Read the contents of a file and parse them as a spec""" with closing(open(path)) as spec_file: # Specs from files are assumed normal and concrete spec = Spec(spec_file.read().replace('\n', '')) if all(spack.db.exists(s.name) for s in spec.traverse()): copy = spec.copy() # TODO: It takes a lot of time to normalize every spec on read. # TODO: Storing graph info with spec files would fix this. copy.normalize() if copy.concrete: return copy # These are specs spack still understands. # If we get here, either the spec is no longer in spack, or # something about its dependencies has changed. So we need to # just assume the read spec is correct. We'll lose graph # information if we do this, but this is just for best effort # for commands like uninstall and find. Currently Spack # doesn't do anything that needs the graph info after install. # TODO: store specs with full connectivity information, so # that we don't have to normalize or reconstruct based on # changing dependencies in the Spack tree. spec._normal = True spec._concrete = True return spec
def test_spec_contains_deps(self): s = Spec('callpath') s.normalize() assert 'dyninst' in s assert 'libdwarf' in s assert 'libelf' in s assert 'mpi' in s
def test_deptype_traversal_run(self): dag = Spec('dttop') dag.normalize() names = ['dttop', 'dtrun1', 'dtrun3'] traversal = dag.traverse(deptype='run') assert [x.name for x in traversal] == names
def test_normalize_diamond_deptypes(self): """Ensure that dependency types are preserved even if the same thing is depended on in two different ways.""" s = Spec('dt-diamond') s.normalize() self.check_diamond_deptypes(s) self.check_diamond_normalized_dag(s)
def test_yaml_subdag(self): spec = Spec('mpileaks^mpich+debug') spec.concretize() yaml_spec = Spec.from_yaml(spec.to_yaml()) for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'): self.assertTrue(spec[dep].eq_dag(yaml_spec[dep]))
def test_architecture_inheritance(self): """test_architecture_inheritance is likely to fail with an UnavailableCompilerVersionError if the architecture is concretized incorrectly. """ spec = Spec('cmake-client %[email protected] os=fe ^ cmake') spec.concretize() assert spec['cmake'].architecture == spec.architecture
def test_compiler_inheritance(self): spec = Spec('mpileaks') spec.normalize() spec['dyninst'].compiler = CompilerSpec('clang') spec.concretize() # TODO: not exactly the syntax I would like. assert spec['libdwarf'].compiler.satisfies('clang') assert spec['libelf'].compiler.satisfies('clang')
def target_factory(spec_string, target_concrete): spec = Spec(spec_string) if target_concrete: spec._mark_concrete() substitute_abstract_variants(spec) return spec
def test_nobuild_package(self): got_error = False spec = Spec('externaltool%clang') try: spec.concretize() except spack.concretize.NoBuildError: got_error = True self.assertTrue(got_error)
def check_concretize(self, abstract_spec): abstract = Spec(abstract_spec) concrete = abstract.concretized() self.assertFalse(abstract.concrete) self.assertTrue(concrete.concrete) self.check_spec(abstract, concrete) return concrete
def test_spec_formatting(self): spec = Spec("multivalue_variant cflags=-O2") spec.concretize() # Since the default is the full spec see if the string rep of # spec is the same as the output of spec.format() # ignoring whitespace (though should we?) and ignoring dependencies spec_string = str(spec) idx = spec_string.index(' ^') assert spec_string[:idx] == spec.format().strip() # Testing named strings ie {string} and whether we get # the correct component # Mixed case intentional to test both package_segments = [("{NAME}", "name"), ("{VERSION}", "versions"), ("{compiler}", "compiler"), ("{compiler_flags}", "compiler_flags"), ("{variants}", "variants"), ("{architecture}", "architecture")] sigil_package_segments = [("{@VERSIONS}", '@' + str(spec.version)), ("{%compiler}", '%' + str(spec.compiler)), ("{arch=architecture}", ' arch=' + str(spec.architecture))] compiler_segments = [("{compiler.name}", "name"), ("{compiler.version}", "versions")] sigil_compiler_segments = [ ("{%compiler.name}", '%' + spec.compiler.name), ("{@compiler.version}", '@' + str(spec.compiler.version)) ] architecture_segments = [("{architecture.platform}", "platform"), ("{architecture.os}", "os"), ("{architecture.target}", "target")] other_segments = [('{spack_root}', spack.paths.spack_root), ('{spack_install}', spack.store.layout.root), ('{hash:7}', spec.dag_hash(7)), ('{/hash}', '/' + spec.dag_hash())] for named_str, prop in package_segments: expected = getattr(spec, prop, "") actual = spec.format(named_str) assert str(expected) == actual for named_str, expected in sigil_package_segments: actual = spec.format(named_str) assert expected == actual compiler = spec.compiler for named_str, prop in compiler_segments: expected = getattr(compiler, prop, "") actual = spec.format(named_str) assert str(expected) == actual for named_str, expected in sigil_compiler_segments: actual = spec.format(named_str) assert expected == actual arch = spec.architecture for named_str, prop in architecture_segments: expected = getattr(arch, prop, "") actual = spec.format(named_str) assert str(expected) == actual for named_str, expected in other_segments: actual = spec.format(named_str) assert expected == actual
def graph_dot(specs, deptype='all', static=False, out=None): """Generate a graph in dot format of all provided specs. Print out a dot formatted graph of all the dependencies between package. Output can be passed to graphviz, e.g.: .. code-block:: console spack graph --dot qt | dot -Tpdf > spack-graph.pdf """ if out is None: out = sys.stdout deptype = canonical_deptype(deptype) out.write('digraph G {\n') out.write(' labelloc = "b"\n') out.write(' rankdir = "TB"\n') out.write(' ranksep = "5"\n') out.write('node[\n') out.write(' fontname=Monaco,\n') out.write(' penwidth=2,\n') out.write(' fontsize=12,\n') out.write(' margin=.1,\n') out.write(' shape=box,\n') out.write(' fillcolor=lightblue,\n') out.write(' style="rounded,filled"]\n') out.write('\n') def q(string): return '"%s"' % string if not specs: raise ValueError("Must provide specs ot graph_dot") # Static graph includes anything a package COULD depend on. if static: names = set.union(*[ s.package.possible_dependencies(expand_virtuals=False) for s in specs ]) specs = [Spec(name) for name in names] labeled = set() def label(key, label): if key not in labeled: out.write(' "%s" [label="%s"]\n' % (key, label)) labeled.add(key) deps = set() for spec in specs: if static: out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name)) # Skip virtual specs (we'll find out about them from concrete ones. if spec.virtual: continue # Add edges for each depends_on in the package. for dep_name, dep in iteritems(spec.package.dependencies): deps.add((spec.name, dep_name)) # If the package provides something, add an edge for that. for provider in set(s.name for s in spec.package.provided): deps.add((provider, spec.name)) else: def key_label(s): return s.dag_hash(), "%s/%s" % (s.name, s.dag_hash(7)) for s in spec.traverse(deptype=deptype): skey, slabel = key_label(s) out.write(' "%s" [label="%s"]\n' % (skey, slabel)) for d in s.dependencies(deptype=deptype): dkey, _ = key_label(d) deps.add((skey, dkey)) out.write('\n') for pair in deps: out.write(' "%s" -> "%s"\n' % pair) out.write('}\n')
def test_config_perms_fail_write_gt_read(self, configure_permissions): # Test failure for writable more permissive than readable spec = Spec('callpath') with pytest.raises(ConfigError): spack.package_prefs.get_package_permissions(spec)
def test_compiler_flags_from_user_are_grouped(self): spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test') spec.concretize() cflags = spec.compiler_flags['cflags'] assert any(x == '-foo-flag foo-val' for x in cflags)
def test_architecture_compatibility(target, constraint, expected): assert ABI().architecture_compatible(Spec(target), Spec(constraint)) == expected
def test_concretize_two_virtuals_with_one_bound( self, mutable_mock_repo ): """Test a package with multiple virtual dependencies and one preset.""" Spec('hypre ^openblas').concretize()
def concretize_difficult_packages(self, a, b): """Test a couple of large packages that are often broken due to current limitations in the concretizer""" s = Spec(a + '@' + b) s.concretize() assert s[a].version == ver(b)
def test_indirect_unsatisfied_single_valued_variant(self): spec = Spec('singlevalue-variant-dependent') spec.concretize() assert '[email protected]' not in spec
def _specify(spec_like): if isinstance(spec_like, Spec): return spec_like return Spec(spec_like)
def test_satisfies_virtual(self): # Don't use check_satisfies: it checks constrain() too, and # you can't constrain a non-virtual by a virtual. assert Spec('mpich').satisfies(Spec('mpi')) assert Spec('mpich2').satisfies(Spec('mpi')) assert Spec('zmpi').satisfies(Spec('mpi'))
def test_unsatisfiable_multi_value_variant(self): # Semantics for a multi-valued variant is different # Depending on whether the spec is concrete or not a = make_spec('multivalue_variant foo="bar"', concrete=True) spec_str = 'multivalue_variant foo="bar,baz"' b = Spec(spec_str) assert not a.satisfies(b) assert not a.satisfies(spec_str) # A concrete spec cannot be constrained further with pytest.raises(UnsatisfiableSpecError): a.constrain(b) a = Spec('multivalue_variant foo="bar"') spec_str = 'multivalue_variant foo="bar,baz"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained assert a.satisfies(b) assert a.satisfies(spec_str) # An abstract spec can instead be constrained assert a.constrain(b) a = make_spec('multivalue_variant foo="bar,baz"', concrete=True) spec_str = 'multivalue_variant foo="bar,baz,quux"' b = Spec(spec_str) assert not a.satisfies(b) assert not a.satisfies(spec_str) # A concrete spec cannot be constrained further with pytest.raises(UnsatisfiableSpecError): a.constrain(b) a = Spec('multivalue_variant foo="bar,baz"') spec_str = 'multivalue_variant foo="bar,baz,quux"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained assert a.satisfies(b) assert a.satisfies(spec_str) # An abstract spec can instead be constrained assert a.constrain(b) # ...but will fail during concretization if there are # values in the variant that are not allowed with pytest.raises(InvalidVariantValueError): a.concretize() # This time we'll try to set a single-valued variant a = Spec('multivalue_variant fee="bar"') spec_str = 'multivalue_variant fee="baz"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained, # as before concretization I don't know which type of variant # I have (if it is not a BV) assert a.satisfies(b) assert a.satisfies(spec_str) # A variant cannot be parsed as single-valued until we try to # concretize. This means that we can constrain the variant above assert a.constrain(b) # ...but will fail during concretization if there are # multiple values set with pytest.raises(MultipleValuesInExclusiveVariantError): a.concretize()
def test_self_index(self): s = Spec('callpath') assert s['callpath'] == s
def check_invalid_constraint(spec, constraint): spec = Spec(spec) constraint = Spec(constraint) with pytest.raises(UnsatisfiableSpecError): spec.constrain(constraint)
def check_constrain_not_changed(spec, constraint): spec = Spec(spec) assert not spec.constrain(constraint)
def test_concretize_two_virtuals_with_dual_provider(self): """Test a package with multiple virtual dependencies and force a provider that provides both. """ Spec('hypre ^openblas-with-lapack').concretize()
def test_default_rpaths_create_install_default_layout(tmpdir, mirror_directory_def, install_mockery): """ Test the creation and installation of buildcaches with default rpaths into the default directory layout scheme. """ gspec = Spec('garply') gspec.concretize() cspec = Spec('corge') cspec.concretize() iparser = argparse.ArgumentParser() install.setup_parser(iparser) # Install some packages with dependent packages iargs = iparser.parse_args(['--no-cache', cspec.name]) install.install(iparser, iargs) global mirror_path_def mirror_path_def = mirror_directory_def mparser = argparse.ArgumentParser() mirror.setup_parser(mparser) margs = mparser.parse_args([ 'add', '--scope', 'site', 'test-mirror-def', 'file://%s' % mirror_path_def ]) mirror.mirror(mparser, margs) margs = mparser.parse_args(['list']) mirror.mirror(mparser, margs) # setup argument parser parser = argparse.ArgumentParser() buildcache.setup_parser(parser) # Set default buildcache args create_args = [ 'create', '-a', '-u', '-d', str(mirror_path_def), cspec.name ] install_args = ['install', '-a', '-u', cspec.name] # Create a buildache args = parser.parse_args(create_args) buildcache.buildcache(parser, args) # Test force overwrite create buildcache create_args.insert(create_args.index('-a'), '-f') args = parser.parse_args(create_args) buildcache.buildcache(parser, args) # create mirror index args = parser.parse_args( ['update-index', '-d', 'file://%s' % str(mirror_path_def)]) buildcache.buildcache(parser, args) # list the buildcaches in the mirror args = parser.parse_args(['list', '-a', '-l', '-v']) buildcache.buildcache(parser, args) # Uninstall the package and deps uparser = argparse.ArgumentParser() uninstall.setup_parser(uparser) uargs = uparser.parse_args(['-y', '--dependents', gspec.name]) uninstall.uninstall(uparser, uargs) # test install args = parser.parse_args(install_args) buildcache.buildcache(parser, args) # This gives warning that spec is already installed buildcache.buildcache(parser, args) # test overwrite install install_args.insert(install_args.index('-a'), '-f') args = parser.parse_args(install_args) buildcache.buildcache(parser, args) args = parser.parse_args(['keys', '-f']) buildcache.buildcache(parser, args) args = parser.parse_args(['list']) buildcache.buildcache(parser, args) args = parser.parse_args(['list', '-a']) buildcache.buildcache(parser, args) args = parser.parse_args(['list', '-l', '-v']) buildcache.buildcache(parser, args) bindist._cached_specs = set() spack.stage.purge() margs = mparser.parse_args(['rm', '--scope', 'site', 'test-mirror-def']) mirror.mirror(mparser, margs)
def test_concretize_two_virtuals_with_two_bound(self): """Test a package with multiple virtual deps and two of them preset.""" Spec('hypre ^openblas ^netlib-lapack').concretize()
def upload_spec(args): """Upload a spec to s3 bucket""" if not args.spec and not args.spec_yaml: tty.error('Cannot upload spec without spec arg or path to spec yaml') sys.exit(1) if not args.base_dir: tty.error('No base directory for buildcache specified') sys.exit(1) if args.spec: try: spec = Spec(args.spec) spec.concretize() except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from string {0}'.format( args.spec)) sys.exit(1) else: try: with open(args.spec_yaml, 'r') as fd: spec = Spec.from_yaml(fd.read()) except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from yaml {0}'.format( args.spec_yaml)) sys.exit(1) s3, bucket_name = get_s3_session(args.endpoint_url) build_cache_dir = bindist.build_cache_relative_path() tarball_key = os.path.join( build_cache_dir, bindist.tarball_path_name(spec, '.spack')) tarball_path = os.path.join(args.base_dir, tarball_key) specfile_key = os.path.join( build_cache_dir, bindist.tarball_name(spec, '.spec.yaml')) specfile_path = os.path.join(args.base_dir, specfile_key) cdashidfile_key = os.path.join( build_cache_dir, bindist.tarball_name(spec, '.cdashid')) cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key) tty.msg('Uploading {0}'.format(tarball_key)) s3.meta.client.upload_file( tarball_path, bucket_name, os.path.join('mirror', tarball_key), ExtraArgs={'ACL': 'public-read'}) tty.msg('Uploading {0}'.format(specfile_key)) s3.meta.client.upload_file( specfile_path, bucket_name, os.path.join('mirror', specfile_key), ExtraArgs={'ACL': 'public-read'}) if os.path.exists(cdashidfile_path): tty.msg('Uploading {0}'.format(cdashidfile_key)) s3.meta.client.upload_file( cdashidfile_path, bucket_name, os.path.join('mirror', cdashidfile_key), ExtraArgs={'ACL': 'public-read'})
def test_concretize_two_virtuals(self): """Test a package with multiple virtual dependencies.""" Spec('hypre').concretize()
def test_noversion_pkg(self, spec): """Test concretization failures for no-version packages.""" with pytest.raises(NoValidVersionError, match="no valid versions"): Spec(spec).concretized()
def concretize_multi_provider(self): s = Spec('mpileaks ^[email protected]') s.concretize() assert s['mpi'].version == ver('1.10.3')
def test_compiler_child(self): s = Spec('mpileaks%clang ^dyninst%gcc') s.concretize() assert s['mpileaks'].satisfies('%clang') assert s['dyninst'].satisfies('%gcc')
def test_compatibility(target, constraint, loose, expected): assert ABI().compatible(Spec(target), Spec(constraint), loose=loose) == expected
def test_external_package(self): spec = Spec('externaltool%gcc') spec.concretize() assert spec['externaltool'].external_path == '/path/to/external_tool' assert 'externalprereq' not in spec assert spec['externaltool'].compiler.satisfies('gcc')
def from_dict(d): specs = [Spec.from_dict(spec_dict) for spec_dict in d['specs']] alias = d.get('alias', None) return TestSuite(specs, alias)
def test_my_dep_depends_on_provider_of_my_virtual_dep(self): spec = Spec('indirect-mpich') spec.normalize() spec.concretize()
def concretize(abstract_spec): return Spec(abstract_spec).concretized()
def test_no_compilers_for_arch(self): s = Spec('a arch=linux-rhel0-x86_64') with pytest.raises(spack.concretize.NoCompilersForArchError): s.concretize()
def check_constrain(expected, spec, constraint): exp = Spec(expected) spec = Spec(spec) constraint = Spec(constraint) spec.constrain(constraint) assert exp == spec
def test_virtual_index(self): s = Spec('callpath') s.concretize() s_mpich = Spec('callpath ^mpich') s_mpich.concretize() s_mpich2 = Spec('callpath ^mpich2') s_mpich2.concretize() s_zmpi = Spec('callpath ^zmpi') s_zmpi.concretize() assert s['mpi'].name != 'mpi' assert s_mpich['mpi'].name == 'mpich' assert s_mpich2['mpi'].name == 'mpich2' assert s_zmpi['zmpi'].name == 'zmpi' for spec in [s, s_mpich, s_mpich2, s_zmpi]: assert 'mpi' in spec