def read_spec(self, path): """Read the contents of a file and parse them as a spec""" with closing(open(path)) as spec_file: # Specs from files are assumed normal and concrete spec = Spec(spec_file.read().replace('\n', '')) if all(spack.db.exists(s.name) for s in spec.traverse()): copy = spec.copy() # TODO: It takes a lot of time to normalize every spec on read. # TODO: Storing graph info with spec files would fix this. copy.normalize() if copy.concrete: return copy # These are specs spack still understands. # If we get here, either the spec is no longer in spack, or # something about its dependencies has changed. So we need to # just assume the read spec is correct. We'll lose graph # information if we do this, but this is just for best effort # for commands like uninstall and find. Currently Spack # doesn't do anything that needs the graph info after install. # TODO: store specs with full connectivity information, so # that we don't have to normalize or reconstruct based on # changing dependencies in the Spack tree. spec._normal = True spec._concrete = True return spec
def test_copy_satisfies_transitive(self): spec = Spec('dttop') spec.concretize() copy = spec.copy() for s in spec.traverse(): assert s.satisfies(copy[s.name]) assert copy[s.name].satisfies(s)
def test_copy_deptypes(self): """Ensure that dependency types are preserved by spec copy.""" s1 = Spec('dt-diamond') s1.normalize() self.check_diamond_deptypes(s1) self.check_diamond_normalized_dag(s1) s2 = s1.copy() self.check_diamond_normalized_dag(s2) self.check_diamond_deptypes(s2) s3 = Spec('dt-diamond') s3.concretize() self.check_diamond_deptypes(s3) s4 = s3.copy() self.check_diamond_deptypes(s4)
def test_normalize_twice(self): """Make sure normalize can be run twice on the same spec, and that it is idempotent.""" spec = Spec('mpileaks') spec.normalize() n1 = spec.copy() spec.normalize() self.assertEqual(n1, spec)
def test_normalize_twice(self): """Make sure normalize can be run twice on the same spec, and that it is idempotent.""" spec = Spec('mpileaks') spec.normalize() n1 = spec.copy() spec.normalize() assert n1 == spec
def test_copy_normalized(self): orig = Spec('mpileaks') orig.normalize() copy = orig.copy() check_links(copy) assert orig == copy assert orig.eq_dag(copy) # ensure no shared nodes bt/w orig and copy. orig_ids = set(id(s) for s in orig.traverse()) copy_ids = set(id(s) for s in copy.traverse()) assert not orig_ids.intersection(copy_ids)
def test_satisfies_same_spec_with_different_hash(self): """Ensure that concrete specs are matched *exactly* by hash.""" s1 = Spec('mpileaks').concretized() s2 = s1.copy() assert s1.satisfies(s2) assert s2.satisfies(s1) # Simulate specs that were installed before and after a change to # Spack's hashing algorithm. This just reverses s2's hash. s2._hash = s1.dag_hash()[-1::-1] assert not s1.satisfies(s2) assert not s2.satisfies(s1)
def test_copy_simple(self): orig = Spec('mpileaks') copy = orig.copy() check_links(copy) assert orig == copy assert orig.eq_dag(copy) assert orig._normal == copy._normal assert orig._concrete == copy._concrete # ensure no shared nodes bt/w orig and copy. orig_ids = set(id(s) for s in orig.traverse()) copy_ids = set(id(s) for s in copy.traverse()) assert not orig_ids.intersection(copy_ids)
def test_copy_simple(self): orig = Spec('mpileaks') copy = orig.copy() self.check_links(copy) self.assertEqual(orig, copy) self.assertTrue(orig.eq_dag(copy)) self.assertEqual(orig._normal, copy._normal) self.assertEqual(orig._concrete, copy._concrete) # ensure no shared nodes bt/w orig and copy. orig_ids = set(id(s) for s in orig.traverse()) copy_ids = set(id(s) for s in copy.traverse()) self.assertFalse(orig_ids.intersection(copy_ids))
def test_copy_through_spec_build_interface(self): """Check that copying dependencies using id(node) as a fast identifier of the node works when the spec is wrapped in a SpecBuildInterface object. """ s = Spec('mpileaks').concretized() c0 = s.copy() assert c0 == s # Single indirection c1 = s['mpileaks'].copy() assert c0 == c1 == s # Double indirection c2 = s['mpileaks']['mpileaks'].copy() assert c0 == c1 == c2 == s
def test_ordered_read_not_required_for_consistent_dag_hash( config, mock_packages): """Make sure ordered serialization isn't required to preserve hashes. For consistent hashes, we require that YAML and json documents have their keys serialized in a deterministic order. However, we don't want to require them to be serialized in order. This ensures that is not required. """ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse'] for spec in specs: spec = Spec(spec) spec.concretize() # # Dict & corresponding YAML & JSON from the original spec. # spec_dict = spec.to_dict() spec_yaml = spec.to_yaml() spec_json = spec.to_json() # # Make a spec with reversed OrderedDicts for every # OrderedDict in the original. # reversed_spec_dict = reverse_all_dicts(spec.to_dict()) # # Dump to YAML and JSON # yaml_string = syaml.dump(spec_dict, default_flow_style=False) reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False) json_string = sjson.dump(spec_dict) reversed_json_string = sjson.dump(reversed_spec_dict) # # Do many consistency checks # # spec yaml is ordered like the spec dict assert yaml_string == spec_yaml assert json_string == spec_json # reversed string is different from the original, so it # *would* generate a different hash assert yaml_string != reversed_yaml_string assert json_string != reversed_json_string # build specs from the "wrongly" ordered data round_trip_yaml_spec = Spec.from_yaml(yaml_string) round_trip_json_spec = Spec.from_json(json_string) round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string) round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string) # TODO: remove this when build deps are in provenance. spec = spec.copy(deps=('link', 'run')) # specs are equal to the original assert spec == round_trip_yaml_spec assert spec == round_trip_json_spec assert spec == round_trip_reversed_yaml_spec assert spec == round_trip_reversed_json_spec assert round_trip_yaml_spec == round_trip_reversed_yaml_spec assert round_trip_json_spec == round_trip_reversed_json_spec # dag_hashes are equal assert spec.dag_hash() == round_trip_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_json_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash() # full_hashes are equal spec.concretize() round_trip_yaml_spec.concretize() round_trip_json_spec.concretize() round_trip_reversed_yaml_spec.concretize() round_trip_reversed_json_spec.concretize() assert spec.full_hash() == round_trip_yaml_spec.full_hash() assert spec.full_hash() == round_trip_json_spec.full_hash() assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash() assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
def test_ordered_read_not_required_for_consistent_dag_hash( config, builtin_mock ): """Make sure ordered serialization isn't required to preserve hashes. For consistent hashes, we require that YAML and json documents have their keys serialized in a deterministic order. However, we don't want to require them to be serialized in order. This ensures that is not required. """ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse'] for spec in specs: spec = Spec(spec) spec.concretize() # # Dict & corresponding YAML & JSON from the original spec. # spec_dict = spec.to_dict() spec_yaml = spec.to_yaml() spec_json = spec.to_json() # # Make a spec with reversed OrderedDicts for every # OrderedDict in the original. # reversed_spec_dict = reverse_all_dicts(spec.to_dict()) # # Dump to YAML and JSON # yaml_string = syaml.dump(spec_dict, default_flow_style=False) reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False) json_string = sjson.dump(spec_dict) reversed_json_string = sjson.dump(reversed_spec_dict) # # Do many consistency checks # # spec yaml is ordered like the spec dict assert yaml_string == spec_yaml assert json_string == spec_json # reversed string is different from the original, so it # *would* generate a different hash assert yaml_string != reversed_yaml_string assert json_string != reversed_json_string # build specs from the "wrongly" ordered data round_trip_yaml_spec = Spec.from_yaml(yaml_string) round_trip_json_spec = Spec.from_json(json_string) round_trip_reversed_yaml_spec = Spec.from_yaml( reversed_yaml_string ) round_trip_reversed_json_spec = Spec.from_yaml( reversed_json_string ) # TODO: remove this when build deps are in provenance. spec = spec.copy(deps=('link', 'run')) # specs are equal to the original assert spec == round_trip_yaml_spec assert spec == round_trip_json_spec assert spec == round_trip_reversed_yaml_spec assert spec == round_trip_reversed_json_spec assert round_trip_yaml_spec == round_trip_reversed_yaml_spec assert round_trip_json_spec == round_trip_reversed_json_spec # dag_hashes are equal assert spec.dag_hash() == round_trip_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_json_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
def test_copy_dependencies(self): s1 = Spec('mpileaks ^[email protected]') s2 = s1.copy() assert '^[email protected]' in s2 assert '^mpich2' in s2