def update(self, spec): if type(spec) != spack.spec.Spec: spec = spack.spec.Spec(spec) assert (not spec.virtual) pkg = spec.package for provided_spec, provider_spec in pkg.provided.iteritems(): if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name provider_map = self.providers.setdefault(provided_name, {}) if not provided_spec in provider_map: provider_map[provided_spec] = set() if self.restrict: provider_set = provider_map[provided_spec] # If this package existed in the index before, # need to take the old versions out, as they're # now more constrained. old = set([s for s in provider_set if s.name == spec.name]) provider_set.difference_update(old) # Now add the new version. provider_set.add(spec) else: # Before putting the spec in the map, constrain it so that # it provides what was asked for. constrained = spec.copy() constrained.constrain(provider_spec) provider_map[provided_spec].add(constrained)
def update(self, spec): if type(spec) != spack.spec.Spec: spec = spack.spec.Spec(spec) assert not spec.virtual pkg = spec.package for provided_spec, provider_spec in pkg.provided.iteritems(): if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name if provided_name not in self.providers: self.providers[provided_name] = {} provider_map = self.providers[provided_name] if not provided_spec in provider_map: provider_map[provided_spec] = set() if self.restrict: provider_map[provided_spec].add(spec) else: # Before putting the spec in the map, constrain it so that # it provides what was asked for. constrained = spec.copy() constrained.constrain(provider_spec) provider_map[provided_spec].add(constrained)
def update(self, spec): if type(spec) != spack.spec.Spec: spec = spack.spec.Spec(spec) assert(not spec.virtual) pkg = spec.package for provided_spec, provider_spec in pkg.provided.iteritems(): if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name provider_map = self.providers.setdefault(provided_name, {}) if not provided_spec in provider_map: provider_map[provided_spec] = set() if self.restrict: provider_set = provider_map[provided_spec] # If this package existed in the index before, # need to take the old versions out, as they're # now more constrained. old = set([s for s in provider_set if s.name == spec.name]) provider_set.difference_update(old) # Now add the new version. provider_set.add(spec) else: # Before putting the spec in the map, constrain it so that # it provides what was asked for. constrained = spec.copy() constrained.constrain(provider_spec) provider_map[provided_spec].add(constrained)
def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) if not os.path.isdir(path): return None if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) if installed_spec == spec: return path # DAG hashes currently do not include build dependencies. # # TODO: remove this when we do better concretization and don't # ignore build-only deps in hashes. elif installed_spec == spec.copy(deps=('link', 'run')): return path if spec.dag_hash() == installed_spec.dag_hash(): raise SpecHashCollisionError(spec, installed_spec) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path)
def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) if not os.path.isdir(path): return None if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) if installed_spec == spec: return path # DAG hashes currently do not include build dependencies. # # TODO: remove this when we do better concretization and don't # ignore build-only deps in hashes. elif (installed_spec.copy(deps=('link', 'run')) == spec.copy(deps=('link', 'run'))): # The directory layout prefix is based on the dag hash, so among # specs with differing full-hash but matching dag-hash, only one # may be installed. This means for example that for two instances # that differ only in CMake version used to build, only one will # be installed. return path if spec.dag_hash() == installed_spec.dag_hash(): raise SpecHashCollisionError(spec, installed_spec) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path)
def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch): """Test whether Spec.installed_upstream() works.""" upstream_write_db, upstream_db, upstream_layout, \ downstream_db, downstream_layout = upstream_and_downstream_db # a known installed spec should say that it's installed mock_repo = MockPackageMultiRepo() mock_repo.add_package('x', [], []) with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec("x").concretized() assert not spec.installed assert not spec.installed_upstream upstream_write_db.add(spec, upstream_layout) upstream_db._read() monkeypatch.setattr(spack.store, "db", downstream_db) assert spec.installed assert spec.installed_upstream assert spec.copy().installed # an abstract spec should say it's not installed spec = spack.spec.Spec("not-a-real-package") assert not spec.installed assert not spec.installed_upstream
def validate_dependencies(self): """Ensure that this package and its dependencies all have consistent constraints on them. NOTE that this will NOT find sanity problems through a virtual dependency. Virtual deps complicate the problem because we don't know in advance which ones conflict with others in the dependency DAG. If there's more than one virtual dependency, it's a full-on SAT problem, so hold off on this for now. The vdeps are actually skipped in preorder_traversal, so see that for details. TODO: investigate validating virtual dependencies. """ # This algorithm just attempts to merge all the constraints on the same # package together, loses information about the source of the conflict. # What we'd really like to know is exactly which two constraints # conflict, but that algorithm is more expensive, so we'll do it # the simple, less informative way for now. merged = spack.spec.DependencyMap() try: for pkg in self.preorder_traversal(): for name, spec in pkg.dependencies.iteritems(): if name not in merged: merged[name] = spec.copy() else: merged[name].constrain(spec) except spack.spec.UnsatisfiableSpecError, e: raise InvalidPackageDependencyError( "Package %s has inconsistent dependency constraints: %s" % (self.name, e.message))
def regenerate(self, all_specs, roots): specs_for_view = [] specs = all_specs if self.link == 'all' else roots for spec in specs: # The view does not store build deps, so if we want it to # recognize environment specs (which do store build deps), then # they need to be stripped if spec.concrete: # Do not link unconcretized roots specs_for_view.append(spec.copy(deps=('link', 'run'))) if self.select: specs_for_view = list(filter(self.select_fn, specs_for_view)) if self.exclude: specs_for_view = list(filter(self.exclude_fn, specs_for_view)) installed_specs_for_view = set(s for s in specs_for_view if s.package.installed) view = self.view() view.clean() specs_in_view = set(view.get_all_specs()) tty.msg("Updating view at {0}".format(self.root)) rm_specs = specs_in_view - installed_specs_for_view view.remove_specs(*rm_specs, with_dependents=False) add_specs = installed_specs_for_view - specs_in_view view.add_specs(*add_specs, with_dependencies=False)
def _add(self, spec, directory_layout=None, explicit=False): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either intsalled or missing. """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: self._add(dep, directory_layout, explicit=False) key = spec.dag_hash() if key not in self._data: installed = False path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, explicit=explicit) # Connect dependencies from the DB to the new copy. for name, dep in spec.dependencies_dict(_tracked_deps).iteritems(): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit
def mock_get_specs_multiarch(database, monkeypatch): specs = [spec.copy() for spec in database.query_local()] # make one spec that is NOT the test architecture for spec in specs: if spec.name == "mpileaks": spec.architecture = spack.spec.ArchSpec('linux-rhel7-x86_64') break monkeypatch.setattr(spack.binary_distribution, 'get_specs', lambda: specs)
def _add(self, spec, directory_layout=None, explicit=False): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either intsalled or missing. """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: self._add(dep, directory_layout, explicit=False) key = spec.dag_hash() if key not in self._data: installed = bool(spec.external) path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) self._data[key] = InstallRecord( new_spec, path, installed, ref_count=0, explicit=explicit) # Connect dependencies from the DB to the new copy. for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit
def _increment_ref_count(self, spec, directory_layout=None): """Recursively examine dependencies and update their DB entries.""" key = spec.dag_hash() if key not in self._data: installed = False path = None if directory_layout: path = directory_layout.path_for_spec(spec) installed = os.path.isdir(path) self._data[key] = InstallRecord(spec.copy(), path, installed) for dep in spec.dependencies.values(): self._increment_ref_count(dep) self._data[key].ref_count += 1
def get(self, spec, new=False): if spec.virtual: raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: raise UnknownPackageError( "Repository %s does not contain package %s" % (self.namespace, spec.fullname)) key = hash(spec) if new or key not in self._instances: package_class = self.get_pkg_class(spec.name) try: copy = spec.copy() # defensive copy. Package owns its spec. self._instances[key] = package_class(copy) except Exception: if spack.debug: sys.excepthook(*sys.exc_info()) raise FailedConstructorError(spec.fullname, *sys.exc_info()) return self._instances[key]
def __init__(self, pkg, spec, type=default_deptype): """Create a new Dependency. Args: pkg (type): Package that has this dependency spec (Spec): Spec indicating dependency requirements type (sequence): strings describing dependency relationship """ assert isinstance(spec, spack.spec.Spec) self.pkg = pkg self.spec = spec.copy() # This dict maps condition specs to lists of Patch objects, just # as the patches dict on packages does. self.patches = {} if type is None: self.type = set(default_deptype) else: self.type = set(type)
def update(self, spec): if type(spec) != spack.spec.Spec: spec = spack.spec.Spec(spec) assert (not spec.virtual) pkg = spec.package for provided_spec, provider_spec in pkg.provided.iteritems(): if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name if provided_name not in self.providers: self.providers[provided_name] = {} if self.restrict: self.providers[provided_name][provided_spec] = spec else: # Before putting the spec in the map, constrain it so that # it provides what was asked for. constrained = spec.copy() constrained.constrain(provider_spec) self.providers[provided_name][provided_spec] = constrained
def _add( self, spec, directory_layout=None, explicit=False, installation_time=None ): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either installed or missing. Args: spec: spec to be added directory_layout: layout of the spec installation **kwargs: explicit Possible values: True, False, any A spec that was installed following a specific user request is marked as explicit. If instead it was pulled-in as a dependency of a user requested spec it's considered implicit. installation_time Date and time of installation """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") # Retrieve optional arguments installation_time = installation_time or _now() for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: extra_args = { 'explicit': False, 'installation_time': installation_time } self._add(dep, directory_layout, **extra_args) key = spec.dag_hash() if key not in self._data: installed = bool(spec.external) path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) extra_args = { 'explicit': explicit, 'installation_time': installation_time } self._data[key] = InstallRecord( new_spec, path, installed, ref_count=0, **extra_args ) # Connect dependencies from the DB to the new copy. for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit
def test_ordered_read_not_required_for_consistent_dag_hash( hash_type, config, mock_packages): """Make sure ordered serialization isn't required to preserve hashes. For consistent hashes, we require that YAML and json documents have their keys serialized in a deterministic order. However, we don't want to require them to be serialized in order. This ensures that is not required. """ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse'] for spec in specs: spec = Spec(spec) spec.concretize() # # Dict & corresponding YAML & JSON from the original spec. # spec_dict = spec.to_dict(hash=hash_type) spec_yaml = spec.to_yaml(hash=hash_type) spec_json = spec.to_json(hash=hash_type) # # Make a spec with reversed OrderedDicts for every # OrderedDict in the original. # reversed_spec_dict = reverse_all_dicts(spec.to_dict(hash=hash_type)) # # Dump to YAML and JSON # yaml_string = syaml.dump(spec_dict, default_flow_style=False) reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False) json_string = sjson.dump(spec_dict) reversed_json_string = sjson.dump(reversed_spec_dict) # # Do many consistency checks # # spec yaml is ordered like the spec dict assert yaml_string == spec_yaml assert json_string == spec_json # reversed string is different from the original, so it # *would* generate a different hash assert yaml_string != reversed_yaml_string assert json_string != reversed_json_string # build specs from the "wrongly" ordered data round_trip_yaml_spec = Spec.from_yaml(yaml_string) round_trip_json_spec = Spec.from_json(json_string) round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string) round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string) # Strip spec if we stripped the yaml spec = spec.copy(deps=hash_type.deptype) # specs are equal to the original assert spec == round_trip_yaml_spec assert spec == round_trip_json_spec assert spec == round_trip_reversed_yaml_spec assert spec == round_trip_reversed_json_spec assert round_trip_yaml_spec == round_trip_reversed_yaml_spec assert round_trip_json_spec == round_trip_reversed_json_spec # dag_hashes are equal assert spec.dag_hash() == round_trip_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_json_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash() # full_hashes are equal if we round-tripped by build_hash or full_hash if hash_type in (ht.build_hash, ht.full_hash): spec.concretize() round_trip_yaml_spec.concretize() round_trip_json_spec.concretize() round_trip_reversed_yaml_spec.concretize() round_trip_reversed_json_spec.concretize() assert spec.full_hash() == round_trip_yaml_spec.full_hash() assert spec.full_hash() == round_trip_json_spec.full_hash() assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash( ) assert spec.full_hash() == round_trip_reversed_json_spec.full_hash( )