def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) if not os.path.isdir(path): return None if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) if installed_spec == spec: return path # DAG hashes currently do not include build dependencies. # # TODO: remove this when we do better concretization and don't # ignore build-only deps in hashes. elif (installed_spec.copy(deps=('link', 'run')) == spec.copy(deps=('link', 'run'))): # The directory layout prefix is based on the dag hash, so among # specs with differing full-hash but matching dag-hash, only one # may be installed. This means for example that for two instances # that differ only in CMake version used to build, only one will # be installed. return path if spec.dag_hash() == installed_spec.dag_hash(): raise SpecHashCollisionError(spec, installed_spec) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path)
def test_install_monitor_save_local(install_mockery_mutable_config, mock_fetch, tmpdir_factory): """ Mock installing and saving monitor results to file. """ reports_dir = tmpdir_factory.mktemp('reports') spack.config.set('config:monitor_dir', str(reports_dir)) out = install('--monitor', '--monitor-save-local', 'dttop') assert "Successfully installed dttop" in out # The reports directory should not be empty (timestamped folders) assert os.listdir(str(reports_dir)) # Get the spec name spec = spack.spec.Spec("dttop") spec.concretize() # Ensure we have monitor results saved for dirname in os.listdir(str(reports_dir)): dated_dir = os.path.join(str(reports_dir), dirname) build_metadata = "build-metadata-%s.json" % spec.dag_hash() assert build_metadata in os.listdir(dated_dir) spec_file = "spec-dttop-%s-config.json" % spec.version assert spec_file in os.listdir(dated_dir) spack.config.set('config:monitor_dir', "~/.spack/reports/monitor")
def test_add_to_upstream_after_downstream(upstream_and_downstream_db): """An upstream DB can add a package after it is installed in the downstream DB. When a package is recorded as installed in both, the results should refer to the downstream DB. """ upstream_write_db, upstream_db, upstream_layout,\ downstream_db, downstream_layout = (upstream_and_downstream_db) mock_repo = MockPackageMultiRepo() mock_repo.add_package('x', [], []) with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('x') spec.concretize() downstream_db.add(spec, downstream_layout) upstream_write_db.add(spec, upstream_layout) upstream_db._read() upstream, record = downstream_db.query_by_spec_hash(spec.dag_hash()) # Even though the package is recorded as installed in the upstream DB, # we prefer the locally-installed instance assert not upstream qresults = downstream_db.query('x') assert len(qresults) == 1 queried_spec, = qresults try: orig_db = spack.store.db spack.store.db = downstream_db assert queried_spec.prefix == downstream_layout.path_for_spec(spec) finally: spack.store.db = orig_db
def specs_deprecated_by(self, spec): """Return all specs deprecated in favor of the given spec""" with self.read_transaction(): return [ rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash() ]
def _add(self, spec, path, directory_layout=None): """Add an install record for spec at path to the database. This assumes that the spec is not already installed. It updates the ref counts on dependencies of the spec in the DB. This operation is in-memory, and does not lock the DB. """ key = spec.dag_hash() if key in self._data: rec = self._data[key] rec.installed = True # TODO: this overwrites a previous install path (when path != # self._data[key].path), and the old path still has a # dependent in the DB. We could consider re-RPATH-ing the # dependents. This case is probably infrequent and may not be # worth fixing, but this is where we can discover it. rec.path = path else: self._data[key] = InstallRecord(spec, path, True) for dep in spec.dependencies.values(): self._increment_ref_count(dep, directory_layout)
def _add(self, spec, path, directory_layout=None, explicit=False): """Add an install record for spec at path to the database. This assumes that the spec is not already installed. It updates the ref counts on dependencies of the spec in the DB. This operation is in-memory, and does not lock the DB. """ key = spec.dag_hash() if key in self._data: rec = self._data[key] rec.installed = True # TODO: this overwrites a previous install path (when path != # self._data[key].path), and the old path still has a # dependent in the DB. We could consider re-RPATH-ing the # dependents. This case is probably infrequent and may not be # worth fixing, but this is where we can discover it. rec.path = path else: self._data[key] = InstallRecord(spec, path, True, explicit=explicit) for dep in spec.dependencies.values(): self._increment_ref_count(dep, directory_layout)
def installed_relatives(self, spec, direction='children', transitive=True): """Return installed specs related to this one.""" if direction not in ('parents', 'children'): raise ValueError("Invalid direction: %s" % direction) relatives = set() for spec in self.query(spec): if transitive: to_add = spec.traverse(direction=direction, root=False) elif direction == 'parents': to_add = spec.dependents() else: # direction == 'children' to_add = spec.dependencies() for relative in to_add: hash_key = relative.dag_hash() if hash_key not in self._data: reltype = ('Dependent' if direction == 'parents' else 'Dependency') tty.warn("Inconsistent state! %s %s of %s not in DB" % (reltype, hash_key, spec.dag_hash())) continue if not self._data[hash_key].installed: continue relatives.add(relative) return relatives
def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) if not os.path.isdir(path): return None if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) if installed_spec == spec: return path # DAG hashes currently do not include build dependencies. # # TODO: remove this when we do better concretization and don't # ignore build-only deps in hashes. elif installed_spec == spec.copy(deps=('link', 'run')): return path if spec.dag_hash() == installed_spec.dag_hash(): raise SpecHashCollisionError(spec, installed_spec) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path)
def installed_relatives(self, spec, direction='children', transitive=True, deptype='all'): """Return installed specs related to this one.""" if direction not in ('parents', 'children'): raise ValueError("Invalid direction: %s" % direction) relatives = set() for spec in self.query(spec): if transitive: to_add = spec.traverse( direction=direction, root=False, deptype=deptype) elif direction == 'parents': to_add = spec.dependents(deptype=deptype) else: # direction == 'children' to_add = spec.dependencies(deptype=deptype) for relative in to_add: hash_key = relative.dag_hash() upstream, record = self.query_by_spec_hash(hash_key) if not record: reltype = ('Dependent' if direction == 'parents' else 'Dependency') msg = ("Inconsistent state! %s %s of %s not in DB" % (reltype, hash_key, spec.dag_hash())) if self._fail_when_missing_deps: raise MissingDependenciesError(msg) tty.warn(msg) continue if not record.installed: continue relatives.add(relative) return relatives
def install_all(self, args=None): """Install all concretized specs in an environment.""" # Make sure log directory exists log_path = self.log_path fs.mkdirp(log_path) for concretized_hash in self.concretized_order: spec = self.specs_by_hash[concretized_hash] # Parse cli arguments and construct a dictionary # that will be passed to Package.do_install API kwargs = dict() if args: spack.cmd.install.update_kwargs_from_args(args, kwargs) with fs.working_dir(self.path): spec.package.do_install(**kwargs) # Link the resulting log file into logs dir build_log_link = os.path.join( log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) if os.path.exists(build_log_link): os.remove(build_log_link) os.symlink(spec.package.build_log_path, build_log_link)
def make_configuration(spec): """Returns the lmod configuration for spec""" key = spec.dag_hash() try: return configuration_registry[key] except KeyError: return configuration_registry.setdefault(key, LmodConfiguration(spec))
def all_specs_by_hash(self): """Map of hashes to spec for all specs in this environment.""" hashes = {} for h in self.concretized_order: specs = self.specs_by_hash[h].traverse(deptype=('link', 'run')) for spec in specs: hashes[spec.dag_hash()] = spec return hashes
def _increment_ref_count(self, spec): key = spec.dag_hash() if key not in self._data: return rec = self._data[key] rec.ref_count += 1
def display_specs_as_json(specs, deps=False): """Convert specs to a list of json records.""" seen = set() records = [] for spec in specs: if spec.dag_hash() in seen: continue seen.add(spec.dag_hash()) records.append(spec.to_record_dict()) if deps: for dep in spec.traverse(): if dep.dag_hash() in seen: continue seen.add(dep.dag_hash()) records.append(dep.to_record_dict()) sjson.dump(records, sys.stdout)
def _get_matching_spec_key(self, spec, **kwargs): """Get the exact spec OR get a single spec that matches.""" key = spec.dag_hash() if key not in self._data: match = self.query_one(spec, **kwargs) if match: return match.dag_hash() raise KeyError("No such spec in database! %s" % spec) return key
def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False, other_arch=False): """Returns a list of specs matching the not necessarily concretized specs given from cli Args: specs: list of specs to be matched against buildcaches on mirror allow_multiple_matches : if True multiple matches are admitted Return: list of specs """ # List of specs that match expressions given via command line specs_from_cli = [] has_errors = False try: specs = bindist.update_cache_and_get_specs() except bindist.FetchCacheError as e: tty.error(e) if not other_arch: arch = spack.spec.Spec.default_arch() specs = [s for s in specs if s.satisfies(arch)] for pkg in pkgs: matches = [] tty.msg("buildcache spec(s) matching %s \n" % pkg) for spec in sorted(specs): if pkg.startswith('/'): pkghash = pkg.replace('/', '') if spec.dag_hash().startswith(pkghash): matches.append(spec) else: if spec.satisfies(pkg): matches.append(spec) # For each pkg provided, make sure it refers to only one package. # Fail and ask user to be unambiguous if it doesn't if not allow_multiple_matches and len(matches) > 1: tty.error('%s matches multiple downloaded packages:' % pkg) for match in matches: tty.msg('"%s"' % match.format()) has_errors = True # No downloaded package matches the query if len(matches) == 0: tty.error('%s does not match any downloaded packages.' % pkg) has_errors = True specs_from_cli.extend(matches) if has_errors: tty.die('use one of the matching specs above') return specs_from_cli
def _add(self, spec, directory_layout=None, explicit=False): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either intsalled or missing. """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: self._add(dep, directory_layout, explicit=False) key = spec.dag_hash() if key not in self._data: installed = False path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, explicit=explicit) # Connect dependencies from the DB to the new copy. for name, dep in spec.dependencies_dict(_tracked_deps).iteritems(): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit
def _get_matching_spec_key(self, spec, **kwargs): """Get the exact spec OR get a single spec that matches.""" key = spec.dag_hash() upstream, record = self.query_by_spec_hash(key) if not record: match = self.query_one(spec, **kwargs) if match: return match.dag_hash() raise KeyError("No such spec in database! %s" % spec) return key
def _add(self, spec, directory_layout=None, explicit=False): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either intsalled or missing. """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: self._add(dep, directory_layout, explicit=False) key = spec.dag_hash() if key not in self._data: installed = bool(spec.external) path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) self._data[key] = InstallRecord( new_spec, path, installed, ref_count=0, explicit=explicit) # Connect dependencies from the DB to the new copy. for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit
def all_specs_by_hash(self): """Map of hashes to spec for all specs in this environment.""" # Note this uses dag-hashes calculated without build deps as keys, # whereas the environment tracks specs based on dag-hashes calculated # with all dependencies. This function should not be used by an # Environment object for management of its own data structures hashes = {} for h in self.concretized_order: specs = self.specs_by_hash[h].traverse(deptype=('link', 'run')) for spec in specs: hashes[spec.dag_hash()] = spec return hashes
def listspecs(args): """list binary packages available from mirrors""" specs = bindist.get_specs(args.force) if args.packages: pkgs = set(args.packages) for pkg in pkgs: tty.msg("buildcache spec(s) matching " + "%s and commands to install them" % pkgs) for spec in sorted(specs): if spec.satisfies(pkg): tty.msg('Enter\nspack buildcache install /%s\n' % spec.dag_hash(7) + ' to install "%s"' % spec.format()) else: tty.msg("buildcache specs and commands to install them") for spec in sorted(specs): tty.msg('Enter\nspack buildcache install /%s\n' % spec.dag_hash(7) + ' to install "%s"' % spec.format())
def _install(self, spec, **install_args): spec.package.do_install(**install_args) # Make sure log directory exists log_path = self.log_path fs.mkdirp(log_path) with fs.working_dir(self.path): # Link the resulting log file into logs dir build_log_link = os.path.join( log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) if os.path.lexists(build_log_link): os.remove(build_log_link) os.symlink(spec.package.build_log_path, build_log_link)
def relative_path_for_spec(self, spec): _check_concrete(spec) if spec.external: return spec.external dir_name = "%s-%s-%s" % (spec.name, spec.version, spec.dag_hash(self.hash_len)) path = join_path(spec.architecture, "%s-%s" % (spec.compiler.name, spec.compiler.version), dir_name) return path
def reindex(self, directory_layout): """Build database index from scratch based on a directory layout. Locks the DB if it isn't locked already. """ # Special transaction to avoid recursive reindex calls and to # ignore errors if we need to rebuild a corrupt database. def _read_suppress_error(): try: if os.path.isfile(self._index_path): self._read_from_file(self._index_path) except CorruptDatabaseError as e: self._error = e self._data = {} transaction = WriteTransaction(self.lock, _read_suppress_error, self._write, _db_lock_timeout) with transaction: if self._error: tty.warn( "Spack database was corrupt. Will rebuild. Error was:", str(self._error)) self._error = None old_data = self._data try: self._data = {} # Ask the directory layout to traverse the filesystem. for spec in directory_layout.all_specs(): # Try to recover explicit value from old DB, but # default it to False if DB was corrupt. explicit = False if old_data is not None: old_info = old_data.get(spec.dag_hash()) if old_info is not None: explicit = old_info.explicit self._add(spec, directory_layout, explicit=explicit) self._check_ref_counts() except: # If anything explodes, restore old data, skip write. self._data = old_data raise
def _decrement_ref_count(self, spec): key = spec.dag_hash() if key not in self._data: # TODO: print something here? DB is corrupt, but # not much we can do. return rec = self._data[key] rec.ref_count -= 1 if rec.ref_count == 0 and not rec.installed: del self._data[key] for dep in spec.dependencies(_tracked_deps): self._decrement_ref_count(dep)
def reindex(self, directory_layout): """Build database index from scratch based on a directory layout. Locks the DB if it isn't locked already. """ # Special transaction to avoid recursive reindex calls and to # ignore errors if we need to rebuild a corrupt database. def _read_suppress_error(): try: if os.path.isfile(self._index_path): self._read_from_file(self._index_path) except CorruptDatabaseError as e: self._error = e self._data = {} transaction = WriteTransaction( self.lock, _read_suppress_error, self._write, _db_lock_timeout) with transaction: if self._error: tty.warn( "Spack database was corrupt. Will rebuild. Error was:", str(self._error)) self._error = None old_data = self._data try: self._data = {} # Ask the directory layout to traverse the filesystem. for spec in directory_layout.all_specs(): # Try to recover explicit value from old DB, but # default it to False if DB was corrupt. explicit = False if old_data is not None: old_info = old_data.get(spec.dag_hash()) if old_info is not None: explicit = old_info.explicit self._add(spec, directory_layout, explicit=explicit) self._check_ref_counts() except: # If anything explodes, restore old data, skip write. self._data = old_data raise
def _increment_ref_count(self, spec, directory_layout=None): """Recursively examine dependencies and update their DB entries.""" key = spec.dag_hash() if key not in self._data: installed = False path = None if directory_layout: path = directory_layout.path_for_spec(spec) installed = os.path.isdir(path) self._data[key] = InstallRecord(spec.copy(), path, installed) for dep in spec.dependencies.values(): self._increment_ref_count(dep) self._data[key].ref_count += 1
def path_for_spec(self, spec): """Return absolute path from the root to a directory for the spec.""" _check_concrete(spec) if spec.external: return spec.external_path if self.check_upstream: upstream, record = spack.store.db.query_by_spec_hash( spec.dag_hash()) if upstream: raise SpackError( "Internal error: attempted to call path_for_spec on" " upstream-installed package.") path = self.relative_path_for_spec(spec) assert(not path.startswith(self.root)) return os.path.join(self.root, path)
def relative_path_for_spec(self, spec): _check_concrete(spec) if spec.external: return spec.external dir_name = "%s-%s-%s" % ( spec.name, spec.version, spec.dag_hash(self.hash_len)) path = join_path( spec.architecture, "%s-%s" % (spec.compiler.name, spec.compiler.version), dir_name) return path
def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout): roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b', 'c']] layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/', '/rc/']] default = ('build', 'link') z = MockPackage('z', [], []) y = MockPackage('y', [z], [default]) x = MockPackage('x', [y], [default]) mock_repo = MockPackageMultiRepo([x, y, z]) with spack.repo.swap(mock_repo): spec = spack.spec.Spec('x') spec.concretize() db_c = spack.database.Database(roots[2]) db_c.add(spec['z'], layouts[2]) db_b = spack.database.Database(roots[1], upstream_dbs=[db_c]) db_b.add(spec['y'], layouts[1]) db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c]) db_a.add(spec['x'], layouts[0]) upstream_dbs_from_scratch = ( spack.store._construct_upstream_dbs_from_install_roots( [roots[1], roots[2]], _test=True)) db_a_from_scratch = spack.database.Database( roots[0], upstream_dbs=upstream_dbs_from_scratch) assert db_a_from_scratch.db_for_spec_hash( spec.dag_hash()) == (db_a_from_scratch) assert db_a_from_scratch.db_for_spec_hash( spec['y'].dag_hash()) == (upstream_dbs_from_scratch[0]) assert db_a_from_scratch.db_for_spec_hash( spec['z'].dag_hash()) == (upstream_dbs_from_scratch[1]) db_a_from_scratch._check_ref_counts() upstream_dbs_from_scratch[0]._check_ref_counts() upstream_dbs_from_scratch[1]._check_ref_counts() assert (db_a_from_scratch.installed_relatives(spec) == set( spec.traverse(root=False))) assert (db_a_from_scratch.installed_relatives( spec['z'], direction='parents') == set([spec, spec['y']]))
def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False): """Returns a list of specs matching the not necessarily concretized specs given from cli Args: specs: list of specs to be matched against buildcaches on mirror allow_multiple_matches : if True multiple matches are admitted Return: list of specs """ # List of specs that match expressions given via command line specs_from_cli = [] has_errors = False specs = bindist.get_specs(force) for pkg in pkgs: matches = [] tty.msg("buildcache spec(s) matching %s \n" % pkg) for spec in sorted(specs): if pkg.startswith('/'): pkghash = pkg.replace('/', '') if spec.dag_hash().startswith(pkghash): matches.append(spec) else: if spec.satisfies(pkg): matches.append(spec) # For each pkg provided, make sure it refers to only one package. # Fail and ask user to be unambiguous if it doesn't if not allow_multiple_matches and len(matches) > 1: tty.error('%s matches multiple downloaded packages:' % pkg) for match in matches: tty.msg('"%s"' % match.format()) has_errors = True # No downloaded package matches the query if len(matches) == 0: tty.error('%s does not match any downloaded packages.' % pkg) has_errors = True specs_from_cli.extend(matches) if has_errors: tty.die('use one of the matching specs above') return specs_from_cli
def install(self, user_spec, concrete_spec=None, **install_args): """Install a single spec into an environment. This will automatically concretize the single spec, but it won't affect other as-yet unconcretized specs. """ spec = Spec(user_spec) if self.add(spec): concrete = concrete_spec if concrete_spec else spec.concretized() self._add_concrete_spec(spec, concrete) else: # spec might be in the user_specs, but not installed. spec = next(s for s in self.user_specs if s.name == spec.name) concrete = self.specs_by_hash.get(spec.dag_hash()) if not concrete: concrete = spec.concretized() self._add_concrete_spec(spec, concrete) concrete.package.do_install(**install_args)
def display_specs_as_json(specs, deps=False): """Convert specs to a list of json records.""" seen = set() records = [] for spec in specs: dag_hash = spec.dag_hash() if dag_hash in seen: continue records.append(spec.node_dict_with_hashes()) seen.add(dag_hash) if deps: for dep in spec.traverse(): dep_dag_hash = dep.dag_hash() if dep_dag_hash in seen: continue records.append(dep.node_dict_with_hashes()) seen.add(dep_dag_hash) sjson.dump(records, sys.stdout)
def _construct_entry_from_directory_layout(self, directory_layout, old_data, spec, deprecator=None): # Try to recover explicit value from old DB, but # default it to True if DB was corrupt. This is # just to be conservative in case a command like # "autoremove" is run by the user after a reindex. tty.debug('RECONSTRUCTING FROM SPEC.YAML: {0}'.format(spec)) explicit = True inst_time = os.stat(spec.prefix).st_ctime if old_data is not None: old_info = old_data.get(spec.dag_hash()) if old_info is not None: explicit = old_info.explicit inst_time = old_info.installation_time extra_args = {'explicit': explicit, 'installation_time': inst_time} self._add(spec, directory_layout, **extra_args) if deprecator: self._deprecate(spec, deprecator)
def install_all(self, args=None): """Install all concretized specs in an environment.""" for concretized_hash in self.concretized_order: spec = self.specs_by_hash[concretized_hash] # Parse cli arguments and construct a dictionary # that will be passed to Package.do_install API kwargs = dict() if args: spack.cmd.install.update_kwargs_from_args(args, kwargs) self._install(spec, **kwargs) if not spec.external: # Link the resulting log file into logs dir build_log_link = os.path.join( self.log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) if os.path.lexists(build_log_link): os.remove(build_log_link) os.symlink(spec.package.build_log_path, build_log_link) self.regenerate_views()
def _read_from_yaml(self, stream): """ Fill database from YAML, do not maintain old data Translate the spec portions from node-dict form to spec form Does not do any locking. """ try: if isinstance(stream, basestring): with open(stream, 'r') as f: yfile = yaml.load(f) else: yfile = yaml.load(stream) except MarkedYAMLError as e: raise SpackYAMLError("error parsing YAML database:", str(e)) if yfile is None: return def check(cond, msg): if not cond: raise CorruptDatabaseError(self._index_path, msg) check('database' in yfile, "No 'database' attribute in YAML.") # High-level file checks db = yfile['database'] check('installs' in db, "No 'installs' in YAML DB.") check('version' in db, "No 'version' in YAML DB.") # TODO: better version checking semantics. version = Version(db['version']) if version != _db_version: raise InvalidDatabaseVersionError(_db_version, version) # Iterate through database and check each record. installs = db['installs'] data = {} for hash_key, rec in installs.items(): try: # This constructs a spec DAG from the list of all installs spec = self._read_spec_from_yaml(hash_key, installs) # Validate the spec by ensuring the stored and actual # hashes are the same. spec_hash = spec.dag_hash() if not spec_hash == hash_key: tty.warn("Hash mismatch in database: %s -> spec with hash %s" % (hash_key, spec_hash)) continue # TODO: is skipping the right thing to do? # Insert the brand new spec in the database. Each # spec has its own copies of its dependency specs. # TODO: would a more immmutable spec implementation simplify this? data[hash_key] = InstallRecord.from_dict(spec, rec) except Exception as e: tty.warn("Invalid database reecord:", "file: %s" % self._index_path, "hash: %s" % hash_key, "cause: %s" % str(e)) raise self._data = data
def gray_hash(spec, length): return colorize('@K{%s}' % spec.dag_hash(length))
def gray_hash(spec, length): h = spec.dag_hash(length) if spec.concrete else '-' * length return colorize('@K{%s}' % h)
def missing(self, spec): with self.read_transaction(): key = spec.dag_hash() return key in self._data and not self._data[key].installed
def reindex(self, directory_layout): """Build database index from scratch based on a directory layout. Locks the DB if it isn't locked already. """ # Special transaction to avoid recursive reindex calls and to # ignore errors if we need to rebuild a corrupt database. def _read_suppress_error(): try: if os.path.isfile(self._index_path): self._read_from_file(self._index_path) except CorruptDatabaseError as e: self._error = e self._data = {} transaction = WriteTransaction( self.lock, _read_suppress_error, self._write ) with transaction: if self._error: tty.warn( "Spack database was corrupt. Will rebuild. Error was:", str(self._error) ) self._error = None # Read first the `spec.yaml` files in the prefixes. They should be # considered authoritative with respect to DB reindexing, as # entries in the DB may be corrupted in a way that still makes # them readable. If we considered DB entries authoritative # instead, we would perpetuate errors over a reindex. old_data = self._data try: # Initialize data in the reconstructed DB self._data = {} # Start inspecting the installed prefixes processed_specs = set() for spec in directory_layout.all_specs(): # Try to recover explicit value from old DB, but # default it to True if DB was corrupt. This is # just to be conservative in case a command like # "autoremove" is run by the user after a reindex. tty.debug( 'RECONSTRUCTING FROM SPEC.YAML: {0}'.format(spec)) explicit = True inst_time = os.stat(spec.prefix).st_ctime if old_data is not None: old_info = old_data.get(spec.dag_hash()) if old_info is not None: explicit = old_info.explicit inst_time = old_info.installation_time extra_args = { 'explicit': explicit, 'installation_time': inst_time } self._add(spec, directory_layout, **extra_args) processed_specs.add(spec) for key, entry in old_data.items(): # We already took care of this spec using # `spec.yaml` from its prefix. if entry.spec in processed_specs: msg = 'SKIPPING RECONSTRUCTION FROM OLD DB: {0}' msg += ' [already reconstructed from spec.yaml]' tty.debug(msg.format(entry.spec)) continue # If we arrived here it very likely means that # we have external specs that are not dependencies # of other specs. This may be the case for externally # installed compilers or externally installed # applications. tty.debug( 'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec)) try: layout = spack.store.layout if entry.spec.external: layout = None install_check = True else: install_check = layout.check_installed(entry.spec) if install_check: kwargs = { 'spec': entry.spec, 'directory_layout': layout, 'explicit': entry.explicit, 'installation_time': entry.installation_time # noqa: E501 } self._add(**kwargs) processed_specs.add(entry.spec) except Exception as e: # Something went wrong, so the spec was not restored # from old data tty.debug(e.message) pass self._check_ref_counts() except BaseException: # If anything explodes, restore old data, skip write. self._data = old_data raise
def specs_by_hash(self): by_hash = {} for spec in self.all_specs(): by_hash[spec.dag_hash()] = spec return by_hash
def _add( self, spec, directory_layout=None, explicit=False, installation_time=None ): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. Also ensures dependencies are present and updated in the DB as either installed or missing. Args: spec: spec to be added directory_layout: layout of the spec installation **kwargs: explicit Possible values: True, False, any A spec that was installed following a specific user request is marked as explicit. If instead it was pulled-in as a dependency of a user requested spec it's considered implicit. installation_time Date and time of installation """ if not spec.concrete: raise NonConcreteSpecAddError( "Specs added to DB must be concrete.") # Retrieve optional arguments installation_time = installation_time or _now() for dep in spec.dependencies(_tracked_deps): dkey = dep.dag_hash() if dkey not in self._data: extra_args = { 'explicit': False, 'installation_time': installation_time } self._add(dep, directory_layout, **extra_args) key = spec.dag_hash() if key not in self._data: installed = bool(spec.external) path = None if not spec.external and directory_layout: path = directory_layout.path_for_spec(spec) try: directory_layout.check_installed(spec) installed = True except DirectoryLayoutError as e: tty.warn( 'Dependency missing due to corrupt install directory:', path, str(e)) # Create a new install record with no deps initially. new_spec = spec.copy(deps=False) extra_args = { 'explicit': explicit, 'installation_time': installation_time } self._data[key] = InstallRecord( new_spec, path, installed, ref_count=0, **extra_args ) # Connect dependencies from the DB to the new copy. for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)): dkey = dep.spec.dag_hash() new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) self._data[dkey].ref_count += 1 # Mark concrete once everything is built, and preserve # the original hash of concrete specs. new_spec._mark_concrete() new_spec._hash = key else: # If it is already there, mark it as installed. self._data[key].installed = True self._data[key].explicit = explicit