def _write_to_file(self, stream): """Write out the databsae to a JSON file. This function does not do any locking or transactions. """ # map from per-spec hash code to installation record. installs = dict((k, v.to_dict()) for k, v in self._data.items()) # database includes installation list and version. # NOTE: this DB version does not handle multiple installs of # the same spec well. If there are 2 identical specs with # different paths, it can't differentiate. # TODO: fix this before we support multiple install locations. database = { 'database': { 'installs': installs, 'version': str(_db_version) } } try: sjson.dump(database, stream) except YAMLError as e: raise syaml.SpackYAMLError( "error writing YAML database:", str(e))
def from_yaml(stream, name=None): try: data = syaml.load(stream) return Mirror.from_dict(data, name) except yaml_error.MarkedYAMLError as e: raise six.raise_from( syaml.SpackYAMLError("error parsing YAML mirror:", str(e)), e, )
def process_config_path(path): result = [] if path.startswith(':'): raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'". format(path), '') seen_override_in_path = False while path: front, sep, path = path.partition(':') if (sep and not path) or path.startswith(':'): if seen_override_in_path: raise syaml.SpackYAMLError("Meaningless second override" " indicator `::' in path `{0}'". format(path), '') path = path.lstrip(':') front = syaml.syaml_str(front) front.override = True seen_override_in_path = True result.append(front) return result
def _read_from_file(self, stream, format='json'): """ Fill database from file, do not maintain old data Translate the spec portions from node-dict form to spec form Does not do any locking. """ if format.lower() == 'json': load = sjson.load elif format.lower() == 'yaml': load = syaml.load else: raise ValueError("Invalid database format: %s" % format) try: if isinstance(stream, string_types): with open(stream, 'r') as f: fdata = load(f) else: fdata = load(stream) except MarkedYAMLError as e: raise syaml.SpackYAMLError("error parsing YAML database:", str(e)) except Exception as e: raise CorruptDatabaseError("error parsing database:", str(e)) if fdata is None: return def check(cond, msg): if not cond: raise CorruptDatabaseError( "Spack database is corrupt: %s" % msg, self._index_path) check('database' in fdata, "No 'database' attribute in YAML.") # High-level file checks db = fdata['database'] check('installs' in db, "No 'installs' in YAML DB.") check('version' in db, "No 'version' in YAML DB.") installs = db['installs'] # TODO: better version checking semantics. version = Version(db['version']) if version > _db_version: raise InvalidDatabaseVersionError(_db_version, version) elif version < _db_version: self.reindex(spack.store.layout) installs = dict((k, v.to_dict()) for k, v in self._data.items()) def invalid_record(hash_key, error): msg = ("Invalid record in Spack database: " "hash: %s, cause: %s: %s") msg %= (hash_key, type(error).__name__, str(error)) raise CorruptDatabaseError(msg, self._index_path) # Build up the database in three passes: # # 1. Read in all specs without dependencies. # 2. Hook dependencies up among specs. # 3. Mark all specs concrete. # # The database is built up so that ALL specs in it share nodes # (i.e., its specs are a true Merkle DAG, unlike most specs.) # Pass 1: Iterate through database and build specs w/o dependencies data = {} for hash_key, rec in installs.items(): try: # This constructs a spec DAG from the list of all installs spec = self._read_spec_from_dict(hash_key, installs) # Insert the brand new spec in the database. Each # spec has its own copies of its dependency specs. # TODO: would a more immmutable spec implementation simplify # this? data[hash_key] = InstallRecord.from_dict(spec, rec) except Exception as e: invalid_record(hash_key, e) # Pass 2: Assign dependencies once all specs are created. for hash_key in data: try: self._assign_dependencies(hash_key, installs, data) except Exception as e: invalid_record(hash_key, e) # Pass 3: Mark all specs concrete. Specs representing real # installations must be explicitly marked. # We do this *after* all dependencies are connected because if we # do it *while* we're constructing specs,it causes hashes to be # cached prematurely. for hash_key, rec in data.items(): rec.spec._mark_concrete() self._data = data
def from_yaml(stream, name=None): try: data = syaml.load(stream) return MirrorCollection(data) except yaml_error.MarkedYAMLError as e: raise syaml.SpackYAMLError("error parsing YAML spec:", str(e))