예제 #1
0
파일: database.py 프로젝트: LLNL/spack
    def _write_to_file(self, stream):
        """Write out the databsae to a JSON file.

        This function does not do any locking or transactions.
        """
        # map from per-spec hash code to installation record.
        installs = dict((k, v.to_dict()) for k, v in self._data.items())

        # database includes installation list and version.

        # NOTE: this DB version does not handle multiple installs of
        # the same spec well.  If there are 2 identical specs with
        # different paths, it can't differentiate.
        # TODO: fix this before we support multiple install locations.
        database = {
            'database': {
                'installs': installs,
                'version': str(_db_version)
            }
        }

        try:
            sjson.dump(database, stream)
        except YAMLError as e:
            raise syaml.SpackYAMLError(
                "error writing YAML database:", str(e))
예제 #2
0
파일: environment.py 프로젝트: LLNL/spack
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put the new user specs in the YAML
        yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
        yaml_spec_list[:] = [str(s) for s in self.user_specs]

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)
예제 #3
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, builtin_mock
):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(
            reversed_yaml_string
        )
        round_trip_reversed_json_spec = Spec.from_yaml(
            reversed_json_string
        )

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
예제 #4
0
    def _save_distutil_vars(self, prefix):
        """
        Run before changing automatically generated contents of the
        _sysconfigdata.py, which is used by distutils to figure out what
        executables to use while compiling and linking extensions. If we build
        extensions with spack those executables should be spack's wrappers.
        Spack partially covers this by setting environment variables that
        are also accounted for by distutils. Currently there is one more known
        variable that must be set, which is LDSHARED, so the method saves its
        autogenerated value to pass it to the dependant package's setup script.
        """

        self._distutil_vars = {}

        input_filename = None
        for filename in [join_path(lib_dir,
                                   'python{0}'.format(self.version.up_to(2)),
                                   '_sysconfigdata.py')
                         for lib_dir in [prefix.lib, prefix.lib64]]:
            if os.path.isfile(filename):
                input_filename = filename
                break

        if not input_filename:
            return

        input_dict = None
        try:
            with open(input_filename) as input_file:
                match = re.search(r'build_time_vars\s*=\s*(?P<dict>{.*})',
                                  input_file.read(),
                                  flags=re.DOTALL)

                if match:
                    input_dict = ast.literal_eval(match.group('dict'))
        except (IOError, SyntaxError):
            pass

        if not input_dict:
            tty.warn('Failed to find \'build_time_vars\' dictionary in file '
                     '\'%s\'. This might cause the extensions that are '
                     'installed with distutils to call compilers directly '
                     'avoiding Spack\'s wrappers.' % input_filename)
            return

        for var_name in Python._DISTUTIL_VARS_TO_SAVE:
            if var_name in input_dict:
                self._distutil_vars[var_name] = input_dict[var_name]
            else:
                tty.warn('Failed to find key \'%s\' in \'build_time_vars\' '
                         'dictionary in file \'%s\'. This might cause the '
                         'extensions that are installed with distutils to '
                         'call compilers directly avoiding Spack\'s wrappers.'
                         % (var_name, input_filename))

        if len(self._distutil_vars) > 0:
            output_filename = None
            try:
                output_filename = join_path(
                    spack.store.layout.metadata_path(self.spec),
                    Python._DISTUTIL_CACHE_FILENAME)
                with open(output_filename, 'w') as output_file:
                    sjson.dump(self._distutil_vars, output_file)
            except:
                tty.warn('Failed to save metadata for distutils. This might '
                         'cause the extensions that are installed with '
                         'distutils to call compilers directly avoiding '
                         'Spack\'s wrappers.')
                # We make the cache empty if we failed to save it to file
                # to provide the same behaviour as in the case when the cache
                # is initialized by the method load_distutils_data().
                self._distutil_vars = {}
                if output_filename:
                    force_remove(output_filename)
예제 #5
0
파일: mirror.py 프로젝트: mcuma/spack
 def to_json(self, stream=None):
     return sjson.dump(self.to_dict(True), stream)
예제 #6
0
def write_test_suite_file(suite):
    """Write the test suite to its lock file."""
    with open(suite.stage.join(test_suite_filename), 'w') as f:
        sjson.dump(suite.to_dict(), stream=f)
예제 #7
0
 def to_json(self, stream):
     sjson.dump({'patches': self.index}, stream)
예제 #8
0
파일: package.py 프로젝트: wdmapp/spack
    def _save_distutil_vars(self):
        """
        Run before changing automatically generated contents of the
        _sysconfigdata.py, which is used by distutils to figure out what
        executables to use while compiling and linking extensions. If we build
        extensions with spack those executables should be spack's wrappers.
        Spack partially covers this by setting environment variables that
        are also accounted for by distutils. Currently there is one more known
        variable that must be set, which is LDSHARED, so the method saves its
        autogenerated value to pass it to the dependent package's setup script.
        """

        self._distutil_vars = {}

        input_filename = self.get_sysconfigdata_name()
        input_dict = None
        try:
            with open(input_filename) as input_file:
                match = re.search(r'build_time_vars\s*=\s*(?P<dict>{.*})',
                                  input_file.read(),
                                  flags=re.DOTALL)

                if match:
                    input_dict = ast.literal_eval(match.group('dict'))
        except (IOError, SyntaxError):
            pass

        if not input_dict:
            tty.warn("Failed to find 'build_time_vars' dictionary in file "
                     "'%s'. This might cause the extensions that are "
                     "installed with distutils to call compilers directly "
                     "avoiding Spack's wrappers." % input_filename)
            return

        for var_name in Python._DISTUTIL_VARS_TO_SAVE:
            if var_name in input_dict:
                self._distutil_vars[var_name] = input_dict[var_name]
            else:
                tty.warn("Failed to find key '%s' in 'build_time_vars' "
                         "dictionary in file '%s'. This might cause the "
                         "extensions that are installed with distutils to "
                         "call compilers directly avoiding Spack's wrappers." %
                         (var_name, input_filename))

        if len(self._distutil_vars) > 0:
            output_filename = None
            try:
                output_filename = join_path(
                    spack.store.layout.metadata_path(self.spec),
                    Python._DISTUTIL_CACHE_FILENAME)
                with open(output_filename, 'w') as output_file:
                    sjson.dump(self._distutil_vars, output_file)
            except Exception:
                tty.warn("Failed to save metadata for distutils. This might "
                         "cause the extensions that are installed with "
                         "distutils to call compilers directly avoiding "
                         "Spack's wrappers.")
                # We make the cache empty if we failed to save it to file
                # to provide the same behaviour as in the case when the cache
                # is initialized by the method load_distutils_data().
                self._distutil_vars = {}
                if output_filename:
                    force_remove(output_filename)
예제 #9
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, mock_packages
):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(
            reversed_yaml_string
        )
        round_trip_reversed_json_spec = Spec.from_yaml(
            reversed_json_string
        )

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
        # full_hashes are equal
        spec.concretize()
        round_trip_yaml_spec.concretize()
        round_trip_json_spec.concretize()
        round_trip_reversed_yaml_spec.concretize()
        round_trip_reversed_json_spec.concretize()
        assert spec.full_hash() == round_trip_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_json_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
예제 #10
0
 def save(self):
     """
     Save the data to file
     """
     with spack.caches.misc_cache.write_transaction(self.cache_key) as (old, new):
         sjson.dump(self.data, new)