Ejemplo n.º 1
0
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put the new user specs in the YAML
        yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
        yaml_spec_list[:] = [str(s) for s in self.user_specs]

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)
Ejemplo n.º 2
0
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put the new user specs in the YAML
        yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
        yaml_spec_list[:] = [str(s) for s in self.user_specs]

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)
Ejemplo n.º 3
0
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put any changes in the definitions in the YAML
        for name, speclist in self.spec_lists.items():
            if name == user_speclist_name:
                # The primary list is handled differently
                continue

            conf = config_dict(self.yaml)
            active_yaml_lists = [
                l for l in conf.get('definitions', [])
                if name in l and _eval_conditional(l.get('when', 'True'))
            ]

            # Remove any specs in yaml that are not in internal representation
            for ayl in active_yaml_lists:
                # If it's not a string, it's a matrix. Those can't have changed
                # If it is a string that starts with '$', it's a reference.
                # Those also can't have changed.
                ayl[name][:] = [
                    s for s in ayl.setdefault(name, [])
                    if (not isinstance(s, six.string_types))
                    or s.startswith('$') or Spec(s) in speclist.specs
                ]

            # Put the new specs into the first active list from the yaml
            new_specs = [
                entry for entry in speclist.yaml_list
                if isinstance(entry, six.string_types) and not any(
                    entry in ayl[name] for ayl in active_yaml_lists)
            ]
            list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
            list_for_new_specs[:] = list_for_new_specs + new_specs

        # put the new user specs in the YAML.
        # This can be done directly because there can't be multiple definitions
        # nor when clauses for `specs` list.
        yaml_spec_list = config_dict(self.yaml).setdefault(
            user_speclist_name, [])
        yaml_spec_list[:] = self.user_specs.yaml_list

        default_name = default_view_name
        if self.views and len(self.views) == 1 and default_name in self.views:
            path = self.default_view.root
            if self.default_view == ViewDescriptor(self.view_path_default):
                view = True
            elif self.default_view == ViewDescriptor(path):
                view = path
            else:
                view = dict((name, view.to_dict())
                            for name, view in self.views.items())
        elif self.views:
            view = dict(
                (name, view.to_dict()) for name, view in self.views.items())
        else:
            view = False

        yaml_dict = config_dict(self.yaml)
        if view is not True:
            # The default case is to keep an active view inside of the
            # Spack environment directory. To avoid cluttering the config,
            # we omit the setting in this case.
            yaml_dict['view'] = view
        elif 'view' in yaml_dict:
            del yaml_dict['view']

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)

        # TODO: for operations that just add to the env (install etc.) this
        # could just call update_view
        self.regenerate_views()