示例#1
0
def find_spec(spec, condition):
    """Searches the dag from spec in an intelligent order and looks
       for a spec that matches a condition"""
    # First search parents, then search children
    deptype = ('build', 'link')
    dagiter = chain(
        spec.traverse(direction='parents',  deptype=deptype, root=False),
        spec.traverse(direction='children', deptype=deptype, root=False))
    visited = set()
    for relative in dagiter:
        if condition(relative):
            return relative
        visited.add(id(relative))

    # Then search all other relatives in the DAG *except* spec
    for relative in spec.root.traverse(deptypes=spack.alldeps):
        if relative is spec:
            continue
        if id(relative) in visited:
            continue
        if condition(relative):
            return relative

    # Finally search spec itself.
    if condition(spec):
        return spec

    return None   # Nothing matched the condition.
示例#2
0
def find_spec(spec, condition):
    """Searches the dag from spec in an intelligent order and looks
       for a spec that matches a condition"""
    # First search parents, then search children
    deptype = ('build', 'link')
    dagiter = chain(
        spec.traverse(direction='parents', deptype=deptype, root=False),
        spec.traverse(direction='children', deptype=deptype, root=False))
    visited = set()
    for relative in dagiter:
        if condition(relative):
            return relative
        visited.add(id(relative))

    # Then search all other relatives in the DAG *except* spec
    for relative in spec.root.traverse(deptypes=spack.alldeps):
        if relative is spec:
            continue
        if id(relative) in visited:
            continue
        if condition(relative):
            return relative

    # Finally search spec itself.
    if condition(spec):
        return spec

    return None  # Nothing matched the condition.
示例#3
0
文件: environment.py 项目: LLNL/spack
    def _get_environment_specs(self, recurse_dependencies=True):
        """Returns the specs of all the packages in an environment.

        If these specs appear under different user_specs, only one copy
        is added to the list returned.
        """
        package_to_spec = {}
        spec_list = list()

        for spec_hash in self.concretized_order:
            spec = self.specs_by_hash[spec_hash]

            specs = (spec.traverse(deptype=('link', 'run'))
                     if recurse_dependencies else (spec,))

            for dep in specs:
                prior = package_to_spec.get(dep.name)
                if prior and prior != dep:
                    tty.debug("{0} takes priority over {1}"
                              .format(package_to_spec[dep.name].format(),
                                      dep.format()))
                else:
                    package_to_spec[dep.name] = dep
                    spec_list.append(dep)

        return spec_list
示例#4
0
文件: environment.py 项目: LLNL/spack
    def _to_lockfile_dict(self):
        """Create a dictionary to store a lockfile for this environment."""
        concrete_specs = {}
        for spec in self.specs_by_hash.values():
            for s in spec.traverse():
                dag_hash = s.dag_hash()
                if dag_hash not in concrete_specs:
                    concrete_specs[dag_hash] = s.to_node_dict(all_deps=True)

        hash_spec_list = zip(
            self.concretized_order, self.concretized_user_specs)

        # this is the lockfile we'll write out
        data = {
            # metadata about the format
            '_meta': {
                'file-type': 'spack-lockfile',
                'lockfile-version': lockfile_format_version,
            },

            # users specs + hashes are the 'roots' of the environment
            'roots': [{
                'hash': h,
                'spec': str(s)
            } for h, s in hash_spec_list],

            # Concrete specs by hash, including dependencies
            'concrete_specs': concrete_specs,
        }

        return data
示例#5
0
文件: database.py 项目: LLNL/spack
    def installed_relatives(self, spec, direction='children', transitive=True):
        """Return installed specs related to this one."""
        if direction not in ('parents', 'children'):
            raise ValueError("Invalid direction: %s" % direction)

        relatives = set()
        for spec in self.query(spec):
            if transitive:
                to_add = spec.traverse(direction=direction, root=False)
            elif direction == 'parents':
                to_add = spec.dependents()
            else:  # direction == 'children'
                to_add = spec.dependencies()

            for relative in to_add:
                hash_key = relative.dag_hash()
                if hash_key not in self._data:
                    reltype = ('Dependent' if direction == 'parents'
                               else 'Dependency')
                    tty.warn("Inconsistent state! %s %s of %s not in DB"
                             % (reltype, hash_key, spec.dag_hash()))
                    continue

                if not self._data[hash_key].installed:
                    continue

                relatives.add(relative)
        return relatives
示例#6
0
 def installed_dependents(self, spec):
     """List the installed specs that depend on this one."""
     dependents = set()
     for spec in self.query(spec):
         for dependent in spec.traverse(direction='parents', root=False):
             dependents.add(dependent)
     return dependents
示例#7
0
    def installed_relatives(self, spec, direction='children', transitive=True):
        """Return installed specs related to this one."""
        if direction not in ('parents', 'children'):
            raise ValueError("Invalid direction: %s" % direction)

        relatives = set()
        for spec in self.query(spec):
            if transitive:
                to_add = spec.traverse(direction=direction, root=False)
            elif direction == 'parents':
                to_add = spec.dependents()
            else:  # direction == 'children'
                to_add = spec.dependencies()

            for relative in to_add:
                hash_key = relative.dag_hash()
                if hash_key not in self._data:
                    reltype = ('Dependent' if direction == 'parents'
                               else 'Dependency')
                    tty.warn("Inconsistent state! %s %s of %s not in DB"
                             % (reltype, hash_key, spec.dag_hash()))
                    continue

                if not self._data[hash_key].installed:
                    continue

                relatives.add(relative)
        return relatives
示例#8
0
def test_installed_upstream(upstream_and_downstream_db):
    upstream_db, upstream_layout, downstream_db, downstream_layout = (
        upstream_and_downstream_db)

    default = ('build', 'link')
    x = MockPackage('x', [], [])
    z = MockPackage('z', [], [])
    y = MockPackage('y', [z], [default])
    w = MockPackage('w', [x, y], [default, default])
    mock_repo = MockPackageMultiRepo([w, x, y, z])

    with spack.repo.swap(mock_repo):
        spec = spack.spec.Spec('w')
        spec.concretize()

        for dep in spec.traverse(root=False):
            upstream_db.add(dep, upstream_layout)

        new_spec = spack.spec.Spec('w')
        new_spec.concretize()
        downstream_db.add(new_spec, downstream_layout)
        for dep in new_spec.traverse(root=False):
            upstream, record = downstream_db.query_by_spec_hash(
                dep.dag_hash())
            assert upstream
            assert record.path == upstream_layout.path_for_spec(dep)
        upstream, record = downstream_db.query_by_spec_hash(
            new_spec.dag_hash())
        assert not upstream
        assert record.installed

        upstream_db._check_ref_counts()
        downstream_db._check_ref_counts()
示例#9
0
 def installed_dependents(self, spec):
     """List the installed specs that depend on this one."""
     dependents = set()
     for spec in self.query(spec):
         for dependent in spec.traverse(direction='parents', root=False):
             dependents.add(dependent)
     return dependents
示例#10
0
    def make_concretization_repository(abstract_specs):
        """Returns the path to a temporary repository created to contain
        a fake package that depends on all of the abstract specs.
        """
        tmpdir = tempfile.mkdtemp()
        repo_path, _ = spack.repo.create_repo(tmpdir)

        debug_msg = '[CONCRETIZATION]: Creating helper repository in {0}'
        tty.debug(debug_msg.format(repo_path))

        pkg_dir = os.path.join(repo_path, 'packages', 'concretizationroot')
        fs.mkdirp(pkg_dir)
        environment = spack.tengine.make_environment()
        template = environment.get_template('misc/coconcretization.pyt')

        # Split recursive specs, as it seems the concretizer has issue
        # respecting conditions on dependents expressed like
        # depends_on('foo ^[email protected]'), see issue #11160
        split_specs = [dep.copy(deps=False)
                       for spec in abstract_specs
                       for dep in spec.traverse(root=True)]

        with open(os.path.join(pkg_dir, 'package.py'), 'w') as f:
            f.write(template.render(specs=[str(s) for s in split_specs]))

        return spack.repo.Repo(repo_path)
示例#11
0
文件: database.py 项目: lorak41/spack
    def installed_relatives(self, spec, direction='children', transitive=True,
                            deptype='all'):
        """Return installed specs related to this one."""
        if direction not in ('parents', 'children'):
            raise ValueError("Invalid direction: %s" % direction)

        relatives = set()
        for spec in self.query(spec):
            if transitive:
                to_add = spec.traverse(
                    direction=direction, root=False, deptype=deptype)
            elif direction == 'parents':
                to_add = spec.dependents(deptype=deptype)
            else:  # direction == 'children'
                to_add = spec.dependencies(deptype=deptype)

            for relative in to_add:
                hash_key = relative.dag_hash()
                upstream, record = self.query_by_spec_hash(hash_key)
                if not record:
                    reltype = ('Dependent' if direction == 'parents'
                               else 'Dependency')
                    msg = ("Inconsistent state! %s %s of %s not in DB"
                           % (reltype, hash_key, spec.dag_hash()))
                    if self._fail_when_missing_deps:
                        raise MissingDependenciesError(msg)
                    tty.warn(msg)
                    continue

                if not record.installed:
                    continue

                relatives.add(relative)
        return relatives
示例#12
0
    def format_list(specs):
        """Display a single list of specs, with no groups"""
        # create the final, formatted versions of all specs
        formatted = []
        for spec in specs:
            formatted.append((fmt(spec), spec))
            if deps:
                for depth, dep in spec.traverse(root=False, depth=True):
                    formatted.append((fmt(dep, depth), dep))
                formatted.append(('', None))  # mark newlines

        # unless any of these are set, we can just colify and be done.
        if not any((deps, paths)):
            colify((f[0] for f in formatted), indent=indent)
            return

        # otherwise, we'll print specs one by one
        max_width = max(len(f[0]) for f in formatted)
        path_fmt = "%%-%ds%%s" % (max_width + 2)

        # getting lots of prefixes requires DB lookups. Ensure
        # all spec.prefix calls are in one transaction.
        with spack.store.db.read_transaction():
            for string, spec in formatted:
                if not string:
                    print()  # print newline from above
                    continue

                if paths:
                    print(path_fmt % (string, spec.prefix))
                else:
                    print(string)
示例#13
0
    def _to_lockfile_dict(self):
        """Create a dictionary to store a lockfile for this environment."""
        concrete_specs = {}
        for spec in self.specs_by_hash.values():
            for s in spec.traverse():
                dag_hash = s.dag_hash()
                if dag_hash not in concrete_specs:
                    concrete_specs[dag_hash] = s.to_node_dict(all_deps=True)

        hash_spec_list = zip(self.concretized_order,
                             self.concretized_user_specs)

        # this is the lockfile we'll write out
        data = {
            # metadata about the format
            '_meta': {
                'file-type': 'spack-lockfile',
                'lockfile-version': lockfile_format_version,
            },

            # users specs + hashes are the 'roots' of the environment
            'roots': [{
                'hash': h,
                'spec': str(s)
            } for h, s in hash_spec_list],

            # Concrete specs by hash, including dependencies
            'concrete_specs': concrete_specs,
        }

        return data
示例#14
0
    def _get_environment_specs(self, recurse_dependencies=True):
        """Returns the specs of all the packages in an environment.

        If these specs appear under different user_specs, only one copy
        is added to the list returned.
        """
        package_to_spec = {}
        spec_list = list()

        for spec_hash in self.concretized_order:
            spec = self.specs_by_hash[spec_hash]

            specs = (spec.traverse(
                deptype=('link', 'run')) if recurse_dependencies else (spec, ))

            for dep in specs:
                prior = package_to_spec.get(dep.name)
                if prior and prior != dep:
                    tty.debug("{0} takes priority over {1}".format(
                        package_to_spec[dep.name].format(), dep.format()))
                else:
                    package_to_spec[dep.name] = dep
                    spec_list.append(dep)

        return spec_list
示例#15
0
    def remove_specs(self, *specs, **kwargs):
        assert all((s.concrete for s in specs))
        with_dependents = kwargs.get("with_dependents", True)
        with_dependencies = kwargs.get("with_dependencies", False)

        # caller can pass this in, as get_all_specs() is expensive
        all_specs = kwargs.get("all_specs", None) or set(self.get_all_specs())

        specs = set(specs)

        if with_dependencies:
            specs = get_dependencies(specs)

        if kwargs.get("exclude", None):
            specs = set(filter_exclude(specs, kwargs["exclude"]))

        to_deactivate = specs
        to_keep = all_specs - to_deactivate

        dependents = find_dependents(to_keep, to_deactivate)

        if with_dependents:
            # remove all packages depending on the ones to remove
            if len(dependents) > 0:
                tty.warn(self._croot +
                         "The following dependents will be removed: %s" %
                         ", ".join((s.name for s in dependents)))
                to_deactivate.update(dependents)
        elif len(dependents) > 0:
            tty.warn(self._croot +
                     "The following packages will be unusable: %s" %
                     ", ".join((s.name for s in dependents)))

        # Determine the order that packages should be removed from the view;
        # dependents come before their dependencies.
        to_deactivate_sorted = list()
        depmap = dict()
        for spec in to_deactivate:
            depmap[spec] = set(d for d in spec.traverse(root=False)
                               if d in to_deactivate)

        while depmap:
            for spec in [s for s, d in depmap.items() if not d]:
                to_deactivate_sorted.append(spec)
                for s in depmap.keys():
                    depmap[s].discard(spec)
                depmap.pop(spec)
        to_deactivate_sorted.reverse()

        # Ensure that the sorted list contains all the packages
        assert set(to_deactivate_sorted) == to_deactivate

        # Remove the packages from the view
        for spec in to_deactivate_sorted:
            if spec.package.is_extension:
                self.remove_extension(spec, with_dependents=with_dependents)
            else:
                self.remove_standalone(spec)

        self._purge_empty_directories()
示例#16
0
def test_update_failed_no_dependent_task(install_mockery):
    """Test _update_failed with missing dependent build tasks."""
    spec, installer = create_installer('dependent-install')

    for dep in spec.traverse(root=False):
        task = create_build_task(dep.package)
        installer._update_failed(task, mark=False)
        assert installer.failed[task.pkg_id] is None
示例#17
0
def _mirror(args):
    mirror_dir = spack.util.path.canonicalize_path(
        os.path.join(args.root_dir, LOCAL_MIRROR_DIR))

    # TODO: Here we are adding gnuconfig manually, but this can be fixed
    # TODO: as soon as we have an option to add to a mirror all the possible
    # TODO: dependencies of a spec
    root_specs = spack.bootstrap.all_root_specs(
        development=args.dev) + ['gnuconfig']
    for spec_str in root_specs:
        msg = 'Adding "{0}" and dependencies to the mirror at {1}'
        llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
        # Suppress tty from the call below for terser messages
        llnl.util.tty.set_msg_enabled(False)
        spec = spack.spec.Spec(spec_str).concretized()
        for node in spec.traverse():
            spack.mirror.create(mirror_dir, [node])
        llnl.util.tty.set_msg_enabled(True)

    if args.binary_packages:
        msg = 'Adding binary packages from "{0}" to the mirror at {1}'
        llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
        llnl.util.tty.set_msg_enabled(False)
        stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
        stage.create()
        stage.fetch()
        stage.expand_archive()
        build_cache_dir = os.path.join(stage.source_path, 'build_cache')
        shutil.move(build_cache_dir, mirror_dir)
        llnl.util.tty.set_msg_enabled(True)

    def write_metadata(subdir, metadata):
        metadata_rel_dir = os.path.join('metadata', subdir)
        metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir,
                                     'metadata.yaml')
        llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
        with open(metadata_yaml, mode='w') as f:
            spack.util.spack_yaml.dump(metadata, stream=f)
        return os.path.dirname(metadata_yaml), metadata_rel_dir

    instructions = (
        '\nTo register the mirror on the platform where it\'s supposed '
        'to be used, move "{0}" to its final location and run the '
        'following command(s):\n\n').format(args.root_dir)
    cmd = '  % spack bootstrap add --trust {0} <final-path>/{1}\n'
    _, rel_directory = write_metadata(subdir='sources',
                                      metadata=SOURCE_METADATA)
    instructions += cmd.format('local-sources', rel_directory)
    if args.binary_packages:
        abs_directory, rel_directory = write_metadata(subdir='binaries',
                                                      metadata=BINARY_METADATA)
        shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON),
                    abs_directory)
        shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON),
                    abs_directory)
        instructions += cmd.format('local-binaries', rel_directory)
    print(instructions)
示例#18
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        # Pass on concretizing the compiler flags if the target or operating
        # system is not set.
        if not spec.architecture.concrete:
            # We haven't changed, but other changes need to happen before we
            # continue. `return True` here to force concretization to keep
            # running.
            return True

        compiler_match = lambda other: (
            spec.compiler == other.compiler and
            spec.architecture == other.architecture)

        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = list()
            try:
                nearest = next(p for p in spec.traverse(direction='parents')
                               if (compiler_match(p) and
                                   (p is not spec) and
                                   flag in p.compiler_flags))
                nearest_flags = nearest.compiler_flags.get(flag, [])
                flags = spec.compiler_flags.get(flag, [])
                if set(nearest_flags) - set(flags):
                    spec.compiler_flags[flag] = list(
                        llnl.util.lang.dedupe(nearest_flags + flags)
                    )
                    ret = True
            except StopIteration:
                pass

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        try:
            compiler = spack.compilers.compiler_for_spec(
                spec.compiler, spec.architecture)
        except spack.compilers.NoCompilerForSpecError:
            if self.check_for_compiler_existence:
                raise
            return ret
        for flag in compiler.flags:
            config_flags = compiler.flags.get(flag, [])
            flags = spec.compiler_flags.get(flag, [])
            spec.compiler_flags[flag] = list(
                llnl.util.lang.dedupe(config_flags + flags)
            )
            if set(config_flags) - set(flags):
                ret = True

        return ret
示例#19
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        # Pass on concretizing the compiler flags if the target or operating
        # system is not set.
        if not (spec.architecture.platform_os and spec.architecture.target):
            # We haven't changed, but other changes need to happen before we
            # continue. `return True` here to force concretization to keep
            # running.
            return True

        compiler_match = lambda other: (
            spec.compiler == other.compiler and
            spec.architecture == other.architecture)

        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = list()
            try:
                nearest = next(p for p in spec.traverse(direction='parents')
                               if (compiler_match(p) and
                                   (p is not spec) and
                                   flag in p.compiler_flags))
                nearest_flags = set(nearest.compiler_flags.get(flag, []))
                flags = set(spec.compiler_flags.get(flag, []))
                if (nearest_flags - flags):
                    # TODO: these set operations may reorder the flags, which
                    # for some orders of flags can be invalid. See:
                    # https://github.com/spack/spack/issues/6154#issuecomment-342365573
                    spec.compiler_flags[flag] = list(nearest_flags | flags)
                    ret = True
            except StopIteration:
                pass

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        try:
            compiler = spack.compilers.compiler_for_spec(
                spec.compiler, spec.architecture)
        except spack.compilers.NoCompilerForSpecError:
            if self.check_for_compiler_existence:
                raise
            return ret
        for flag in compiler.flags:
            config_flags = set(compiler.flags.get(flag, []))
            flags = set(spec.compiler_flags.get(flag, []))
            spec.compiler_flags[flag] = list(config_flags | flags)
            if (config_flags - flags):
                ret = True

        return ret
示例#20
0
def test_update_failed_no_dependent_task(install_mockery):
    """Test _update_failed with missing dependent build tasks."""
    const_arg = installer_args(['dependent-install'], {})
    installer = create_installer(const_arg)
    spec = installer.build_requests[0].pkg.spec

    for dep in spec.traverse(root=False):
        task = create_build_task(dep.package)
        installer._update_failed(task, mark=False)
        assert installer.failed[task.pkg_id] is None
示例#21
0
def test_installed_upstream(upstream_and_downstream_db):
    upstream_write_db, upstream_db, upstream_layout,\
        downstream_db, downstream_layout = (upstream_and_downstream_db)

    default = ('build', 'link')
    mock_repo = MockPackageMultiRepo()
    x = mock_repo.add_package('x', [], [])
    z = mock_repo.add_package('z', [], [])
    y = mock_repo.add_package('y', [z], [default])
    mock_repo.add_package('w', [x, y], [default, default])

    with spack.repo.use_repositories(mock_repo):
        spec = spack.spec.Spec('w')
        spec.concretize()

        for dep in spec.traverse(root=False):
            upstream_write_db.add(dep, upstream_layout)
        upstream_db._read()

        for dep in spec.traverse(root=False):
            record = downstream_db.get_by_hash(dep.dag_hash())
            assert record is not None
            with pytest.raises(spack.database.ForbiddenLockError):
                record = upstream_db.get_by_hash(dep.dag_hash())

        new_spec = spack.spec.Spec('w')
        new_spec.concretize()
        downstream_db.add(new_spec, downstream_layout)
        for dep in new_spec.traverse(root=False):
            upstream, record = downstream_db.query_by_spec_hash(
                dep.dag_hash())
            assert upstream
            assert record.path == upstream_layout.path_for_spec(dep)
        upstream, record = downstream_db.query_by_spec_hash(
            new_spec.dag_hash())
        assert not upstream
        assert record.installed

        upstream_db._check_ref_counts()
        downstream_db._check_ref_counts()
示例#22
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        # Pass on concretizing the compiler flags if the target or operating
        # system is not set.
        if not (spec.architecture.platform_os and spec.architecture.target):
            # We haven't changed, but other changes need to happen before we
            # continue. `return True` here to force concretization to keep
            # running.
            return True

        compiler_match = lambda other: (
            spec.compiler == other.compiler and
            spec.architecture == other.architecture)

        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = list()
            try:
                nearest = next(p for p in spec.traverse(direction='parents')
                               if (compiler_match(p) and
                                   (p is not spec) and
                                   flag in p.compiler_flags))
                nearest_flags = set(nearest.compiler_flags.get(flag, []))
                flags = set(spec.compiler_flags.get(flag, []))
                if (nearest_flags - flags):
                    spec.compiler_flags[flag] = list(nearest_flags | flags)
                    ret = True
            except StopIteration:
                pass

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        compiler = spack.compilers.compiler_for_spec(
            spec.compiler, spec.architecture)
        for flag in compiler.flags:
            config_flags = set(compiler.flags.get(flag, []))
            flags = set(spec.compiler_flags.get(flag, []))
            spec.compiler_flags[flag] = list(config_flags | flags)
            if (config_flags - flags):
                ret = True

        return ret
示例#23
0
def view_copy(src, dst, view, spec=None):
    """
    Copy a file from src to dst.

    Use spec and view to generate relocations
    """
    shutil.copy2(src, dst)
    if spec and not spec.external:
        # Not metadata, we have to relocate it

        # Get information on where to relocate from/to

        # This is vestigial code for the *old* location of sbang. Previously,
        # sbang was a bash script, and it lived in the spack prefix. It is
        # now a POSIX script that lives in the install prefix. Old packages
        # will have the old sbang location in their shebangs.
        # TODO: Not sure which one to use...
        import spack.hooks.sbang as sbang

        # Break a package include cycle
        import spack.relocate

        orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
        new_sbang = sbang.sbang_shebang_line()

        prefix_to_projection = collections.OrderedDict(
            {spec.prefix: view.get_projection_for_spec(spec)})

        for dep in spec.traverse():
            if not dep.external:
                prefix_to_projection[dep.prefix] = \
                    view.get_projection_for_spec(dep)

        if spack.relocate.is_binary(dst):
            spack.relocate.relocate_text_bin(binaries=[dst],
                                             prefixes=prefix_to_projection)
        else:
            prefix_to_projection[spack.store.layout.root] = view._root
            prefix_to_projection[orig_sbang] = new_sbang
            spack.relocate.relocate_text(files=[dst],
                                         prefixes=prefix_to_projection)
        try:
            stat = os.stat(src)
            os.chown(dst, stat.st_uid, stat.st_gid)
        except OSError:
            tty.debug('Can\'t change the permissions for %s' % dst)
示例#24
0
    def _get_environment_specs(self, recurse_dependencies=True):
        """Returns the specs of all the packages in an environment.

        If these specs appear under different user_specs, only one copy
        is added to the list returned.
        """
        spec_list = list()

        for spec_hash in self.concretized_order:
            spec = self.specs_by_hash[spec_hash]

            specs = (spec.traverse(
                deptype=('link', 'run')) if recurse_dependencies else (spec, ))

            spec_list.extend(specs)

        return spec_list
示例#25
0
文件: __init__.py 项目: wangvsa/spack
def display_specs_as_json(specs, deps=False):
    """Convert specs to a list of json records."""
    seen = set()
    records = []
    for spec in specs:
        if spec.dag_hash() in seen:
            continue
        seen.add(spec.dag_hash())
        records.append(spec.to_record_dict())

        if deps:
            for dep in spec.traverse():
                if dep.dag_hash() in seen:
                    continue
                seen.add(dep.dag_hash())
                records.append(dep.to_record_dict())

    sjson.dump(records, sys.stdout)
示例#26
0
def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout):
    roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b', 'c']]
    layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/', '/rc/']]

    default = ('build', 'link')
    z = MockPackage('z', [], [])
    y = MockPackage('y', [z], [default])
    x = MockPackage('x', [y], [default])

    mock_repo = MockPackageMultiRepo([x, y, z])

    with spack.repo.swap(mock_repo):
        spec = spack.spec.Spec('x')
        spec.concretize()
        db_c = spack.database.Database(roots[2])
        db_c.add(spec['z'], layouts[2])

        db_b = spack.database.Database(roots[1], upstream_dbs=[db_c])
        db_b.add(spec['y'], layouts[1])

        db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c])
        db_a.add(spec['x'], layouts[0])

        upstream_dbs_from_scratch = (
            spack.store._construct_upstream_dbs_from_install_roots(
                [roots[1], roots[2]], _test=True))
        db_a_from_scratch = spack.database.Database(
            roots[0], upstream_dbs=upstream_dbs_from_scratch)

        assert db_a_from_scratch.db_for_spec_hash(
            spec.dag_hash()) == (db_a_from_scratch)
        assert db_a_from_scratch.db_for_spec_hash(
            spec['y'].dag_hash()) == (upstream_dbs_from_scratch[0])
        assert db_a_from_scratch.db_for_spec_hash(
            spec['z'].dag_hash()) == (upstream_dbs_from_scratch[1])

        db_a_from_scratch._check_ref_counts()
        upstream_dbs_from_scratch[0]._check_ref_counts()
        upstream_dbs_from_scratch[1]._check_ref_counts()

        assert (db_a_from_scratch.installed_relatives(spec) == set(
            spec.traverse(root=False)))
        assert (db_a_from_scratch.installed_relatives(
            spec['z'], direction='parents') == set([spec, spec['y']]))
示例#27
0
文件: environment.py 项目: LLNL/spack
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put the new user specs in the YAML
        yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
        yaml_spec_list[:] = [str(s) for s in self.user_specs]

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)
示例#28
0
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put the new user specs in the YAML
        yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
        yaml_spec_list[:] = [str(s) for s in self.user_specs]

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)
示例#29
0
文件: __init__.py 项目: key4hep/spack
def display_specs_as_json(specs, deps=False):
    """Convert specs to a list of json records."""
    seen = set()
    records = []
    for spec in specs:
        dag_hash = spec.dag_hash()
        if dag_hash in seen:
            continue
        records.append(spec.node_dict_with_hashes())
        seen.add(dag_hash)

        if deps:
            for dep in spec.traverse():
                dep_dag_hash = dep.dag_hash()
                if dep_dag_hash in seen:
                    continue
                records.append(dep.node_dict_with_hashes())
                seen.add(dep_dag_hash)

    sjson.dump(records, sys.stdout)
示例#30
0
def view_copy(src, dst, view, spec=None):
    """
    Copy a file from src to dst.

    Use spec and view to generate relocations
    """
    shutil.copyfile(src, dst)
    if spec:
        # Not metadata, we have to relocate it

        # Get information on where to relocate from/to

        # This is vestigial code for the *old* location of sbang. Previously,
        # sbang was a bash script, and it lived in the spack prefix. It is
        # now a POSIX script that lives in the install prefix. Old packages
        # will have the old sbang location in their shebangs.
        # TODO: Not sure which one to use...
        import spack.hooks.sbang as sbang
        orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
        new_sbang = sbang.sbang_shebang_line()

        prefix_to_projection = OrderedDict({
            spec.prefix: view.get_projection_for_spec(spec),
            spack.paths.spack_root: view._root})

        for dep in spec.traverse():
            prefix_to_projection[dep.prefix] = \
                view.get_projection_for_spec(dep)

        if spack.relocate.is_binary(dst):
            spack.relocate.relocate_text_bin(
                binaries=[dst],
                prefixes=prefix_to_projection
            )
        else:
            prefix_to_projection[spack.store.layout.root] = view._root
            prefix_to_projection[orig_sbang] = new_sbang
            spack.relocate.relocate_text(
                files=[dst],
                prefixes=prefix_to_projection
            )
示例#31
0
    def concretize_compiler(self, spec):
        """If the spec already has a compiler, we're done.  If not, then take
           the compiler used for the nearest ancestor with a compiler
           spec and use that.  If the ancestor's compiler is not
           concrete, then give it a valid version.  If there is no
           ancestor with a compiler, use the system default compiler.

           Intuition: Use the system default if no package that depends on
           this one has a strict compiler requirement.  Otherwise, try to
           build with the compiler that will be used by libraries that
           link to this one, to maximize compatibility.
        """
        all_compilers = spack.compilers.all_compilers()

        if (spec.compiler and
            spec.compiler.concrete and
            spec.compiler in all_compilers):
            return False

        try:
            nearest = next(p for p in spec.traverse(direction='parents')
                           if p.compiler is not None).compiler

            if not nearest in all_compilers:
                # Take the newest compiler that saisfies the spec
                matches = sorted(spack.compilers.find(nearest))
                if not matches:
                    raise UnavailableCompilerVersionError(nearest)

                # copy concrete version into nearest spec
                nearest.versions = matches[-1].versions.copy()
                assert(nearest.concrete)

            spec.compiler = nearest.copy()

        except StopIteration:
            spec.compiler = spack.compilers.default_compiler().copy()

        return True  # things changed.
示例#32
0
def view_copy(src, dst, view, spec=None):
    """
    Copy a file from src to dst.

    Use spec and view to generate relocations
    """
    shutil.copyfile(src, dst)
    if spec:
        # Not metadata, we have to relocate it

        # Get information on where to relocate from/to
        prefix_to_projection = dict(
            (dep.prefix, view.get_projection_for_spec(dep))
            for dep in spec.traverse()
        )

        if spack.relocate.is_binary(dst):
            # relocate binaries
            spack.relocate.relocate_text_bin(
                binaries=[dst],
                orig_install_prefix=spec.prefix,
                new_install_prefix=view.get_projection_for_spec(spec),
                orig_spack=spack.paths.spack_root,
                new_spack=view._root,
                new_prefixes=prefix_to_projection
            )
        else:
            # relocate text
            spack.relocate.relocate_text(
                files=[dst],
                orig_layout_root=spack.store.layout.root,
                new_layout_root=view._root,
                orig_install_prefix=spec.prefix,
                new_install_prefix=view.get_projection_for_spec(spec),
                orig_spack=spack.paths.spack_root,
                new_spack=view._root,
                new_prefixes=prefix_to_projection
            )
示例#33
0
    def concretize_architecture(self, spec):
        """If the spec is empty provide the defaults of the platform. If the
        architecture is not a string type, then check if either the platform,
        target or operating system are concretized. If any of the fields are
        changed then return True. If everything is concretized (i.e the
        architecture attribute is a namedtuple of classes) then return False.
        If the target is a string type, then convert the string into a
        concretized architecture. If it has no architecture and the root of the
        DAG has an architecture, then use the root otherwise use the defaults
        on the platform.
        """
        try:
            # Get the nearest architecture with any fields set
            nearest = next(p for p in spec.traverse(direction='parents')
                           if (p.architecture and p is not spec))
            nearest_arch = nearest.architecture
        except StopIteration:
            # Default to the system architecture if nothing set
            nearest_arch = spack.spec.ArchSpec(spack.architecture.sys_type())

        spec_changed = False

        # ensure type safety for the architecture
        if spec.architecture is None:
            spec.architecture = spack.spec.ArchSpec()
            spec_changed = True

        # replace each of the fields (platform, os, target) separately
        nearest_dict = nearest_arch.to_cmp_dict()
        replacement_fields = [
            k for k, v in iteritems(nearest_dict)
            if v and not getattr(spec.architecture, k)
        ]
        for field in replacement_fields:
            setattr(spec.architecture, field, getattr(nearest_arch, field))
            spec_changed = True

        return spec_changed
示例#34
0
    def concretize_compiler(self, spec):
        """If the spec already has a compiler, we're done.  If not, then take
           the compiler used for the nearest ancestor with a compiler
           spec and use that.  If the ancestor's compiler is not
           concrete, then give it a valid version.  If there is no
           ancestor with a compiler, use the system default compiler.

           Intuition: Use the system default if no package that depends on
           this one has a strict compiler requirement.  Otherwise, try to
           build with the compiler that will be used by libraries that
           link to this one, to maximize compatibility.
        """
        all_compilers = spack.compilers.all_compilers()

        if (spec.compiler and
            spec.compiler.concrete and
            spec.compiler in all_compilers):
            return

        try:
            nearest = next(p for p in spec.traverse(direction='parents')
                           if p.compiler is not None).compiler

            if not nearest in all_compilers:
                # Take the newest compiler that saisfies the spec
                matches = sorted(spack.compilers.find(nearest))
                if not matches:
                    raise UnavailableCompilerVersionError(nearest)

                # copy concrete version into nearest spec
                nearest.versions = matches[-1].versions.copy()
                assert(nearest.concrete)

            spec.compiler = nearest.copy()

        except StopIteration:
            spec.compiler = spack.compilers.default_compiler().copy()
示例#35
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        # Pass on concretizing the compiler flags if the target or operating
        # system is not set.
        if not (spec.architecture.platform_os and spec.architecture.target):
            # We haven't changed, but other changes need to happen before we
            # continue. `return True` here to force concretization to keep
            # running.
            return True

        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            try:
                nearest = next(
                    p for p in spec.traverse(direction='parents')
                    if ((p.compiler == spec.compiler and p is not spec)
                        and flag in p.compiler_flags))
                if flag not in spec.compiler_flags or \
                        not (sorted(spec.compiler_flags[flag]) >=
                             sorted(nearest.compiler_flags[flag])):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag])
                            | set(nearest.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[flag] = nearest.compiler_flags[
                            flag]
                    ret = True

            except StopIteration:
                if (flag in spec.root.compiler_flags
                        and ((flag not in spec.compiler_flags)
                             or sorted(spec.compiler_flags[flag]) != sorted(
                                 spec.root.compiler_flags[flag]))):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag])
                            | set(spec.root.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[flag] = spec.root.compiler_flags[
                            flag]
                    ret = True
                else:
                    if flag not in spec.compiler_flags:
                        spec.compiler_flags[flag] = []

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        compiler = spack.compilers.compiler_for_spec(spec.compiler,
                                                     spec.architecture)
        for flag in compiler.flags:
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = compiler.flags[flag]
                if compiler.flags[flag] != []:
                    ret = True
            else:
                if ((sorted(spec.compiler_flags[flag]) != sorted(
                        compiler.flags[flag]))
                        and (not set(spec.compiler_flags[flag]) >= set(
                            compiler.flags[flag]))):
                    ret = True
                    spec.compiler_flags[flag] = list(
                        set(spec.compiler_flags[flag])
                        | set(compiler.flags[flag]))

        return ret
示例#36
0
def get_executable(exe, spec=None, install=False):
    """Find an executable named exe, either in PATH or in Spack

    Args:
        exe (str): needed executable name
        spec (Spec or str): spec to search for exe in (default exe)
        install (bool): install spec if not available

    When ``install`` is True, Spack will use the python used to run Spack as an
    external. The ``install`` option should only be used with packages that
    install quickly (when using external python) or are guaranteed by Spack
    organization to be in a binary mirror (clingo)."""
    # Search the system first
    runner = spack.util.executable.which(exe)
    if runner:
        return runner

    # Check whether it's already installed
    spec = spack.spec.Spec(spec or exe)
    installed_specs = spack.store.db.query(spec, installed=True)
    for ispec in installed_specs:
        # filter out directories of the same name as the executable
        exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe)
                    if fs.is_exe(exe_p)]
        if exe_path:
            ret = spack.util.executable.Executable(exe_path[0])
            envmod = EnvironmentModifications()
            for dep in ispec.traverse(root=True, order='post'):
                envmod.extend(uenv.environment_modifications_for_spec(dep))
            ret.add_default_envmod(envmod)
            return ret
        else:
            tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix))

    def _raise_error(executable, exe_spec):
        error_msg = 'cannot find the executable "{0}"'.format(executable)
        if exe_spec:
            error_msg += ' from spec "{0}'.format(exe_spec)
        raise RuntimeError(error_msg)

    # If we're not allowed to install this for ourselves, we can't find it
    if not install:
        _raise_error(exe, spec)

    with spack_python_interpreter():
        # We will install for ourselves, using this python if needed
        # Concretize the spec
        spec.concretize()

    spec.package.do_install()
    # filter out directories of the same name as the executable
    exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe)
                if fs.is_exe(exe_p)]
    if exe_path:
        ret = spack.util.executable.Executable(exe_path[0])
        envmod = EnvironmentModifications()
        for dep in spec.traverse(root=True, order='post'):
            envmod.extend(uenv.environment_modifications_for_spec(dep))
        ret.add_default_envmod(envmod)
        return ret

    _raise_error(exe, spec)
示例#37
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            try:
                nearest = next(
                    p for p in spec.traverse(direction='parents')
                    if ((p.compiler == spec.compiler and p is not spec)
                        and flag in p.compiler_flags))
                if ((not flag in spec.compiler_flags)
                        or sorted(spec.compiler_flags[flag]) != sorted(
                            nearest.compiler_flags[flag])):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag])
                            | set(nearest.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[flag] = nearest.compiler_flags[
                            flag]
                    ret = True

            except StopIteration:
                if (flag in spec.root.compiler_flags
                        and ((not flag in spec.compiler_flags)
                             or sorted(spec.compiler_flags[flag]) != sorted(
                                 spec.root.compiler_flags[flag]))):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag])
                            | set(spec.root.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[flag] = spec.root.compiler_flags[
                            flag]
                    ret = True
                else:
                    if not flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = []

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        compiler = spack.compilers.compiler_for_spec(spec.compiler)
        for flag in compiler.flags:
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = compiler.flags[flag]
                if compiler.flags[flag] != []:
                    ret = True
            else:
                if ((sorted(spec.compiler_flags[flag]) != sorted(
                        compiler.flags[flag]))
                        and (not set(spec.compiler_flags[flag]) >= set(
                            compiler.flags[flag]))):
                    ret = True
                    spec.compiler_flags[flag] = list(
                        set(spec.compiler_flags[flag])
                        | set(compiler.flags[flag]))

        return ret
示例#38
0
    def write(self):
        """Writes an in-memory environment to its location on disk.

        This will also write out package files for each newly concretized spec.
        """
        # ensure path in var/spack/environments
        fs.mkdirp(self.path)

        if self.specs_by_hash:
            # ensure the prefix/.env directory exists
            fs.mkdirp(self.env_subdir_path)

            for spec in self.new_specs:
                for dep in spec.traverse():
                    if not dep.concrete:
                        raise ValueError('specs passed to environment.write() '
                                         'must be concrete!')

                    root = os.path.join(self.repos_path, dep.namespace)
                    repo = spack.repo.create_or_construct(root, dep.namespace)
                    pkg_dir = repo.dirname_for_package_name(dep.name)

                    fs.mkdirp(pkg_dir)
                    spack.repo.path.dump_provenance(dep, pkg_dir)
            self.new_specs = []

            # write the lock file last
            with fs.write_tmp_and_move(self.lock_path) as f:
                sjson.dump(self._to_lockfile_dict(), stream=f)
        else:
            if os.path.exists(self.lock_path):
                os.unlink(self.lock_path)

        # invalidate _repo cache
        self._repo = None

        # put any changes in the definitions in the YAML
        for name, speclist in self.spec_lists.items():
            if name == user_speclist_name:
                # The primary list is handled differently
                continue

            conf = config_dict(self.yaml)
            active_yaml_lists = [
                l for l in conf.get('definitions', [])
                if name in l and _eval_conditional(l.get('when', 'True'))
            ]

            # Remove any specs in yaml that are not in internal representation
            for ayl in active_yaml_lists:
                # If it's not a string, it's a matrix. Those can't have changed
                # If it is a string that starts with '$', it's a reference.
                # Those also can't have changed.
                ayl[name][:] = [
                    s for s in ayl.setdefault(name, [])
                    if (not isinstance(s, six.string_types))
                    or s.startswith('$') or Spec(s) in speclist.specs
                ]

            # Put the new specs into the first active list from the yaml
            new_specs = [
                entry for entry in speclist.yaml_list
                if isinstance(entry, six.string_types) and not any(
                    entry in ayl[name] for ayl in active_yaml_lists)
            ]
            list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
            list_for_new_specs[:] = list_for_new_specs + new_specs

        # put the new user specs in the YAML.
        # This can be done directly because there can't be multiple definitions
        # nor when clauses for `specs` list.
        yaml_spec_list = config_dict(self.yaml).setdefault(
            user_speclist_name, [])
        yaml_spec_list[:] = self.user_specs.yaml_list

        default_name = default_view_name
        if self.views and len(self.views) == 1 and default_name in self.views:
            path = self.default_view.root
            if self.default_view == ViewDescriptor(self.view_path_default):
                view = True
            elif self.default_view == ViewDescriptor(path):
                view = path
            else:
                view = dict((name, view.to_dict())
                            for name, view in self.views.items())
        elif self.views:
            view = dict(
                (name, view.to_dict()) for name, view in self.views.items())
        else:
            view = False

        yaml_dict = config_dict(self.yaml)
        if view is not True:
            # The default case is to keep an active view inside of the
            # Spack environment directory. To avoid cluttering the config,
            # we omit the setting in this case.
            yaml_dict['view'] = view
        elif 'view' in yaml_dict:
            del yaml_dict['view']

        # if all that worked, write out the manifest file at the top level
        with fs.write_tmp_and_move(self.manifest_path) as f:
            _write_yaml(self.yaml, f)

        # TODO: for operations that just add to the env (install etc.) this
        # could just call update_view
        self.regenerate_views()
示例#39
0
    def concretize_compiler_flags(self, spec):
        """
        The compiler flags are updated to match those of the spec whose
        compiler is used, defaulting to no compiler flags in the spec.
        Default specs set at the compiler level will still be added later.
        """
        # Pass on concretizing the compiler flags if the target or operating
        # system is not set.
        if not (spec.architecture.platform_os and spec.architecture.target):
            # We haven't changed, but other changes need to happen before we
            # continue. `return True` here to force concretization to keep
            # running.
            return True

        ret = False
        for flag in spack.spec.FlagMap.valid_compiler_flags():
            try:
                nearest = next(p for p in spec.traverse(direction='parents')
                               if ((p.compiler == spec.compiler and
                                    p is not spec) and
                                   flag in p.compiler_flags))
                if flag not in spec.compiler_flags or \
                        not (sorted(spec.compiler_flags[flag]) >=
                             sorted(nearest.compiler_flags[flag])):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag]) |
                            set(nearest.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[
                            flag] = nearest.compiler_flags[flag]
                    ret = True

            except StopIteration:
                if (flag in spec.root.compiler_flags and
                    ((flag not in spec.compiler_flags) or
                     sorted(spec.compiler_flags[flag]) !=
                     sorted(spec.root.compiler_flags[flag]))):
                    if flag in spec.compiler_flags:
                        spec.compiler_flags[flag] = list(
                            set(spec.compiler_flags[flag]) |
                            set(spec.root.compiler_flags[flag]))
                    else:
                        spec.compiler_flags[
                            flag] = spec.root.compiler_flags[flag]
                    ret = True
                else:
                    if flag not in spec.compiler_flags:
                        spec.compiler_flags[flag] = []

        # Include the compiler flag defaults from the config files
        # This ensures that spack will detect conflicts that stem from a change
        # in default compiler flags.
        compiler = spack.compilers.compiler_for_spec(
            spec.compiler, spec.architecture)
        for flag in compiler.flags:
            if flag not in spec.compiler_flags:
                spec.compiler_flags[flag] = compiler.flags[flag]
                if compiler.flags[flag] != []:
                    ret = True
            else:
                if ((sorted(spec.compiler_flags[flag]) !=
                     sorted(compiler.flags[flag])) and
                    (not set(spec.compiler_flags[flag]) >=
                     set(compiler.flags[flag]))):
                    ret = True
                    spec.compiler_flags[flag] = list(
                        set(spec.compiler_flags[flag]) |
                        set(compiler.flags[flag]))

        return ret