def mirror_add(args): """Add a mirror to Spack.""" url = args.url if url.startswith('/'): url = 'file://' + url mirrors = spack.config.get_config('mirrors', scope=args.scope) if not mirrors: mirrors = syaml_dict() for name, u in mirrors.items(): if name == args.name: tty.die("Mirror with name %s already exists." % name) if u == url: tty.die("Mirror with url %s already exists." % url) # should only be one item per mirror dict. items = [(n, u) for n, u in mirrors.items()] items.insert(0, (args.name, url)) mirrors = syaml_dict(items) spack.config.update_config('mirrors', mirrors, scope=args.scope)
def get_config(section, scope=None): """Get configuration settings for a section. If ``scope`` is ``None`` or not provided, return the merged contents of all of Spack's configuration scopes. If ``scope`` is provided, return only the confiugration as specified in that scope. This off the top-level name from the YAML section. That is, for a YAML config file that looks like this:: config: install_tree: $spack/opt/spack module_roots: lmod: $spack/share/spack/lmod ``get_config('config')`` will return:: { 'install_tree': '$spack/opt/spack', 'module_roots: { 'lmod': '$spack/share/spack/lmod' } } """ validate_section_name(section) merged_section = syaml.syaml_dict() if scope is None: scopes = config_scopes.values() else: scopes = [validate_scope(scope)] for scope in scopes: # read potentially cached data from the scope. data = scope.get_section(section) # Skip empty configs if not data or not isinstance(data, dict): continue if section not in data: tty.warn("Skipping bad configuration file: '%s'" % scope.path) continue merged_section = _merge_yaml(merged_section, data) # no config files -- empty config. if section not in merged_section: return {} # take the top key off before returning. return merged_section[section]
def _generate_pkg_config(external_pkg_entries): """Generate config according to the packages.yaml schema for a single package. This does not generate the entire packages.yaml. For example, given some external entries for the CMake package, this could return:: { 'externals': [{ 'spec': '[email protected]', 'prefix': '/opt/cmake-3.17.1/' }, { 'spec': '[email protected]', 'prefix': '/opt/cmake-3.16.5/' }] } """ pkg_dict = syaml.syaml_dict() pkg_dict['externals'] = [] for e in external_pkg_entries: if not _spec_is_valid(e.spec): continue external_items = [('spec', str(e.spec)), ('prefix', e.base_dir)] if e.spec.external_modules: external_items.append(('modules', e.spec.external_modules)) if e.spec.extra_attributes: external_items.append( ('extra_attributes', syaml.syaml_dict(e.spec.extra_attributes.items())) ) # external_items.extend(e.spec.extra_attributes.items()) pkg_dict['externals'].append( syaml.syaml_dict(external_items) ) return pkg_dict
def generate_module_index(root, modules): entries = syaml.syaml_dict() for m in modules: entry = { 'path': m.layout.filename, 'use_name': m.layout.use_name } entries[m.spec.dag_hash()] = entry index = {'module_index': entries} index_path = os.path.join(root, 'module-index.yaml') llnl.util.filesystem.mkdirp(root) with open(index_path, 'w') as index_file: syaml.dump(index, index_file, default_flow_style=False)
def _generate_pkg_config(external_pkg_entries): """Generate config according to the packages.yaml schema for a single package. This does not generate the entire packages.yaml. For example, given some external entries for the CMake package, this could return:: { 'paths': { '[email protected]': '/opt/cmake-3.17.1/', '[email protected]': '/opt/cmake-3.16.5/' } } """ paths_dict = syaml.syaml_dict() for e in external_pkg_entries: if not _spec_is_valid(e.spec): continue paths_dict[str(e.spec)] = e.base_dir pkg_dict = syaml.syaml_dict() pkg_dict['paths'] = paths_dict return pkg_dict
def test_ordereddict_merge_order(): """"Test that source keys come before dest keys in merge_yaml results.""" source = syaml.syaml_dict([ ("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ]) dest = syaml.syaml_dict([ ("k4", "v4"), ("k3", "WRONG"), ("k5", "v5"), ]) result = spack.config.merge_yaml(dest, source) assert "WRONG" not in result.values() expected_keys = ["k1", "k2", "k3", "k4", "k5"] expected_items = [("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ("k4", "v4"), ("k5", "v5")] assert expected_keys == list(result.keys()) assert expected_items == list(result.items())
def _get_config_memoized(self, section, scope): _validate_section_name(section) if scope is None: scopes = self.scopes.values() else: scopes = [self._validate_scope(scope)] merged_section = syaml.syaml_dict() for scope in scopes: # read potentially cached data from the scope. data = scope.get_section(section) # Skip empty configs if not data or not isinstance(data, dict): continue if section not in data: continue # We might be reading configuration files in an old format, # thus read data and update it in memory if need be. changed = _update_in_memory(data, section) if changed: self.format_updates[section].append(scope) merged_section = merge_yaml(merged_section, data) # no config files -- empty config. if section not in merged_section: return syaml.syaml_dict() # take the top key off before returning. ret = merged_section[section] if isinstance(ret, dict): ret = syaml.syaml_dict(ret) return ret
def mirror_remove(args): """Remove a mirror by name.""" name = args.name mirrors = spack.config.get_config('mirrors', scope=args.scope) if not mirrors: mirrors = syaml_dict() if name not in mirrors: tty.die("No mirror with name %s" % name) old_value = mirrors.pop(name) spack.config.update_config('mirrors', mirrors, scope=args.scope) tty.msg("Removed mirror %s with url %s" % (name, old_value))
def add(name, url, scope, args={}): """Add a named mirror in the given scope""" mirrors = spack.config.get('mirrors', scope=scope) if not mirrors: mirrors = syaml_dict() if name in mirrors: tty.die("Mirror with name %s already exists." % name) items = [(n, u) for n, u in mirrors.items()] mirror_data = url key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"] # On creation, assume connection data is set for both if any(value for value in key_values if value in args): url_dict = {"url": url, "access_pair": (args.s3_access_key_id, args.s3_access_key_secret), "access_token": args.s3_access_token, "profile": args.s3_profile, "endpoint_url": args.s3_endpoint_url} mirror_data = {"fetch": url_dict, "push": url_dict} items.insert(0, (name, mirror_data)) mirrors = syaml_dict(items) spack.config.set('mirrors', mirrors, scope=scope)
def _mark_overrides(data): if isinstance(data, list): return syaml.syaml_list(_mark_overrides(elt) for elt in data) elif isinstance(data, dict): marked = syaml.syaml_dict() for key, val in iteritems(data): if isinstance(key, string_types) and key.endswith(':'): key = syaml.syaml_str(key[:-1]) key.override = True marked[key] = _mark_overrides(val) return marked else: return data
def to_dict_or_value(self): """Returns a dict or a value representing the current target. String values are used to keep backward compatibility with generic targets, like e.g. x86_64 or ppc64. More specific micro-architectures will return a dictionary which contains information on the name, features, vendor, generation and parents of the current target. """ # Generic targets represent either an architecture # family (like x86_64) or a custom micro-architecture if self.microarchitecture.vendor == 'generic': return str(self) return syaml_dict( self.microarchitecture.to_dict(return_list_of_items=True))
def update_config(self, section, update_data, scope=None, force=False): """Update the configuration file for a particular scope. Overwrites contents of a section in a scope with update_data, then writes out the config file. update_data should have the top-level section name stripped off (it will be re-added). Data itself can be a list, dict, or any other yaml-ish structure. Configuration scopes that are still written in an old schema format will fail to update unless ``force`` is True. Args: section (str): section of the configuration to be updated update_data (dict): data to be used for the update scope (str): scope to be updated force (str): force the update """ if self.format_updates.get(section) and not force: msg = ('The "{0}" section of the configuration needs to be written' ' to disk, but is currently using a deprecated format. ' 'Please update it using:\n\n' '\tspack config [--scope=<scope] update {0}\n\n' 'Note that previous versions of Spack will not be able to ' 'use the updated configuration.') msg = msg.format(section) raise RuntimeError(msg) _validate_section_name(section) # validate section name scope = self._validate_scope(scope) # get ConfigScope object # manually preserve comments need_comment_copy = (section in scope.sections and scope.sections[section] is not None) if need_comment_copy: comments = getattr(scope.sections[section][section], yaml.comments.Comment.attrib, None) # read only the requested section's data. scope.sections[section] = syaml.syaml_dict({section: update_data}) if need_comment_copy and comments: setattr(scope.sections[section][section], yaml.comments.Comment.attrib, comments) scope._write_section(section)
def generate_module_index(root, modules, overwrite=False): index_path = os.path.join(root, 'module-index.yaml') if overwrite or not os.path.exists(index_path): entries = syaml.syaml_dict() else: with open(index_path) as index_file: yaml_content = syaml.load(index_file) entries = yaml_content['module_index'] for m in modules: entry = {'path': m.layout.filename, 'use_name': m.layout.use_name} entries[m.spec.dag_hash()] = entry index = {'module_index': entries} llnl.util.filesystem.mkdirp(root) with open(index_path, 'w') as index_file: syaml.dump(index, default_flow_style=False, stream=index_file)
def manifest(self): """The spack.yaml file that should be used in the image""" import jsonschema # Copy in the part of spack.yaml prescribed in the configuration file manifest = copy.deepcopy(self.config) manifest.pop('container') # Ensure that a few paths are where they need to be manifest.setdefault('config', syaml.syaml_dict()) manifest['config']['install_tree'] = self.paths.store manifest['view'] = self.paths.view manifest = {'spack': manifest} # Validate the manifest file jsonschema.validate(manifest, schema=spack.schema.env.schema) return syaml.dump(manifest, default_flow_style=False).strip()
def _mark_internal(data, name): """Add a simple name mark to raw YAML/JSON data. This is used by `spack config blame` to show where config lines came from. """ if isinstance(data, dict): d = syaml.syaml_dict((_mark_internal(k, name), _mark_internal(v, name)) for k, v in data.items()) elif isinstance(data, list): d = syaml.syaml_list(_mark_internal(e, name) for e in data) else: d = syaml.syaml_type(data) if syaml.markable(d): d._start_mark = yaml.Mark(name, None, None, None, None, None) d._end_mark = yaml.Mark(name, None, None, None, None, None) return d
def test_view_multiple_projections(tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery): install('libdwarf@20130207') install('[email protected]%gcc') viewpath = str(tmpdir.mkdir('view')) view_projection = s_yaml.syaml_dict([('extendee', '{name}-{compiler.name}'), ('all', '{name}-{version}')]) projection_file = create_projection_file(tmpdir, view_projection) view('add', viewpath, '--projection-file={0}'.format(projection_file), 'libdwarf', 'extendee') libdwarf_prefix = os.path.join(viewpath, 'libdwarf-20130207/libdwarf') extendee_prefix = os.path.join(viewpath, 'extendee-gcc/bin') assert os.path.exists(libdwarf_prefix) assert os.path.exists(extendee_prefix)
def tutorial(parser, args): if not spack.cmd.spack_is_git_repo(): tty.die("This command requires a git installation of Spack!") if not args.yes_to_all: tty.msg("This command will set up Spack for the tutorial at " "https://spack-tutorial.readthedocs.io.", "") tty.warn("This will modify your Spack configuration by:", " - deleting some configuration in ~/.spack", " - adding a mirror and trusting its public key", " - checking out a particular branch of Spack", "") if not tty.get_yes_or_no("Are you sure you want to proceed?"): tty.die("Aborted") rm_cmds = ["rm -f %s" % f for f in rm_configs] tty.msg("Reverting compiler and repository configuration", *rm_cmds) for path in rm_configs: if os.path.exists(path): shutil.rmtree(path, ignore_errors=True) tty.msg("Ensuring that the tutorial binary mirror is configured:", "spack mirror add tutorial %s" % tutorial_mirror) mirror_config = syaml_dict() mirror_config["tutorial"] = tutorial_mirror spack.config.set('mirrors', mirror_config, scope="user") tty.msg("Ensuring that we trust tutorial binaries", "spack gpg trust %s" % tutorial_key) spack.util.gpg.trust(tutorial_key) # Note that checkout MUST be last. It changes Spack under our feet. # If you don't put this last, you'll get import errors for the code # that follows (exacerbated by the various lazy singletons we use) tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format( *spack.spack_version_info[:2] )) git = which("git", required=True) with working_dir(spack.paths.prefix): git("checkout", tutorial_branch)
def write_includes(self, path): abspath = os.path.abspath(path) sections = list(spack.config.section_schemas.keys()) data = syaml.syaml_dict() try: for s in sections: # we have to check that there is data in each scope # or else ill-formatted output can occur has_data = False for scope in self.config.scopes.values(): if scope.get_section(s) is not None: has_data = True if(has_data): temp = self.config.get_config(s) data[s] = temp except (yaml.YAMLError, IOError): raise spack.config.ConfigError("Error reading configuration: %s" % s) with open(abspath, 'w') as fout: syaml.dump_config(data, stream=fout, default_flow_style=False, blame=False)
def set(self, path, value, scope=None): """Convenience function for setting single values in config files. Accepts the path syntax described in ``get()``. """ if ':' not in path: # handle bare section name as path self.update_config(path, value, scope=scope) return parts = process_config_path(path) section = parts.pop(0) section_data = self.get_config(section, scope=scope) data = section_data while len(parts) > 1: key = parts.pop(0) if _override(key): new = type(data[key])() del data[key] else: new = data[key] if isinstance(new, dict): # Make it an ordered dict new = syaml.syaml_dict(new) # reattach to parent object data[key] = new data = new if _override(parts[0]): data.pop(parts[0], None) # update new value data[parts[0]] = value self.update_config(section, section_data, scope=scope)
def test_compiler_config_modifications(monkeypatch): s = spack.spec.Spec('cmake') s.concretize() pkg = s.package os.environ['SOME_VAR_STR'] = '' os.environ['SOME_VAR_NUM'] = '0' os.environ['PATH_LIST'] = '/path/third:/path/forth' os.environ['EMPTY_PATH_LIST'] = '' os.environ.pop('NEW_PATH_LIST', None) env_mod = syaml_dict() set_cmd = syaml_dict() env_mod[syaml_str('set')] = set_cmd set_cmd[syaml_str('SOME_VAR_STR')] = syaml_str('SOME_STR') set_cmd[syaml_str('SOME_VAR_NUM')] = 1 monkeypatch.setattr(pkg.compiler, 'environment', env_mod) spack.build_environment.setup_package(pkg, False) assert os.environ['SOME_VAR_STR'] == 'SOME_STR' assert os.environ['SOME_VAR_NUM'] == str(1) env_mod = syaml_dict() unset_cmd = syaml_dict() env_mod[syaml_str('unset')] = unset_cmd unset_cmd[syaml_str('SOME_VAR_STR')] = None monkeypatch.setattr(pkg.compiler, 'environment', env_mod) assert 'SOME_VAR_STR' in os.environ spack.build_environment.setup_package(pkg, False) assert 'SOME_VAR_STR' not in os.environ env_mod = syaml_dict() set_cmd = syaml_dict() env_mod[syaml_str('set')] = set_cmd append_cmd = syaml_dict() env_mod[syaml_str('append-path')] = append_cmd unset_cmd = syaml_dict() env_mod[syaml_str('unset')] = unset_cmd prepend_cmd = syaml_dict() env_mod[syaml_str('prepend-path')] = prepend_cmd set_cmd[syaml_str('EMPTY_PATH_LIST')] = syaml_str('/path/middle') append_cmd[syaml_str('PATH_LIST')] = syaml_str('/path/last') append_cmd[syaml_str('EMPTY_PATH_LIST')] = syaml_str('/path/last') append_cmd[syaml_str('NEW_PATH_LIST')] = syaml_str('/path/last') unset_cmd[syaml_str('SOME_VAR_NUM')] = None prepend_cmd[syaml_str('PATH_LIST')] = syaml_str('/path/first:/path/second') prepend_cmd[syaml_str('EMPTY_PATH_LIST')] = syaml_str('/path/first') prepend_cmd[syaml_str('NEW_PATH_LIST')] = syaml_str('/path/first') prepend_cmd[syaml_str('SOME_VAR_NUM')] = syaml_str('/8') assert 'SOME_VAR_NUM' in os.environ monkeypatch.setattr(pkg.compiler, 'environment', env_mod) spack.build_environment.setup_package(pkg, False) # Check that the order of modifications is respected and the # variable was unset before it was prepended. assert os.environ['SOME_VAR_NUM'] == '/8' expected = '/path/first:/path/second:/path/third:/path/forth:/path/last' assert os.environ['PATH_LIST'] == expected expected = '/path/first:/path/middle:/path/last' assert os.environ['EMPTY_PATH_LIST'] == expected expected = '/path/first:/path/last' assert os.environ['NEW_PATH_LIST'] == expected os.environ.pop('SOME_VAR_STR', None) os.environ.pop('SOME_VAR_NUM', None) os.environ.pop('PATH_LIST', None) os.environ.pop('EMPTY_PATH_LIST', None) os.environ.pop('NEW_PATH_LIST', None)
def add_pr_mirror(url): cfg_scope = cfg.default_modify_scope() mirrors = cfg.get('mirrors', scope=cfg_scope) items = [(n, u) for n, u in mirrors.items()] items.insert(0, ('ci_pr_mirror', url)) cfg.set('mirrors', syaml.syaml_dict(items), scope=cfg_scope)
def clear(self): """Empty cached config information.""" self.sections = syaml.syaml_dict()
def to_dict(self): return syaml_dict([('name', self.name), ('version', self.version)])
def __init__(self, name, path): self.name = name # scope name. self.path = path # path to directory containing configs. self.sections = syaml.syaml_dict() # sections read from config files.
def to_dict(self): return syaml_dict( (('platform', str(self.platform) if self.platform else None), ('platform_os', str(self.platform_os) if self.platform_os else None), ('target', str(self.target) if self.target else None)))
def to_dict(self): if self._push_url is None: return self._fetch_url else: return syaml_dict([('fetch', self._fetch_url), ('push', self._push_url)])
def to_dict(self, recursive=False): return syaml_dict( sorted(((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()), key=operator.itemgetter(0)))
def export(parser, args): q_args = {"explicit": True if args.explicit else any} specs = args.specs(**q_args) # Exit early if no package matches the constraint if not args.specs and args.constraint: msg = "No package matches the query: {0}" msg = msg.format(' '.join(args.constraint)) tty.msg(msg) return packages = spack.config.get('packages', scope=args.scope) # If tags have been specified on the command line, filter by tags if args.tags: packages_with_tags = spack.repo.path.packages_with_tags(*args.tags) specs = [x for x in specs if x.name in packages_with_tags] if args.exclude: specs = set(filter_exclude(specs, args.exclude)) cls = None if args.module: cls = spack.modules.module_types[args.module] # Collect packages per package name pkgs = {} for spec in specs: pkgs.setdefault(spec.name, []).append(spec) pymods = {} # Dump per package, make sure that none are forgotten for pkg, pkg_specs in pkgs.items(): paths = syaml_dict() modules = syaml_dict() package = packages.setdefault(pkg, syaml_dict()) versions = None if 'version' in package: versions = [str(v) for v in package['version']] for spec in pkg_specs: key = spec.format(args.format) sflags = [] bflags = [] for k, v in spec.variants.items(): default = None if k in spec.package.variants: default = spec.package.variants[k].default if v.value != default or args.variants == 'all': if v.value in (True, False): bflags.append(v) elif v.name != 'patches': sflags.append(v) sflags = ' '.join(str(f) for f in sorted(sflags)) bflags = ''.join(str(f) for f in sorted(bflags)) key = ' '.join([e for e in (key, sflags, bflags) if len(e) > 0]) key = str(key) if isinstance(spec.package, PythonPackage): py = spec['python'] if args.dependencies: key += " ^{0}".format(py.format("$_$@")) if not spec.package.is_activated(py.package.view()): # For external packages, setup_environment is not # called, and thus they are not included in # PYTHON_PATH. msg = "python package not activated, skipping: {0}" msg = msg.format(spec.format("$_$@")) tty.warn(msg) # paths[key] = str(spec.prefix) else: mod = pymods.setdefault(py, cls(py) if cls else None) if mod and not mod.conf.blacklisted: if os.path.exists(mod.layout.filename): paths[key] = '/activated' # modules[key] = mod.layout.use_name # paths[key] = str(spec.prefix) else: msg = "python package activated in inactive module, skipping: {0}" msg = msg.format(spec.format("$_$@")) tty.warn(msg) continue else: mod = cls(spec) if cls else None if mod and not mod.conf.blacklisted: if os.path.exists(mod.layout.filename): modules[key] = str(mod.layout.use_name) else: msg = "module not present for {0}" msg = msg.format(spec.format("$_$@")) tty.warn(msg) # Even with modules, the path needs to be present to, e.g., # have `spack setup` work! paths.setdefault(key, []).append(spec) if versions and str(spec.version) not in versions: versions.append(str(spec.version)) if versions: package['version'] = syaml_list(sorted(versions, reverse=True)) if len(paths) > 0: def install_date(s): _, record = spack.store.db.query_by_spec_hash(s.dag_hash()) return record.installation_time for k in paths.keys(): values = paths[k] if values == '/activated': continue paths[k] = str( sorted(values, key=install_date, reverse=True)[0].prefix) package.setdefault('paths', syaml_dict()).update(paths) if len(modules) > 0: package.setdefault('modules', syaml_dict()).update(modules) # Trim empty items from the yaml for cfg in packages.values(): for k, v in list(cfg.items()): if (k == 'buildable' and v) or (hasattr(v, '__iter__') and len(v) == 0): del cfg[k] # Restore ordering packages = syaml_dict( sorted((k, v) for (k, v) in packages.items() if len(v) > 0)) if 'all' in packages: packages['all'] = packages.pop('all') yaml.dump({'packages': packages}, stream=sys.stdout, default_flow_style=False, Dumper=PackagesDumper)
def to_dict(self): str_or_none = lambda v: str(v) if v else None d = syaml_dict([('platform', str_or_none(self.platform)), ('platform_os', str_or_none(self.os)), ('target', self.target.to_dict_or_value())]) return syaml_dict([('arch', d)])
def to_dict(self): """Generate human-readable dict for YAML.""" if self.concrete: return syaml_dict([('version', str(self[0]))]) else: return syaml_dict([('versions', [str(v) for v in self])])