def _read_config_file(filename, schema): """Read a YAML configuration file.""" # Ignore nonexisting files. if not os.path.exists(filename): return None elif not os.path.isfile(filename): raise ConfigFileError( "Invalid configuration. %s exists but is not a file." % filename) elif not os.access(filename, os.R_OK): raise ConfigFileError("Config file is not readable: %s" % filename) try: tty.debug("Reading config file %s" % filename) with open(filename) as f: data = syaml.load_config(f) if data: validate(data, schema) return data except MarkedYAMLError as e: raise ConfigFileError( "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) except IOError as e: raise ConfigFileError( "Error reading configuration file %s: %s" % (filename, str(e)))
def test_external_module(self, monkeypatch): """Test that packages can find externals specified by module The specific code for parsing the module is tested elsewhere. This just tests that the preference is accounted for""" # make sure this doesn't give us an external first. def mock_module(cmd, module): return 'prepend-path PATH /dummy/path' monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module) spec = Spec('mpi') spec.concretize() assert not spec['mpi'].external # load config conf = syaml.load_config("""\ all: providers: mpi: [mpich] mpi: buildable: false externals: - spec: [email protected] modules: [dummy] """) spack.config.set('packages', conf, scope='concretize') # ensure that once config is in place, external is used spec = Spec('mpi') spec.concretize() assert spec['mpich'].external_path == '/dummy/path'
def test_external_prefixes_last(mutable_config, mock_packages, working_env, monkeypatch): # Sanity check: under normal circumstances paths associated with # dt-diamond-left would appear first. We'll mark it as external in # the test to check if the associated paths are placed last. assert 'dt-diamond-left' < 'dt-diamond-right' cfg_data = syaml.load_config("""\ dt-diamond-left: externals: - spec: [email protected] prefix: /fake/path1 buildable: false """) spack.config.set("packages", cfg_data) top = spack.spec.Spec('dt-diamond').concretized() def _trust_me_its_a_dir(path): return True monkeypatch.setattr(os.path, 'isdir', _trust_me_its_a_dir) env_mods = EnvironmentModifications() spack.build_environment.set_wrapper_variables(top.package, env_mods) env_mods.apply_modifications() link_dir_var = os.environ['SPACK_LINK_DIRS'] link_dirs = link_dir_var.split(':') external_lib_paths = set(['/fake/path1/lib', '/fake/path1/lib64']) # The external lib paths should be the last two entries of the list and # should not appear anywhere before the last two entries assert (set(os.path.normpath(x) for x in link_dirs[-2:]) == external_lib_paths) assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) & external_lib_paths)
def _impl(filename): file = os.path.join(root_for_conf, filename + '.yaml') with open(file) as f: configuration = syaml.load_config(f) def mock_config_function(): return configuration def writer_key_function(): return mock_config_function()[writer_key] monkeypatch.setattr( spack.modules.common, 'configuration', mock_config_function ) monkeypatch.setattr( writer_mod, 'configuration', writer_key_function ) monkeypatch.setattr( writer_mod, 'configuration_registry', {} )
def add(fullpath, scope=None): """Add the given configuration to the specified config scope. Add accepts a path. If you want to add from a filename, use add_from_file""" components = process_config_path(fullpath) has_existing_value = True path = '' override = False for idx, name in enumerate(components[:-1]): # First handle double colons in constructing path colon = '::' if override else ':' if path else '' path += colon + name if getattr(name, 'override', False): override = True else: override = False # Test whether there is an existing value at this level existing = get(path, scope=scope) if existing is None: has_existing_value = False # We've nested further than existing config, so we need the # type information for validation to know how to handle bare # values appended to lists. existing = get_valid_type(path) # construct value from this point down value = syaml.load_config(components[-1]) for component in reversed(components[idx + 1:-1]): value = {component: value} break if has_existing_value: path, _, value = fullpath.rpartition(':') value = syaml.load_config(value) existing = get(path, scope=scope) # append values to lists if isinstance(existing, list) and not isinstance(value, list): value = [value] # merge value into existing new = merge_yaml(existing, value) config.set(path, new, scope)
def __call__(self, filename): file = os.path.join(self.root_for_conf, filename + '.yaml') with open(file) as f: mock_config = MockConfig(syaml.load_config(f), self.writer_key) self.monkeypatch.setattr(spack.modules.common, 'configuration', mock_config.configuration) self.monkeypatch.setattr(self.writer_mod, 'configuration', mock_config.writer_configuration) self.monkeypatch.setattr(self.writer_mod, 'configuration_registry', {})
def configure_user_perms(): conf = syaml.load_config("""\ all: permissions: read: world write: user """) spack.config.set('packages', conf, scope='user') yield
def test_all_is_not_a_virtual(self): """Verify that `all` is allowed in packages.yaml.""" conf = syaml.load_config("""\ all: variants: [+mpi] """) spack.config.set('packages', conf, scope='concretize') # should be no error for 'all': spack.package_prefs.get_packages_config()
def test_buildable_false_all(self): conf = syaml.load_config("""\ all: buildable: false """) spack.config.set('packages', conf, scope='concretize') spec = Spec('libelf') assert not spack.package_prefs.is_spec_buildable(spec) spec = Spec('mpich') assert not spack.package_prefs.is_spec_buildable(spec)
def configure_group_perms(): conf = syaml.load_config("""\ all: permissions: read: world write: group group: {0} """.format(grp.getgrgid(os.getegid()).gr_name)) spack.config.set('packages', conf, scope='user') yield
def test_no_virtuals_in_packages_yaml(self): """Verify that virtuals are not allowed in packages.yaml.""" # set up a packages.yaml file with a vdep as a key. We use # syaml.load_config here to make sure source lines in the config are # attached to parsed strings, as the error message uses them. conf = syaml.load_config("""\ mpi: paths: [email protected]: /path/to/lapack """) spack.config.set('packages', conf, scope='concretize') # now when we get the packages.yaml config, there should be an error with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError): spack.package_prefs.get_packages_config()
def data(): """Returns the data loaded from a test file""" test_file = """\ config_file: x86_64: foo: /path/to/foo bar: /path/to/bar baz: /path/to/baz some_list: - item 1 - item 2 - item 3 another_list: [ 1, 2, 3 ] some_key: some_string """ return syaml.load_config(test_file)
def config_updater(cfg_type, cfg_file): # Get a function to update the format """ Courtesy of Greg Becker """ update_fn = spack.config.ensure_latest_format_fn(cfg_type) with open(cfg_file) as f: raw_data = syaml.load_config(f) or {} data = raw_data.pop(cfg_type, {}) update_fn(data) # Make a backup copy and rewrite the file bkp_file = cfg_file + '.bkp' shutil.copy(cfg_file, bkp_file) write_data = {cfg_type: data} with open(cfg_file, 'w') as f: syaml.dump_config(write_data, stream=f, default_flow_style=False) msg = 'File "{0}" updated [backup={1}]' tty.msg(msg.format(cfg_file, bkp_file))
def read_config_file(filename, schema=None): """Read a YAML configuration file. User can provide a schema for validation. If no schema is provided, we will infer the schema from the top-level key.""" # Dev: Inferring schema and allowing it to be provided directly allows us # to preserve flexibility in calling convention (don't need to provide # schema when it's not necessary) while allowing us to validate against a # known schema when the top-level key could be incorrect. # Ignore nonexisting files. if not os.path.exists(filename): return None elif not os.path.isfile(filename): raise ConfigFileError( "Invalid configuration. %s exists but is not a file." % filename) elif not os.access(filename, os.R_OK): raise ConfigFileError("Config file is not readable: %s" % filename) try: tty.debug("Reading config file %s" % filename) with open(filename) as f: data = syaml.load_config(f) if data: if not schema: key = next(iter(data)) schema = all_schemas[key] validate(data, schema) return data except StopIteration: raise ConfigFileError( "Config file is empty or is not a valid YAML dict: %s" % filename) except MarkedYAMLError as e: raise ConfigFileError( "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) except IOError as e: raise ConfigFileError( "Error reading configuration file %s: %s" % (filename, str(e)))
def check_compiler_yaml_version(): config = spack.config.config for scope in config.file_scopes: file_name = os.path.join(scope.path, 'compilers.yaml') data = None if os.path.isfile(file_name): with open(file_name) as f: data = syaml.load_config(f) if data: compilers = data.get('compilers') if compilers and len(compilers) > 0: if (not isinstance(compilers, list) or 'operating_system' not in compilers[0]['compiler']): new_file = os.path.join(scope.path, '_old_compilers.yaml') tty.warn('%s in out of date compilers format. ' 'Moved to %s. Spack automatically generate ' 'a compilers config file ' % (file_name, new_file)) os.rename(file_name, new_file)
def configure_permissions(): conf = syaml.load_config("""\ all: permissions: read: group write: group group: all mpich: permissions: read: user write: user mpileaks: permissions: write: user group: mpileaks callpath: permissions: write: world """) spack.config.set('packages', conf, scope='concretize') yield
def test_external_mpi(self): # make sure this doesn't give us an external first. spec = Spec('mpi') spec.concretize() assert not spec['mpi'].external # load config conf = syaml.load_config("""\ all: providers: mpi: [mpich] mpich: buildable: false paths: [email protected]: /dummy/path """) spack.config.set('packages', conf, scope='concretize') # ensure that once config is in place, external is used spec = Spec('mpi') spec.concretize() assert spec['mpich'].external_path == '/dummy/path'
def config_update(args): # Read the configuration files spack.config.config.get_config(args.section, scope=args.scope) updates = spack.config.config.format_updates[args.section] cannot_overwrite, skip_system_scope = [], False for scope in updates: cfg_file = spack.config.config.get_config_filename( scope.name, args.section ) scope_dir = scope.path can_be_updated = _can_update_config_file(scope_dir, cfg_file) if not can_be_updated: if scope.name == 'system': skip_system_scope = True msg = ('Not enough permissions to write to "system" scope. ' 'Skipping update at that location [cfg={0}]') tty.warn(msg.format(cfg_file)) continue cannot_overwrite.append((scope, cfg_file)) if cannot_overwrite: msg = 'Detected permission issues with the following scopes:\n\n' for scope, cfg_file in cannot_overwrite: msg += '\t[scope={0}, cfg={1}]\n'.format(scope.name, cfg_file) msg += ('\nEither ensure that you have sufficient permissions to ' 'modify these files or do not include these scopes in the ' 'update.') tty.die(msg) if skip_system_scope: updates = [x for x in updates if x.name != 'system'] # Report if there are no updates to be done if not updates: msg = 'No updates needed for "{0}" section.' tty.msg(msg.format(args.section)) return proceed = True if not args.yes_to_all: msg = ('The following configuration files are going to be updated to' ' the latest schema format:\n\n') for scope in updates: cfg_file = spack.config.config.get_config_filename( scope.name, args.section ) msg += '\t[scope={0}, file={1}]\n'.format(scope.name, cfg_file) msg += ('\nIf the configuration files are updated, versions of Spack ' 'that are older than this version may not be able to read ' 'them. Spack stores backups of the updated files which can ' 'be retrieved with "spack config revert"') tty.msg(msg) proceed = tty.get_yes_or_no('Do you want to proceed?', default=False) if not proceed: tty.die('Operation aborted.') # Get a function to update the format update_fn = spack.config.ensure_latest_format_fn(args.section) for scope in updates: cfg_file = spack.config.config.get_config_filename( scope.name, args.section ) with open(cfg_file) as f: data = syaml.load_config(f) or {} data = data.pop(args.section, {}) update_fn(data) # Make a backup copy and rewrite the file bkp_file = cfg_file + '.bkp' shutil.copy(cfg_file, bkp_file) spack.config.config.update_config( args.section, data, scope=scope.name, force=True ) msg = 'File "{0}" updated [backup={1}]' tty.msg(msg.format(cfg_file, bkp_file))
def create_env(parser, args): if args.directory is not None: if os.path.exists(args.directory) is False: print("making", args.directory) os.makedirs(args.directory) theDir = args.directory elif args.name is not None: theDir = os.path.join(os.environ['SPACK_MANAGER'], 'environments', args.name) if os.path.exists(theDir) is False: print("making", theDir) os.makedirs(theDir) else: theDir = os.getcwd() has_view = False if args.yaml: assert (os.path.isfile(args.yaml)) with open(args.yaml, 'r') as fyaml: print(fyaml) user_yaml = syaml.load_config(fyaml) user_view = environment.config_dict(user_yaml).get('view') if user_view: has_view = True env = environment.Environment(theDir, init_file=args.yaml, with_view=has_view, keep_relative=True) yaml = env.yaml def _unify_already_set(yaml): return ('spack' in yaml and 'concretizer' in yaml['spack'] and 'unify' in yaml['spack']['concretizer']) if not args.yaml or not _unify_already_set(yaml): yaml['spack']['concretizer'] = {'unify': True} with env.write_transaction(): env.write() if args.machine is not None: machine = args.machine if machine not in fm.machine_list.keys(): raise Exception('Specified machine %s is not defined' % machine) else: machine = find_machine(verbose=False) if args.spec: spec_list = spack.cmd.parse_specs(args.spec) for s in spec_list: env.add(s) inc_creator = IncludesCreator() genPath = os.path.join(os.environ['SPACK_MANAGER'], 'configs', 'base') inc_creator.add_scope('base', genPath) hostPath = os.path.join(os.environ['SPACK_MANAGER'], 'configs', machine) if os.path.exists(hostPath): inc_creator.add_scope('machine', hostPath) else: print('Host not setup in spack-manager: %s' % hostPath) include_file_name = 'include.yaml' include_file = os.path.join(theDir, include_file_name) inc_creator.write_includes(include_file) if 'include' in yaml['spack']: yaml['spack']['include'].append(include_file_name) else: yaml['spack']['include'] = [include_file_name] env.write() fpath = os.path.join(os.environ['SPACK_MANAGER'], '.tmp') os.makedirs(fpath, exist_ok=True) storage = os.path.join(fpath, 'created_env_path.txt') with open(storage, 'w') as f: f.write(theDir) return theDir
def config_add(args): """Add the given configuration to the specified config scope This is a stateful operation that edits the config files.""" if not (args.file or args.path): tty.error("No changes requested. Specify a file or value.") setup_parser.add_parser.print_help() exit(1) scope, section = _get_scope_and_section(args) # Updates from file if args.file: # Get file as config dict data = spack.config.read_config_file(args.file) if any(k in data for k in spack.schema.env.keys): data = ev.config_dict(data) # update all sections from config dict # We have to iterate on keys to keep overrides from the file for section in data.keys(): if section in spack.config.section_schemas.keys(): # Special handling for compiler scope difference # Has to be handled after we choose a section if scope is None: scope = spack.config.default_modify_scope(section) value = data[section] existing = spack.config.get(section, scope=scope) new = spack.config.merge_yaml(existing, value) set_config(args, section, new, scope) if args.path: components = spack.config.process_config_path(args.path) has_existing_value = True path = '' override = False for idx, name in enumerate(components[:-1]): # First handle double colons in constructing path colon = '::' if override else ':' if path else '' path += colon + name if getattr(name, 'override', False): override = True else: override = False # Test whether there is an existing value at this level existing = spack.config.get(path, scope=scope) if existing is None: has_existing_value = False # We've nested further than existing config, so we need the # type information for validation to know how to handle bare # values appended to lists. existing = spack.config.get_valid_type(path) # construct value from this point down value = syaml.load_config(components[-1]) for component in reversed(components[idx + 1:-1]): value = {component: value} break if has_existing_value: path, _, value = args.path.rpartition(':') value = syaml.load_config(value) existing = spack.config.get(path, scope=scope) # append values to lists if isinstance(existing, list) and not isinstance(value, list): value = [value] # merge value into existing new = spack.config.merge_yaml(existing, value) set_config(args, path, new, scope)
def check_schema(name, file_contents): """Check a Spack YAML schema against some data""" f = StringIO(file_contents) data = syaml.load_config(f) spack.config.validate(data, name)