def verify(self, distro, origins_fn): # Filter out components that are disabled in origins file origins = utils.load_yaml(origins_fn) for c in self.wanted_components: if c not in origins: if c in self.no_origins: LOG.debug("Automatically enabling component %s, not" " present in origins file %s but present in" " desired persona %s (origin not required).", c, origins_fn, self.source) origins[c] = { 'disabled': False, } else: LOG.warn("Automatically disabling %s, not present in" " origin file but present in desired" " persona (origin required).", colorizer.quote(c, quote_color='red')) origins[c] = { 'disabled': True, } disabled_components = set(key for key, value in six.iteritems(origins) if value.get('disabled')) self.wanted_components = [c for c in self.wanted_components if c not in disabled_components] # Some sanity checks against the given distro/persona d_name = distro.name if d_name not in self.distro_support: raise RuntimeError("Persona does not support the loaded distro") for c in self.wanted_components: if not distro.known_component(c): raise RuntimeError("Persona provided component %s but its not supported by the loaded distro" % (c))
def load(path, distros_patch=None): """Load configuration for all distros found in path. :param path: path containing distro configuration in yaml format :param distros_patch: distros file patch, jsonpath format (rfc6902) """ distro_possibles = [] patch = jsonpatch.JsonPatch(distros_patch) if distros_patch else None input_files = glob.glob(sh.joinpths(path, '*.yaml')) if not input_files: raise excp.ConfigException('Did not find any distro definition files in %r' % path) for fn in input_files: LOG.debug("Attempting to load distro definition from %r", fn) try: cls_kvs = utils.load_yaml(fn) # Apply any user specified patches to distros file if patch: patch.apply(cls_kvs, in_place=True) except Exception as err: LOG.warning('Could not load distro definition from %r: %s', fn, err) else: if 'name' not in cls_kvs: name, _ext = os.path.splitext(sh.basename(fn)) cls_kvs['name'] = name distro_possibles.append(Distro(**cls_kvs)) matches = _match_distros(distro_possibles) LOG.debug("Matched distros %s", [m.name for m in matches]) return matches
def load(path, distros_patch=None): """Load configuration for all distros found in path. :param path: path containing distro configuration in yaml format :param distros_patch: distros file patch, jsonpath format (rfc6902) """ distro_possibles = [] patch = jsonpatch.JsonPatch(distros_patch) if distros_patch else None input_files = glob.glob(sh.joinpths(path, '*.yaml')) if not input_files: raise excp.ConfigException( 'Did not find any distro definition files in %r' % path) for fn in input_files: LOG.debug("Attempting to load distro definition from %r", fn) try: cls_kvs = utils.load_yaml(fn) # Apply any user specified patches to distros file if patch: patch.apply(cls_kvs, in_place=True) except Exception as err: LOG.warning('Could not load distro definition from %r: %s', fn, err) else: if 'name' not in cls_kvs: name, _ext = os.path.splitext(sh.basename(fn)) cls_kvs['name'] = name distro_possibles.append(Distro(**cls_kvs)) matches = _match_distros(distro_possibles) LOG.debug("Matched distros %s", [m.name for m in matches]) return matches
def _cache(self, conf): """Cache config file into memory to avoid re-reading it from disk.""" if conf not in self._cached: path = sh.joinpths(self._path, conf + self._conf_ext) if not sh.isfile(path): raise exceptions.YamlConfigNotFoundException(path) self._cached[conf] = utils.load_yaml(path) or {}
def load_previous_settings(): settings_prev = None try: settings_prev = utils.load_yaml(SETTINGS_FILE) except Exception: # Errors could be expected on format problems # or on the file not being readable.... pass return settings_prev
def _process_includes(self, root): if root in self.included: return pth = sh.joinpths(self.base, "%s.yaml" % (root)) if not sh.isfile(pth): self.included[root] = {} return self.included[root] = utils.load_yaml(pth) self.included[root] = self._do_include(self.included[root])
def load(filename, patch_file=None): base = utils.load_yaml(filename) patched = False if patch_file: patch = jsonpatch.JsonPatch(patch_file) patch.apply(base, in_place=True) patched = True origin = Origin(filename, patched=patched) origin.update(base) return origin
def _establish_passwords(self): pw_read = [] for fn in self.password_files: if sh.isfile(fn): self.passwords.cache.update(utils.load_yaml(fn)) pw_read.append(fn) if pw_read: utils.log_iterable(pw_read, header="Updated passwords to be used from %s files" % len(pw_read), logger=LOG)
def load(path): distro_possibles = [] input_files = glob.glob(sh.joinpths(path, '*.yaml')) if not input_files: raise excp.ConfigException('Did not find any distro definition files in %r' % path) for fn in input_files: LOG.debug("Attempting to load distro definition from %r", fn) try: cls_kvs = utils.load_yaml(fn) except Exception as err: LOG.warning('Could not load distro definition from %r: %s', fn, err) distro_possibles.append(Distro(**cls_kvs)) return _match_distro(distro_possibles)
def load_examples(): examples = [] for filename in glob.glob(EXAMPLE_GLOB): if sh.isfile(filename): # The test generator will use the first element as the test # identifer so provide a filename + index based test identifer to # be able to connect test failures to the example which caused it. try: base = sh.basename(filename) base = re.sub(r"[.\s]", "_", base) for i, example in enumerate(utils.load_yaml(filename)): examples.append(("%s_%s" % (base, i), example)) except IOError: pass return examples
def load(path): distro_possibles = [] input_files = glob.glob(sh.joinpths(path, '*.yaml')) if not input_files: raise excp.ConfigException( 'Did not find any distro definition files in %r' % path) for fn in input_files: LOG.debug("Attempting to load distro definition from %r", fn) try: cls_kvs = utils.load_yaml(fn) except Exception as err: LOG.warning('Could not load distro definition from %r: %s', fn, err) distro_possibles.append(Distro(**cls_kvs)) return _match_distro(distro_possibles)
def match(self, distros, origins_fn): # Filter out components that are disabled in origins file origins = utils.load_yaml(origins_fn) for c in self.wanted_components: if c not in origins: if c in self.no_origins: LOG.debug("Automatically enabling component %s, not" " present in origins file %s but present in" " desired persona %s (origin not required).", c, origins_fn, self.source) origins[c] = { 'disabled': False, } else: LOG.warn("Automatically disabling %s, not present in" " origin file but present in desired" " persona (origin required).", colorizer.quote(c, quote_color='red')) origins[c] = { 'disabled': True, } disabled_components = set(key for key, value in six.iteritems(origins) if value.get('disabled')) self.wanted_components = [c for c in self.wanted_components if c not in disabled_components] # Pick which of potentially many distros will work... distro_names = set() selected_distro = None for distro in distros: distro_names.add(distro.name) if distro.name not in self.distro_support: continue will_work = True for component in self.wanted_components: if not distro.known_component(component): will_work = False break if will_work: selected_distro = distro break if selected_distro is None: raise RuntimeError("Persona does not support any of the loaded" " distros: %s" % list(distro_names)) else: return selected_distro
def download(self): """Download sources needed to build the component, if any.""" target_dir = self.get_option('app_dir') download_cfg = utils.load_yaml(self._origins_fn).get(self.name, {}) if not target_dir or not download_cfg: return [] uri = download_cfg.pop('repo', None) if not uri: raise ValueError(("Could not find repo uri for %r component from the %r " "config file." % (self.name, self._origins_fn))) uris = [uri] utils.log_iterable(uris, logger=LOG, header="Downloading from %s uris" % (len(uris))) sh.mkdirslist(target_dir, tracewriter=self.tracewriter) # This is used to delete what is downloaded (done before # fetching to ensure its cleaned up even on download failures) self.tracewriter.download_happened(target_dir, uri) down.GitDownloader(uri, target_dir, **download_cfg).download() return uris
def _update_passwords(self): if not self.store_passwords: return if not self.passwords.cache: return who_update = [] for fn in self.password_files: if sh.isfile(fn): who_update.append(fn) if not who_update: who_update.append(self.default_password_file) who_done = [] for fn in who_update: if sh.isfile(fn): contents = utils.load_yaml(fn) else: contents = {} contents.update(self.passwords.cache) sh.write_file(fn, utils.add_header(fn, utils.prettify_yaml(contents))) who_done.append(fn) utils.log_iterable(who_done, header="Updated/created %s password files" % len(who_done), logger=LOG)
def download(self): """Download sources needed to build the component, if any.""" target_dir = self.get_option('app_dir') download_cfg = utils.load_yaml(self._origins_fn).get(self.name, {}) if not target_dir or not download_cfg: return [] uri = download_cfg.pop('repo', None) if not uri: raise ValueError( ("Could not find repo uri for %r component from the %r " "config file." % (self.name, self._origins_fn))) uris = [uri] utils.log_iterable(uris, logger=LOG, header="Downloading from %s uris" % (len(uris))) sh.mkdirslist(target_dir, tracewriter=self.tracewriter) # This is used to delete what is downloaded (done before # fetching to ensure its cleaned up even on download failures) self.tracewriter.download_happened(target_dir, uri) down.GitDownloader(uri, target_dir, **download_cfg).download() return uris
def load(self, distro, component, persona=None): # NOTE (vnovikov): applying takes place before loading reference links self._apply_persona(component, persona) dir_opts = self._get_dir_opts(component) distro_opts = distro.options origins_opts = {} if self._origins_path: try: origins_opts = utils.load_yaml(self._origins_path)[component] except KeyError: pass general_component_opts = self._base_loader.load('general') component_specific_opts = self._base_loader.load(component) # NOTE (vnovikov): merge order is the same as arguments order below. merged_opts = utils.merge_dicts( dir_opts, distro_opts, origins_opts, general_component_opts, component_specific_opts, ) return merged_opts
def main(): if len(sys.argv) < 3: print("%s distro_yaml root_dir ..." % sys.argv[0]) return 1 root_dirs = sys.argv[2:] yaml_fn = sh.abspth(sys.argv[1]) requires_files = [] for d in root_dirs: all_contents = sh.listdir(d, recursive=True, files_only=True) requires_files = [ sh.abspth(f) for f in all_contents if re.search(r"(test|pip)[-]requires$", f, re.I) ] requires_files = sorted(list(set(requires_files))) requirements = [] source_requirements = {} for fn in requires_files: source_requirements[fn] = [] for req in pip_helper.parse_requirements(sh.load_file(fn)): requirements.append(req.key.lower().strip()) source_requirements[fn].append(req.key.lower().strip()) print("Comparing pips/pip2pkgs in %s to those found in %s" % (yaml_fn, root_dirs)) for fn in sorted(requires_files): print(" + " + str(fn)) requirements = set(requirements) print("All known requirements:") for r in sorted(requirements): print("+ " + str(r)) distro_yaml = utils.load_yaml(yaml_fn) components = distro_yaml.get('components', {}) all_known_names = [] components_pips = {} for (c, details) in components.items(): components_pips[c] = [] pip2pkgs = details.get('pip_to_package', []) pips = details.get('pips', []) known_names = [] for item in pip2pkgs: known_names.append(item['name'].lower().strip()) for item in pips: known_names.append(item['name'].lower().strip()) components_pips[c].extend(known_names) all_known_names.extend(known_names) all_known_names = sorted(list(set(all_known_names))) not_needed = [] for n in all_known_names: if n not in requirements: not_needed.append(n) if not_needed: print("The following distro yaml mappings may not be needed:") for n in sorted(not_needed): msg = " + %s (" % (n) # Find which components said they need this... for (c, known_names) in components_pips.items(): if n in known_names: msg += c + "," msg += ")" print(msg) not_found = [] for n in requirements: name = n.lower().strip() if name not in all_known_names: not_found.append(name) not_found = sorted(list(set(not_found))) if not_found: print( "The following distro yaml mappings may be required but were not found:" ) for n in sorted(not_found): msg = " + %s" % (n) msg += " (" # Find which file/s said they need this... for (fn, reqs) in source_requirements.items(): matched = False for r in reqs: if r.lower().strip() == name: matched = True if matched: msg += fn + "," msg += ")" print(msg) return len(not_found) + len(not_needed)
def load(fn): cls_kvs = utils.load_yaml(fn) cls_kvs['source'] = fn instance = Persona(**cls_kvs) return instance
def main(): if len(sys.argv) < 3: print("%s distro_yaml root_dir ..." % sys.argv[0]) return 1 root_dirs = sys.argv[2:] yaml_fn = sh.abspth(sys.argv[1]) requires_files = [] for d in root_dirs: all_contents = sh.listdir(d, recursive=True, files_only=True) requires_files = [sh.abspth(f) for f in all_contents if re.search(r"(test|pip)[-]requires$", f, re.I)] requires_files = sorted(list(set(requires_files))) requirements = [] source_requirements = {} for fn in requires_files: source_requirements[fn] = [] for req in pip_helper.parse_requirements(sh.load_file(fn)): requirements.append(req.key.lower().strip()) source_requirements[fn].append(req.key.lower().strip()) print("Comparing pips/pip2pkgs in %s to those found in %s" % (yaml_fn, root_dirs)) for fn in sorted(requires_files): print(" + " + str(fn)) requirements = set(requirements) print("All known requirements:") for r in sorted(requirements): print("+ " + str(r)) distro_yaml = utils.load_yaml(yaml_fn) components = distro_yaml.get('components', {}) all_known_names = [] components_pips = {} for (c, details) in components.items(): components_pips[c] = [] pip2pkgs = details.get('pip_to_package', []) pips = details.get('pips', []) known_names = [] for item in pip2pkgs: known_names.append(item['name'].lower().strip()) for item in pips: known_names.append(item['name'].lower().strip()) components_pips[c].extend(known_names) all_known_names.extend(known_names) all_known_names = sorted(list(set(all_known_names))) not_needed = [] for n in all_known_names: if n not in requirements: not_needed.append(n) if not_needed: print("The following distro yaml mappings may not be needed:") for n in sorted(not_needed): msg = " + %s (" % (n) # Find which components said they need this... for (c, known_names) in components_pips.items(): if n in known_names: msg += c + "," msg += ")" print(msg) not_found = [] for n in requirements: name = n.lower().strip() if name not in all_known_names: not_found.append(name) not_found = sorted(list(set(not_found))) if not_found: print("The following distro yaml mappings may be required but were not found:") for n in sorted(not_found): msg = " + %s" % (n) msg += " (" # Find which file/s said they need this... for (fn, reqs) in source_requirements.items(): matched = False for r in reqs: if r.lower().strip() == name: matched = True if matched: msg += fn + "," msg += ")" print(msg) return len(not_found) + len(not_needed)