def __call__(self): if type(self.logicmap.raw) == dict: if self.is_group(self.logicmap.raw): log.debug("items groups provided") results = self.process_group(self.logicmap.raw) else: log.debug("single items provided") results = { self.FINAL_RESULT_OP: [self.process_single(self.logicmap.raw, copy_cache=True)] } elif type(self.logicmap.raw) == list: log.debug("list of %s items provided", len(self.logicmap.raw)) results = self.process_list(self.logicmap.raw) else: results = { self.FINAL_RESULT_OP: [self.process_single(self.logicmap.raw, copy_cache=True)] } final_results = [] for op in results: if op == 'and': final_results.append(all(results[op])) elif op == 'or': final_results.append(any(results[op])) elif op == 'not': # this is a NOR final_results.append(not any(results[op])) else: log.debug("unknown operator '%s' found in requirement", op) result = all(final_results) log.debug("final result=%s", result) return result
def _search_task_wrapper(self, path, term_key): try: with gzip.open(path, 'r') as fd: try: # test if file is gzip fd.read(1) fd.seek(0) return self._search_task(term_key, fd, path) except OSError: pass with open(path) as fd: return self._search_task(term_key, fd, path) except UnicodeDecodeError: # ignore the file if it can't be decoded log.debug("caught UnicodeDecodeError for path %s - skipping", path) except EOFError as e: msg = ("an exception occured while searching {} - {}".format( path, e)) raise FileSearchException(msg) from e except Exception as e: msg = ( "an unknown exception occured while searching {} - {}".format( path, e)) raise FileSearchException(msg) from e
def path(self): if self.fs_path: # pylint: disable=W0125 path = os.path.join(HotSOSConfig.DATA_ROOT, self.fs_path) if (HotSOSConfig.USE_ALL_LOGS and not self.options['disable-all-logs']): path = "{}*".format(path) return path if self.command: # pylint: disable=W0125 if self.cmd_tmp_path: return self.cmd_tmp_path args_callback = self.options['args-callback'] if args_callback: args, kwargs = self.get_method(args_callback) else: args = self.options['args'] kwargs = self.options['kwargs'] # get command output out = getattr(CLIHelper(), self.command)(*args, **kwargs) # store in temp file to make it searchable # NOTE: we dont need to delete this at the the end since they are # created in the plugun tmp dir which is wiped at the end of the # plugin run. if type(out) == list: out = ''.join(out) elif type(out) == dict: out = str(out) self.cmd_tmp_path = mktemp_dump(out) return self.cmd_tmp_path log.debug("no input provided")
def message_with_format_dict_applied(self, property=None, checks=None): """ If a format-dict is provided this will resolve any cache references then format the message. Returns formatted message. Either property or checks must be provided (but not both). @params property: optional YPropertyOverride object. @params checks: optional dict of YPropertyChecks objects. """ fdict = self.format_dict if not fdict: return self.message for key, value in fdict.items(): if PropertyCacheRefResolver.is_valid_cache_ref(value): rvalue = PropertyCacheRefResolver(value, property=property, checks=checks).resolve() log.debug("updating format-dict key=%s with cached %s (%s)", key, value, rvalue) fdict[key] = rvalue message = self.message if message is not None: message = str(message).format(**fdict) return message
def apply_ops(self, ops, input=None, normalise_value_types=False): """ Takes a list of operations and processes each one where each takes as input the output of the previous. @param ops: list of tuples of operations and optional args. @param input: the value that is used as input to the first operation. @param normalise_value_types: if an operation has an expected value and and this is True, the type of the input will be cast to that of the expectced value. """ log.debug("ops=%s, input=%s", ops, input) if type(ops) != list: raise Exception("Expected list of ops but got {}". format(ops)) for op in ops: expected = None force_expected = False if len(op) > 1: # if an expected value was provided we must use it regardless # of what it is. force_expected = True expected = op[1] if expected is not None and normalise_value_types: input = type(expected)(input) input = self.apply_op(op[0], input=input, expected=expected, force_expected=force_expected) return input
def apply_op(self, op, input=None, expected=None, force_expected=False): log.debug("op=%s, input=%s, expected=%s, force_expected=%s", op, input, expected, force_expected) if expected is not None or force_expected: return getattr(operator, op)(input, expected) return getattr(operator, op)(input)
def set(self, key, data): log.debug("%s: caching key=%s with value=%s", id(self), key, data) _current = self._data.get(key) if _current and type(_current) == dict and type(data) == dict: self._data[key].update(data) else: self._data[key] = data
def _get_defs_recursive(self, path): """ Recursively find all yaml/files beneath a directory. """ defs = {} for entry in os.listdir(path): abs_path = os.path.join(path, entry) if os.path.isdir(abs_path): subdefs = self._get_defs_recursive(abs_path) defs[os.path.basename(abs_path)] = subdefs else: if not self._is_def(abs_path): continue if self._get_yname(abs_path) == os.path.basename(path): with open(abs_path) as fd: log.debug("applying dir globals %s", entry) self.stats_num_files_loaded += 1 defs.update(yaml.safe_load(fd.read()) or {}) continue with open(abs_path) as fd: self.stats_num_files_loaded += 1 _content = yaml.safe_load(fd.read()) or {} defs[self._get_yname(abs_path)] = _content return defs
def cache_save(self, data): log.debug("saving network port %s to cache", self.name) if not os.path.isdir(self.cache_path_root): os.makedirs(self.cache_path_root) path = os.path.join(self.cache_path_root, 'stats.json') with open(path, 'w') as fd: fd.write(json.dumps(data))
def _run(self, plugin): log.debug("running plugin %s", plugin) if plugin not in PLUGIN_CATALOG: raise Exception("unknown plugin {}".format(plugin)) setup_config(PLUGIN_NAME=plugin) parts = PLUGIN_CATALOG[plugin] return plugintools.PluginRunner().run_parts(parts)
def get_cls(self, import_str): log.debug("instantiating class %s", import_str) mod = import_str.rpartition('.')[0] class_name = import_str.rpartition('.')[2] try: return getattr(importlib.import_module(mod), class_name) except Exception: log.exception("failed to import class %s from %s", class_name, mod) raise
def load_plugin_defs(self): log.debug('loading %s definitions for plugin=%s', self.ytype, HotSOSConfig.PLUGIN_NAME) yaml_defs = self.plugin_defs if not yaml_defs: yaml_defs = self.plugin_defs_legacy return yaml_defs
def plugin_defs(self): path = os.path.join(HotSOSConfig.PLUGIN_YAML_DEFS, self.ytype, HotSOSConfig.PLUGIN_NAME) # reset self.stats_num_files_loaded = 0 if os.path.isdir(path): _defs = self._get_defs_recursive(path) log.debug("YDefsLoader: plugin %s loaded %s files", HotSOSConfig.PLUGIN_NAME, self.stats_num_files_loaded) return _defs
def get_process_cmd_from_line(self, line, expr): for expr_type, expr_tmplt in SVC_EXPR_TEMPLATES.items(): if expr_type == 'relative' and not self.ps_allow_relative: continue ret = re.compile(expr_tmplt.format(expr)).match(line) if ret: svc = ret.group(1) log.debug("matched process %s with %s expr", svc, expr_type) return svc
def cache_save(self, data, namespaces=False): log.debug("saving network helper info to cache (namespaces=%s)", namespaces) if namespaces: path = os.path.join(self.cache_path_root, 'ns_interfaces.json') else: path = os.path.join(self.cache_path_root, 'interfaces.json') with open(path, 'w') as fd: fd.write(json.dumps(data))
def plugin_defs_legacy(self): path = os.path.join(HotSOSConfig.PLUGIN_YAML_DEFS, '{}.yaml'.format(self.ytype)) if not os.path.exists(path): return {} log.debug("using legacy defs path %s", path) with open(path) as fd: defs = yaml.safe_load(fd.read()) or {} return defs.get(HotSOSConfig.PLUGIN_NAME, {})
def cache_load(self): path = os.path.join(self.cache_path_root, 'stats.json') if not os.path.exists(path): log.debug("network port %s not found in cache", self.name) return with open(path) as fd: try: log.debug("loading network port %s from cache", self.name) return json.loads(fd.read()) except json.decoder.JSONDecodeError: log.warning("failed to load networkport from cache")
def passes(self): """ Content can either be a single requirement, dict of requirement groups or list of requirements. List may contain individual requirements or groups. """ log.debug("running requirement") logicmap = LogicalCollectionMap(self.content, self.REQ_TYPES, cache=self.cache) result = LogicalCollectionHandler(logicmap)() self.cache.set('passes', result) return result
def check_service(self, svc, ops, started_after_svc_obj=None): if started_after_svc_obj: a = svc.start_time b = started_after_svc_obj.start_time if a and b: log.debug("%s started=%s, %s started=%s", svc.name, a, started_after_svc_obj.name, b) if b > a: log.debug("svc %s started before %s", svc.name, started_after_svc_obj.name) return False return self.apply_ops(ops, input=svc.state)
def run(self): mgr = IssuesManager() for scenario in self.scenarios: results = {} log.debug("running scenario: %s", scenario.name) # run all conclusions and use highest priority result(s). One or # more conclusions may share the same priority. All conclusions # that match and share the same priority will be used. for name, conc in scenario.conclusions.items(): if conc.reached(scenario.checks): if conc.priority: priority = conc.priority.value else: priority = 1 if priority in results: results[priority].append(conc) else: results[priority] = [conc] log.debug("conclusion reached: %s (priority=%s)", name, priority) if results: highest = max(results.keys()) log.debug("selecting highest priority=%s conclusions (%s)", highest, len(results[highest])) for conc in results[highest]: mgr.add(conc.issue, context=conc.context) else: log.debug("no conclusions reached")
def get_method(self, import_str): log.debug("calling method %s", import_str) mod = import_str.rpartition('.')[0] property = import_str.rpartition('.')[2] class_name = mod.rpartition('.')[2] mod = mod.rpartition('.')[0] cls = getattr(importlib.import_module(mod), class_name) try: ret = getattr(cls(), property)() except Exception: log.exception("failed to import and call method %s", import_str) raise return ret
def process_group(self, item): results = {} for op, _items in item.items(): if op not in results: results[op] = [] if type(_items) != list: results[op].append(self.process_single(_items)) else: log.debug("op=%s has %s items(s)", op, len(_items)) for _item in _items: results[op].append(self.process_single(_item)) return results
def handler(self): invert_result = self.settings.get('invert-result', False) handler = self.settings['handler'] obj = self.get_cls(handler) path = self.settings.get('path') if path: path = os.path.join(HotSOSConfig.DATA_ROOT, path) cfg = obj(path) else: cfg = obj() results = [] for key, assertion in self.settings['assertions'].items(): ops = assertion.get('ops') section = assertion.get('section') if section: actual = cfg.get(key, section=section) else: actual = cfg.get(key) log.debug("requirement check: config %s %s (actual=%s)", key, self.ops_to_str(ops), actual) if ops: if actual is None: result = assertion.get('allow-unset', False) else: result = self.apply_ops(ops, input=actual, normalise_value_types=True) else: result = self.apply_ops([['ne', None]], input=actual) # This is a bit iffy since it only gives us the final config # assertion checked. self.cache.set('key', key) self.cache.set('ops', self.ops_to_str(ops)) self.cache.set('value_actual', actual) # return on first fail if not result and not invert_result: return False results.append(result) if invert_result: return not all(results) return all(results)
def apply(self, item, copy_cache=False): if type(item) != dict: # i.e. it must be a string key = value = item else: # dict should only have one key key, value = copy.deepcopy(item).popitem() ret = self.backend[key](value) result = ret.result if copy_cache and self.cache: log.debug("merging cache with item of type %s", ret.__class__.__name__) self.cache.merge(ret.cache) return result
def get_attribute(self, import_str): log.debug("fetching attribute %s", import_str) mod = import_str.rpartition('.')[0] attr = import_str.rpartition('.')[2] try: ret = getattr(importlib.import_module(mod), attr) except Exception as exc: log.exception("failed to get module attribute %s", import_str) # ystruct.YAMLDefOverrideBase swallows AttributeError so need to # convert to something else. if type(exc) == AttributeError: raise ImportError from exc raise return ret
def minimise_master_output(summary_yaml, mode): """ Converts the master output to include issues and bugs. """ log.debug("Minimising output (mode=%s).", mode) if not summary_yaml: return summary_yaml filtered = {} if mode == 'short': filtered = _get_short_format(summary_yaml) elif mode == 'very-short': filtered = _get_very_short_format(summary_yaml) else: log.debug("Unknown minimalmode '%s'", mode) return summary_yaml return filtered
def cache_load(self, namespaces=False): if namespaces: path = os.path.join(self.cache_path_root, 'ns_interfaces.json') else: path = os.path.join(self.cache_path_root, 'interfaces.json') if not os.path.exists(path): log.debug("network helper info not available in cache " "(namespaces=%s)", namespaces) return with open(path) as fd: try: log.debug("loading network helper info from cache " "(namespaces=%s)", namespaces) return json.loads(fd.read()) except json.decoder.JSONDecodeError: log.warning("failed to load interfaces from cache")
def handler(self): default_ops = [['truth']] if type(self.settings) != dict: path = self.settings # default is get bool (True/False) for value ops = default_ops else: path = self.settings['path'] ops = self.settings.get('ops', default_ops) actual = self.get_property(path) result = self.apply_ops(ops, input=actual) log.debug('requirement check: property %s %s (result=%s)', path, self.ops_to_str(ops), result) self.cache.set('property', path) self.cache.set('ops', self.ops_to_str(ops)) self.cache.set('value_actual', actual) return result
def apply_output_formatting(summary_yaml, format, html_escape=False, minimal_mode=None): filtered = summary_yaml if minimal_mode: filtered = minimise_master_output(filtered, minimal_mode) if format == 'json': log.debug('Converting master yaml file to %s', format) filtered = json.dumps(filtered, indent=2, sort_keys=True) else: filtered = plugintools.dump(filtered) if html_escape: log.debug('Encoding output file to html') filtered = html.escape(filtered) return filtered
def reached(self, checks): """ Return True/False result of this conclusion and prepare issue info. """ log.debug("running conclusion %s", self.name) logicmap = LogicalCollectionMap(self.decision.content, {name: lambda name: checks[name] for name in checks}) result = LogicalCollectionHandler(logicmap)() if not result: return False search_results = None for name, check in checks.items(): if check.expr and check.expr.cache.results: search_results = check.expr.cache.results.find_by_tag(name) if search_results: # Save some context for the issue self.context.set(**{r.source: r.linenumber for r in search_results}) elif check.requires: # Dump the requires cache into the context. We improve this # later by addign more info. self.context.set(**check.requires.cache.cache) if self.raises.format_groups: if search_results: # we only use the first result message = self.raises.message_with_format_list_applied( search_results[0]) else: message = self.raises.message log.warning("no search results found so not applying format " "groups") else: message = self.raises.message_with_format_dict_applied( checks=checks) if self.raises.type.ISSUE_TYPE == 'bug': self.issue = self.raises.type(self.raises.bug_id, message) else: self.issue = self.raises.type(message) return result