def expand_tag( some_or_tag_el: _Element, only_expand_types: Iterable[str] = None ) -> List[_Element]: """ Substitute a tag element with elements which the tag refers to. some_or_tag_el -- an already expanded element or a tag element to expand only_expand_types -- if specified, return only elements of these types """ if some_or_tag_el.tag != TAG_TAG: return [some_or_tag_el] conf_section = find_parent(some_or_tag_el, "configuration") if conf_section is None: return [] expanded_elements = [] for element_id in [ str(obj_ref.get("id", "")) for obj_ref in some_or_tag_el.iterfind(TAG_OBJREF) ]: if only_expand_types: searcher = ElementSearcher( only_expand_types, element_id, conf_section ) if searcher.element_found(): expanded_elements.append(searcher.get_element()) else: expanded_elements.extend( get_configuration_elements_by_id(conf_section, element_id) ) return expanded_elements
def verify( topology_el: _Element, resources_el: _Element, cluster_status_nodes: Sequence[StateElement], ) -> ReportItemList: """ Check if all cluster nodes and stonith devices used in fencing levels exist. topology_el -- fencing levels to check resources_el -- resources definitions cluster_status_nodes -- list of status of existing cluster nodes """ report_list: ReportItemList = [] used_nodes: Set[str] = set() used_devices: Set[str] = set() for level_el in topology_el.iterfind("fencing-level"): used_devices.update(str(level_el.get("devices", "")).split(",")) if "target" in level_el.attrib: used_nodes.add(str(level_el.get("target", ""))) if used_devices: report_list.extend( _validate_devices(resources_el, sorted(used_devices), allow_force=False)) for node in sorted(used_nodes): report_list.extend( _validate_target_valuewise(cluster_status_nodes, TARGET_TYPE_NODE, node, allow_force=False)) return report_list
def get_operations_from_transitions( transitions: _Element, ) -> List[SimulationOperation]: """ Extract resource operations from simulated transitions transitions -- simulated transitions from crm_simulate """ operation_list: List[SimulationOperation] = [] for rsc_op in transitions.iterfind("synapse/action_set/rsc_op"): operation = SimulationOperationType( rsc_op.get("operation", "").lower()) if operation not in KNOWN_OPERATIONS: continue for primitive in rsc_op.iterfind("primitive"): primitive_id = str(primitive.attrib["id"]) operation_list.append( SimulationOperation( operation_id=int(rsc_op.attrib["id"]), primitive_id=primitive_id, primitive_long_id=primitive.get("long-id") or primitive_id, operation_type=operation, on_node=str(rsc_op.attrib["on_node"]), )) operation_list.sort(key=lambda op: op.operation_id) return operation_list
def _find_actors(self, e: Et._Element, actor_type: str, actor_dict: dict): for a in e.iterfind(f"./*[@type='{actor_type}']"): actor = a.find('./actor').text value = a.find('./value').text self.xml_dict[e.get('name')][actor] = { 'value': value, 'type': actor_type } self.__update_actor_dict(actor_dict, actor, value)
def _read_response(self, r_xml: Et._Element) -> bool: for e in r_xml.iterfind(self.response_xpath): if e.text: self.result.append(e.text) if self.result: LOGGER.debug('AsConnector successfully got all material names from scene.') return True if self.result else False
def is_stonith_enabled(crm_config_el: _Element) -> bool: # We should read the default value from pacemaker. However, that may slow # pcs down as we need to run 'pacemaker-schedulerd metadata' to get it. stonith_enabled = True # TODO properly support multiple cluster_property_set with rules for nvpair in crm_config_el.iterfind( "cluster_property_set/nvpair[@name='stonith-enabled']"): if is_false(nvpair.get("value")): stonith_enabled = False break return stonith_enabled
def _read_response(self, r_xml: Et._Element) -> bool: self.result: List[str] = list() for e in r_xml.iterfind(self.response_xpath): if e.text: self.result.append(e.text) if self.result: LOGGER.debug('AsConnector SceneGetAll request successful. Found scenes %s', self.result) return True if self.result else False
def _read_response(self, r_xml: Et._Element) -> bool: if r_xml is None: return False self.scene_root = NodeInfo.get_node_from_as_connector_element(r_xml.find(self.response_xpath)) for idx, n in enumerate(r_xml.iterfind(self.response_xpath)): node = NodeInfo.get_node_from_as_connector_element(n) node.knecht_id = None if node.knecht_id == 'None' else node.knecht_id self.result[node.knecht_id or node.as_id or idx] = node return True
def nvset_element_to_dto(nvset_el: _Element) -> CibNvsetDto: """ Export an nvset xml element to its DTO """ rule_el = nvset_el.find("./rule") return CibNvsetDto( str(nvset_el.get("id", "")), _tag_to_type[str(nvset_el.tag)], export_attributes(nvset_el, with_id=False), None if rule_el is None else rule_element_to_dto(rule_el), [ nvpair_element_to_dto(nvpair_el) for nvpair_el in nvset_el.iterfind("./nvpair") ], )
def get_misconfigured_resources( resources_el: _Element, ) -> Tuple[List[_Element], List[_Element], List[_Element]]: """ Return stonith: all, 'action' option set, 'method' option set to 'cycle' """ stonith_all = [] stonith_with_action = [] stonith_with_method_cycle = [] for stonith in resources_el.iterfind("primitive[@class='stonith']"): stonith_all.append(stonith) for nvpair in stonith.iterfind("instance_attributes/nvpair"): if nvpair.get("name") == "action" and nvpair.get("value"): stonith_with_action.append(stonith) if (nvpair.get("name") == "method" and nvpair.get("value") == "cycle"): stonith_with_method_cycle.append(stonith) return stonith_all, stonith_with_action, stonith_with_method_cycle
def nvset_element_to_dto(nvset_el: _Element, rule_in_effect_eval: RuleInEffectEval) -> CibNvsetDto: """ Export an nvset xml element to its DTO nvset_el -- an nvset element to be exported rule_in_effect_eval -- a class for evaluating if a rule is in effect """ rule_dto = None rule_el = nvset_el.find("./rule") if rule_el is not None: rule_dto = rule_element_to_dto(rule_in_effect_eval, rule_el) return CibNvsetDto( str(nvset_el.get("id", "")), export_attributes(nvset_el, with_id=False), rule_dto, [ nvpair_element_to_dto(nvpair_el) for nvpair_el in nvset_el.iterfind("./nvpair") ], )
def parse_fs(fs: etree._Element) -> FeatureStructure: """Parse a <tei:fs> element Note that this doesn't handle all the convoluted ways to specify fs in TEI but only the relatively simple subset we need here. """ if fs.tag != f"{TEI}fs": raise ValueError( f"Attempting to parse a {fs.tag} element as a feature structure." ) res = dict() for f in fs.iterfind(f"{TEI}f"): f_name = f.attrib[f"{TEI}name"] if len(f) == 0: f_value = f.text elif len(f) == 1: value_elem = f[0] if value_elem.tag in (f"{TEI}symbol", f"{TEI}numeric"): f_value = value_elem.attrib[f"{TEI}value"] elif value_elem.tag == f"{TEI}string": f_value = value_elem.text elif value_elem.tag == f"{TEI}binary": value_str = value_elem.attrib[f"{TEI}value"] if value_str in ("true", "1"): f_value = True elif value_str in ("false", "0"): f_value = False else: raise ValueError(f"Invalid value for <tei:binary>: {value_str!r}.") elif value_elem.tag == f"{TEI}fs": f_value = parse_fs(value_elem) else: raise ValueError(f"Unsupported feature type: {value_elem.tag!r}") else: raise ValueError("Features with more than one children are not supported") res[f_name] = f_value return res