def get_property_set(model_obj): res = find( model_obj, "", "+m:^~property_set", obj_cls=get_metamodel(model_obj)["PropertySet"], ) if res is None: res = get_default_property_set(get_metamodel(model_obj)) return res
def get_fixed_dimension(attr): if textx.textx_isinstance(attr, textx.get_metamodel(attr)["ScalarAttribute"]): return 1 elif textx.textx_isinstance(attr, textx.get_metamodel(attr)["VariantAttribute"]): return 1 elif textx.textx_isinstance(attr, textx.get_metamodel(attr)["ArrayAttribute"]): if attr.has_fixed_size(): return attr.compute_formula() else: raise TextXSemanticError( "unexpected: no fixed dimension available for {}".format( attr.name), **get_location(attr))
def get_all_referenced_structs(s): mm = get_metamodel(s) l = set() l.add(s) n = 0 while n != len(l): n = len(l) l0 = l.copy() for s in l0: c = get_children_of_type("ScalarAttribute", s) for a in c: if textx_isinstance(a.type, mm["Struct"]): l.add(a.type) # print(f"1added {a.type.name}") c = get_children_of_type("ArrayAttribute", s) for a in c: if textx_isinstance(a.type, mm["Struct"]): l.add(a.type) # print(f"2added {a.type.name}") c = get_children_of_type("VariantMapping", s) for m in c: if textx_isinstance(m.type, mm["Struct"]): l.add(m.type) # print(f"3added {m.type.name}") return l
def generate_py_for_enum(enum_obj, output_file): mm = get_metamodel(enum_obj) if obj_is_newer_than_file(enum_obj, output_file): with open(output_file, "w") as f: f.write(f"""# generated code # for enum f{enum_obj.name} import numpy as np from enum import Enum """) for r in get_referenced_elements_of_enum(enum_obj): f.write("import {}\n".format(module_name(r))) f.write("\n") f.write("meta = {\n") for ev in enum_obj.enum_entries: pdefs = get_all_possible_properties(ev) pdefs = sorted(pdefs.keys()) f.write(f" '{ev.name}':{{") output_properties(f, ev, pdefs) f.write("},\n") f.write("}\n") f.write(f"class {enum_obj.name}(Enum):\n") t = fqn(enum_obj.type) for ev in enum_obj.enum_entries: v = ev.value.render_formula(**fp(enum_obj)) f.write(f" {ev.name} = {t}({v})\n") f.write(" def __repr__(self):\n") f.write(" return self.name\n") f.write(" def __str__(self):\n") f.write(" return self.name\n")
def __call__(self, current_obj, attr, obj_ref): """ find an object Args: current_obj: object corresponding a instance of an object (rule instance) attr: the referencing attribute (unused) obj_ref: ObjCrossRef to be resolved Returns: None or the referenced object """ obj_cls, obj_name = obj_ref.cls, obj_ref.obj_name if self.split_string is None: from textx import get_metamodel rule = get_metamodel(current_obj)[obj_ref.match_rule_name] if hasattr(rule._tx_peg_rule, 'split'): split = rule._tx_peg_rule.split else: split = '.' else: split = self.split_string return find(current_obj, obj_name, self.rrel_tree, obj_cls, split_string=split, use_proxy=self.use_proxy)
def check_Val(val_object): mm = get_metamodel(val_object) if val_object.valueClassificator is None: return elif val_object.valueClassificator == "ENUM": textx_assert( textx_isinstance(val_object.ref.ref, mm["EnumEntry"]), val_object, "referenced value is not matching classificator '{}'".format( val_object.valueClassificator), ) elif val_object.valueClassificator == "CONST": textx_assert( textx_isinstance(val_object.ref.ref, mm["Constant"]), val_object, "referenced value is not matching classificator '{}'".format( val_object.valueClassificator), ) else: textx_assert( False, val_object, "unexpected classificator '{}'".format( val_object.valueClassificator), )
def _get_referenced_if_attributes(self): mm = get_metamodel(self) return list( filter( lambda x: textx_isinstance(x.ref, mm["Attribute"]), get_children_of_type("AttrRef", self.if_attr), ))
def check_Property(p): mm = get_metamodel(p) if textx_isinstance(p.parent, mm["VariantMapping"]): textx_assert( is_applicable(p), p, f"{p.parent.parent.name}.{p.definition.name} not " f"applicable for variant mapping {p.parent.type.name}", ) else: textx_assert(is_applicable(p), p, f"{p.parent.name}.{p.definition.name} not applicable") prop_value = get_property(p.parent, p.definition.name) # throws on error if p.definition.name == "defaultStringValue": textx_assert( not has_property(p.parent, "defaultValue"), p.parent, "only one default is allowed", ) if textx_isinstance(p.parent, mm["ScalarAttribute"]): textx_assert( len(prop_value) == 1, p, "only exactly one char is allowed as default") # if p.definition.name == "fixedSizeInBytes": # struct_type = p.parent.type # assert textx_isinstance(struct_type, mm['Struct']) # value = get_property(p.parent, "fixedSizeInBytes") # textx_assert( # value>=struct_type."getmaxsize MISSING TODO", # p.parent.parent, # f"max size of {struct_type.name} is smaler than {value}", # ) if p.definition.name in [ "fixpointLsbValue", "fixpointMsbValue", "fixpointOffsetValue", ]: a = p.parent # must be an attribute (restriction of fixpoint pros) textx_assert( not (has_property(a, "fixpointLsbValue") and has_property(a, "fixpointMsbValue")), a, "specify either MSB or LSB (and not both at the same time)", ) textx_assert( has_property(a, "fixpointLsbValue") or has_property(a, "fixpointMsbValue"), p, "specify either MSB or LSB (you need at least one of them for fixpoint values)", ) textx_assert( a.type.internaltype in ["UINT", "INT"], a, "fixpoint meta information only possible with integral values", )
def fqn(t): mm = get_metamodel(t) if textx_isinstance(t, mm["RawType"]): if t.name in _m: return _m[t.name] else: return t.name else: return "::".join(get_package_names_of_obj(t)) + "::" + t.name
def get(ps): res = set(ps.property_definitions) if ps.extends is not None: res = res | get(ps.extends) else: res = res | set( get_default_property_set( get_metamodel(model_obj)).property_definitions) return res
def check_Attribute(a): if a.name.startswith("item_"): raise TextXSemanticError( "attribute may not start with 'item_' " + a.name, **get_location(a)) if a.name.startswith("_"): raise TextXSemanticError("attribute may not start with '_' " + a.name, **get_location(a)) if hasattr(a, "type"): if a.embedded: textx_assert(a.type.name != "char", a, "char may be be used as embedded field") if hasattr(a, "type"): if textx_isinstance(a.type, RawType): if a.type.internaltype in ["INT", "UINT"] and not a.embedded: if get_bits(a.type) not in [8, 16, 32, 64, 128]: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) if a.type.internaltype not in ["INT", "UINT", "BOOL" ] and a.embedded: raise TextXSemanticError( "attribute {} must be an integral type.".format(a.name), **get_location(a), ) elif textx_isinstance(a.type, get_metamodel(a)["Enum"]): if get_bits(a.type) not in [8, 16, 32, 64, 128] and not a.embedded: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) # check mandatory properties in attributes mandatory_prop_defs = get_all_possible_mandatory_properties(a) attr_prop_defs = list(map(lambda p: p.definition, a.properties)) for d in mandatory_prop_defs.values(): textx_assert(d in attr_prop_defs, a, f"missing mandatory property '{d.name}'") if a.is_container(): textx_assert( a.if_attr is None, a, "restricted attributes may not be used as container (put them into a separate substruct)", ) if a.is_embedded(): textx_assert( a.if_attr is None, a, "restricted attributes may not be embedded (put them into a separate substruct)", )
def get_referenceed_dim_attributes(self): mm = get_metamodel(self) l = [] for d in self.dims: l = l + list( filter( lambda x: textx_isinstance(x.ref, mm["Attribute"]), get_children_of_type("AttrRef", d), )) return l
def check_VariantMapping(mapping): mm = get_metamodel(mapping) selector_type = mapping.parent.variant_selector.ref.type if textx_isinstance(selector_type, mm["Enum"]): if not mapping.id.is_enum(): raise TextXSemanticError( "bad type (enum of type {} is expected)".format( selector_type.name), **get_location(mapping), )
def person_definer_scope(knows, attr, attr_ref): m = get_model(knows) # get the model of the currently processed element name = attr_ref.obj_name # the name of currently looked up element found_persons = list(filter(lambda p: p.name == name, m.persons)) if len(found_persons) > 0: return found_persons[0] # if a person exists, return it else: mm = get_metamodel(m) # else, create it and store it in the model person = mm['Person']() person.name = name person.parent = m m.persons.append(person) return person
def get_packages_of_obj(obj): from textx import textx_isinstance, get_metamodel pkgs = [] p = obj while p is not None: if textx_isinstance(p, get_metamodel(p)["AnyPackage"]): pkgs.insert(0, p) if hasattr(p, "parent"): p = p.parent else: p = None return pkgs
def _assert_attr_defined_before_beeing_used_in_formula(a, f, d): mm = get_metamodel(d) # only the first element of a reference path has to be checked all_refs = map(lambda x: x.ref._tx_path[0], get_children_of_type("AttrRef", f)) all_refs = filter(lambda x: textx_isinstance(x, mm["ScalarAttribute"]), all_refs) for r in all_refs: textx_assert( is_attribute_before_other_attribute(r, a), d, f"{r.name} must be defined before {a.name}", )
def check_Sum(sum): from textx import get_children_of_type, textx_isinstance, get_metamodel mm = get_metamodel(sum) enum_entries = list( filter( lambda x: (x.ref is not None) and textx_isinstance( x.ref.ref, mm["EnumEntry"]), get_children_of_type("Val", sum), )) if len(enum_entries) > 0: if not sum.is_enum(): raise TextXSemanticError("enum must not be part of a formula", **get_location(sum))
def prop_scope(refItem, attr, attr_ref): ps = get_property_set(refItem) defaultPropertySet = get_default_property_set( textx.get_metamodel(refItem)) if isinstance(ps, textx.scoping.Postponed): return ps while ps is not None: for pd in ps.property_definitions: if pd.name == attr_ref.obj_name: return pd if ps.extends is None and ps is not defaultPropertySet: ps = defaultPropertySet else: ps = ps.extends return None
def get_signed_or_unsigned(t): mm = get_metamodel(t) if textx_isinstance(t, mm["Enum"]): return get_signed_or_unsigned(t.type) elif textx_isinstance(t, mm["RawType"]): if t.internaltype == "INT": return "signed" elif t.internaltype == "UINT": return "unsigned" elif t.internaltype == "BOOL": return "unsigned" else: raise Exception("unexpected") else: raise Exception("unexpected")
def _assert_restricted_attr_may_not_be_used_in_formula(f, d, info_where="dimension"): mm = get_metamodel(d) all_refs = list( map(lambda x: x.ref._tx_path, get_children_of_type("AttrRef", f))) if len(all_refs) > 0: all_refs = reduce(lambda a, b: a + b, all_refs) all_refs = filter(lambda x: textx_isinstance(x, mm["ScalarAttribute"]), all_refs) for r in all_refs: textx_assert( r.if_attr is None, d, f"restricted attribute {r.name} may not be used in {info_where}", )
def apply(self, obj, lookup_list, matched_path, first_element): """ Args: obj: model object lookup_list: non-empty name list (not used) Returns: The parent of the specified type or None. """ from textx import get_metamodel, textx_isinstance t = get_metamodel(obj)[self.type] while hasattr(obj, "parent"): obj = obj.parent if textx_isinstance(obj, t): return obj, lookup_list, matched_path return None, lookup_list, matched_path
def get_bits(x): mm = textx.get_metamodel(x) if textx.textx_isinstance(x, mm["RawType"]): return x.bits elif textx.textx_isinstance(x, mm["Enum"]): return x.type.bits elif textx.textx_isinstance(x, mm["ScalarAttribute"]): return get_bits(x.type) elif textx.textx_isinstance(x, mm["ArrayAttribute"]): textx_assert(x.has_fixed_size(), x, "embedded arrays must have fixed size") return get_bits(x.type) * x.compute_formula() else: raise TextXSemanticError( "unexpected: no bits available for {}".format(x.name), **get_location(x))
def get_enum(self): from textx import textx_isinstance, get_metamodel if len(self.parts) != 1: return None if len(self.parts[0].parts) != 1: return None if len(self.parts[0].parts[0].parts) != 1: return None if len(self.parts[0].parts[0].parts[0].parts) != 1: return None v = self.parts[0].parts[0].parts[0].parts[0] mm = get_metamodel(v) if v.ref is not None: if textx_isinstance(v.ref.ref, mm["EnumEntry"]): return v.ref.ref._tx_obj return None
def _get_ctor_param_type(a): mm = get_metamodel(a) if a.is_embedded(): if textx.textx_isinstance(a, mm["ScalarAttribute"]): return get_cpp_return_type(a.type) elif textx.textx_isinstance(a, mm["ArrayAttribute"]): return f"std::array<{get_cpp_return_type(a.type)},{a.compute_formula()}>" if textx.textx_isinstance(a, mm["ScalarAttribute"]): return fqn(a.type) elif textx.textx_isinstance(a, mm["ArrayAttribute"]): if a.has_fixed_size(): return f"std::array<{fqn(a.type)},{a.compute_formula()}>" else: return f"std::vector<{fqn(a.type)}>" elif textx.textx_isinstance(a, mm["VariantAttribute"]): return f"std::variant<{get_variant_types(a)}>" else: raise Exception("unexpected type")
def get_all_unique_properties(struct_obj): """ :param struct_obj: the struct obj :return: a list of pairs (property_definition, [path to obj as list of attributes]) """ mm = get_metamodel(struct_obj) ret = [] for a in struct_obj.attributes: for p in a.properties: if p.definition.numberOfPropRestriction is not None: if p.definition.numberOfPropRestriction.max == 1: ret.append((p.definition, [a])) if textx_isinstance(a, mm["ScalarAttribute"]) and textx_isinstance( a.type, mm["Struct"]): do_break = get_property(a, "is_payload") # None or bool if do_break is None or not do_break: ret += map(lambda x: (x[0], [a] + x[1]), get_all_unique_properties(a.type)) return ret
def get_referenced_elements_of_struct(i): from textx import get_metamodel, textx_isinstance, get_children_of_type mm = get_metamodel(i) res = [] for a in i.attributes: if textx_isinstance(a, mm["ScalarAttribute"]): res.append(a.type) elif textx_isinstance(a, mm["ArrayAttribute"]): res.append(a.type) for d in a.dims: for r in get_children_of_type("AttrRef", d.dim): if textx_isinstance(r.ref, mm["Constant"]): c = r.ref res.append(c.parent) elif textx_isinstance(a, mm["VariantAttribute"]): for m in a.mappings: res.append(m.type) else: raise Exception("unexpected") for p in a.properties: if p.numberValue: for r in get_children_of_type("AttrRef", p.numberValue.x): if textx_isinstance(r.ref, mm["Constant"]): c = r.ref res.append(c.parent) for c in i.constant_entries: for r in get_children_of_type("AttrRef", c.value): if textx_isinstance(r.ref, mm["Constant"]): c = r.ref res.append(c.parent) res = filter( lambda x: textx_isinstance(x, mm["Enum"]) or textx_isinstance( x, mm["Struct"]) or textx_isinstance(x, mm["Constants"]), res, ) res = filter(lambda x: x != i, res) res = list(set(res)) res = sorted(res, key=lambda x: x.name) return res
def find_builtin(obj, lookup_list, rrel_tree, obj_cls=None): """ Find a object from the buildin_models :param obj: an object of the model to get the metamodel from :param lookup_list: search path (FQN) :param rrel_tree: search path (in the abstract syntax tree) :param obj_cls: class to be searched for (optional) :return: the object or None """ from textx.scoping.rrel import find from textx import get_metamodel res = None mm = get_metamodel(obj) for m in mm.builtin_models: res = find(m, lookup_list, rrel_tree, obj_cls) if res is not None: break return res
def load_models_using_filepattern(self, filename_pattern, model, glob_args, is_main_model=False, encoding='utf-8', add_to_local_models=True, model_params=None): """ Add a new model to all relevant objects. Args: filename_pattern: models to be loaded model: model holding the loaded models in its _tx_model_repository field (may be None). glob_args: arguments passed to the glob.glob function. Returns: the list of loaded models """ from textx import get_metamodel if model is not None: self.update_model_in_repo_based_on_filename(model) the_metamodel = get_metamodel(model) # default metamodel else: the_metamodel = None filenames = glob.glob(filename_pattern, **glob_args) if len(filenames) == 0: raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), filename_pattern) loaded_models = [] for filename in filenames: the_metamodel = metamodel_for_file_or_default_metamodel( filename, the_metamodel) loaded_models.append( self.load_model(the_metamodel, filename, is_main_model, encoding=encoding, add_to_local_models=add_to_local_models, model_params=model_params)) return loaded_models
def compute_formula(self, **kwargs): use_max_for_attributes = kwargs.get("use_max_for_attributes", False) if self.ref: from textx import textx_isinstance, get_metamodel if textx_isinstance(self.ref.ref, get_metamodel(self)["Attribute"]): if use_max_for_attributes: from item_lang.properties import has_property, get_property assert self.ref.ref.is_scalar() assert has_property(self.ref.ref, "maxValue") return get_property(self.ref.ref, "maxValue") else: raise Exception("no constexpr") else: return self.ref.ref.value.compute_formula(**kwargs) elif self.sum: return self.sum.compute_formula(**kwargs) else: return self.value
def get_referenced_elements_of_constants(cs): from textx import get_metamodel, textx_isinstance, get_children_of_type mm = get_metamodel(cs) res = [] for c in cs.constant_entries: for r in get_children_of_type("AttrRef", c.value): if textx_isinstance(r.ref, mm["Constant"]): c = r.ref res.append(c.parent) res = filter( lambda x: textx_isinstance(x, mm["Enum"]) or textx_isinstance( x, mm["Struct"]) or textx_isinstance(x, mm["Constants"]), res, ) res = filter(lambda x: x != cs, res) res = list(set(res)) res = sorted(res, key=lambda x: x.name) return res