def check_EnumEntry(a): # check mandatory properties in attributes mandatory_prop_defs = get_all_possible_mandatory_properties(a) attr_prop_defs = list(map(lambda p: p.definition, a.properties)) for d in mandatory_prop_defs.values(): textx_assert(d in attr_prop_defs, a, f"missing mandatory property '{d.name}'")
def check_ArrayAttribute(a): # tests: see filebased_tests/char if a.type.name == "char": textx_assert( len(a.dims) == 1, a, "no multidimensional strings allowed") if a.is_embedded(): textx_assert( len(a.dims) == 1, a, "no multidimensional embedded arrays allowed")
def _assert_attr_defined_before_beeing_used_in_formula(a, f, d): mm = get_metamodel(d) # only the first element of a reference path has to be checked all_refs = map(lambda x: x.ref._tx_path[0], get_children_of_type("AttrRef", f)) all_refs = filter(lambda x: textx_isinstance(x, mm["ScalarAttribute"]), all_refs) for r in all_refs: textx_assert( is_attribute_before_other_attribute(r, a), d, f"{r.name} must be defined before {a.name}", )
def _assert_restricted_attr_may_not_be_used_in_formula(f, d, info_where="dimension"): mm = get_metamodel(d) all_refs = list( map(lambda x: x.ref._tx_path, get_children_of_type("AttrRef", f))) if len(all_refs) > 0: all_refs = reduce(lambda a, b: a + b, all_refs) all_refs = filter(lambda x: textx_isinstance(x, mm["ScalarAttribute"]), all_refs) for r in all_refs: textx_assert( r.if_attr is None, d, f"restricted attribute {r.name} may not be used in {info_where}", )
def check_Constants(c): # unique constant names: all_attribute_names = list(map(lambda x: x.name, c.constant_entries)) all_attribute_names_unique = set(all_attribute_names) if len(all_attribute_names) != len(all_attribute_names_unique): idx = 0 while len(all_attribute_names) > 0: first = all_attribute_names[0] del all_attribute_names[0] textx_assert( first in all_attribute_names_unique, c.constant_entries[idx], f"constant name {first} is not unique", ) all_attribute_names_unique.remove(first) idx += 1
def check_Attribute(a): if a.name.startswith("item_"): raise TextXSemanticError( "attribute may not start with 'item_' " + a.name, **get_location(a)) if a.name.startswith("_"): raise TextXSemanticError("attribute may not start with '_' " + a.name, **get_location(a)) if hasattr(a, "type"): if a.embedded: textx_assert(a.type.name != "char", a, "char may be be used as embedded field") if hasattr(a, "type"): if textx_isinstance(a.type, RawType): if a.type.internaltype in ["INT", "UINT"] and not a.embedded: if get_bits(a.type) not in [8, 16, 32, 64, 128]: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) if a.type.internaltype not in ["INT", "UINT", "BOOL" ] and a.embedded: raise TextXSemanticError( "attribute {} must be an integral type.".format(a.name), **get_location(a), ) elif textx_isinstance(a.type, get_metamodel(a)["Enum"]): if get_bits(a.type) not in [8, 16, 32, 64, 128] and not a.embedded: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) # check mandatory properties in attributes mandatory_prop_defs = get_all_possible_mandatory_properties(a) attr_prop_defs = list(map(lambda p: p.definition, a.properties)) for d in mandatory_prop_defs.values(): textx_assert(d in attr_prop_defs, a, f"missing mandatory property '{d.name}'") if a.is_container(): textx_assert( a.if_attr is None, a, "restricted attributes may not be used as container (put them into a separate substruct)", ) if a.is_embedded(): textx_assert( a.if_attr is None, a, "restricted attributes may not be embedded (put them into a separate substruct)", )
def check_Val(val_object): mm = get_metamodel(val_object) if val_object.valueClassificator is None: return elif val_object.valueClassificator == "ENUM": textx_assert( textx_isinstance(val_object.ref.ref, mm["EnumEntry"]), val_object, "referenced value is not matching classificator '{}'".format( val_object.valueClassificator), ) elif val_object.valueClassificator == "CONST": textx_assert( textx_isinstance(val_object.ref.ref, mm["Constant"]), val_object, "referenced value is not matching classificator '{}'".format( val_object.valueClassificator), ) else: textx_assert( False, val_object, "unexpected classificator '{}'".format( val_object.valueClassificator), )
def _assert_attr_used_in_formula_must_have_a_max_and_default_value(f, d): mm = get_metamodel(d) all_refs = list( map(lambda x: x.ref._tx_path, get_children_of_type("AttrRef", f))) # _tx_apth is a list of references to the final variable (e.g. header.n # yields [header,n]. if len(all_refs) > 0: all_refs = reduce(lambda a, b: a + b, all_refs) # merge all ref lists all_refs = filter( lambda x: textx_isinstance(x, mm["ScalarAttribute"]) and x.has_rawtype( ), all_refs, ) for r in all_refs: textx_assert( has_property(r, "maxValue"), d, f"attribute {r.name} used in dim formula must have a 'maxValue'", ) textx_assert( has_property(r, "defaultValue"), d, f"attribute {r.name} used in dim formula must have a 'defaultValue'", )
def get_property_type(attr, prop_name): def get_type(internaltype): if internaltype == "STRING": return str elif internaltype == "INT": return find_builtin(attr, "built_in.int32", "+m:^package.items") # +m ist not required.. elif internaltype == "UINT": return find_builtin(attr, "built_in.uint32", "+m:^package.items") elif internaltype == "BOOL": ret = find_builtin(attr, "built_in.bool", "+m:^package.items") return ret elif internaltype == "FLOAT": return find_builtin(attr, "built_in.double", "+m:^package.items") else: raise Exception("unknown internal type {}".format(internaltype)) res = list( filter(lambda x: x.definition.name == prop_name, attr.properties)) if len(res) == 0: raise Exception("property {} not found".format(prop_name)) else: mm = get_metamodel(attr) textx_assert(len(res) == 1, attr, prop_name + " must be unique") res = res[0] internaltype = res.definition.internaltype if internaltype == "ATTRTYPE": textx_assert( not textx_isinstance(attr, mm["VariantAttribute"]), attr, prop_name + " not supported for variants", ) textx_assert( textx_isinstance(attr.type, mm["RawType"]) or textx_isinstance(attr.type, mm["Enum"]), attr, prop_name + " only applicable for rawtypes/enums", ) return attr.type else: return get_type(internaltype)
def get_value(res, internaltype): if internaltype == "STRING": textx_assert(res.textValue is not None, attr, prop_name + " must be a STRING") return res.textValue.x elif internaltype == "INT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER/INT") elif internaltype == "UINT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER/UINT") elif internaltype == "BOOL": textx_assert( res.numberValue is not None, attr, prop_name + " must be an NUMBER/BOOL as int", ) elif internaltype == "FLOAT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER") assert res.numberValue is not None elif internaltype == "ENUM": textx_assert(res.numberValue is not None, attr, prop_name + " must be an ENUM") assert res.numberValue is not None else: return None return compute_formula_for_internaltype(res.numberValue.x, internaltype, prop_name)
def get_property(attr, prop_name): def get_value(res, internaltype): if internaltype == "STRING": textx_assert(res.textValue is not None, attr, prop_name + " must be a STRING") return res.textValue.x elif internaltype == "INT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER/INT") elif internaltype == "UINT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER/UINT") elif internaltype == "BOOL": textx_assert( res.numberValue is not None, attr, prop_name + " must be an NUMBER/BOOL as int", ) elif internaltype == "FLOAT": textx_assert(res.numberValue is not None, attr, prop_name + " must be an NUMBER") assert res.numberValue is not None elif internaltype == "ENUM": textx_assert(res.numberValue is not None, attr, prop_name + " must be an ENUM") assert res.numberValue is not None else: return None return compute_formula_for_internaltype(res.numberValue.x, internaltype, prop_name) res = list( filter(lambda x: x.definition.name == prop_name, attr.properties)) if len(res) == 0: return None else: mm = get_metamodel(attr) textx_assert(len(res) == 1, attr, prop_name + " must be unique") res = res[0] internaltype = res.definition.internaltype if internaltype == "ATTRTYPE": textx_assert( not textx_isinstance(attr, mm["VariantAttribute"]), attr, prop_name + " not supported for variants", ) correct_instance = textx_isinstance( attr.type, mm["RawType"]) or textx_isinstance( attr.type, mm["Enum"]) textx_assert( correct_instance, attr, prop_name + " only applicable for rawtypes/enums", ) internaltype = attr.type.internaltype value = get_value(res, internaltype) textx_assert( value is not None, attr, prop_name + " could not be interpreted (unexpected)", ) return value
def check_Property(p): mm = get_metamodel(p) if textx_isinstance(p.parent, mm["VariantMapping"]): textx_assert( is_applicable(p), p, f"{p.parent.parent.name}.{p.definition.name} not " f"applicable for variant mapping {p.parent.type.name}", ) else: textx_assert(is_applicable(p), p, f"{p.parent.name}.{p.definition.name} not applicable") prop_value = get_property(p.parent, p.definition.name) # throws on error if p.definition.name == "defaultStringValue": textx_assert( not has_property(p.parent, "defaultValue"), p.parent, "only one default is allowed", ) if textx_isinstance(p.parent, mm["ScalarAttribute"]): textx_assert( len(prop_value) == 1, p, "only exactly one char is allowed as default") # if p.definition.name == "fixedSizeInBytes": # struct_type = p.parent.type # assert textx_isinstance(struct_type, mm['Struct']) # value = get_property(p.parent, "fixedSizeInBytes") # textx_assert( # value>=struct_type."getmaxsize MISSING TODO", # p.parent.parent, # f"max size of {struct_type.name} is smaler than {value}", # ) if p.definition.name in [ "fixpointLsbValue", "fixpointMsbValue", "fixpointOffsetValue", ]: a = p.parent # must be an attribute (restriction of fixpoint pros) textx_assert( not (has_property(a, "fixpointLsbValue") and has_property(a, "fixpointMsbValue")), a, "specify either MSB or LSB (and not both at the same time)", ) textx_assert( has_property(a, "fixpointLsbValue") or has_property(a, "fixpointMsbValue"), p, "specify either MSB or LSB (you need at least one of them for fixpoint values)", ) textx_assert( a.type.internaltype in ["UINT", "INT"], a, "fixpoint meta information only possible with integral values", )
def check_Struct(s): mm = get_metamodel(s) # check mandatory properties in attributes mandatory_prop_defs = get_all_possible_mandatory_properties(s) attr_prop_defs = map(lambda p: p.definition, s.properties) for d in mandatory_prop_defs.values(): textx_assert(d in attr_prop_defs, s, f"missing mandatory property '{d.name}'") # check if max count of properties are not violated ("... to ... times per message) def get_all_properties_of_struct(s): lst = [] for a in s.attributes: lst = lst + a.properties if textx_isinstance(a, mm["ScalarAttribute"]) and textx_isinstance( a.type, mm["Struct"]): do_break = get_property(a, "is_payload") # None or bool if do_break is None or not do_break: lst = lst + get_all_properties_of_struct(a.type) return lst properties = get_all_properties_of_struct(s) properties_per_def = {} for p in properties: if p.definition not in properties_per_def: properties_per_def[p.definition] = [p] else: properties_per_def[p.definition].append(p) property_defs = get_all_possible_properties( s, filter_applicable_to_model_object=False).values() for d in property_defs: if d.numberOfPropRestriction is not None: n = len(properties_per_def.get(d, [])) textx_assert( n >= d.numberOfPropRestriction.min, s, f'need at least {d.numberOfPropRestriction.min} of property "{d.name}"', ) textx_assert( n <= d.numberOfPropRestriction.max, s, f'not more than {d.numberOfPropRestriction.max} of property "{d.name}" allowed', ) # unique names: all_attribute_names = list(map(lambda x: x.name, s.attributes)) all_attribute_names_unique = set(all_attribute_names) if len(all_attribute_names) != len(all_attribute_names_unique): idx = 0 while len(all_attribute_names) > 0: first = all_attribute_names[0] del all_attribute_names[0] textx_assert( first in all_attribute_names_unique, s.attributes[idx], f"attribute name {first} is not unique", ) all_attribute_names_unique.remove(first) idx += 1 check_Constants(s)