def check_ScalarAttribute(a): # tests: see filebased_tests/embedded if a.is_container(): if not textx_isinstance(a.type, RawType): raise TextXSemanticError( "container {} must be an unsigned integral type.".format( a.name), **get_location(a), ) elif a.type.internaltype != "UINT": raise TextXSemanticError( "container {} must be an unsigned integral type.".format( a.name), **get_location(a), ) num_bits = reduce( lambda a, b: a + b, map( lambda a: get_bits(a.type) * get_fixed_dimension(a), a.get_container_elements(), ), ) if num_bits != get_bits(a.type): raise TextXSemanticError( "embedded elements of container {} ({}) do not sum up to {}.". format(a.name, num_bits, get_bits(a.type)), **get_location(a), )
def check_Attribute(a): if a.name.startswith("item_"): raise TextXSemanticError( "attribute may not start with 'item_' " + a.name, **get_location(a)) if a.name.startswith("_"): raise TextXSemanticError("attribute may not start with '_' " + a.name, **get_location(a)) if hasattr(a, "type"): if a.embedded: textx_assert(a.type.name != "char", a, "char may be be used as embedded field") if hasattr(a, "type"): if textx_isinstance(a.type, RawType): if a.type.internaltype in ["INT", "UINT"] and not a.embedded: if get_bits(a.type) not in [8, 16, 32, 64, 128]: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) if a.type.internaltype not in ["INT", "UINT", "BOOL" ] and a.embedded: raise TextXSemanticError( "attribute {} must be an integral type.".format(a.name), **get_location(a), ) elif textx_isinstance(a.type, get_metamodel(a)["Enum"]): if get_bits(a.type) not in [8, 16, 32, 64, 128] and not a.embedded: raise TextXSemanticError( "attribute {} must have a bit size of a power of two.". format(a.name), **get_location(a), ) # check mandatory properties in attributes mandatory_prop_defs = get_all_possible_mandatory_properties(a) attr_prop_defs = list(map(lambda p: p.definition, a.properties)) for d in mandatory_prop_defs.values(): textx_assert(d in attr_prop_defs, a, f"missing mandatory property '{d.name}'") if a.is_container(): textx_assert( a.if_attr is None, a, "restricted attributes may not be used as container (put them into a separate substruct)", ) if a.is_embedded(): textx_assert( a.if_attr is None, a, "restricted attributes may not be embedded (put them into a separate substruct)", )
def check_type(t): type_name_check = get_model(t)._tx_model_params.get('type_name_check', default='on') assert type_name_check in ['on', 'off'] if type_name_check == 'on': if t.name[0].isupper(): raise TextXSyntaxError("types must be lowercase", **get_location(t))
def check_VariantMapping(mapping): mm = get_metamodel(mapping) selector_type = mapping.parent.variant_selector.ref.type if textx_isinstance(selector_type, mm["Enum"]): if not mapping.id.is_enum(): raise TextXSemanticError( "bad type (enum of type {} is expected)".format( selector_type.name), **get_location(mapping), )
def get_container(atype): _assert_is_embedded(atype) p = atype.parent.get_prev_attr(atype) while p is not None and not p.is_container(): p = atype.parent.get_prev_attr(p) if p is None: raise TextXSemanticError( "unexpected: did not found container of embedded attribute {}". format(atype), **get_location(atype)) return p
def has_property(attr, prop_name, raises_if_not_applicable=True): res = list( filter(lambda x: x.definition.name == prop_name, attr.properties)) if len(res) == 0: if (prop_name not in get_all_possible_properties(attr) and raises_if_not_applicable): raise TextXSemanticError( "{} not a possible property".format(prop_name), **get_location(attr)) return False else: assert len(res) == 1 return True
def check_Sum(sum): from textx import get_children_of_type, textx_isinstance, get_metamodel mm = get_metamodel(sum) enum_entries = list( filter( lambda x: (x.ref is not None) and textx_isinstance( x.ref.ref, mm["EnumEntry"]), get_children_of_type("Val", sum), )) if len(enum_entries) > 0: if not sum.is_enum(): raise TextXSemanticError("enum must not be part of a formula", **get_location(sum))
def get_fixed_dimension(attr): if textx.textx_isinstance(attr, textx.get_metamodel(attr)["ScalarAttribute"]): return 1 elif textx.textx_isinstance(attr, textx.get_metamodel(attr)["VariantAttribute"]): return 1 elif textx.textx_isinstance(attr, textx.get_metamodel(attr)["ArrayAttribute"]): if attr.has_fixed_size(): return attr.compute_formula() else: raise TextXSemanticError( "unexpected: no fixed dimension available for {}".format( attr.name), **get_location(attr))
def get_bits(x): mm = textx.get_metamodel(x) if textx.textx_isinstance(x, mm["RawType"]): return x.bits elif textx.textx_isinstance(x, mm["Enum"]): return x.type.bits elif textx.textx_isinstance(x, mm["ScalarAttribute"]): return get_bits(x.type) elif textx.textx_isinstance(x, mm["ArrayAttribute"]): textx_assert(x.has_fixed_size(), x, "embedded arrays must have fixed size") return get_bits(x.type) * x.compute_formula() else: raise TextXSemanticError( "unexpected: no bits available for {}".format(x.name), **get_location(x))
def range_start_pragma_processor(pragma: RangePragma, parse_context: ParseContext): location = get_location(pragma) line = location["line"] pragma.ng_source_line_begin = line if pragma.begin_or_end == "BEGIN": parse_context.pragma_stack.append(pragma) parse_context.map_lines_to_pragmas[line] = pragma for req in pragma.reqs: pragmas = parse_context.map_reqs_to_pragmas.setdefault(req, []) pragmas.append(pragma) elif pragma.begin_or_end == "END": try: current_top_pragma: RangePragma = parse_context.pragma_stack.pop() if pragma.reqs != current_top_pragma.reqs: raise create_begin_end_range_reqs_mismatch_error( location, current_top_pragma.reqs, pragma.reqs) current_top_pragma.ng_source_line_end = line except IndexError: raise create_end_without_begin_error(location) else: raise NotImplementedError
def algo_check(a): for p in a.parameters: if p.datatype is not None: raise TextXSemanticError( "parameter is not allowed to have {} flag".format( p.datatype), **get_location(p))
def my_processor(m): from textx.exceptions import TextXSemanticError if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m))
def semantic_check(model, metamodel): if model.name == "WrongMode": raise TextXSemanticError( 'The root mode cannot be called "Wrong Mode".', **get_location(model))
def _assert_is_embedded(atype): if not atype.is_embedded(): raise TextXSemanticError( "unexpected: get_start_end_bit called for an non-embedded attribute {}" .format(atype), **get_location(atype))
def textx_assert(req_true, o, text): if not req_true: raise TextXSemanticError(text + " for " + str(o), **get_location(o))
def mode_obj_processor(mode): if mode.name[0].islower(): raise TextXSemanticError( f'Mode name "{mode.name}" must be capitalized.', **get_location(mode))
def check_type(t): if t.name[0].isupper(): raise textx.exceptions.TextXSyntaxError( "types must be lowercase", **get_location(t))
def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **get_location(f))