def test_match_rule_suppress(): """ Test suppressing operator in match rules. """ grammar = r""" FullyQualifiedID[noskipws]: /\s*/- QuotedID+['.'] /\s*/- ; QuotedID: '"'?- ID '"'?- ; """ meta = metamodel_from_str(grammar) model = meta.model_from_str(''' first."second".third."fourth" ''') assert model == 'first.second.third.fourth' # Checking suppress rule reference grammar = """ First: 'a' Second- Third; Second: 'b'; Third: Second; """ meta = metamodel_from_str(grammar) model = meta.model_from_str('a b b') # Second b should be suppressed assert model == 'ab'
def test_obj_processor_sequence_match_rule(): grammar = """ First: i=MyFixedInt 'end' ; MyFixedInt: '0' '0' '04' ; """ model = '0004 end' mm = metamodel_from_str(grammar) m = mm.model_from_str(model) assert type(m.i) is text processors = { 'MyFixedInt': lambda x: int(x) } mm = metamodel_from_str(grammar) mm.register_obj_processors(processors) m = mm.model_from_str(model) assert type(m.i) is int
def test_ignore_case(): langdef = """ Model: 'start' rules*='first' 'second'; """ meta = metamodel_from_str(langdef) # By default case is not ignored. with pytest.raises(TextXSyntaxError): meta.model_from_str('Start first First Second') meta = metamodel_from_str(langdef, ignore_case=True) meta.model_from_str('Start first First Second')
def test_that_passing_a_non_unicode_raises_exception(): # Test metamodel construction with pytest.raises(TextXError, match=r'textX accepts only unicode strings.'): metamodel = metamodel_from_str(42) metamodel = metamodel_from_str('First: INT;') metamodel.model_from_str('42') # Test model constuction with pytest.raises(TextXError, match=r'textX accepts only unicode strings.'): metamodel.model_from_str(42)
def test_skipws(): langdef = """ Model: 'start' rules*='first' 'second'; """ meta = metamodel_from_str(langdef) # By default ws are skipped. meta.model_from_str('start first first second') meta = metamodel_from_str(langdef, skipws=False) with pytest.raises(TextXSyntaxError): meta.model_from_str('start first first second') meta.model_from_str('startfirstfirstsecond')
def test_syntactic_predicate_not(): """ Test negative lookahead using `not` syntactic predicate. """ grammar = """ Expression: Let | MyID | NUMBER; Let: 'let' expr+=Expression 'end' ; Keyword: 'let' | 'end'; MyID: !Keyword ID; """ meta = metamodel_from_str(grammar) model = meta.model_from_str(""" let let let 34 end let foo end end end """) assert model assert len(model.expr) == 1 assert model.expr[0].expr[0].expr[0] == 34 assert model.expr[0].expr[1].expr[0] == 'foo'
def test_sequence_ordered_choice(): """ Test ordered choice of sequences. """ grammar = """ Model: ('first' a=INT b?='a_is_here' | 'second' c=INT d?='c_is_here' | e=RuleA) 'END' ; RuleA: 'rule' name=ID; """ meta = metamodel_from_str(grammar, debug=True) assert meta assert set([x.__name__ for x in meta]) == \ set(['Model', 'RuleA'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('first 23 a_is_here END') assert model.a == 23 assert model.c == 0 assert model.b is True assert model.d is False model = meta.model_from_str('second 32 END') assert model.a == 0 assert model.c == 32 assert model.b is False assert model.d is False model = meta.model_from_str('rule A END') assert model.a == 0 assert model.c == 0 assert model.b is False assert model.d is False
def test_abstract_rule_and_object_reference(): grammar = """ Model: 'start' rules*=RuleA 'ref' ref=[RuleA]; RuleA: Rule1|Rule2; Rule1: RuleI|RuleE; Rule2: 'r2' name=ID; RuleI: 'rI' name=ID; RuleE: 'rE' name=ID; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == \ set(['Model', 'RuleA', 'Rule1', 'Rule2', 'RuleI', 'RuleE'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start r2 rule1 rE rule2 ref rule2') assert model assert hasattr(model, 'rules') assert hasattr(model, 'ref') assert model.rules assert model.ref # Reference to first rule assert model.ref is model.rules[1] assert model.ref.__class__.__name__ == "RuleE"
def test_bool_match(): grammar = """ Model: 'start' rule?='rule' rule2?=Rule; // rule and rule2 attr should be Rule: Rule1|Rule2|Rule3; // true where match succeeds Rule1: a=INT; Rule2: b=STRING; Rule3: c=ID; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == \ set(['Model', 'Rule', 'Rule1', 'Rule2', 'Rule3'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start rule 34') assert model assert hasattr(model, 'rule') assert hasattr(model, 'rule2') assert model.rule is True assert model.rule2 is True model = meta.model_from_str('start 34') assert model.rule is False assert model.rule2 is True model = meta.model_from_str('start') assert model.rule is False assert model.rule2 is False
def test_repetition_separator_modifier(): """ Match list with regex separator. """ grammar = """ Model: 'start' attr+=Rule[/,|;/]; // Here a regex match is used to // define , or ; as a separator Rule: Rule1|Rule2|Rule3; Rule1: a=INT; Rule2: b=STRING; Rule3: c=ID; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == set(['Model', 'Rule', 'Rule1', 'Rule2', 'Rule3'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start 34, "foo"; ident') assert model assert model.attr assert model.attr[0].a == 34 assert model.attr[1].b == "foo" assert model.attr[2].c == "ident" assert model.attr[0].__class__.__name__ == 'Rule1' assert model.attr[1].__class__.__name__ == 'Rule2' assert model.attr[2].__class__.__name__ == 'Rule3' # There must be at least one Rule matched after 'start' with pytest.raises(TextXSyntaxError): model = meta.model_from_str('start') assert model
def test_assignment_optional(): grammar = """ Model: 'start' (attr=Rule)?; // There should be at most one Rule // after 'start' Rule: Rule1|Rule2|Rule3; Rule1: a=INT; Rule2: b=STRING; Rule3: c=ID; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == \ set(['Model', 'Rule', 'Rule1', 'Rule2', 'Rule3'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start') assert model model = meta.model_from_str('start 34') assert model assert model.attr assert model.attr.a == 34 assert model.attr.__class__.__name__ == 'Rule1' # There must be at most one Rule matched after 'start' with pytest.raises(TextXSyntaxError): model = meta.model_from_str('start 34 "foo"') assert model
def test_assignment_multiple_simple(): """ Test that multiple assignments to the same attribute will result in the list of values. """ grammar = """ Model: 'start' a=INT a=INT (a=INT)?; """ meta = metamodel_from_str(grammar) model = meta.model_from_str('start 34 23 45') assert meta['Model']._tx_attrs['a'].cls.__name__ == 'INT' assert meta['Model']._tx_attrs['a'].mult == '1..*' assert meta['Model']._tx_attrs['a'].cont assert not meta['Model']._tx_attrs['a'].ref assert model assert model.a assert type(model.a) is list assert len(model.a) == 3 assert model.a == [34, 23, 45] model = meta.model_from_str('start 34 23') assert model.a == [34, 23]
def test_assignment_zeroormore(): grammar = """ Model: 'start' attr*=Rule; // There should be zero or more Rule-s after // 'start' Rule: Rule1|Rule2|Rule3; Rule1: a=INT; Rule2: b=STRING; Rule3: c=ID; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == \ set(['Model', 'Rule', 'Rule1', 'Rule2', 'Rule3'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start 34 "foo"') assert model assert model.attr assert model.attr[0].a == 34 assert model.attr[1].b == "foo" assert model.attr[0].__class__.__name__ == 'Rule1' assert model.attr[1].__class__.__name__ == 'Rule2' model = meta.model_from_str('start') assert model
def test_float_int_number(): """ Test that numbers are recognized correctly. """ grammar = """ Rule: a=NUMBER b=INT c=FLOAT ; """ meta = metamodel_from_str(grammar) model = meta.model_from_str('3.4 5 .3') assert model.a == 3.4 assert type(model.a) is float assert model.b == 5 assert model.c == 0.3 model = meta.model_from_str('3 5 2.0') assert model.a == 3 assert type(model.a) is int assert model.b == 5 assert model.c == 2 assert type(model.c) is float
def test_match_complex_recursive_peg_rule_resolve(): """ Test that recursive match rules are properly resolved. """ grammar = """ calc: expression; factor: INT | ('(' expression ')'); term: factor (term_op factor)*; term_op: '*' | '/'; expression: term (expr_op term)*; expr_op: '+' | '-'; """ metamodel = metamodel_from_str(grammar) assert metamodel._parser_blueprint.parser_model.nodes[0].rule_name == \ 'expression' assert type(metamodel._parser_blueprint.parser_model.nodes[0]) is Sequence calc_rule = metamodel['calc']._tx_peg_rule expression_rule = metamodel['expression']._tx_peg_rule assert calc_rule is expression_rule assert type(calc_rule) is Sequence assert type(metamodel['term_op']._tx_peg_rule) is OrderedChoice # Recursive factor rule factor_rule = metamodel['factor']._tx_peg_rule # Find expression reference expr_ref = factor_rule.nodes[1].nodes[1] assert expr_ref.rule_name == 'expression' assert type(expr_ref) is Sequence assert expr_ref is expression_rule
def test_no_import_for_string(): """ Test that import can't be used if meta-model is loaded from string. """ grammar = """ import relativeimport.first Second: a = First ; """ with pytest.raises(AssertionError): metamodel_from_str(grammar)
def test_children(): """ This test checks the get_children function """ ################################# # META MODEL DEF ################################# my_metamodel = metamodel_from_str(metamodel_str) my_metamodel.register_scope_providers({"*.*": scoping_providers.FQN()}) ################################# # MODEL PARSING ################################# my_model = my_metamodel.model_from_str(''' package P1 { class Part1 { } } package P2 { class Part2 { attr C2 rec; } class C2 { attr P1.Part1 p1; attr Part2 p2a; attr P2.Part2 p2b; } } ''') ################################# # TEST ################################# res = get_children_of_type("Class", my_model) res.sort(key=lambda x: x.name) assert len(res) == 3 assert all(map(eq, map(lambda x: x.name, res), ["C2", "Part1", "Part2"])) assert not all(map(eq, map(lambda x: x.name, res), ["Part1", "Part2", "C2"])) for x in res: assert x.__class__.__name__ == "Class" res = get_children_of_type("Attribute", my_model) res.sort(key=lambda x: x.name) assert len(res) == 4 assert all(map(eq, map(lambda x: x.name, res), ["p1", "p2a", "p2b", "rec"])) for x in res: assert x.__class__.__name__ == "Attribute" res = get_children(lambda x: hasattr(x, "name") and re.match( ".*2.*", x.name), my_model) res.sort(key=lambda x: x.name) assert len(res) == 5 assert all(map(eq, map(lambda x: x.name, res), ["C2", "P2", "Part2", "p2a", "p2b"]))
def test_issue108_obj_proc_multifile(): """ see issue 108 for a detailed error report """ mm = textx.metamodel_from_str(''' Model: imports*=Import classes*=Class; Import: 'import' importURI=STRING ';'; Class: 'class' name=ID '{' '}'; ''') lst_class_names = [] lst_models = [] def print_obj(x): lst_class_names.append(x.name) def print_model(m, mm): lst_models.append(m) mm.register_scope_providers( {'*.*': scoping_providers.PlainNameImportURI()}) mm.register_obj_processors({'Class': print_obj}) mm.register_model_processor(print_model) current_dir = dirname(__file__) mm.model_from_file(join(current_dir, 'issue108', 'a.dsl')) assert 2 == len(lst_models) assert 2 == len(lst_class_names)
def test_objcrossref_positions(): # get positions from string # definition positions test1_def_pos = modelstr.find('first Test1') test2_def_pos = modelstr.find('first Test2') # reference positions ( skip 30 characters ) second_rule_pos = modelstr.find('second') test1_ref_pos = modelstr.find('Test1', second_rule_pos) test2_ref_pos = modelstr.find('Test2', second_rule_pos) # textx_tools_support enabled mm = metamodel_from_str(grammar, textx_tools_support=True) model = mm.model_from_str(modelstr) # compare positions with crossref list items test1_crossref = model._pos_crossref_list[0] test2_crossref = model._pos_crossref_list[1] # test1 assert test1_crossref.ref_pos_start == test1_ref_pos assert test1_crossref.def_pos_start == test1_def_pos # test2 assert test2_crossref.ref_pos_start == test2_ref_pos assert test2_crossref.def_pos_start == test2_def_pos
def test_nested_match_rules(): """ Test calling processors for nested match rules. """ grammar = r""" Model: objects*=MyObject; MyObject: HowMany | MyNumber; HowMany: '+'+; // We will register processor that returns a count of '+' MyNumber: MyFloat | INT; MyFloat: /[+-]?(((\d+\.(\d*)?|\.\d+)([eE][+-]?\d+)?)|((\d+)([eE][+-]?\d+)))(?<=[\w\.])(?![\w\.])/; """ # noqa def howmany_processor(x): return len(x) mm = metamodel_from_str(grammar) mm.register_obj_processors({'HowMany': howmany_processor, 'MyFloat': lambda x: float(x)}) model = mm.model_from_str('3.4 ++ + ++ 6') assert model.objects[0] == 3.4 assert model.objects[1] == 5 assert model.objects[2] == 6 assert type(model.objects[2]) is int # Now we will add another processor for `MyObject` to test if we can change # the result returned from match processors lower in hierarchy. def myobject_processor(x): assert type(x) in [int, float] return '#{}'.format(text(x)) mm.register_obj_processors({'HowMany': howmany_processor, 'MyFloat': lambda x: float(x), 'MyObject': myobject_processor}) model = mm.model_from_str('3.4 ++ + ++ 6') assert model.objects[0] == '#3.4' assert model.objects[1] == '#5'
def test_object_processors(): """ Test that object processors are called. They should be called after each model object construction. """ call_order = [] def first_obj_processor(first): first._first_called = True call_order.append(1) def second_obj_processor(second): second._second_called = True call_order.append(2) # test that parent is fully initialised. # b should be True assert second.parent.b obj_processors = { 'First': first_obj_processor, 'Second': second_obj_processor, } metamodel = metamodel_from_str(grammar) metamodel.register_obj_processors(obj_processors) model_str = 'first 34 45 7 A 45 65 B true C "dfdf"' first = metamodel.model_from_str(model_str) assert hasattr(first, '_first_called') for s in first.seconds: assert hasattr(s, '_second_called') assert call_order == [2, 2, 2, 1]
def test_object_processor_replace_object(): """ Test that what is returned from object processor is value used in the output model. """ def second_obj_processor(second): return second.sec / 2 def string_obj_processor(mystr): return "[{}]".format(mystr) obj_processors = { 'Second': second_obj_processor, 'STRING': string_obj_processor, } metamodel = metamodel_from_str(grammar) metamodel.register_obj_processors(obj_processors) model_str = 'first 34 45 7 A 45 65 B true C "dfdf"' first = metamodel.model_from_str(model_str) assert len(first.seconds) == 3 assert first.seconds[0] == 17 assert first.c == '["dfdf"]'
def test_model_modification_through_scoping_custom_lookup_addon_fix(): mm = metamodel_from_str(grammar_addon) mm.register_scope_providers({'Knows.*': person_definer_scope, 'Greeting.*': Postponer()}) m = mm.model_from_str(r''' Tom knows Jerry *hello Tom ''') assert len(m.persons) == 2 assert len(m.knows) == 1 assert len(m.greetings) == 1 assert m.greetings[0].person == m.knows[0].person1 m = mm.model_from_str(r''' *hello Tom Tom knows Jerry ''') assert len(m.persons) == 2 assert len(m.knows) == 1 assert len(m.greetings) == 1 assert m.greetings[0].person == m.knows[0].person1 # Unknown elements still produce an error, as expected with raises(TextXSemanticError, match=r'.*Unresolvable.*Berry.*'): mm.model_from_str(r''' Tom knows Jerry *hello Berry ''')
def main(debug=False): calc_mm = metamodel_from_str(grammar, classes=[Calc, Expression, Term, Factor, Operand], debug=debug) this_folder = dirname(__file__) if debug: metamodel_export(calc_mm, join(this_folder, 'calc_metamodel.dot')) input_expr = ''' a = 10; b = 2 * a + 17; -(4-1)*a+(2+4.67)+b*5.89/(.2+7) ''' model = calc_mm.model_from_str(input_expr) if debug: model_export(model, join(this_folder, 'calc_model.dot')) # Getting value property from the Calc instance will start evaluation. result = model.value assert (model.value - 6.93805555) < 0.0001 print("Result is", result)
def test_free_text_with_references(): model_str = r''' ENTRY Hello: """a way to say hello\@mail (see @[Hi])""" ENTRY Hi: """another way to say hello (see @[Hello])""" ENTRY Salut: """french "hello" (@[Hello]@[Hi]@[Bonjour]@[Salut]@[Hallo])""" ENTRY Hallo: """german way to say hello (see ""@[Hello]"")""" ENTRY Bonjour: """another french "\@@[Hello]", see @[Salut]""" ENTRY NoLink: """Just text""" ENTRY Empty: """""" ''' # noqa metamodel = metamodel_from_str(grammar, classes=[Entry], use_regexp_group=True) m = metamodel.model_from_str(model_str) assert 1 == len(m.entries[0].data) assert 1 == len(m.entries[1].data) assert 5 == len(m.entries[2].data) assert 1 == len(m.entries[3].data) assert 2 == len(m.entries[4].data) assert 0 == len(m.entries[5].data) assert 0 == len(m.entries[6].data) assert 'Hi' == m.entries[0].data[0].ref.name assert m.entries[1] == m.entries[0].data[0].ref assert 'a way to say hello@mail (see @[Hi])' == str(m.entries[0]) assert 'german way to say hello (see ""@[Hello]"")' == str(m.entries[3]) assert 'another french "@@[Hello]", see @[Salut]' == str(m.entries[4]) assert 'Just text' == str(m.entries[5]) assert '' == str(m.entries[6])
def test_issue78_quickcheck_no_obj_processors_called_for_references(): """ This test represents just a plausibility check. """ grammarA = """ Model: a+=A | b+=B; A:'A' name=ID; B:'B' name=ID '->' a=[A]; """ mm = textx.metamodel_from_str(grammarA) import textx.scoping.providers as scoping_providers global_repo_provider = scoping_providers.PlainNameGlobalRepo() mm.register_scope_providers({"*.*": global_repo_provider}) test_list = [] mm.register_obj_processors({ 'A': lambda o: test_list.append(o.name), }) # no references to A: --> obj proc called m1 = mm.model_from_str(''' A a1 A a2 A a3 ''') assert ['a1', 'a2', 'a3'] == test_list # only references to A: --> obj proc not called global_repo_provider.add_model(m1) mm.model_from_str(''' B b1 -> a1 B b2 -> a2 B b3 -> a3 ''') assert ['a1', 'a2', 'a3'] == test_list # unchanged...
def test_get_model(): metamodel = metamodel_from_str(grammar) model = metamodel.model_from_str(model_str) t = model.a[0].y assert get_model(t) is model
def test_resolve_model_path_simple_case_with_refs(): ################################# # META MODEL DEF ################################# grammar = r''' Model: name=ID b=B; B: 'B:' name=ID ('->' b=B | '-->' bref=[B] ); ''' mm = metamodel_from_str(grammar) ################################# # MODEL PARSING ################################# model = mm.model_from_str(r''' My_Model B: Level0_B -> B: Level1_B --> Level0_B ''') ################################# # TEST MODEL ################################# # test normal functionality (with refs) level0B = resolve_model_path(model, "b") assert level0B.name == "Level0_B" level1B = resolve_model_path(model, "b.b") assert level1B.name == "Level1_B" bref = resolve_model_path(model, "b.b.bref") assert bref.name == "Level0_B" assert bref == level0B
def get_metamodel(): mm = metamodel_from_str( r''' reference data as d Model: includes*=Include algos+=Algo flows+=Flow; Algo: 'algo' name=ID ':' inp=[d.Data] '->' outp=[d.Data]; Flow: 'connect' algo1=[Algo] '->' algo2=[Algo] ; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **tools.get_location(f) ) mm.register_obj_processors({ 'Flow': check_flow }) return mm
def main(debug=False): bool_mm = metamodel_from_str(grammar, classes=[Bool, Or, And, Not, Operand], ignore_case=True, debug=debug) this_folder = dirname(__file__) if debug: metamodel_export(bool_mm, join(this_folder, 'bool_metamodel.dot')) input_expr = ''' a = true; b = not a and true; a and false or not b ''' model = bool_mm.model_from_str(input_expr) if debug: model_export(model, join(this_folder, 'bool_model.dot')) # Getting value property from the Bool instance will start evaluation. result = model.value assert model.value is True print("Result is", result)
def test_multi_metamodel_references1(): global_repo = scoping.GlobalModelRepository() mm_A = metamodel_from_str(grammarA, global_repository=global_repo) mm_B = metamodel_from_str(grammarB, global_repository=global_repo, referenced_metamodels=[mm_A]) global_repo_provider = scoping_providers.PlainNameGlobalRepo() mm_B.register_scope_providers({"*.*": global_repo_provider}) mm_A.register_scope_providers({"*.*": global_repo_provider}) mA = mm_A.model_from_str(''' A a1 A a2 A a3 ''') global_repo_provider.add_model(mA) mm_B.model_from_str(''' B b1 -> a1 B b2 -> a2 B b3 -> a3 ''') with raises(textx.exceptions.TextXSemanticError, match=r'.*UNKNOWN.*'): mm_B.model_from_str(''' B b1 -> a1 B b2 -> a2 B b3 -> UNKNOWN ''')
def test_unreferenced_abstract_rule(): """ Test that unreferenced abstract rule is properly recognized. """ grammar = """ First: name=ID; Second: Third; Third: a=STRING; """ mm = metamodel_from_str(grammar) assert mm['First']._tx_type == RULE_COMMON assert mm['Second']._tx_type == RULE_ABSTRACT assert mm['Second']._tx_inh_by == [mm['Third']] assert mm['Third']._tx_type == RULE_COMMON
def test_basetype_match_rule_is_match(): """ Test that ordered choice of basetypes is a match rule. """ grammar = """ Rule: INT|ID; """ meta = metamodel_from_str(grammar) assert meta assert meta['Rule']._tx_type is RULE_MATCH model = meta.model_from_str('34') assert model assert model.__class__ == int assert model == 34
def test_abstract_rule_with_multiple_rule_refs(): """ Test that a single alternative of abstract rule can reference multiple match rules with a single common rule. """ grammar = """ Rule: STRING|Rule1|ID|Prefix Rule1 Sufix; Rule1: a=INT; // common rule Prefix: '#'; Sufix: '--'; """ meta = metamodel_from_str(grammar) model = meta.model_from_str('# 23 --') assert meta['Rule']._tx_type is RULE_ABSTRACT assert model.a == 23
def test_base_type_obj_processor_override(): grammar = """ First: 'begin' i=INT 'end' ; """ processors = { 'INT': lambda x: float(x) } mm = metamodel_from_str(grammar) mm.register_obj_processors(processors) m = mm.model_from_str('begin 34 end') assert type(m.i) is float
def test_obj_processor_simple_match_rule(): grammar = r""" First: a=MyFloat 'end' ; MyFloat: /\d+\.(\d+)?/ ; """ model = '3. end' mm = metamodel_from_str(grammar) m = mm.model_from_str(model) assert type(m.a) is text processors = { 'MyFloat': lambda x: float(x) } print('filters') mm = metamodel_from_str(grammar) mm.register_obj_processors(processors) m = mm.model_from_str(model) assert type(m.a) is float
def test_match_rule_complex(): """ Test match rule that has complex expressions. """ grammar = r""" Rule: ('one' /\d+/)* 'foo'+ |'two'|'three'; """ meta = metamodel_from_str(grammar) assert meta assert meta['Rule']._tx_type is RULE_MATCH model = meta.model_from_str('one 45 one 78 foo foo foo') assert model assert model.__class__ == text assert model == "one45one78foofoofoo"
def test_match_single_peg_rule_resolve(): """ Test that match rules with a single reference in rule body are properly resolved. """ model = """ Rule1: Rule2; Rule2: Rule3; Rule3: 'a' INT; """ metamodel = metamodel_from_str(model) assert metamodel['Rule1']._tx_peg_rule == \ metamodel['Rule2']._tx_peg_rule == \ metamodel['Rule3']._tx_peg_rule assert type(metamodel['Rule1']._tx_peg_rule) is Sequence
def setUp(self): self.tile_size = [233, 0] self.haoda_type = ir.Type('uint16') self.unroll_factor = 1 self.expr_ref = ir.Ref(name='foo', idx=(233, 42), lat=None) self.expr = ir.Expr(operand=(self.expr_ref,), operator=()) self.input_stmt = grammar.InputStmt( haoda_type=self.haoda_type, name='foo_i', tile_size=self.tile_size, dram=()) self.param_stmt = grammar.ParamStmt( haoda_type=self.haoda_type, name='foo_p', attr=(), size=(), dram=()) self.local_ref = ir.Ref(name='foo_l', idx=(0, 0), lat=None) self.local_stmt = grammar.LocalStmt( haoda_type=self.haoda_type, let=(), ref=self.local_ref, expr=self.expr) self.output_ref = ir.Ref(name='foo_o', idx=(0, 0), lat=None) self.output_stmt = grammar.OutputStmt( haoda_type=self.haoda_type, let=(), ref=self.output_ref, expr=self.expr, dram=()) self.args = { 'burst_width': 512, 'border': 'ignore', 'iterate': 2, 'cluster': 'none', 'app_name': 'foo_bar', 'input_stmts': [self.input_stmt], 'param_stmts': [self.param_stmt], 'local_stmts': [self.local_stmt], 'output_stmts': [self.output_stmt], 'dim': len(self.tile_size), 'tile_size': self.tile_size, 'unroll_factor': self.unroll_factor, 'replication_factor': self.unroll_factor} self.soda_mm = textx.metamodel_from_str( grammar.GRAMMAR, classes=grammar.CLASSES) self.blur = self.soda_mm.model_from_str( r''' kernel: blur burst width: 512 unroll factor: 16 input uint16: input(2000, *) local uint16: tmp(0,0)=(input(-1,0)+input(0,0)+input(1,0))/3 output uint16: output(0,0)=(tmp(0,-1)+tmp(0,0)+tmp(0,1))/3 iterate: 2 border: preserve cluster: none ''') args = {**self.blur.__dict__, **{'replication_factor': 1}} self.stencil = core.Stencil(**args)
def test_issue89_get_obj_pos_in_text(): mm = textx.metamodel_from_str(''' Model: objs+=Obj; Obj: 'obj' name=ID; ''') m = mm.model_from_str('''obj A obj B obj C obj D ''') assert (1, 1) == textx.get_model(m.objs[0])._tx_parser.pos_to_linecol( m.objs[0]._tx_position) assert (2, 1) == m._tx_parser.pos_to_linecol(m.objs[1]._tx_position) assert (3, 2) == m._tx_parser.pos_to_linecol(m.objs[2]._tx_position) assert (4, 10) == m._tx_parser.pos_to_linecol(m.objs[3]._tx_position)
def test_match_rule_multiple(): """ If match rule has multiple simple matches resulting string should be made by concatenation of simple matches. """ grammar = """ Rule: 'one' 'two' | 'three'; """ meta = metamodel_from_str(grammar) assert meta assert meta['Rule']._tx_type is RULE_MATCH model = meta.model_from_str(' one two') assert model assert model.__class__ == text assert model == "onetwo"
def get_metamodel(): mm = metamodel_from_str(r''' Model: types+=Type; Type: 'type' name=ID; Comment: /\/\/.*$/; ''', global_repository=global_repo) def check_type(t): if t.name[0].isupper(): raise textx.exceptions.TextXSyntaxError( "types must be lowercase", **tools.get_location(t)) mm.register_obj_processors({'Type': check_type}) return mm
def test_custom_base_type_with_builtin_alternatives(): grammar = r""" Model: i*=MyNumber; MyNumber: MyFloat | INT; MyFloat: /[+-]?(((\d+\.(\d*)?|\.\d+)([eE][+-]?\d+)?)|((\d+)([eE][+-]?\d+)))(?<=[\w\.])(?![\w\.])/; """ # noqa mm = metamodel_from_str(grammar) model = mm.model_from_str('3.4 6') assert type(model.i[0]) is text assert type(model.i[1]) is int mm.register_obj_processors({'MyFloat': lambda x: float(x)}) model = mm.model_from_str('3.4 6') assert type(model.i[0]) is float assert type(model.i[1]) is int
def test_model_modification_through_scoping_custom_lookup_addon_failure(): mm = metamodel_from_str(grammar_addon) mm.register_scope_providers({'Knows.*': person_definer_scope}) # Here, at least one case produces an error, since # Tom is not part of the model until the "Knowns" rule # is resolved. with raises(TextXSemanticError, match=r'.*Unknown object.*Tom.*'): mm.model_from_str(r''' Tom knows Jerry *hello Tom ''') mm.model_from_str(r''' *hello Tom Tom knows Jerry ''')
def test_builtin_models_are_searched_by_rrel(): register_language('builtin_types', '*.type', metamodel=types_mm) builtin_models = ModelRepository() builtin_models.add_model(types_mm.model_from_str('type int type bool')) mm = metamodel_from_str(entity_mm_str, builtin_models=builtin_models) model = mm.model_from_str(r''' entity First { first : bool } ''') assert model.entities[0].properties[0].type.__class__.__name__ == 'BaseType' assert model.entities[0].properties[0].type.name == 'bool'
def test_unordered_group_choice_with_sequences(grammar): """ Test the equivalence of using ordered choice and a sequence of parenthesed groups. """ mm = metamodel_from_str(grammar) model = mm.model_from_str('begin first second third') assert model.first == 'first' and model.second == 'second' and model.third == 'third' model = mm.model_from_str('begin third first second') assert model.first == 'first' and model.second == 'second' and model.third == 'third' with pytest.raises(TextXSyntaxError): mm.model_from_str('begin second first third') with pytest.raises(TextXSyntaxError): mm.model_from_str('begin third second first')
def test_unordered_group_sequence_choice_nonoptional(grammar): """ Test the equivalence of using sequence and ordered choice in unordered groups. """ mm = metamodel_from_str(grammar) model = mm.model_from_str('begin first second') assert model.first == 'first' and model.second == 'second' model = mm.model_from_str('begin second first') assert model.first == 'first' and model.second == 'second' with pytest.raises(TextXSyntaxError): mm.model_from_str('begin first second first') with pytest.raises(TextXSyntaxError): mm.model_from_str('begin first first')
def test_match_rule(): """ Match rule always returns string. """ grammar = """ Rule: 'one'|'two'|'three'; """ meta = metamodel_from_str(grammar) assert meta assert meta['Rule']._tx_type is RULE_MATCH model = meta.model_from_str('two') assert model assert model.__class__ == text assert model == "two"
def test_rrel_repetitions(): """ This is a basic extra test to demonstrate `()*` in RREL expressions. """ my_metamodel = metamodel_from_str(r''' Model: entries*=Entry; Entry: name=ID (':' ref=[Entry])?; ''') my_model = my_metamodel.model_from_str(r''' a: b c b: a ''') a = find(my_model, "a", "entries.ref*") assert a.name == 'a' b = find(my_model, "b", "entries.ref*") assert b.name == 'b' c = find(my_model, "c", "entries.ref*") assert c.name == 'c' a2 = find(my_model, "a.b.a", "entries.ref*") assert a2 == a b2 = find(my_model, "b.a.b", "entries.ref*") assert b2 == b res, objpath = find_object_with_path(my_model, "b.a.b", "entries.ref*") assert res == b assert len(objpath) == 3 assert objpath[-1] == res assert ".".join(map(lambda x: x.name, objpath)) == 'b.a.b' a2 = find(my_model, "b.a.b.a", "entries.ref*") assert a2 == a res, objpath = find_object_with_path(my_model, "b.a.b.a", "entries.ref*") assert res == a assert len(objpath) == 4 assert objpath[-1] == res assert ".".join(map(lambda x: x.name, objpath)) == 'b.a.b.a' a2 = find(my_model, "b.a.b.a.b.a.b.a.b.a", "entries.ref*") assert a2 == a
def test_buildins_fully_qualified_name(): """ This test is used to check if a model with buildins works correctly. The test uses full quialified name scoping (to check that exchanging the scope provider globally does not harm the buildins-feature. The test loads - one model w/o buildins - one model with buildins - one model with unknown references (errors) """ ################################# # META MODEL DEF ################################# type_builtins = { 'OneThing': Thing(name="OneThing"), 'OtherThing': Thing(name="OtherThing") } my_metamodel = metamodel_from_str(metamodel_str, classes=[Thing], builtins=type_builtins) my_metamodel.register_scope_providers({"*.*": scoping_providers.FQN()}) ################################# # MODEL PARSING ################################# my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B} ''') my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B OneThing OtherThing} ''') with raises(textx.exceptions.TextXSemanticError, match=r'.*Unknown object.*UnknownPart.*'): my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B OneThing OtherThing UnknownPart} ''')
def test_regexp_with_groups_deactivated(): """ Test that the grammar with w/o groups. """ model_str = ''' data = """ This is a multiline text! """ ''' metamodel = metamodel_from_str(grammar) m = metamodel.model_from_str(model_str) assert '"""' in m.entries[0].data # """ is not removed assert 'This' in m.entries[0].data # This and text in model assert 'text!' in m.entries[0].data # This and text in model
def test_combined_scope_provider(): grammar = """ F: definitions *= Event1 (start = Start )? ; Event1: 'event' name=ID '(' parameters+=Variable[','] ')' ':' argumentos+=Repetition[','] 'end' ; Start: 'start' name=ID 'link' event=[Event1] '(' argumentos+=Repetition[','] ')' 'end' ; Repetition: name=ID '=' paramName=[Variable]; Variable: name=ID; """ meta = metamodel_from_str(grammar) meta.register_scope_providers({ '*.*': FQN(), 'Repetition.paramName': MyScope(meta) }) code = ''' event eventA (varA,varB,varC): x1=varA, x2=varB, x3=varC end start eventoB link eventA ( x1=varA, x2=varB, x3=varC ) end ''' meta.model_from_str(code)
def test_base_type_obj_processor_override(): grammar = """ First: 'begin' i=INT 'end' ; """ def to_float_with_str_check(x): assert type(x) is text return float(x) processors = {'INT': to_float_with_str_check} mm = metamodel_from_str(grammar) mm.register_obj_processors(processors) m = mm.model_from_str('begin 34 end') assert type(m.i) is float
def test_obj_processor_exception_wrap_for_common_rules(): grammar = r""" Model: a+=A; A: name=ID ('(' other=[A] ')')?; """ @textxerror_wrap def p(a): if a.name == 'E': raise Exception("test") mm = metamodel_from_str(grammar) mm.register_obj_processors({'A': p}) from textx.exceptions import TextXError mm.model_from_str('X Y Z (X)') with pytest.raises(TextXError, match=r'None:1:3:.*test'): mm.model_from_str('X E Z (X)')
def test_obj_processor_exception_wrap_for_match_rules(): grammar = r""" Model: a+=A; A: /\w+/; """ @textxerror_wrap def p(a): if a == 'E': raise Exception("test") mm = metamodel_from_str(grammar) mm.register_obj_processors({'A': p}) from textx.exceptions import TextXError mm.model_from_str('X Y Z') with pytest.raises(TextXError, match=r'None:1:3:.*test'): mm.model_from_str('X E Z')
def test_rule_call_forward_backward_reference(): grammar = """ Model: 'start' attr=Rule2; Rule1: 'one'|'two'|'three'; Rule2: 'rule2' attr=Rule1; """ meta = metamodel_from_str(grammar) assert meta assert set([x.__name__ for x in meta]) == set(['Model', 'Rule1', 'Rule2'])\ .union(set(ALL_TYPE_NAMES)) model = meta.model_from_str('start rule2 three') assert model assert model.attr assert model.attr.attr assert model.attr.attr == "three"
def get_metamodel(): mm = metamodel_from_str(r''' reference types as t Model: includes*=Include data+=Data; Data: 'data' name=ID '{' attributes+=Attribute '}'; Attribute: name=ID ':' type=[t.Type]; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm
def test_object_processors_user_classes(): """ Test that object processors are called. They should be called after each model object construction. """ def first_obj_processor(first): first._first_called = True first._a_copy = first.a def second_obj_processor(second): second._second_called = True second._sec_copy = second.sec # test that parent is fully initialised. # b should be True assert second.parent.b is not None obj_processors = { 'First': first_obj_processor, 'Second': second_obj_processor, } class First(object): def __init__(self, seconds, a, b, c): self.seconds = seconds self.a = a self.b = b self.c = c class Second(object): def __init__(self, sec, parent): self.sec = sec self.parent = parent metamodel = metamodel_from_str(grammar, classes=[First, Second]) metamodel.register_obj_processors(obj_processors) model_str = 'first 34 45 7 A 45 65 B true C "dfdf"' first = metamodel.model_from_str(model_str) assert hasattr(first, '_first_called') assert first._a_copy == first.a for s in first.seconds: assert hasattr(s, '_second_called') assert s._sec_copy == s.sec
def test_buildins(): """ This test is used to check if a model with buildins works correctly. The test uses no special scoping. The test loads - one model w/o buildins - one model with buildins - one model with unknown references (errors) """ ################################# # META MODEL DEF ################################# type_builtins = { 'OneThing': Thing(name="OneThing"), 'OtherThing': Thing(name="OtherThing") } my_metamodel = metamodel_from_str(metamodel_str, classes=[Thing], builtins=type_builtins) ################################# # MODEL PARSING ################################# my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B} ''') my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B OneThing OtherThing} ''') with raises(textx.exceptions.TextXSemanticError, match=r'.*Unknown object.*UnknownPart.*'): my_metamodel.model_from_str(''' thing A {} thing B {} thing C {A B OneThing OtherThing UnknownPart} ''')
def test_reference_to_python_attribute(): # This test demonstrates how to link python objects to # a textX model. # "access" objects access python attributes. from collections import namedtuple Person = namedtuple('Person', 'first_name last_name zip_code') p1 = Person('Tim', 'Foo', 123) p2 = Person('Tom', 'Bar', 456) sp = PythonScopeProvider({"p1": p1, "p2": p2}) # create meta model my_metamodel = metamodel_from_str(metamodel_str) my_metamodel.register_scope_providers({ "Access.pyobj": sp, "Access.pyattr": sp }) # read model my_model = my_metamodel.model_from_str(''' access A_Tim p1.first_name access A_123 p1.zip_code access A_456 p2.zip_code ''') # check that the references are OK A_Tim = get_unique_named_object(my_model, "A_Tim").pyattr assert A_Tim == "Tim" A_123 = get_unique_named_object(my_model, "A_123").pyattr assert A_123 == 123 A_456 = get_unique_named_object(my_model, "A_456").pyattr assert A_456 == 456 with raises(Exception, match=r'.*unknown.*'): my_metamodel.model_from_str(''' access A1 p1.unknown ''') with raises(Exception, match=r'.*p3.*'): my_metamodel.model_from_str(''' access A1 p3.first_anme ''') pass