def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_model_with_local_scope_postponed(): """ This is a test for the local scope provider which checks that the scope resolution is postponed at an intermediate stage. This must be the case, since the order of object references is exchanged in two differernt metamodels. This, we argue that (in the absence of an additional sorting mechanisms) in one of both cases the required reference to the "from_instance" must be unresolved in the first resolution pass. The check is done using white box information (postponed_counter). """ ################################# # META MODEL DEF ################################# sp1 = scoping_providers.RelativeName("from_inst.component.slots") my_meta_model1 = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model1.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": sp1, "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots") }) sp2 = scoping_providers.RelativeName("from_inst.component.slots") my_meta_model2 = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model2', 'Components.tx')) my_meta_model2.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": sp2, "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots") }) ################################# # MODEL PARSING ################################# my_meta_model1.model_from_file(join(abspath(dirname(__file__)), "components_model1", "example.components")) my_meta_model2.model_from_file(join(abspath(dirname(__file__)), "components_model2", "example.components")) ################################# # TEST MODEL ################################# assert sp1.postponed_counter > 0 or sp2.postponed_counter > 0
def test_model_with_local_scope_and_bad_model_path(): """ This is a basic test for the local scope provider """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": # error (component is not a list) scoping_providers.RelativeName("from_inst.component"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) ################################# # MODEL PARSING ################################# with raises(textx.exceptions.TextXError, match=r'.*expected path to list in the model ' + r'\(from_inst.component\).*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example.components"))
def test_model_with_imports_and_global_repo(): """ Basic test for FQNImportURI + global_repository """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx'), global_repository=True) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) ################################# # TEST MODEL ################################# userid = get_unique_named_object(my_model, "userid") userid2 = get_unique_named_object(my_model2, "userid") assert userid == userid2 assert userid.ref == userid2.ref assert userid.ref.__class__.__name__ == "RawType"
def test_model_with_multi_import(): """ Basic test for FQNImportURI + multi imports (import "*.if") """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_c", "A_multi_import.if")) ################################# # TEST MODEL ################################# imports = get_children_of_type("Import", my_model) assert 1 == len(imports) i = imports[0] assert 4 == len(i._tx_loaded_models) # 4 files assert 4 == len(set(i._tx_loaded_models)) # 4 different files
def test_model_with_local_scope_wrong_type(): """ This is a basic test for the local scope provider (basd case with wrong type). """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) ################################# # MODEL PARSING ################################# with raises(textx.exceptions.TextXSemanticError, match=r'.*wrong_port.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_wrong_type.components"))
def test_postponed_resolution_error(): """ This test checks that an unresolvable scopre provider induces an exception. This is checked by using a scope provider which always returns a postponed object. """ ################################# # META MODEL DEF ################################# def from_port(obj, attr, obj_ref): return scoping.Postponed() def to_port(obj, attr, obj_ref): return scoping.Postponed() my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": from_port, "Connection.to_port": to_port }) ################################# # MODEL PARSING ################################# with raises(textx.exceptions.TextXSemanticError, match=r'.*Unresolvable cross references.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example.components"))
def callgraph_nomemoization(): mm = metamodel_from_file('rhapsody.tx') # Small file this_folder = dirname(__file__) model = mm.model_from_file(join(this_folder, 'test_inputs', 'LightSwitch.rpy'))
def test_model_without_imports(): """ Basic test for FQNImportURI (with a model not using imports) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_a", "all_in_one.if")) ################################# # TEST MODEL ################################# # check that "socket" is an interface check_unique_named_object_has_class(my_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(my_model, "socket") s1 = get_unique_named_object(my_model, "s1") assert a == s1.ref
def test_model_with_imports_and_errors(): """ Basic test for FQNImportURI (bad case) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# with raises(textx.exceptions.TextXSemanticError, match=r'.*Unknown object.*types.int.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app_error1.if")) with raises(IOError, match=r'.*file_not_found\.if.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app_error2.if"))
def test_resolve_model_path_with_lists(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": scoping_providers.ExtRelativeName("from_inst.component", "slots", "extends"), "Connection.to_port": scoping_providers.ExtRelativeName("to_inst.component", "slots", "extends"), }) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_inherit2.components")) ################################# # TEST MODEL ################################# action2a = resolve_model_path(my_model, "packages.usage.instances.action2", True) action2b = get_unique_named_object(my_model, "action2") assert action2a is action2b middle_a = resolve_model_path(my_model, "packages.base.components.Middle", True) middle_b = get_unique_named_object(my_model, "Middle") assert middle_a is middle_b # test parent(...) with lists action2a_with_parent = resolve_model_path( action2a, "parent(Model).packages.usage.instances.action2", True) assert action2a_with_parent == action2a # test "normal" parent with lists action2a_with_parent2 = resolve_model_path( action2a, "parent.instances.action2", True) assert action2a_with_parent2 == action2a with raises(Exception, match=r'.*unexpected: got list in path for ' r'get_referenced_object.*'): resolve_model_path(my_model, "packages.usage.instances.action2", False)
def main(path=None, debug=False, reportfilename=None): if path is None: path = join(dirname(__file__), "..", "..") if reportfilename is None: reportfilename = join(dirname(__file__), "REPORT.md") print("render_all_grammars.py - example program") matches = [] for root, dirnames, filenames in os.walk(path): for filename in fnmatch.filter(filenames, '*.tx'): matches.append((root, filename)) with open(reportfilename, "wt") as md: md.write("# All grammars (*.tx)\n") for m in matches: inname = join(m[0], m[1]) outfname_base = "{}_{}".format( m[0].replace(path, '').lstrip(sep).replace(sep, '_'), m[1].rstrip('.tx')) destpath = join(dirname(reportfilename), "dot") if not exists(destpath): os.mkdir(destpath) dest_dot = join(destpath, outfname_base + ".dot") dest_dot_png = join(destpath, outfname_base + ".dot.png") destpath = join(dirname(reportfilename), "pu") if not exists(destpath): os.mkdir(destpath) dest_pu = join(destpath, outfname_base+".pu") dest_pu_png = join(destpath, outfname_base+".png") print(dest_dot) mm = metamodel_from_file(inname, debug=debug) metamodel_export(mm, dest_dot) metamodel_export(mm, dest_pu, renderer=PlantUmlRenderer()) md.write("## {}\n".format(m[1])) md.write(" * source: {}/{}\n".format(m[0], m[1])) md.write(" * basename: {}\n".format(outfname_base)) md.write('\n') with open(inname, "rt") as gr: for l in gr: md.write("\t\t"+l) md.write('\n') rel_dest_dot_png = os.path.relpath( dest_dot_png, dirname(reportfilename)) rel_dest_pu_png = os.path.relpath( dest_pu_png, dirname(reportfilename)) md.write('<img width="49%" src="{}" alt="{}">\n'.format( rel_dest_pu_png, rel_dest_pu_png)) md.write('<img width="49%" src="{}" alt="{}">\n'.format( rel_dest_dot_png, rel_dest_dot_png)) md.write('\n\n') print("-------------------------") print("how to process and display the output:") print(" dot -O -Tpng dot/*.dot") print(" plantuml pu/*.pu") print("open the generated {}".format(reportfilename))
def test_model_with_circular_imports(): """ Basic test for FQNImportURI + circular imports """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_c", "A.if")) ################################# # TEST MODEL ################################# imports = get_children_of_type("Import", my_model) assert len(imports) > 0 for i in imports: assert 1 == len(i._tx_loaded_models) # one file / load import assert i.importURI in i._tx_loaded_models[0]._tx_filename check_unique_named_object_has_class(my_model, "A", "Interface") a = get_unique_named_object(my_model, "A") a_self = get_children(lambda x: hasattr(x, 'name') and x.name == "self", a) assert len(a_self) == 1 a_self = a_self[0] a_other = get_children( lambda x: hasattr(x, 'name') and x.name == "other", a) assert len(a_other) == 1 a_other = a_other[0] a_other_self = get_children( lambda x: hasattr(x, 'name') and x.name == "self", a_other.ref) assert len(a_other_self) == 1 a_other_self = a_other_self[0] a_other_other = get_children( lambda x: hasattr(x, 'name') and x.name == "other", a_other.ref) assert len(a_other_other) == 1 a_other_other = a_other_other[0] assert a_self.ref == a_other_other.ref assert a_self.ref != a_other.ref assert a_other.ref == a_other_self.ref assert a_other.ref != a_other_other.ref
def test_model_with_local_scope_and_circular_ref_via_two_models(): """ Test for FQNGlobalRepo + circular references. """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx'), global_repository=True) global_scope = scoping_providers.FQNGlobalRepo( join(abspath(dirname(__file__)), "components_model1", "example_?.components")) my_meta_model.register_scope_providers({ "*.*": global_scope, "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots") }) ################################# # MODEL PARSING ################################# my_model_a = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_A.components")) my_model_b = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_B.components")) a_my_a = get_unique_named_object(my_model_a, "mya") a_my_b = get_unique_named_object(my_model_a, "myb") b_my_a = get_unique_named_object(my_model_b, "mya") b_my_b = get_unique_named_object(my_model_b, "myb") assert a_my_a != b_my_a assert a_my_b != b_my_b assert a_my_a.component == b_my_a.component # same component "class" assert a_my_b.component == b_my_b.component # same component "class" a_connections = get_children_of_type("Connection", my_model_a) b_connections = get_children_of_type("Connection", my_model_b) a_connection = list(filter( lambda x: x.from_inst == a_my_a and x.to_inst == a_my_b, a_connections)) b_connection = list(filter( lambda x: x.from_inst == b_my_a and x.to_inst == b_my_b, b_connections)) assert len(a_connection) == 1 assert len(b_connection) == 1
def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": global_repo, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends") }) return mm
def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm
def main(debug=False): this_folder = dirname(__file__) pyflies_mm = metamodel_from_file(join(this_folder, 'pyflies.tx'), debug=debug) metamodel_export(pyflies_mm, join(this_folder, 'pyflies_meta.dot')) experiment = pyflies_mm.model_from_file(join(this_folder, 'experiment.pf')) model_export(experiment, join(this_folder, 'experiment.dot'))
def memoization(): mm = metamodel_from_file('rhapsody.tx', memoization=True) # Small file this_folder = dirname(__file__) model = mm.model_from_file(join(this_folder, 'test_inputs', 'LightSwitch.rpy')) # Large file model2 = mm.model_from_file(join(this_folder, 'test_inputs', 'LightSwitchDouble.rpy'))
def test_get_list_of_concatenated_objects(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": scoping_providers.ExtRelativeName("from_inst.component", "slots", "extends"), "Connection.to_port": scoping_providers.ExtRelativeName("to_inst.component", "slots", "extends"), }) ################################# # MODEL PARSING ################################# my_model1 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_inherit1.components")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example_inherit2.components")) ################################# # TEST MODEL ################################# # test extends A,B start = get_unique_named_object(my_model1, "Start") middle = get_unique_named_object(my_model1, "Middle") end = get_unique_named_object(my_model1, "End") inherited_classes = get_list_of_concatenated_objects(middle, "extends") assert len(inherited_classes) == 3 assert inherited_classes[0] is middle assert inherited_classes[1] is start assert inherited_classes[2] is end # test extends A extends B start = get_unique_named_object(my_model2, "Start") middle = get_unique_named_object(my_model2, "Middle") end = get_unique_named_object(my_model2, "End") inherited_classes = get_list_of_concatenated_objects(middle, "extends") assert len(inherited_classes) == 3 assert inherited_classes[0] is middle assert inherited_classes[1] is start assert inherited_classes[2] is end
def test_multi_metamodel_obj_proc(): global_repo = scoping.GlobalModelRepository() repo = scoping_providers.PlainNameGlobalRepo() repo.register_models(os.path.dirname(__file__)+"/issue140/*.a") mm_A = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "A.tx" ), global_repository=global_repo, classes=[C1]) mm_B = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "B.tx" ), global_repository=global_repo, classes=[C1]) mm_B.register_scope_providers({"*.*": repo}) def proc(a): print(a) mm_A.register_obj_processors({"C1": proc}) mm_B.register_obj_processors({"C1": proc}) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=mm_A) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=mm_B) mm_B.model_from_file(os.path.join( os.path.dirname(__file__), "issue140", "b.b" ))
def data_dsl(): """ An example DSL for data definition """ current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'Data.tx') data_mm = metamodel_from_file(p, global_repository=True) data_mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return data_mm
def get_entity_mm(): """ Builds and returns a meta-model for Entity language. """ type_builtins = { 'integer': SimpleType(None, 'integer'), 'string': SimpleType(None, 'string') } entity_mm = metamodel_from_file(join(this_folder, 'entity.tx'), classes=[SimpleType], builtins=type_builtins) return entity_mm
def test_metaclass_relative_paths(filename): current_dir = os.path.dirname(__file__) mm = metamodel_from_file(os.path.join(current_dir, 'test_import', 'importoverride', filename)) Third = mm['Third'] ThirdMasked = mm['relative.third.Third'] assert Third is not ThirdMasked model = mm.model_from_str('first 12 45 third "abc" "xyz"') inner_second = model.first[0] assert all(type(x) is ThirdMasked for x in inner_second.second) assert all(type(x) is Third for x in model.third)
def recipe_lang(): this_folder = dirname(abspath(__file__)) mm = metamodel_from_file(join(this_folder, "grammar", "Recipe.tx"), classes=[Ingredient]) config_provider = scoping_providers.PlainNameGlobalRepo( "**/*.config", glob_args={"recursive": True}) ingredient_type_provider = scoping_providers.PlainNameGlobalRepo( "**/*.ingredient", glob_args={"recursive": True}) mm.register_scope_providers({ "Recipe.persons": config_provider, "Ingredient.type": ingredient_type_provider, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends"), }) return mm
def bibtex_language(): "The BibTeX language" current_dir = os.path.dirname(__file__) mm = metamodel_from_file(os.path.join(current_dir, 'bibtex.tx')) def strip_value(v): if (v.startswith('{') and v.endswith('}')) or \ (v.startswith('"') and v.endswith('"')): return v[1:-1] return v mm.register_obj_processors({'Value': strip_value}) return mm
def get_meta(file_name, classes=None): builtin_models = scoping.ModelRepository() cmm = metamodel_for_language('pyflies-comp') component_folder = join(dirname(pyflies.__file__), 'components') for comp_file in os.listdir(component_folder): cm = cmm.model_from_file(join(component_folder, comp_file)) builtin_models.add_model(cm) if classes is None: classes = model_classes + [Model] mm = metamodel_from_file(join(this_folder, file_name), classes=classes, builtin_models=builtin_models) mm.register_model_processor(processor) return mm
def main(debug=False): this_folder = dirname(__file__) robot_mm = metamodel_from_file(join(this_folder, 'robot.tx'), debug=False) metamodel_export(robot_mm, join(this_folder, 'robot_meta.dot')) # Register object processor for MoveCommand robot_mm.register_obj_processors({'MoveCommand': move_command_processor}) robot_model = robot_mm.model_from_file(join(this_folder, 'program.rbt')) model_export(robot_model, join(this_folder, 'program.dot')) robot = Robot() robot.interpret(robot_model)
def main(debug=False): gui_mm = metamodel_from_file('tx_easgui/easgui.tx') gui_mm.register_model_processor(checkSemantics) gui_mm.register_obj_processors({'CreateWindow': createWindowProcessor}) gui_model = gui_mm.model_from_file('program.eui') # gui_model = gui_mm.model_from_file(str(sys.argv[1])) root = Tk() my_gui = GUI(root) my_gui.interpret(gui_model) root.mainloop()
def test_model_with_local_scope(): """ This is a basic test for the local scope provider (good case). """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'components_model1', 'Components.tx')) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQN(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "components_model1", "example.components")) ################################# # TEST MODEL ################################# # test local refs action2 = get_unique_named_object(my_model, "action2") action3 = get_unique_named_object(my_model, "action3") connections = get_children_of_type("Connection", my_model) selected_connections = list(filter( lambda x: x.from_inst == action2 and x.to_inst == action3, connections)) assert len(selected_connections) == 1 # test list of formats input2 = get_unique_named_object(my_model, "input2") assert len(input2.formats) == 3 format_names = map(lambda x: x.name, input2.formats) assert "A" in format_names assert "B" in format_names assert "C" in format_names assert "D" not in format_names
def flow_dsl_s(): mm_flow = metamodel_from_file(os.path.join(current_dir, 'Flow.tx'), global_repository=True) # Note, it is better to share a common repo, instead of having one # for each meta model separately. mm_flow.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise TextXSemanticError("algo data types must match", **tools.get_location(f)) mm_flow.register_obj_processors({'Flow': check_flow}) return mm_flow
def flow_dsl(): """ An example DSL for data flow processing definition """ current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'Flow.tx') flow_mm = metamodel_from_file(p, global_repository=True) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise TextXSemanticError("algo data types must match", **get_location(f)) flow_mm.register_obj_processors({'Flow': check_flow}) return flow_mm
def types_dsl(): """ An example DSL for simple types definition """ current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'Types.tx') types_mm = metamodel_from_file(p, global_repository=True) def check_type(t): if t.name[0].isupper(): raise TextXSyntaxError("types must be lowercase", **tools.get_location(t)) types_mm.register_obj_processors({'Type': check_type}) return types_mm
def get_mm(debug=False, global_scope=True): """ """ mm = metamodel_from_file(join(this_dir, 'grammar', 'goal_dsl.tx'), global_repository=global_scope, debug=debug) mm.register_scope_providers({ "*.*": scoping_providers.FQNImportURI( importAs=True, # importURI_to_scope_name=importURI_to_scope_name ) }) return mm
def main(): backend_name = f"backend.{ARGUMENTS['backend']}" if not backend_exists(backend_name): print(f"ERROR: Specified backend `{ARGUMENTS['backend']}` does not exists") exit(1) generator_name = f"{backend_name}.generators.{ARGUMENTS['format']}_generator" if not generator_exists(generator_name): print(f"ERROR: Specified generator `{generator_name}` does not exists") exit(1) generator_module = importlib.import_module(generator_name) # Checks that the generator has the generate function if not hasattr(generator_module, 'generate'): print( f'Module {generator_name} does not have required `generate` function') exit(1) # Generator is OK, we can continue processing the program metamodel = metamodel_from_file('model.tx') from textx.exceptions import TextXSyntaxError try: model = metamodel.model_from_file(ARGUMENTS['filename']) except TextXSyntaxError as err: print(f'ERROR: line {err.line}, column {err.col}: {err.message}') exit(1) mp = ModelParser(model) output = mp.parse_model() # Remove the extension from the output filename if the user added it despite the help message try: extension_index = ARGUMENTS['output'].rindex('.') ARGUMENTS['output'] = ARGUMENTS['output'][0:extension_index] except ValueError: pass if ARGUMENTS['check']: from check import check check(output, ARGUMENTS['check']) print('Everything OK') else: generator_module.generate(output, ARGUMENTS['output'])
def test_model_with_circular_imports(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( abspath(dirname(__file__)) + '/interface_model1/Interface.tx') my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( abspath(dirname(__file__)) + "/interface_model1/model_c/A.if") ################################# # TEST MODEL ################################# check_unique_named_object_has_class(my_model, "A", "Interface") a = get_unique_named_object(my_model, "A") a_self = get_children(lambda x: hasattr(x, 'name') and x.name == "self", a) assert len(a_self) == 1 a_self = a_self[0] a_other = get_children(lambda x: hasattr(x, 'name') and x.name == "other", a) assert len(a_other) == 1 a_other = a_other[0] a_other_self = get_children( lambda x: hasattr(x, 'name') and x.name == "self", a_other.ref) assert len(a_other_self) == 1 a_other_self = a_other_self[0] a_other_other = get_children( lambda x: hasattr(x, 'name') and x.name == "other", a_other.ref) assert len(a_other_other) == 1 a_other_other = a_other_other[0] assert a_self.ref == a_other_other.ref assert a_self.ref != a_other.ref assert a_other.ref == a_other_self.ref assert a_other.ref != a_other_other.ref
def test_diamond_import(): """ Test that diamond rule import results in the same class. """ current_dir = os.path.dirname(__file__) mm = metamodel_from_file( os.path.join(current_dir, 'test_import', 'importoverride', 'first_diamond.tx')) First = mm['First'] MyDiamondRule = mm['diamond.last.MyDiamondRule'] model = mm.model_from_str('second 12 45 third 4 5') assert type(model) is First assert all(type(x.diamond) is MyDiamondRule for x in model.seconds) assert all(type(x.diamond) is MyDiamondRule for x in model.thirds)
def get_entity_mm(debug=False): """ Builds and returns a meta-model for Entity language. """ # Built-in simple types # Each model will have this simple types during reference resolving but # these will not be a part of `types` list of EntityModel. type_builtins = { 'integer': SimpleType(None, 'integer'), 'string': SimpleType(None, 'string') } entity_mm = metamodel_from_file(join(this_folder, 'entity.tx'), classes=[SimpleType], builtins=type_builtins, debug=debug) return entity_mm
def test_globalimports_basic_test_with_single_model_file_and_global_repo(): """ Basic test for the FQNGlobalRepo + global_repository. Tests that two metamodels create the same objects for the same input (when global_repository is used). """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file(join(abspath(dirname(__file__)), 'interface_model2', 'Interface.tx'), global_repository=True) my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQNGlobalRepo( join(abspath(dirname(__file__)), 'interface_model2', 'model_a', '*.if')) }) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model2", "model_a", "all_in_one.if")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model2", "model_a", "all_in_one.if")) ################################# # TEST MODEL ################################# # check that "socket" is an interface check_unique_named_object_has_class(my_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(my_model, "socket") s1 = get_unique_named_object(my_model, "s1") assert a == s1.ref a2 = get_unique_named_object(my_model2, "socket") assert a2 == a # with global repository
def test_diamond_import(): """ Test that diamond rule import results in the same class. """ current_dir = os.path.dirname(__file__) mm = metamodel_from_file(os.path.join(current_dir, 'test_import', 'importoverride', 'first_diamond.tx')) First = mm['First'] MyDiamondRule = mm['diamond.last.MyDiamondRule'] model = mm.model_from_str('second 12 45 third 4 5') assert type(model) is First assert all(type(x.diamond) is MyDiamondRule for x in model.seconds) assert all(type(x.diamond) is MyDiamondRule for x in model.thirds)
def main(debug=False): this_folder = dirname(__file__) # Get meta-model from language description hello_meta = metamodel_from_file(join(this_folder, 'hello.tx'), debug=debug) # Optionally export meta-model to dot metamodel_export(hello_meta, join(this_folder, 'hello_meta.dot')) # Instantiate model example_hello_model = hello_meta.model_from_file( join(this_folder, 'example.hello')) # Optionally export model to dot model_export(example_hello_model, join(this_folder, 'example.dot'))
def test_model_with_imports(): """ Basic test for FQNImportURI (good case) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) ################################# # TEST MODEL ################################# # check that "socket" is an interface inner_model = my_model._tx_model_repository.all_models.filename_to_model[ join(abspath(dirname(__file__)), "interface_model1", "model_b", "base.if")] check_unique_named_object_has_class(inner_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(inner_model, "socket") s1 = get_unique_named_object(inner_model, "s1") userid = get_unique_named_object(my_model, "userid") assert a == s1.ref userid2 = get_unique_named_object(my_model2, "userid") assert userid != userid2 assert userid.ref != userid2.ref assert userid.ref.__class__.__name__ == "RawType"
def get_question_types_mm(): """Returns the meta-model for question-types-dsl language.""" current_dir = os.path.dirname(__file__) grammar_path = os.path.join(current_dir, 'question_types.tx') object_processors = {'QuestionType': question_type_object_processor} # build metamodel metamodel = metamodel_from_file(grammar_path, classes=[Parameter, QuestionType], builtins=get_built_in_question_types(), global_repository=True) metamodel.register_obj_processors(object_processors) return metamodel
def main(debug=False): this_folder = dirname(__file__) # Create metamodel from textX description workflow_mm = metamodel_from_file( join(this_folder, 'workflow.tx'), debug=debug) # Export to dot # Create png image with: dot -Tpng -O workflow_meta.dot metamodel_export(workflow_mm, join(this_folder, 'workflow_meta.dot')) # Load example model example = workflow_mm.model_from_file(join(this_folder, 'example.wf')) # Export to dot # Create png image with: dot -Tpng -O example.dot model_export(example, join(this_folder, 'example.dot'))
def __init__(self): self.python_ast = ast.Module(body=[]) self.variables = [] self.meta_model = metamodel_from_file( os.path.dirname(__file__) + "/pseudocode.tx", debug=False, classes={ DeclareStmt, InputStmt, PrintStmt, DelStmt, FuncCallStmt, IfStmt, WhileStmt, ForStmt, FuncDef, Parameters, TypedArgsList, ReturnStmt, BinaryOp, OrTest, AndTest, Comparison, Expr, XorExpr, AndExpr, ShiftExpr, ArithExpr, Term, Power, UnaryOp, Factor, NotTest, Statement, ExprStmt, TestListStarExpr, TestList, AtomExpr, Atom, Number, Name, String, NoneType, Boolean }) self.meta_model.register_obj_processors({ 'Stmt': self.convert, })
def load_model(filename): try: if settings.DEBUG_MODE: print("Loading patterns...") parsing_begin = time.time() metamodel = metamodel_from_file(os.path.join( os.path.dirname(os.path.realpath(__file__)), 'grammar.tx'), memoization=True) model = metamodel.model_from_file(filename) if settings.DEBUG_MODE: parsing_end = time.time() parsing_delta = parsing_end - parsing_begin print("Done. Loading patterns took %.2f second(s)." % parsing_delta) return model except Exception as e: print(e.message) return None
def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm
def main(debug=False): processors = { 'Expression': expression_action, 'Term': term_action, 'Factor': factor_action, } calc_mm = metamodel_from_file("calc.tx", auto_init_attributes=False, debug=debug) calc_mm.register_obj_processors(processors) input_expr = ''' 3 + 3 * 5 ''' expr = calc_mm.model_from_str(input_expr) result = expression_action(expr) print("Result is", result)
def test_component_specification(): """ Test component specification language """ mm = metamodel_from_file(join(this_folder, 'components.tx'), classes=common_classes) model_str = r''' abstract component abs_comp """ This component is used in inheritance """ { param abs_param: int = 20 } component test_comp extends abs_comp """ This is test component """ { param first_param: string = 'First param' param second_param: int = 5 """ Parameter description """ // Third param can be of multiple types param multi_type: [int, string, symbol] = 10 } ''' model = mm.model_from_str(model_str) comp = model.comp_types[0] assert comp.abstract comp = model.comp_types[1] assert comp.param_types[1].description.strip() == 'Parameter description' assert comp.param_types[2].types == ['int', 'string', 'symbol'] assert comp.param_types[2].default.eval() == 10 assert comp.extends[0].param_types[0].default.eval() == 20
def main(): logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('dsl_file') args = parser.parse_args() # source_filename = 'example.dsl' source_filename = args.dsl_file mm = textx.metamodel_from_file('grammar.tx') m = mm.model_from_file(source_filename) model_export(m, source_filename + '.dot') print(m) filename = source_filename + '.bin' list_filename = source_filename + '.lst' with open(list_filename, 'w') as lst_file: cgen = CodeGenerator(lst_file, filename) cgen.compile(m)
def robot(): "A language for defining robot behaviour" def semantic_check(model, metamodel): if model.name == "WrongMode": raise TextXSemanticError( 'The root mode cannot be called "Wrong Mode".', **get_location(model)) def mode_obj_processor(mode): if mode.name[0].islower(): raise TextXSemanticError( f'Mode name "{mode.name}" must be capitalized.', **get_location(mode)) metamodel = metamodel_from_file(join(dirname(__file__), "robot.tx")) metamodel.register_model_processor(semantic_check) metamodel.register_obj_processors({"Mode": mode_obj_processor}) return metamodel
def get_metamodel(): this_folder = dirname(abspath(__file__)) # get the "mydsl" meta model other_meta_model = mydsl.get_metamodel() # create the meta model and reference "mydsl" meta_model = metamodel_from_file(join(this_folder, "MyDsl1.tx"), referenced_metamodels=[other_meta_model]) # register scope provider (allow import models into mydsl1 models) meta_model.register_scope_providers( {"*.*": scoping_providers.PlainNameImportURI()}) # register file endings scoping.MetaModelProvider.add_metamodel("*.mydsl", other_meta_model) scoping.MetaModelProvider.add_metamodel("*.mydsl1", meta_model) return meta_model
def sbag_language(): "sbag language" builtin_types = { 'int': BaseType(None, 'int'), 'string': BaseType(None, 'string'), 'float': BaseType(None, 'float'), 'boolean': BaseType(None, 'boolean') } mm = metamodel_from_file(os.path.join(current_dir, 'sbag.tx'), classes=[BaseType, Entity], builtins=builtin_types, debug=True) # Here if necessary register object processors or scope providers # http://textx.github.io/textX/stable/metamodel/#object-processors # http://textx.github.io/textX/stable/scoping/ return mm
def test_import(): """ Test grammar import. """ current_dir = os.path.dirname(__file__) mm = metamodel_from_file( os.path.join(current_dir, 'relativeimport', 'first.tx')) metamodel_export(mm, 'import_test_mm.dot') model = """ first second "1" "2" third true false true 12 false endfirst """ model = mm.model_from_str(model) model_export(model, 'import_test_model.dot')
def pyflies_component_language(**kwargs): "A language for PyFlies component specification" global_repo_provider = scoping.providers.PlainNameGlobalRepo() mm = metamodel_from_file(join(current_dir, 'components.tx'), classes=component_classes, **kwargs) # Load base components which can be referenced in other component definition component_folder = join(dirname(pyflies.__file__), '..', 'components') m = mm.model_from_file(join(component_folder, 'base.pfc')) reduce_exp(m) global_repo_provider.add_model(m) mm.register_scope_providers({'*.*': global_repo_provider}) # Here if necessary register object processors or scope providers # http://textx.github.io/textX/stable/metamodel/#object-processors # http://textx.github.io/textX/stable/scoping/ return mm