def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file(join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_provider_basic_test(): """ This test checks that the global MetaModel Provider works (basic function): It is checked that no filename patterns are used twice. It is checked that the correct metamodel is used to load a model (by loading a model constellation using two metamodels). """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) scoping.MetaModelProvider.add_metamodel("*.components", mm_components) scoping.MetaModelProvider.add_metamodel("*.users", mm_users) with raises(Exception, match=r'.*pattern.*already registered.*'): scoping.MetaModelProvider.add_metamodel("*.users", mm_users) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider", "example.users")) ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_model_with_imports_and_global_repo(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file(abspath(dirname(__file__)) + '/interface_model1/Interface.tx', global_repository=True) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( abspath(dirname(__file__)) + "/interface_model1/model_b/app.if") my_model2 = my_meta_model.model_from_file( abspath(dirname(__file__)) + "/interface_model1/model_b/app.if") ################################# # TEST MODEL ################################# userid = get_unique_named_object(my_model, "userid") userid2 = get_unique_named_object(my_model2, "userid") assert userid == userid2 assert userid.ref == userid2.ref assert userid.ref.__class__.__name__ == "RawType"
def test_model_without_imports(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( abspath(dirname(__file__)) + '/interface_model1/Interface.tx') my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( abspath(dirname(__file__)) + "/interface_model1/model_a/all_in_one.if") ################################# # TEST MODEL ################################# # check that "socket" is an interface check_unique_named_object_has_class(my_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(my_model, "socket") s1 = get_unique_named_object(my_model, "s1") assert a == s1.ref
def test_exception_from_included_model(): """ This test checks that an error induced by an included model (thrown via an object processor) is (a) thrown and (b) indicates the correct model location (file, line and col). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError from textx.scoping.tools import get_location if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) scoping.MetaModelProvider.clear() scoping.MetaModelProvider.add_metamodel("*.a", a_mm) scoping.MetaModelProvider.add_metamodel("*.b", b_mm) scoping.MetaModelProvider.add_metamodel("*.c", c_mm) ################################# # MODEL PARSING / TEST ################################# import textx.exceptions with raises(textx.exceptions.TextXSemanticError, match=r'.*model_d\.b:5:3:.*d1 triggers artifical error'): a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance2", "model_a.a")) ################################# # END ################################# scoping.MetaModelProvider.clear()
def test_model_with_multi_import(): """ Basic test for FQNImportURI + multi imports (import "*.if") """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_c", "A_multi_import.if")) ################################# # TEST MODEL ################################# imports = get_children_of_type("Import", my_model) assert 1 == len(imports) i = imports[0] assert 4 == len(i._tx_loaded_models) # 4 files assert 4 == len(set(i._tx_loaded_models)) # 4 different files
def test_model_with_imports_and_global_repo(): """ Basic test for FQNImportURI + global_repository """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file(join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx'), global_repository=True) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) ################################# # TEST MODEL ################################# userid = get_unique_named_object(my_model, "userid") userid2 = get_unique_named_object(my_model2, "userid") assert userid == userid2 assert userid.ref == userid2.ref assert userid.ref.__class__.__name__ == "RawType"
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibTypes.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'" .format(repo_selector)) LibData._mm = metamodel_from_str( r''' Model: includes*=Include data+=Data; Data: 'data' name=ID '{' attributes+=Attribute '}'; Attribute: name=ID ':' type=[Type]; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo, referenced_metamodels=[LibTypes.get_metamodel()]) LibData._mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) textx.scoping.MetaModelProvider.add_metamodel("*.data", LibData.get_metamodel())
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibData.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'" .format(repo_selector)) LibFlow._mm = metamodel_from_str( r''' Model: includes*=Include algos+=Algo flows+=Flow; Algo: 'algo' name=ID ':' inp=[Data] '->' outp=[Data]; Flow: 'connect' algo1=[Algo] '->' algo2=[Algo] ; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo, referenced_metamodels=[LibData.get_metamodel()]) LibFlow._mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) textx.scoping.MetaModelProvider.add_metamodel("*.flow", LibFlow.get_metamodel()) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **tools.get_location(f) ) LibFlow._mm.register_obj_processors({ 'Flow': check_flow })
def test_importURI_variations_import_as_ok1(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_str(grammar) def conv(i): return i.replace(".", "/") + ".model" my_meta_model.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(importURI_converter=conv, importAs=True) }) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "importAs", "b_ok1.model")) ################################# # TEST MODEL ################################# assert my_model.packages[0].name == "B" assert my_model.packages[0].objects[0].name == "A1" assert my_model.packages[0].objects[0].ref.text == "from A1" assert my_model.packages[0].objects[1].name == "A2" assert my_model.packages[0].objects[1].ref.text == "from A2"
def test_model_without_imports(): """ Basic test for FQNImportURI (with a model not using imports) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_a", "all_in_one.if")) ################################# # TEST MODEL ################################# # check that "socket" is an interface check_unique_named_object_has_class(my_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(my_model, "socket") s1 = get_unique_named_object(my_model, "s1") assert a == s1.ref
def test_model_with_imports_and_errors(): """ Basic test for FQNImportURI (bad case) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# with raises(textx.exceptions.TextXSemanticError, match=r'.*Unknown object.*types.int.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app_error1.if")) with raises(IOError, match=r'.*file_not_found\.if.*'): my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app_error2.if"))
def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( abspath(dirname(__file__)) + '/metamodel_provider_utf-16-le/Components.tx') mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( abspath(dirname(__file__)) + '/metamodel_provider_utf-16-le/Users.tx') mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) scoping.MetaModelProvider.add_metamodel("*.components", mm_components) scoping.MetaModelProvider.add_metamodel("*.users", mm_users) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( abspath(dirname(__file__)) + "/metamodel_provider_utf-16-le/example.users", encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B
def test_model_with_circular_imports(): """ Basic test for FQNImportURI + circular imports """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_c", "A.if")) ################################# # TEST MODEL ################################# imports = get_children_of_type("Import", my_model) assert len(imports) > 0 for i in imports: assert 1 == len(i._tx_loaded_models) # one file / load import assert i.importURI in i._tx_loaded_models[0]._tx_filename check_unique_named_object_has_class(my_model, "A", "Interface") a = get_unique_named_object(my_model, "A") a_self = get_children(lambda x: hasattr(x, 'name') and x.name == "self", a) assert len(a_self) == 1 a_self = a_self[0] a_other = get_children(lambda x: hasattr(x, 'name') and x.name == "other", a) assert len(a_other) == 1 a_other = a_other[0] a_other_self = get_children( lambda x: hasattr(x, 'name') and x.name == "self", a_other.ref) assert len(a_other_self) == 1 a_other_self = a_other_self[0] a_other_other = get_children( lambda x: hasattr(x, 'name') and x.name == "other", a_other.ref) assert len(a_other_other) == 1 a_other_other = a_other_other[0] assert a_self.ref == a_other_other.ref assert a_self.ref != a_other.ref assert a_other.ref == a_other_self.ref assert a_other.ref != a_other_other.ref
def data_dsl_s(): mm_data = metamodel_from_file(os.path.join(current_dir, 'Data.tx'), global_repository=True) # Note, it is better to share a common repo, instead of having one # for each meta model separately. mm_data.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) return mm_data
def main(debug=False): # Go to working directory if str(os.path.dirname(__file__)) != '': os.chdir(str(os.path.dirname(__file__))) # Count arguments if len(sys.argv) < 2 or len(sys.argv) > 3: print( 'Please give at least an GRS file name (input model) and optionaly an XMI file name (output model)' ) sys.exit(0) # Obtain GRS model filename # ~ grs_filename = os.path.relpath(sys.argv[1], str(os.getcwd())) grs_filename = os.path.relpath(sys.argv[1], str(os.getcwd())) # Obtain XMI model filename if len(sys.argv) == 3: xmi_filename = sys.argv[2] else: xmi_filename = '../models/generos.xmi' # Load Grammar dsl_metamodel = metamodel_from_file('generos.tx', debug=False) # Convert importURI string (if needed) def conv(i): return i.replace(".", "/") + ".grs" # Scope Providers dsl_metamodel.register_scope_providers( {"*.*": scoping_providers.FQNImportURI(importAs=True)}) # Recursive function that loads the commands of the imported models to the main model def resolve_imports(current_model): # Load imported models imports = get_children_of_type("Import", current_model) for i in imports: for m in i._tx_loaded_models: # Recursively attach commands of more deep imports m = resolve_imports(m) # Attach commands of the submodels (imports) to the main model current_model.commands.extend(m.commands) return current_model # Load Model model = dsl_metamodel.model_from_file(grs_filename) resolve_imports(model) # Fire up the generation system = RosSystem() system.interpret(model) #create rset global_registry[Ecore.nsURI] = Ecore rset = ResourceSet() rset.metamodel_registry[metageneros.nsURI] = metageneros model_res = rset.create_resource(URI(xmi_filename)) # Save model_res.append(system.rosystem) model_res.save()
def test_model_export(): """ This test checks that the export function (to graphdotviz) works with a model distributed across different files. It is checked that all filenames are included in the output and that some elements from every model file are incuded in the output. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) scoping.MetaModelProvider.clear() scoping.MetaModelProvider.add_metamodel("*.a", a_mm) scoping.MetaModelProvider.add_metamodel("*.b", b_mm) scoping.MetaModelProvider.add_metamodel("*.c", c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) out_file = io.StringIO() # export.model_export( # None, "debug_test.dot", m._tx_model_repository.all_models ) export.model_export_to_file(out_file, m) text = out_file.getvalue() print(text) assert "a2_very_long_name" in text assert "b2_very_long_name" in text assert "inheritance{}model_b.b".format(sep) in text assert "inheritance{}model_b.b".format(sep) in text
def data_dsl(): """ An example DSL for data definition """ current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'Data.tx') data_mm = metamodel_from_file(p, global_repository=True) data_mm.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) return data_mm
def test_metamodel_provder_basic_test(): ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( abspath(dirname(__file__)) + '/metamodel_provider/Components.tx') mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( abspath(dirname(__file__)) + '/metamodel_provider/Users.tx') mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) scoping.MetaModelProvider.add_metamodel("*.components", mm_components) scoping.MetaModelProvider.add_metamodel("*.users", mm_users) with raises(Exception, match=r'.*pattern.*already registered.*'): scoping.MetaModelProvider.add_metamodel("*.users", mm_users) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( abspath(dirname(__file__)) + "/metamodel_provider/example.users") ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def _library_init(): global _mm_types, _mm_data, _mm_flow global_repo = True current_dir = os.path.dirname(__file__) # -------------------------------------------------------------------------- _mm_types = metamodel_from_file(os.path.join(current_dir, 'Types.tx'), global_repository=global_repo) textx.scoping.MetaModelProvider.add_metamodel("*.type", _mm_types) def check_type(t): if t.name[0].isupper(): raise textx.exceptions.TextXSyntaxError("types must be lowercase", **tools.get_location(t)) _mm_types.register_obj_processors({'Type': check_type}) # -------------------------------------------------------------------------- _mm_data = metamodel_from_file(os.path.join(current_dir, 'Data.tx'), global_repository=global_repo) textx.scoping.MetaModelProvider.add_metamodel("*.data", _mm_data) _mm_data.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) # -------------------------------------------------------------------------- _mm_flow = metamodel_from_file(os.path.join(current_dir, 'Flow.tx'), global_repository=global_repo) textx.scoping.MetaModelProvider.add_metamodel("*.flow", _mm_flow) _mm_flow.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **tools.get_location(f)) _mm_flow.register_obj_processors({'Flow': check_flow})
def lang(): mm = textx.metamodel_from_str(grammar) mm.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) def algo_check(a): for p in a.parameters: if p.datatype is not None: raise TextXSemanticError( "parameter is not allowed to have {} flag".format( p.datatype), **get_location(p)) mm.register_obj_processors({"Algo": algo_check}) return mm
def second_language(): # We can reference here fist-test-lang since it is registered above mm = metamodel_from_str(r''' reference first-test-lang as f Model: includes*=Include refs+=Reference; Reference: 'ref' ref=[f.First]; Include: 'include' importURI=STRING; ''', global_repository=True) mm.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) return mm
def get_mm(debug=False, global_scope=True): """ """ mm = metamodel_from_file(join(this_dir, 'grammar', 'goal_dsl.tx'), global_repository=global_scope, debug=debug) mm.register_scope_providers({ "*.*": scoping_providers.FQNImportURI( importAs=True, # importURI_to_scope_name=importURI_to_scope_name ) }) return mm
def flow_dsl_s(): mm_flow = metamodel_from_file(os.path.join(current_dir, 'Flow.tx'), global_repository=True) # Note, it is better to share a common repo, instead of having one # for each meta model separately. mm_flow.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise TextXSemanticError("algo data types must match", **tools.get_location(f)) mm_flow.register_obj_processors({'Flow': check_flow}) return mm_flow
def test_model_with_circular_imports(): ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( abspath(dirname(__file__)) + '/interface_model1/Interface.tx') my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( abspath(dirname(__file__)) + "/interface_model1/model_c/A.if") ################################# # TEST MODEL ################################# check_unique_named_object_has_class(my_model, "A", "Interface") a = get_unique_named_object(my_model, "A") a_self = get_children(lambda x: hasattr(x, 'name') and x.name == "self", a) assert len(a_self) == 1 a_self = a_self[0] a_other = get_children(lambda x: hasattr(x, 'name') and x.name == "other", a) assert len(a_other) == 1 a_other = a_other[0] a_other_self = get_children( lambda x: hasattr(x, 'name') and x.name == "self", a_other.ref) assert len(a_other_self) == 1 a_other_self = a_other_self[0] a_other_other = get_children( lambda x: hasattr(x, 'name') and x.name == "other", a_other.ref) assert len(a_other_other) == 1 a_other_other = a_other_other[0] assert a_self.ref == a_other_other.ref assert a_self.ref != a_other.ref assert a_other.ref == a_other_self.ref assert a_other.ref != a_other_other.ref
def get_metamodel(): mm = metamodel_from_str(r''' reference types as t Model: includes*=Include data+=Data; Data: 'data' name=ID '{' attributes+=Attribute '}'; Attribute: name=ID ':' type=[t.Type]; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm
def flow_dsl(): """ An example DSL for data flow processing definition """ current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'Flow.tx') flow_mm = metamodel_from_file(p, global_repository=True) flow_mm.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise TextXSemanticError("algo data types must match", **tools.get_location(f)) flow_mm.register_obj_processors({'Flow': check_flow}) return flow_mm
def test_model_with_imports(): """ Basic test for FQNImportURI (good case) """ ################################# # META MODEL DEF ################################# my_meta_model = metamodel_from_file( join(abspath(dirname(__file__)), 'interface_model1', 'Interface.tx')) my_meta_model.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) ################################# # MODEL PARSING ################################# my_model = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) my_model2 = my_meta_model.model_from_file( join(abspath(dirname(__file__)), "interface_model1", "model_b", "app.if")) ################################# # TEST MODEL ################################# # check that "socket" is an interface inner_model = my_model._tx_model_repository.all_models.filename_to_model[ join(abspath(dirname(__file__)), "interface_model1", "model_b", "base.if")] check_unique_named_object_has_class(inner_model, "socket", "Interface") # check that "s.s1" is a reference to the socket interface a = get_unique_named_object(inner_model, "socket") s1 = get_unique_named_object(inner_model, "s1") userid = get_unique_named_object(my_model, "userid") assert a == s1.ref userid2 = get_unique_named_object(my_model2, "userid") assert userid != userid2 assert userid.ref != userid2.ref assert userid.ref.__class__.__name__ == "RawType"
def test_multi_metamodel_references_with_importURI(): # Use a global repo. # This is useful, especially with circular includes or diamond shaped # includes. Without such a repo, you might get double instantiations of # model elements. # However, if B includes A, but A not B, both meta models might have # global repos on their own (global between model files of the same # meta model --> global_repository=True). Circular dependencies # will require shared grammars, like in test_metamodel_provider3.py, # because it is not possible to share meta models for referencing, before # the meta model is constructed (like in our example, mm_A cannot # reference mm_B, if mm_B already references mm_A because one has to # constructed first). global_repo = scoping.GlobalModelRepository() # Create two meta models with the global repo. # The second meta model allows referencing the first one. mm_A = metamodel_from_str(grammarA, global_repository=global_repo) mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo, referenced_metamodels=[mm_A]) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) # map file endings to the meta models scoping.MetaModelProvider.clear() scoping.MetaModelProvider.add_metamodel("*.a", mm_A) scoping.MetaModelProvider.add_metamodel("*.b", mm_B) # load a model from B which includes a model from A. current_dir = os.path.dirname(__file__) model = mm_B.model_from_file(os.path.join(current_dir, 'multi_metamodel', 'refs', 'b.b')) # check that the classes from the correct meta model are used # (and that the model was loaded). assert model.b[0].__class__ == mm_B[model.b[0].__class__.__name__] assert model.b[0].a.__class__ == mm_A[model.b[0].a.__class__.__name__] # clean up scoping.MetaModelProvider.clear()