def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibTypes.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'".format( repo_selector)) def get_metamodel(): mm = metamodel_from_str(r''' reference types as t Model: includes*=Include data+=Data; Data: 'data' name=ID '{' attributes+=Attribute '}'; Attribute: name=ID ':' type=[t.Type]; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm register_language('data', pattern='*.data', metamodel=get_metamodel)
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibData.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'".format( repo_selector)) def get_metamodel(): mm = metamodel_from_str(r''' reference data as d Model: includes*=Include algos+=Algo flows+=Flow; Algo: 'algo' name=ID ':' inp=[d.Data] '->' outp=[d.Data]; Flow: 'connect' algo1=[Algo] '->' algo2=[Algo] ; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **tools.get_location(f)) mm.register_obj_processors({'Flow': check_flow}) return mm register_language('flow', pattern='*.flow', metamodel=get_metamodel)
def test_register_language(): """ Test both style of language registration. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) language = language_description('test-lang') assert type(language) is LanguageDesc assert language.name == 'test-lang' assert language.pattern == '*.test' assert language.description == 'test-lang description' assert language.metamodel == mymetamodel_callable clear_language_registrations() register_language(LanguageDesc('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)) language = language_description('test-lang') assert type(language) is LanguageDesc assert language.name == 'test-lang' assert language.pattern == '*.test' assert language.description == 'test-lang description' assert language.metamodel == mymetamodel_callable
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": global_repo = True else: raise Exception("unexpected parameter 'repo_selector={}'".format( repo_selector)) def get_metamodel(): mm = metamodel_from_str(r''' Model: types+=Type; Type: 'type' name=ID; Comment: /\/\/.*$/; ''', global_repository=global_repo) def check_type(t): if t.name[0].isupper(): raise textx.exceptions.TextXSyntaxError( "types must be lowercase", **tools.get_location(t)) mm.register_obj_processors({'Type': check_type}) return mm register_language('types', pattern='*.type', metamodel=get_metamodel)
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": global_repo = True else: raise Exception("unexpected parameter 'repo_selector={}'" .format(repo_selector)) def get_metamodel(): mm = metamodel_from_str( r''' Model: types+=Type; Type: 'type' name=ID; Comment: /\/\/.*$/; ''', global_repository=global_repo) def check_type(t): if t.name[0].isupper(): raise textx.exceptions.TextXSyntaxError( "types must be lowercase", **tools.get_location(t) ) mm.register_obj_processors({ 'Type': check_type }) return mm register_language('types', pattern='*.type', metamodel=get_metamodel)
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibTypes.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'" .format(repo_selector)) def get_metamodel(): mm = metamodel_from_str( r''' reference types as t Model: includes*=Include data+=Data; Data: 'data' name=ID '{' attributes+=Attribute '}'; Attribute: name=ID ':' type=[t.Type]; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm register_language('data', pattern='*.data', metamodel=get_metamodel)
def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file(join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_callable_must_return_a_metamodel(): """ Test that meta-model callable must return an instance of TextXMetaModel. """ def invalid_metamodel_callable(): return 42 clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=invalid_metamodel_callable) with pytest.raises(TextXRegistrationError, match='.*Meta-model type for language.*'): metamodel_for_language('test-lang')
def test_builtin_models_are_searched_by_rrel(): register_language('builtin_types', '*.type', metamodel=types_mm) builtin_models = ModelRepository() builtin_models.add_model(types_mm.model_from_str('type int type bool')) mm = metamodel_from_str(entity_mm_str, builtin_models=builtin_models) model = mm.model_from_str(r''' entity First { first : bool } ''') assert model.entities[0].properties[0].type.__class__.__name__ == 'BaseType' assert model.entities[0].properties[0].type.name == 'bool'
def test_register_already_existing_language(): """ Test that trying to register a language with the name already registered will raise `TextXRegistrationError`. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) with pytest.raises(TextXRegistrationError, match='.*already registered.*'): register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)
def test_multiple_languages_for_the_same_pattern(): """ If multiple languages are registered for the same file pattern `language_for_file` shall raise `TextXRegistrationError`. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) register_language('test-lang2', pattern='*.test', description='test-lang2 description', metamodel=mymetamodel_callable) with pytest.raises(TextXRegistrationError, match='Multiple languages can parse.*'): language_for_file('Somefile.test') assert len(languages_for_file('Somefile.test')) == 2
def test_multi_metamodel_obj_proc(): global_repo = scoping.GlobalModelRepository() repo = scoping_providers.PlainNameGlobalRepo() repo.register_models(os.path.dirname(__file__)+"/issue140/*.a") mm_A = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "A.tx" ), global_repository=global_repo, classes=[C1]) mm_B = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "B.tx" ), global_repository=global_repo, classes=[C1]) mm_B.register_scope_providers({"*.*": repo}) def proc(a): print(a) mm_A.register_obj_processors({"C1": proc}) mm_B.register_obj_processors({"C1": proc}) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=mm_A) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=mm_B) mm_B.model_from_file(os.path.join( os.path.dirname(__file__), "issue140", "b.b" ))
def library_init(repo_selector): if repo_selector == "no global scope": global_repo = False elif repo_selector == "global repo": # get the global repo from the inherited meta model: global_repo = LibData.get_metamodel()._tx_model_repository else: raise Exception("unexpected parameter 'repo_selector={}'" .format(repo_selector)) def get_metamodel(): mm = metamodel_from_str( r''' reference data as d Model: includes*=Include algos+=Algo flows+=Flow; Algo: 'algo' name=ID ':' inp=[d.Data] '->' outp=[d.Data]; Flow: 'connect' algo1=[Algo] '->' algo2=[Algo] ; Include: '#include' importURI=STRING; Comment: /\/\/.*$/; ''', global_repository=global_repo) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) def check_flow(f): if f.algo1.outp != f.algo2.inp: raise textx.exceptions.TextXSemanticError( "algo data types must match", **tools.get_location(f) ) mm.register_obj_processors({ 'Flow': check_flow }) return mm register_language('flow', pattern='*.flow', metamodel=get_metamodel)
def register_languages(): clear_language_registrations() global_repo = scoping.GlobalModelRepository() global_repo_provider = scoping_providers.PlainNameGlobalRepo() def get_A_mm(): mm_A = metamodel_from_str(grammarA, global_repository=global_repo) mm_A.register_scope_providers({"*.*": global_repo_provider}) return mm_A def get_B_mm(): mm_B = metamodel_from_str(grammarB, global_repository=global_repo) mm_B.register_scope_providers({"*.*": global_repo_provider}) return mm_B def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B register_language('A', pattern="*.a", metamodel=get_A_mm) register_language('B', pattern="*.b", metamodel=get_B_mm) register_language('BwithImport', pattern="*.b", metamodel=get_BwithImport_mm) return global_repo_provider
def test_language_reference_keyword(): @language('first-test-lang', '*.ftest') def first_language(): return metamodel_from_str( r''' Model: firsts*=First; First: name=ID num=INT; ''') register_language(first_language) @language('second-test-lang', '*.stest') def second_language(): # We can reference here fist-test-lang since it is registered above mm = metamodel_from_str( r''' reference first-test-lang as f Model: includes*=Include refs+=Reference; Reference: 'ref' ref=[f.First]; Include: 'include' importURI=STRING; ''', global_repository=True) mm.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm register_language(second_language) mm = metamodel_for_language('second-test-lang') current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'model.stest') model = mm.model_from_file(p) assert len(model.refs) == 2 assert model.refs[0].ref.name == 'first1' assert model.refs[0].ref.num == 42
def test_language_reference_keyword(): @language('first-test-lang', '*.ftest') def first_language(): return metamodel_from_str(r''' Model: firsts*=First; First: name=ID num=INT; ''') register_language(first_language) @language('second-test-lang', '*.stest') def second_language(): # We can reference here fist-test-lang since it is registered above mm = metamodel_from_str(r''' reference first-test-lang as f Model: includes*=Include refs+=Reference; Reference: 'ref' ref=[f.First]; Include: 'include' importURI=STRING; ''', global_repository=True) mm.register_scope_providers({"*.*": scoping_providers.FQNImportURI()}) return mm register_language(second_language) mm = metamodel_for_language('second-test-lang') current_dir = os.path.dirname(__file__) p = os.path.join(current_dir, 'model.stest') model = mm.model_from_file(p) assert len(model.refs) == 2 assert model.refs[0].ref.name == 'first1' assert model.refs[0].ref.num == 42
def register_languages(): clear_language_registrations() global_repo = scoping.GlobalModelRepository() global_repo_provider = scoping_providers.PlainNameGlobalRepo() class A(object): def __init__(self, **kwargs): super(A, self).__init__() for k, v in kwargs.items(): self.__dict__[k] = v def __setattr__(self, name, value): raise Exception("test: this is not allowed.") def get_A_mm(): mm_A = metamodel_from_str(grammarA, global_repository=global_repo, classes=[A]) mm_A.register_scope_providers({"*.*": global_repo_provider}) return mm_A def get_B_mm(): mm_B = metamodel_from_str(grammarB, global_repository=global_repo) mm_B.register_scope_providers({"*.*": global_repo_provider}) return mm_B def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B register_language('A', pattern="*.a", metamodel=get_A_mm) register_language('B', pattern="*.b", metamodel=get_B_mm) register_language('BwithImport', pattern="*.b", metamodel=get_BwithImport_mm) return global_repo_provider
def test_model_export(): """ This test checks that the export function (to graphdotviz) works with a model distributed across different files. It is checked that all filenames are included in the output and that some elements from every model file are incuded in the output. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) out_file = io.StringIO() # export.model_export( # None, "debug_test.dot", m._tx_model_repository.all_models ) export.model_export_to_file(out_file, m) text = out_file.getvalue() print(text) assert "a2_very_long_name" in text assert "b2_very_long_name" in text assert "inheritance{}model_b.b".format(sep) in text assert "inheritance{}model_b.b".format(sep) in text
def test_metamodel_provider_basic_test(): """ This test checks that the global MetaModel Provider works (basic function): It is checked that no filename patterns are used twice. It is checked that the correct metamodel is used to load a model (by loading a model constellation using two metamodels). Note: the MetaModelProvider is obsolete. This test is fixed in terms of how to handle the filename --> metamodel resolution in textx >= 2.x """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) with raises(Exception, match=r'.*already registered.*'): register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider", "example.users")) ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_model_export(): """ This test checks that the export function (to graphdotviz) works with a model distributed across different files. It is checked that all filenames are included in the output and that some elements from every model file are incuded in the output. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language('a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language('b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language('c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) out_file = io.StringIO() # export.model_export( # None, "debug_test.dot", m._tx_model_repository.all_models ) export.model_export_to_file(out_file, m) text = out_file.getvalue() print(text) assert "a2_very_long_name" in text assert "b2_very_long_name" in text assert "inheritance{}model_b.b".format(sep) in text assert "inheritance{}model_b.b".format(sep) in text
def language_registered(): clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)
def test_exception_from_included_model(): """ This test checks that an error induced by an included model (thrown via an object processor) is (a) thrown and (b) indicates the correct model location (file, line and col). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError from textx.scoping.tools import get_location if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING / TEST ################################# import textx.exceptions with raises(textx.exceptions.TextXSemanticError, match=r'.*model_d\.b:5:3:.*d1 triggers artifical error'): a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance2", "model_a.a")) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test3_global(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": global_repo, }) return mm global_repo_provider = scoping_providers.PlainNameGlobalRepo() global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.a")) global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.b")) global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.c")) a_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# model_repo = global_repo_provider.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check some references to be resolved (!=None) for a in lst: assert a.ref # check meta classes assert a_mm["Cls"]._tx_fqn == b_mm["Cls"]._tx_fqn # more checks from textx import textx_isinstance for a in lst: assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, b_mm["Obj"]) assert textx_isinstance(a, c_mm["Obj"]) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test3_diamond(): """ More complicated model (see test above): here we have a diamond shared dependency. It is also checked that the parsers are correctly cloned. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "diamond", "A_includes_B_C.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL (inheritance, diamond include structure) # - check all references are resolved # - check all models have an own parser ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Call") assert len(lst) > 0 # check all references to be resolved (!=None) for a in lst: assert a.method # check that all models have different parsers parsers = list( map(lambda x: x._tx_parser, model_repo.filename_to_model.values())) assert 4 == len(parsers) # 4 files -> 4 parsers assert 4 == len(set(parsers)) # 4 different parsers ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test3_inheritance(): """ Advanced test for ExtRelativeName and FQNImportURI. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions, A inherits from B, the B from A, etc.). It is checked that all relevant references are resolved. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL (dependencies from one file to the other and back again) # - checks all references are resolved ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Call") assert len(lst) > 0 # check all references to be resolved (!=None) for a in lst: assert a.method ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test3_global_single_metamodel(): """ simplified test with only one meta model """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": global_repo, }) return mm global_repo_provider = scoping_providers.PlainNameGlobalRepo() global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "single", "*.a")) a_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "A.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) ################################# # MODEL PARSING ################################# model_repo = global_repo_provider.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check some references to be resolved (!=None) for a in lst: assert a.ref # check meta classes assert a_mm["Cls"]._tx_fqn == a_mm["Cls"]._tx_fqn # more checks from textx import textx_isinstance for a in lst: assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, a_mm["Obj"]) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test3_import(): """ Advanced test for ExtRelativeName and PlainNameImportURI. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, }) return mm import_lookup_provider = scoping_providers.PlainNameImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "circular", "model_a.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check all references to be resolved (!=None) for a in lst: assert a.ref ################################# # END ################################# clear_language_registrations()
def test_exception_from_included_model(): """ This test checks that an error induced by an included model (thrown via an object processor) is (a) thrown and (b) indicates the correct model location (file, line and col). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language('a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language('b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language('c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING / TEST ################################# import textx.exceptions with raises(textx.exceptions.TextXSemanticError, match=r'.*model_d\.b:5:3:.*d1 triggers artifical error'): a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance2", "model_a.a")) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other; one direction). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": global_repo, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends") }) return mm global_repo = scoping_providers.PlainNameGlobalRepo() global_repo.register_models( join(this_folder, "metamodel_provider2", "*.recipe")) global_repo.register_models( join(this_folder, "metamodel_provider2", "*.ingredient")) i_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Ingredient.tx")) r_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Recipe.tx")) clear_language_registrations() register_language( 'recipe-dsl', pattern='*.recipe', description='demo', metamodel=r_mm # or a factory ) register_language( 'ingredient-dsl', pattern='*.ingredient', description='demo', metamodel=i_mm # or a factory ) ################################# # MODEL PARSING ################################# model_repo = global_repo.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo: lst = lst + get_children_of_type(what, m) return lst lst_i = get_all(model_repo, "IngredientType") lst_r = get_all(model_repo, "Recipe") assert len(lst_i) == 2 assert len(lst_r) == 2 # check some references to be resolved (!=None) assert lst_r[0].ingredients[0].type assert lst_r[0].ingredients[0].unit
def test_model_params_generate_cli(): """ Test that model parameters are passed through generate cli command. """ # register test language @language('testlang', '*.mpt') def model_param_test(): def processor(model, metamodel): # Just to be sure that processor sees the model parameters model.model_params = model._tx_model_params mm = metamodel_from_str(grammar) mm.model_param_defs.add('meaning_of_life', 'The Meaning of Life') mm.register_model_processor(processor) return mm register_language(model_param_test) # register language generator @generator('testlang', 'testtarget') def mytarget_generator(metamodel, model, output_path, overwrite, debug=False, **custom_args): # Dump custom args for testing txt = '\n'.join([ "{}={}".format(arg_name, arg_value) for arg_name, arg_value in custom_args.items() ]) # Dump model params processed by model processor for testing txt += '\nModel params:' txt += '\n'.join([ "{}={}".format(param_name, param_value) for param_name, param_value in model.model_params.items() ]) output_file = get_output_filename(model._tx_filename, None, 'testtarget') def gen_callback(): with open(output_file, 'w') as f: f.write(txt) gen_file(model._tx_filename, output_file, gen_callback, overwrite) register_generator(mytarget_generator) # Run generator from CLI this_folder = os.path.abspath(os.path.dirname(__file__)) runner = CliRunner() model_file = os.path.join(this_folder, 'model_param_generate_test.mpt') result = runner.invoke(textx, [ 'generate', '--language', 'testlang', '--target', 'testtarget', '--overwrite', model_file, '--meaning_of_life', '42', '--someparam', 'somevalue' ]) assert result.exit_code == 0 output_file = os.path.join(this_folder, 'model_param_generate_test.testtarget') with open(output_file, 'r') as f: content = f.read() assert 'someparam=somevalue' in content assert 'Model params:meaning_of_life=42' in content
def test_metamodel_provider_advanced_test(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other; one direction). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": global_repo, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends") }) return mm global_repo = scoping_providers.PlainNameGlobalRepo() global_repo.register_models( join(this_folder, "metamodel_provider2", "*.recipe")) global_repo.register_models( join(this_folder, "metamodel_provider2", "*.ingredient")) i_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Ingredient.tx")) r_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Recipe.tx")) clear_language_registrations() register_language( 'recipe-dsl', pattern='*.recipe', description='demo', metamodel=r_mm # or a factory ) register_language( 'ingredient-dsl', pattern='*.ingredient', description='demo', metamodel=i_mm # or a factory ) ################################# # MODEL PARSING ################################# model_repo = global_repo.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst_i = get_all(model_repo, "IngredientType") lst_r = get_all(model_repo, "Recipe") assert len(lst_i) == 2 assert len(lst_r) == 2 # check some references to be resolved (!=None) assert lst_r[0].ingredients[0].type assert lst_r[0].ingredients[0].unit
def init_metamodel(path): this_folder = dirname(abspath(__file__)) global_repo = GlobalModelRepository() global_repo_provider = scoping_providers.FQNGlobalRepo( glob_args={"recursive": True}) global_repo_provider.register_models(path + "/**/*.config") all_classes = [ cl.PExam, cl.PExamContentContainer, cl.PExerciseRef, cl.PExercise, cl.PAsciiContent, cl.PCodeContent, cl.PFreeSpaceContent, cl.PImage, cl.PLatexContent, cl.PPlantUmlContent ] mm_exercise = metamodel_from_file(join(this_folder, "Exercise.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exercise.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), }) def name2file(name): return name.replace(".", "/") + ".exercise" mm_exam = metamodel_from_file(join(this_folder, "Exam.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exam.register_scope_providers({ "*.*": global_repo_provider, "dummy.dummy": scoping_providers.FQNImportURI(importURI_converter=name2file, search_path=[path]), }) mm_exam.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), "PExam": validation.check_exam }) mm_config = metamodel_from_file(join(this_folder, "Config.tx"), global_repository=global_repo, use_regexp_group=True) clear_language_registrations() register_language("exam-exercise-lang", "*.exercise", metamodel=mm_exercise) register_language("exam-lang", "*.exam", metamodel=mm_exam) register_language("exam-config-lang", "*.config", metamodel=mm_config) all_models = global_repo_provider.load_models_in_model_repo().\ all_models configs = get_all(all_models, what='Config') if len(configs) > 1: raise Exception("found more than one config: {}".format("and ".join( map(lambda x: x._tx_filename, configs)))) if len(configs) != 1: raise Exception("found no config") return mm_exam, all_models, configs[0]