def test_multi_metamodel_types_data_flow2(): # this stuff normally happens in the python module directly of the # third party lib selector = "global repo" clear_language_registrations() LibTypes.library_init(selector) LibData.library_init(selector) LibFlow.library_init(selector) current_dir = os.path.dirname(__file__) model1 = LibFlow.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_flow.flow')) # althought, types.type is included 2x, it is only present 1x assert 3 == len(model1._tx_model_repository.all_models.filename_to_model) # load the type model also used by model1 model2 = LibData.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_structures.data')) # load the type model also used by model1 and model2 model3 = LibTypes.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'types.type')) # the types (reloaded by the second model) # are shared with the first model # --> global repo assert model1.algos[0].inp.attributes[0].type \ in model2.includes[0]._tx_loaded_models[0].types assert model1.algos[0].inp.attributes[0].type in model3.types
def test_register_language(): """ Test both style of language registration. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) language = language_description('test-lang') assert type(language) is LanguageDesc assert language.name == 'test-lang' assert language.pattern == '*.test' assert language.description == 'test-lang description' assert language.metamodel == mymetamodel_callable clear_language_registrations() register_language(LanguageDesc('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)) language = language_description('test-lang') assert type(language) is LanguageDesc assert language.name == 'test-lang' assert language.pattern == '*.test' assert language.description == 'test-lang description' assert language.metamodel == mymetamodel_callable
def register_languages(): clear_language_registrations() global_repo = scoping.GlobalModelRepository() global_repo_provider = scoping_providers.PlainNameGlobalRepo() def get_A_mm(): mm_A = metamodel_from_str(grammarA, global_repository=global_repo) mm_A.register_scope_providers({"*.*": global_repo_provider}) return mm_A def get_B_mm(): mm_B = metamodel_from_str(grammarB, global_repository=global_repo) mm_B.register_scope_providers({"*.*": global_repo_provider}) return mm_B def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B register_language('A', pattern="*.a", metamodel=get_A_mm) register_language('B', pattern="*.b", metamodel=get_B_mm) register_language('BwithImport', pattern="*.b", metamodel=get_BwithImport_mm) return global_repo_provider
def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file(join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_provider_utf_16_le_basic_test(): """ This test checks that the global MetaModel Provider works (basic function). It uses utf-16-le for the model files. """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider_utf-16-le', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider_utf-16-le", "example.users"), encoding='utf-16-le') ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_language_for_file(): """ Test providing language description for a given file name or pattern. """ clear_language_registrations() tx_lang = language_description('textx') assert tx_lang is language_for_file('test.tx') assert tx_lang is language_for_file('*.tx')
def test_declaratively_registered_languages_always_available(): """ Declaratively registered languages will be re-registered at the first API call. textX language is declaratively registered and thus is always accessible. """ clear_language_registrations() tx_lang = language_description('textx') assert tx_lang.name == 'textX' assert tx_lang.pattern == '*.tx' assert tx_lang.project_name == 'textX'
def test_data_dsl(): """ Test loading of correct data dsl. """ clear_language_registrations() current_dir = os.path.dirname(__file__) mmD = metamodel_for_language('data-dsl') model = mmD.model_from_file( os.path.join(current_dir, 'models', 'data_structures.edata1')) assert (model is not None) assert (len(model.data) == 3)
def test_data_dsl(): """ Test loading of correct data dsl. """ clear_language_registrations() current_dir = os.path.dirname(__file__) mmD = metamodel_for_language('data-dsl') model = mmD.model_from_file(os.path.join(current_dir, 'models', 'data_structures.edata')) assert(model is not None) assert(len(model.data) == 3)
def test_multi_metamodel_types_data_flow_validation_error_in_types(): selector = "no global scope" clear_language_registrations() LibTypes.library_init(selector) LibData.library_init(selector) LibFlow.library_init(selector) current_dir = os.path.dirname(__file__) with raises(textx.exceptions.TextXSyntaxError, match=r'.*lowercase.*'): LibFlow.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_flow_including_error.flow'))
async def install_project_async( folder_or_wheel: str, python_path: str, editable: Optional[str] = False, msg_handler: Optional[Callable] = None, ) -> Tuple[str, str, str]: """Installs textX project. Args: folder_or_wheel: path to the folder or wheel of textX language project python_path: python path from virtual environment that extension is using editable: flag if project should be installed in editable mode msg_handler: a callable which is called with message argument when process writes to stdout Returns: A tuple of project name, version and package dist location if project is installed successfully Raises: InstallTextXProjectError: If project is not installed """ project_name, version = get_project_name_and_version(folder_or_wheel) dist_location = None cmd = [python_path, "-m", "pip", "install", folder_or_wheel] if editable: cmd.insert(4, "-e") retcode, output = await run_async(cmd, msg_handler) # Not installed if retcode != 0: raise InstallTextXProjectError(project_name, dist_location, output) # Manually add package to sys.path if installed with -e flag if editable and folder_or_wheel not in sys.path: sys.path.append(folder_or_wheel) clear_language_registrations() # Checks if language with the same name already exist try: language_descriptions() dist_location = get_distribution(project_name).location except Exception as e: await uninstall_project_async(project_name, python_path) raise InstallTextXProjectError(project_name, dist_location, output) from e return project_name, version, dist_location
def test_metamodel_callable_must_return_a_metamodel(): """ Test that meta-model callable must return an instance of TextXMetaModel. """ def invalid_metamodel_callable(): return 42 clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=invalid_metamodel_callable) with pytest.raises(TextXRegistrationError, match='.*Meta-model type for language.*'): metamodel_for_language('test-lang')
def test_multi_metamodel_types_data_flow_validation_error_in_data_flow(): selector = "no global scope" clear_language_registrations() LibTypes.library_init(selector) LibData.library_init(selector) LibFlow.library_init(selector) current_dir = os.path.dirname(__file__) with raises(textx.exceptions.TextXSemanticError, match=r'.*data types must match.*'): LibFlow.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_flow_with_error.flow') )
def test_register_already_existing_language(): """ Test that trying to register a language with the name already registered will raise `TextXRegistrationError`. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) with pytest.raises(TextXRegistrationError, match='.*already registered.*'): register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)
def test_register_language_with_decorator(): """ Test using `language` decorator to register a language definition. """ clear_language_registrations() @language('test-lang', '*.test') def test_lang(): "This is a test language" return mymetamodel_callable() assert type(test_lang) is LanguageDesc assert test_lang.name == 'test-lang' assert test_lang.pattern == '*.test' assert test_lang.description == 'This is a test language' assert callable(test_lang.metamodel)
def register_languages(): clear_language_registrations() global_repo = scoping.GlobalModelRepository() global_repo_provider = scoping_providers.PlainNameGlobalRepo() class A(object): def __init__(self, **kwargs): super(A, self).__init__() for k, v in kwargs.items(): self.__dict__[k] = v def __setattr__(self, name, value): raise Exception("test: this is not allowed.") def get_A_mm(): mm_A = metamodel_from_str(grammarA, global_repository=global_repo, classes=[A]) mm_A.register_scope_providers({"*.*": global_repo_provider}) return mm_A def get_B_mm(): mm_B = metamodel_from_str(grammarB, global_repository=global_repo) mm_B.register_scope_providers({"*.*": global_repo_provider}) return mm_B def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B register_language('A', pattern="*.a", metamodel=get_A_mm) register_language('B', pattern="*.b", metamodel=get_B_mm) register_language('BwithImport', pattern="*.b", metamodel=get_BwithImport_mm) return global_repo_provider
def test_multiple_languages_for_the_same_pattern(): """ If multiple languages are registered for the same file pattern `language_for_file` shall raise `TextXRegistrationError`. """ clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable) register_language('test-lang2', pattern='*.test', description='test-lang2 description', metamodel=mymetamodel_callable) with pytest.raises(TextXRegistrationError, match='Multiple languages can parse.*'): language_for_file('Somefile.test') assert len(languages_for_file('Somefile.test')) == 2
def test_multi_metamodel_obj_proc(): global_repo = scoping.GlobalModelRepository() repo = scoping_providers.PlainNameGlobalRepo() repo.register_models(os.path.dirname(__file__)+"/issue140/*.a") mm_A = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "A.tx" ), global_repository=global_repo, classes=[C1]) mm_B = metamodel_from_file(os.path.join( os.path.dirname(__file__), "issue140", "B.tx" ), global_repository=global_repo, classes=[C1]) mm_B.register_scope_providers({"*.*": repo}) def proc(a): print(a) mm_A.register_obj_processors({"C1": proc}) mm_B.register_obj_processors({"C1": proc}) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=mm_A) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=mm_B) mm_B.model_from_file(os.path.join( os.path.dirname(__file__), "issue140", "b.b" ))
def test_multi_metamodel_types_data_flow1(): # this stuff normally happens in the python module directly of the # third party lib selector = "no global scope" clear_language_registrations() LibTypes.library_init(selector) LibData.library_init(selector) LibFlow.library_init(selector) current_dir = os.path.dirname(__file__) model1 = LibFlow.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_flow.flow') ) # althought, types.type is included 2x, it is only present 1x # (scope providers share a common repo within on model and all # loaded models in that model) assert 3 == len(model1._tx_model_repository.all_models.filename_to_model) # load the type model also used by model1 model2 = LibData.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'data_structures.data') ) # load the type model also used by model1 and model2 model3 = LibTypes.get_metamodel().model_from_file( os.path.join(current_dir, 'multi_metamodel', 'types_data_flow', 'types.type') ) # the types (reloaded by the second model) # are not shared with the first model # --> no global repo assert model1.algos[0].inp.attributes[0].type \ not in model2.includes[0]._tx_loaded_models[0].types assert model1.algos[0].inp.attributes[0].type not in model3.types
def register_languages(): clear_language_registrations() global_repo = scoping.GlobalModelRepository() global_repo_provider = scoping_providers.PlainNameGlobalRepo() def get_A_mm(): mm_A = metamodel_from_str(grammarA, global_repository=global_repo) mm_A.register_scope_providers({"*.*": global_repo_provider}) return mm_A def get_B_mm(): mm_B = metamodel_from_str(grammarB, global_repository=global_repo) mm_B.register_scope_providers({"*.*": global_repo_provider}) return mm_B def get_BwithImport_mm(): mm_B = metamodel_from_str(grammarBWithImport, global_repository=global_repo) # define a default scope provider supporting the importURI feature mm_B.register_scope_providers( {"*.*": scoping_providers.FQNImportURI()}) return mm_B register_language('A', pattern="*.a", metamodel=get_A_mm) register_language('B', pattern="*.b", metamodel=get_B_mm) register_language('BwithImport', pattern="*.b", metamodel=get_BwithImport_mm) return global_repo_provider
async def uninstall_project_async( project_name: str, python_path: str, msg_handler: Optional[Callable] = None) -> None: """Uninstalls textX project. Args: project_name: project name python_path: python path from virtual environment that extension is using msg_handler: a callable which is called with message argument when process writes to stdout Returns: None Raises: UninstallTextXProjectError: If project is not uninstalled DistributionNotFound: If package dist is not found """ cmd = [python_path, "-m", "pip", "uninstall", project_name, "-y"] # Get dist location before uninstalling with pip try: dist_location = get_distribution(project_name).location except DistributionNotFound as e: raise UninstallTextXProjectError(project_name, "") from e # Call pip uninstall command retcode, output = await run_async(cmd, msg_handler) if retcode != 0: raise UninstallTextXProjectError(project_name, output) # Manually remove package from sys.path if needed is_editable = dist_is_editable(dist_location, project_name) if is_editable and dist_location in sys.path: sys.path.remove(dist_location) clear_language_registrations()
def test_metamodel_provider_advanced_test3_inheritance(): """ Advanced test for ExtRelativeName and FQNImportURI. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions, A inherits from B, the B from A, etc.). It is checked that all relevant references are resolved. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL (dependencies from one file to the other and back again) # - checks all references are resolved ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Call") assert len(lst) > 0 # check all references to be resolved (!=None) for a in lst: assert a.method ################################# # END ################################# clear_language_registrations()
def language_registered(): clear_language_registrations() register_language('test-lang', pattern='*.test', description='test-lang description', metamodel=mymetamodel_callable)
def test_model_export(): """ This test checks that the export function (to graphdotviz) works with a model distributed across different files. It is checked that all filenames are included in the output and that some elements from every model file are incuded in the output. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language('a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language('b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language('c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) out_file = io.StringIO() # export.model_export( # None, "debug_test.dot", m._tx_model_repository.all_models ) export.model_export_to_file(out_file, m) text = out_file.getvalue() print(text) assert "a2_very_long_name" in text assert "b2_very_long_name" in text assert "inheritance{}model_b.b".format(sep) in text assert "inheritance{}model_b.b".format(sep) in text
def test_model_export(): """ This test checks that the export function (to graphdotviz) works with a model distributed across different files. It is checked that all filenames are included in the output and that some elements from every model file are incuded in the output. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance", "model_a.a")) out_file = io.StringIO() # export.model_export( # None, "debug_test.dot", m._tx_model_repository.all_models ) export.model_export_to_file(out_file, m) text = out_file.getvalue() print(text) assert "a2_very_long_name" in text assert "b2_very_long_name" in text assert "inheritance{}model_b.b".format(sep) in text assert "inheritance{}model_b.b".format(sep) in text
def test_metamodel_provider_advanced_test3_global(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": global_repo, }) return mm global_repo_provider = scoping_providers.PlainNameGlobalRepo() global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.a")) global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.b")) global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "circular", "*.c")) a_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# model_repo = global_repo_provider.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check some references to be resolved (!=None) for a in lst: assert a.ref # check meta classes assert a_mm["Cls"]._tx_fqn == b_mm["Cls"]._tx_fqn # more checks from textx import textx_isinstance for a in lst: assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, b_mm["Obj"]) assert textx_isinstance(a, c_mm["Obj"]) ################################# # END ################################# clear_language_registrations()
def clear_all(): clear_language_registrations()
def test_metamodel_provider_basic_test(): """ This test checks that the global MetaModel Provider works (basic function): It is checked that no filename patterns are used twice. It is checked that the correct metamodel is used to load a model (by loading a model constellation using two metamodels). Note: the MetaModelProvider is obsolete. This test is fixed in terms of how to handle the filename --> metamodel resolution in textx >= 2.x """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) with raises(Exception, match=r'.*already registered.*'): register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider", "example.users")) ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_provider_advanced_test3_global_single_metamodel(): """ simplified test with only one meta model """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": global_repo, }) return mm global_repo_provider = scoping_providers.PlainNameGlobalRepo() global_repo_provider.register_models( join(this_folder, "metamodel_provider3", "single", "*.a")) a_mm = get_meta_model( global_repo_provider, join(this_folder, "metamodel_provider3", "A.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) ################################# # MODEL PARSING ################################# model_repo = global_repo_provider.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check some references to be resolved (!=None) for a in lst: assert a.ref # check meta classes assert a_mm["Cls"]._tx_fqn == a_mm["Cls"]._tx_fqn # more checks from textx import textx_isinstance for a in lst: assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, a_mm["Obj"]) assert textx_isinstance(a, a_mm["Obj"]) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_basic_test(): """ This test checks that the global MetaModel Provider works (basic function): It is checked that no filename patterns are used twice. It is checked that the correct metamodel is used to load a model (by loading a model constellation using two metamodels). Note: the MetaModelProvider is obsolete. This test is fixed in terms of how to handle the filename --> metamodel resolution in textx >= 2.x """ ################################# # META MODEL DEF ################################# mm_components = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Components.tx')) mm_components.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), "Connection.from_port": scoping_providers.RelativeName("from_inst.component.slots"), "Connection.to_port": scoping_providers.RelativeName("to_inst.component.slots"), }) mm_users = metamodel_from_file( join(abspath(dirname(__file__)), 'metamodel_provider', 'Users.tx')) mm_users.register_scope_providers({ "*.*": scoping_providers.FQNImportURI(), }) clear_language_registrations() register_language( 'components-dsl', pattern='*.components', description='demo', metamodel=mm_components # or a factory ) register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) with raises(Exception, match=r'.*already registered.*'): register_language( 'users-dsl', pattern='*.users', description='demo', metamodel=mm_users # or a factory ) ################################# # MODEL PARSING ################################# my_model = mm_users.model_from_file( join(abspath(dirname(__file__)), "metamodel_provider", "example.users")) ################################# # TEST MODEL ################################# user = get_unique_named_object_in_all_models(my_model, "pi") action1 = get_unique_named_object_in_all_models(my_model, "action1") assert user.instance is action1
def test_metamodel_provider_advanced_test3_diamond(): """ More complicated model (see test above): here we have a diamond shared dependency. It is also checked that the parsers are correctly cloned. """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "diamond", "A_includes_B_C.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL (inheritance, diamond include structure) # - check all references are resolved # - check all models have an own parser ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Call") assert len(lst) > 0 # check all references to be resolved (!=None) for a in lst: assert a.method # check that all models have different parsers parsers = list( map(lambda x: x._tx_parser, model_repo.filename_to_model.values())) assert 4 == len(parsers) # 4 files -> 4 parsers assert 4 == len(set(parsers)) # 4 different parsers ################################# # END ################################# clear_language_registrations()
def test_exception_from_included_model(): """ This test checks that an error induced by an included model (thrown via an object processor) is (a) thrown and (b) indicates the correct model location (file, line and col). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError from textx.scoping.tools import get_location if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING / TEST ################################# import textx.exceptions with raises(textx.exceptions.TextXSemanticError, match=r'.*model_d\.b:5:3:.*d1 triggers artifical error'): a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance2", "model_a.a")) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other; one direction). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": global_repo, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends") }) return mm global_repo = scoping_providers.PlainNameGlobalRepo() global_repo.register_models( join(this_folder, "metamodel_provider2", "*.recipe")) global_repo.register_models( join(this_folder, "metamodel_provider2", "*.ingredient")) i_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Ingredient.tx")) r_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Recipe.tx")) clear_language_registrations() register_language( 'recipe-dsl', pattern='*.recipe', description='demo', metamodel=r_mm # or a factory ) register_language( 'ingredient-dsl', pattern='*.ingredient', description='demo', metamodel=i_mm # or a factory ) ################################# # MODEL PARSING ################################# model_repo = global_repo.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst_i = get_all(model_repo, "IngredientType") lst_r = get_all(model_repo, "Recipe") assert len(lst_i) == 2 assert len(lst_r) == 2 # check some references to be resolved (!=None) assert lst_r[0].ingredients[0].type assert lst_r[0].ingredients[0].unit
def test_exception_from_included_model(): """ This test checks that an error induced by an included model (thrown via an object processor) is (a) thrown and (b) indicates the correct model location (file, line and col). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": provider, "Call.method": scoping_providers.ExtRelativeName("obj.ref", "methods", "extends") }) def my_processor(m): from textx.exceptions import TextXSemanticError if m.name == "d1": raise TextXSemanticError("d1 triggers artifical error", **get_location(m)) mm.register_obj_processors({"Method": my_processor}) return mm import_lookup_provider = scoping_providers.FQNImportURI() a_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model(import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language('a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language('b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language('c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING / TEST ################################# import textx.exceptions with raises(textx.exceptions.TextXSemanticError, match=r'.*model_d\.b:5:3:.*d1 triggers artifical error'): a_mm.model_from_file( join(this_folder, "metamodel_provider3", "inheritance2", "model_a.a")) ################################# # END ################################# clear_language_registrations()
def test_metamodel_provider_advanced_test(): """ Advanced test for ExtRelativeName and PlainNameGlobalRepo. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other; one direction). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(global_repo, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False) mm.register_scope_providers({ "*.*": global_repo, "Ingredient.unit": scoping_providers.ExtRelativeName("type", "units", "extends") }) return mm global_repo = scoping_providers.PlainNameGlobalRepo() global_repo.register_models( join(this_folder, "metamodel_provider2", "*.recipe")) global_repo.register_models( join(this_folder, "metamodel_provider2", "*.ingredient")) i_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Ingredient.tx")) r_mm = get_meta_model( global_repo, join(this_folder, "metamodel_provider2", "Recipe.tx")) clear_language_registrations() register_language( 'recipe-dsl', pattern='*.recipe', description='demo', metamodel=r_mm # or a factory ) register_language( 'ingredient-dsl', pattern='*.ingredient', description='demo', metamodel=i_mm # or a factory ) ################################# # MODEL PARSING ################################# model_repo = global_repo.load_models_in_model_repo().all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo: lst = lst + get_children_of_type(what, m) return lst lst_i = get_all(model_repo, "IngredientType") lst_r = get_all(model_repo, "Recipe") assert len(lst_i) == 2 assert len(lst_r) == 2 # check some references to be resolved (!=None) assert lst_r[0].ingredients[0].type assert lst_r[0].ingredients[0].unit
def test_metamodel_provider_advanced_test3_import(): """ Advanced test for ExtRelativeName and PlainNameImportURI. Here we have a global model repository shared between different meta models. The meta models interact (refer to each other, different directions). """ ################################# # META MODEL DEF ################################# this_folder = dirname(abspath(__file__)) def get_meta_model(provider, grammar_file_name): mm = metamodel_from_file(join(this_folder, grammar_file_name), debug=False, classes=[Cls, Obj]) mm.register_scope_providers({ "*.*": provider, }) return mm import_lookup_provider = scoping_providers.PlainNameImportURI() a_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "A.tx")) b_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "B.tx")) c_mm = get_meta_model( import_lookup_provider, join(this_folder, "metamodel_provider3", "C.tx")) clear_language_registrations() register_language( 'a-dsl', pattern='*.a', description='Test Lang A', metamodel=a_mm) register_language( 'b-dsl', pattern='*.b', description='Test Lang B', metamodel=b_mm) register_language( 'c-dsl', pattern='*.c', description='Test Lang C', metamodel=c_mm) ################################# # MODEL PARSING ################################# m = a_mm.model_from_file( join(this_folder, "metamodel_provider3", "circular", "model_a.a")) model_repo = m._tx_model_repository.all_models ################################# # TEST MODEL ################################# def get_all(model_repo, what): lst = [] for m in model_repo.filename_to_model.values(): lst = lst + get_children_of_type(what, m) return lst lst = get_all(model_repo, "Obj") # print(lst) assert len(lst) == 3 # check all references to be resolved (!=None) for a in lst: assert a.ref ################################# # END ################################# clear_language_registrations()
def clear_all(): clear_language_registrations()
def init_metamodel(path): this_folder = dirname(abspath(__file__)) global_repo = GlobalModelRepository() global_repo_provider = scoping_providers.FQNGlobalRepo( glob_args={"recursive": True}) global_repo_provider.register_models(path + "/**/*.config") all_classes = [ cl.PExam, cl.PExamContentContainer, cl.PExerciseRef, cl.PExercise, cl.PAsciiContent, cl.PCodeContent, cl.PFreeSpaceContent, cl.PImage, cl.PLatexContent, cl.PPlantUmlContent ] mm_exercise = metamodel_from_file(join(this_folder, "Exercise.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exercise.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), }) def name2file(name): return name.replace(".", "/") + ".exercise" mm_exam = metamodel_from_file(join(this_folder, "Exam.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exam.register_scope_providers({ "*.*": global_repo_provider, "dummy.dummy": scoping_providers.FQNImportURI(importURI_converter=name2file, search_path=[path]), }) mm_exam.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), "PExam": validation.check_exam }) mm_config = metamodel_from_file(join(this_folder, "Config.tx"), global_repository=global_repo, use_regexp_group=True) clear_language_registrations() register_language("exam-exercise-lang", "*.exercise", metamodel=mm_exercise) register_language("exam-lang", "*.exam", metamodel=mm_exam) register_language("exam-config-lang", "*.config", metamodel=mm_config) all_models = global_repo_provider.load_models_in_model_repo().\ all_models configs = get_all(all_models, what='Config') if len(configs) > 1: raise Exception("found more than one config: {}".format("and ".join( map(lambda x: x._tx_filename, configs)))) if len(configs) != 1: raise Exception("found no config") return mm_exam, all_models, configs[0]