def load_models(self, model, encoding='utf-8'): from textx.model import get_metamodel from textx.scoping import GlobalModelRepository # do we already have loaded models (analysis)? No -> check/load them if hasattr(model, "_tx_model_repository"): pass else: if hasattr(get_metamodel(model), "_tx_model_repository"): model_repository = GlobalModelRepository(get_metamodel( model)._tx_model_repository.all_models) else: model_repository = GlobalModelRepository() model._tx_model_repository = model_repository self._load_referenced_models(model, encoding=encoding)
def _pre_ref_resolution_callback(other_model): from textx.scoping import GlobalModelRepository filename = other_model._tx_filename assert filename # print("METAMODEL PRE-CALLBACK => {}".format(filename)) other_model._tx_model_repository = GlobalModelRepository( self._tx_model_repository.all_models) self._tx_model_repository.all_models\ .filename_to_model[filename] = other_model
def __init__(self, file_name=None, classes=None, builtins=None, auto_init_attributes=True, ignore_case=False, skipws=True, ws=None, autokwd=False, memoization=False, textx_tools_support=False, use_regexp_group=False, **kwargs): # evaluate optional parameter "global_repository" global_repository = kwargs.pop("global_repository", False) if global_repository: from textx.scoping import GlobalModelRepository if isinstance(global_repository, GlobalModelRepository): self._tx_model_repository = global_repository else: self._tx_model_repository = GlobalModelRepository() super(TextXMetaModel, self).__init__(**kwargs) self.file_name = file_name self.rootcls = None self.builtins = builtins # Convert classes to dict for easier lookup self.user_classes = {} if classes: for c in classes: self.user_classes[c.__name__] = c self.auto_init_attributes = auto_init_attributes self.ignore_case = ignore_case self.skipws = skipws self.ws = ws self.autokwd = autokwd self.memoization = memoization self.textx_tools_support = textx_tools_support self.use_regexp_group = use_regexp_group # Registered model processors self._model_processors = [] # Registered object processors self.obj_processors = {} # Match rule and base type conversion callables self.type_convertors = { 'BOOL': lambda x: x == '1' or x.lower() == 'true', 'INT': lambda x: int(x), 'FLOAT': lambda x: float(x), 'STRICTFLOAT': lambda x: float(x), 'STRING': lambda x: x[1:-1].replace(r'\"', r'"').replace(r"\'", "'"), } # Registered scope provider self.scope_providers = {} # Namespaces self.namespaces = {} self._namespace_stack = [] # Imported namespaces self._imported_namespaces = {} # Referenced languages self.referenced_languages = {} # Create new namespace for BASETYPE classes self._enter_namespace('__base__') # Base types hierarchy should exist in each meta-model base_id = self._new_class('ID', ID, 0) base_string = self._new_class('STRING', STRING, 0) base_bool = self._new_class('BOOL', BOOL, 0) base_int = self._new_class('INT', INT, 0) base_float = self._new_class('FLOAT', FLOAT, 0) base_strictfloat = self._new_class('STRICTFLOAT', STRICTFLOAT, 0) base_number = self._new_class('NUMBER', NUMBER, 0, inherits=[base_strictfloat, base_int]) base_type = self._new_class('BASETYPE', BASETYPE, 0, inherits=[ base_number, base_float, base_bool, base_id, base_string ]) self._new_class('OBJECT', OBJECT, 0, inherits=[base_type], rule_type=RULE_ABSTRACT) self._leave_namespace() # Resolve file name to absolute path. if file_name: file_name = os.path.abspath(file_name) # Root path will be dir name of the file if loaded from file. # If the grammar is not loaded from file 'import' statement can't be # used. self.root_path = os.path.dirname(file_name) if file_name else None # Enter namespace for given file or None if metamodel is # constructed from string. self._enter_namespace(self._namespace_for_file_name(file_name))
def init_metamodel(path): this_folder = dirname(abspath(__file__)) global_repo = GlobalModelRepository() global_repo_provider = scoping_providers.FQNGlobalRepo( glob_args={"recursive": True}) global_repo_provider.register_models(path + "/**/*.config") all_classes = [ cl.PExam, cl.PExamContentContainer, cl.PExerciseRef, cl.PExercise, cl.PAsciiContent, cl.PCodeContent, cl.PFreeSpaceContent, cl.PImage, cl.PLatexContent, cl.PPlantUmlContent ] mm_exercise = metamodel_from_file(join(this_folder, "Exercise.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exercise.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), }) def name2file(name): return name.replace(".", "/") + ".exercise" mm_exam = metamodel_from_file(join(this_folder, "Exam.tx"), global_repository=global_repo, use_regexp_group=True, classes=all_classes) mm_exam.register_scope_providers({ "*.*": global_repo_provider, "dummy.dummy": scoping_providers.FQNImportURI(importURI_converter=name2file, search_path=[path]), }) mm_exam.register_obj_processors({ "MYFLOAT": lambda x: float(x), "MYINT": lambda x: int(x), "PExam": validation.check_exam }) mm_config = metamodel_from_file(join(this_folder, "Config.tx"), global_repository=global_repo, use_regexp_group=True) clear_language_registrations() register_language("exam-exercise-lang", "*.exercise", metamodel=mm_exercise) register_language("exam-lang", "*.exam", metamodel=mm_exam) register_language("exam-config-lang", "*.config", metamodel=mm_config) all_models = global_repo_provider.load_models_in_model_repo().\ all_models configs = get_all(all_models, what='Config') if len(configs) > 1: raise Exception("found more than one config: {}".format("and ".join( map(lambda x: x._tx_filename, configs)))) if len(configs) != 1: raise Exception("found no config") return mm_exam, all_models, configs[0]