コード例 #1
0
def _remove_all_affected_models_in_construction(model):
    """
    Remove all models related to model being constructed
    from any model repository.
    This function is private to model.py, since the models
    being constructed are identified by having a reference
    resolver (this is an internal design decision of model.py).
    See: _start_model_construction
    """
    all_affected_models = get_included_models(model)
    models_to_be_removed = list(
        filter(lambda x: hasattr(x, "_tx_reference_resolver"),
               all_affected_models))
    remove_models_from_repositories(all_affected_models, models_to_be_removed)
コード例 #2
0
def parse_tree_to_objgraph(parser,
                           parse_tree,
                           file_name=None,
                           pre_ref_resolution_callback=None,
                           is_main_model=True,
                           encoding='utf-8'):
    """
    Transforms parse_tree to object graph representing model in a
    new language.
    """

    metamodel = parser.metamodel

    if metamodel.textx_tools_support:
        pos_rule_dict = {}
    pos_crossref_list = []

    def process_match(nt):
        """
        Process subtree for match rules.
        """
        line, col = parser.pos_to_linecol(nt.position)
        if isinstance(nt, Terminal):
            return metamodel.process(nt.value,
                                     nt.rule_name,
                                     filename=parser.file_name,
                                     line=line,
                                     col=col)
        else:
            # If RHS of assignment is NonTerminal it is a product of
            # complex match rule. Convert nodes to text and do the join.
            if len(nt) > 1:
                result = "".join([text(process_match(n)) for n in nt])
            else:
                result = process_match(nt[0])
            return metamodel.process(result,
                                     nt.rule_name,
                                     filename=parser.file_name,
                                     line=line,
                                     col=col)

    def process_node(node):
        line, col = parser.pos_to_linecol(node.position)
        if isinstance(node, Terminal):
            from arpeggio import RegExMatch
            if metamodel.use_regexp_group and \
                    isinstance(node.rule, RegExMatch):
                if node.rule.regex.groups == 1:
                    value = node.extra_info.group(1)
                    return metamodel.process(value,
                                             node.rule_name,
                                             filename=parser.file_name,
                                             line=line,
                                             col=col)
                else:
                    return metamodel.process(node.value,
                                             node.rule_name,
                                             filename=parser.file_name,
                                             line=line,
                                             col=col)
            else:
                return metamodel.process(node.value,
                                         node.rule_name,
                                         filename=parser.file_name,
                                         line=line,
                                         col=col)

        assert node.rule.root, \
            "Not a root node: {}".format(node.rule.rule_name)
        # If this node is created by some root rule
        # create metaclass instance.
        inst = None
        if not node.rule_name.startswith('__asgn'):
            # If not assignment
            # Get class
            mclass = node.rule._tx_class

            if mclass._tx_type == RULE_ABSTRACT:
                # If this meta-class is product of abstract rule replace it
                # with matched concrete meta-class down the inheritance tree.
                # Abstract meta-class should never be instantiated.
                if len(node) > 1:
                    try:
                        return process_node(
                            next(n for n in node if type(n) is not Terminal and
                                 n.rule._tx_class is not RULE_MATCH))  # noqa
                    except StopIteration:
                        # All nodes are match rules, do concatenation
                        return ''.join(text(n) for n in node)
                else:
                    return process_node(node[0])
            elif mclass._tx_type == RULE_MATCH:
                # If this is a product of match rule handle it as a RHS
                # of assignment and return converted python type.
                return process_match(node)

            if parser.debug:
                parser.dprint("CREATING INSTANCE {}".format(node.rule_name))

            # If user class is given
            # use it instead of generic one
            is_user = False
            if node.rule_name in metamodel.user_classes:
                user_class = metamodel.user_classes[node.rule_name]

                # Object initialization will be done afterwards
                # At this point we need object to be allocated
                # So that nested object get correct reference
                inst = user_class.__new__(user_class)
                user_class._tx_obj_attrs[id(inst)] = {}
                is_user = True

            else:
                # Generic class will call attributes init
                # from the constructor
                inst = mclass.__new__(mclass)

            # Initialize object attributes
            parser.metamodel._init_obj_attrs(inst)

            # Collect attributes directly on meta-class instance
            obj_attrs = inst

            inst._tx_position = node.position
            inst._tx_position_end = node.position_end

            # Push real obj. and dummy attr obj on the instance stack
            parser._inst_stack.append((inst, obj_attrs))

            for n in node:
                if parser.debug:
                    parser.dprint("Recursing into {} = '{}'".format(
                        type(n).__name__, text(n)))
                process_node(n)

            parser._inst_stack.pop()

            if is_user:
                parser._user_class_inst.append(inst)

            # If this object is nested add 'parent' reference
            if parser._inst_stack:
                setattr(obj_attrs, 'parent', parser._inst_stack[-1][0])

            # Special case for 'name' attrib. It is used for cross-referencing
            if hasattr(inst, 'name') and inst.name:
                # Objects of each class are in its own namespace
                if not id(inst.__class__) in parser._instances:
                    parser._instances[id(inst.__class__)] = {}
                try:
                    parser._instances[id(inst.__class__)][inst.name] = inst
                except TypeError as e:
                    if 'unhashable type' in e.args[0]:
                        raise TextXSemanticError(
                            'Object name can\'t be of unhashable type.'
                            ' Please see the note in this docs'
                            ' section http://textx.github.io/textX/stable/grammar/#references'
                        )  # noqa
                    raise

            if parser.debug:
                parser.dprint("LEAVING INSTANCE {}".format(node.rule_name))

        else:
            # Handle assignments
            attr_name = node.rule._attr_name
            op = node.rule_name.split('_')[-1]
            model_obj, obj_attr = parser._inst_stack[-1]
            cls = type(model_obj)
            metaattr = cls._tx_attrs[attr_name]

            if parser.debug:
                parser.dprint('Handling assignment: {} {}...'.format(
                    op, attr_name))

            if op == 'optional':
                setattr(obj_attr, attr_name, True)

            elif op == 'plain':
                attr_value = getattr(obj_attr, attr_name)
                if attr_value and type(attr_value) is not list:
                    fmt = "Multiple assignments to attribute {} at {}"
                    raise TextXSemanticError(message=fmt.format(
                        attr_name, parser.pos_to_linecol(node.position)),
                                             err_type=MULT_ASSIGN_ERROR)

                # Convert tree bellow assignment to proper value
                value = process_node(node[0])

                if metaattr.ref and not metaattr.cont:
                    # If this is non-containing reference create ObjCrossRef
                    p = metaattr.scope_provider
                    rn = metaattr.match_rule_name
                    value = ObjCrossRef(obj_name=value,
                                        cls=metaattr.cls,
                                        position=node[0].position,
                                        scope_provider=p,
                                        match_rule_name=rn)
                    parser._crossrefs.append((model_obj, metaattr, value))
                    return model_obj

                if type(attr_value) is list:
                    attr_value.append(value)
                else:
                    setattr(obj_attr, attr_name, value)

            elif op in ['list', 'oneormore', 'zeroormore']:
                for n in node:
                    # If the node is separator skip
                    if n.rule_name != 'sep':
                        # Convert node to proper type
                        # Rule links will be resolved later
                        value = process_node(n)

                        if metaattr.ref and not metaattr.cont:
                            # If this is non-containing reference
                            # create ObjCrossRef

                            p = metaattr.scope_provider
                            rn = metaattr.match_rule_name
                            value = ObjCrossRef(obj_name=value,
                                                cls=metaattr.cls,
                                                position=n.position,
                                                scope_provider=p,
                                                match_rule_name=rn)

                            parser._crossrefs.append(
                                (obj_attr, metaattr, value))
                            continue

                        if not hasattr(obj_attr, attr_name) or \
                                getattr(obj_attr, attr_name) is None:
                            setattr(obj_attr, attr_name, [])
                        getattr(obj_attr, attr_name).append(value)
            else:
                # This shouldn't happen
                assert False

        # Collect rules for textx-tools
        if inst is not None and metamodel.textx_tools_support:
            pos = (inst._tx_position, inst._tx_position_end)
            pos_rule_dict[pos] = inst

        return inst

    def call_obj_processors(metamodel,
                            model_obj,
                            metaclass_of_grammar_rule=None):
        """
        Depth-first model object processing.
        """
        try:
            if metaclass_of_grammar_rule is None:
                metaclass_of_grammar_rule = \
                    metamodel[model_obj.__class__.__name__]
        except KeyError:
            raise TextXSemanticError('Unknown meta-class "{}".'.format(
                model.obj.__class__.__name__))

        if metaclass_of_grammar_rule._tx_type is RULE_MATCH:
            # Object processors for match rules are already called
            # in `process_match`
            return

        many = [MULT_ONEORMORE, MULT_ZEROORMORE]

        # return value of obj_processor
        return_value_grammar = None
        return_value_current = None

        # enter recursive visit of attributes only, if the class of the
        # object being processed is a meta class of the current meta model
        if model_obj.__class__.__name__ in metamodel:
            if hasattr(model_obj, '_tx_fqn'):
                current_metaclass_of_obj = metamodel[model_obj._tx_fqn]
            else:
                # fallback (not used - unsure if this case is required...):
                current_metaclass_of_obj = metamodel[
                    model_obj.__class__.__name__]
            assert current_metaclass_of_obj is not None

            for metaattr in current_metaclass_of_obj._tx_attrs.values():
                # If attribute is base type or containment reference go down
                if metaattr.cont:
                    attr = getattr(model_obj, metaattr.name)
                    if attr:
                        if metaattr.mult in many:
                            for idx, obj in enumerate(attr):
                                if obj is not None:
                                    result = call_obj_processors(
                                        metamodel, obj, metaattr.cls)
                                    if result is not None:
                                        attr[idx] = result
                        else:
                            result = call_obj_processors(
                                metamodel, attr, metaattr.cls)
                            if result is not None:
                                setattr(model_obj, metaattr.name, result)

            # call obj_proc of the current meta_class if type == RULE_ABSTRACT
            if current_metaclass_of_obj._tx_fqn !=\
                    metaclass_of_grammar_rule._tx_fqn:
                assert RULE_ABSTRACT == metaclass_of_grammar_rule._tx_type
                if metamodel.has_obj_processor(
                        current_metaclass_of_obj.__name__):
                    return_value_current = metamodel.process(
                        model_obj, current_metaclass_of_obj.__name__,
                        **get_location(model_obj))

        # call obj_proc of rule found in grammar
        if metamodel.has_obj_processor(metaclass_of_grammar_rule.__name__):
            loc = get_location(model_obj)
            return_value_grammar = metamodel.process(
                model_obj, metaclass_of_grammar_rule.__name__, **loc)

        # both obj_processors are called, if two different processors
        # are defined for the object metaclass and the grammar metaclass
        # (can happen with type==RULE_ABSTRACT):
        # e.g.
        #   Base: Special1|Special2;
        #   RuleCurrentlyChecked: att_to_be_checked=[Base]
        # with object processors defined for Base, Special1, and Special2.
        #
        # Both processors are called, but for the return value the
        # obj_processor corresponding to the object (e.g. of type Special1)
        # dominates over the obj_processor of the grammar rule (Base).
        #
        # The order they are called is: first object (e.g., Special1), then
        # the grammar based metaclass object processor (e.g., Base).
        if return_value_current is not None:
            return return_value_current
        else:
            return return_value_grammar  # may be None

    # load model from file (w/o reference resolution)
    # Note: if an exception happens here, the model was not yet
    # added to any repository. Thus, we have no special exception
    # safety handling at this point...
    model = process_node(parse_tree)

    # Now, the initial version of the model is created.
    # We catch any exceptions here, to clean up cached models
    # introduced by, e.g., scope providers. Models will
    # be marked as "model being constructed" if they represent
    # a non-trivial model (e.g. are not just a string), see
    # below ("model being constructed").
    try:
        # Register filename of the model for later use (e.g. imports/scoping).
        is_immutable_obj = False
        try:
            model._tx_filename = file_name
            model._tx_metamodel = metamodel
            # mark model as "model being constructed"
            _start_model_construction(model)
        except AttributeError:
            # model is of some immutable type
            is_immutable_obj = True
            pass

        if pre_ref_resolution_callback:
            pre_ref_resolution_callback(model)

        if hasattr(model, '_tx_metamodel'):
            assert hasattr(model, '_tx_model_params')

        for scope_provider in metamodel.scope_providers.values():
            from textx.scoping import ModelLoader
            if isinstance(scope_provider, ModelLoader):
                scope_provider.load_models(model, encoding=encoding)

        for crossref in parser._crossrefs:
            crossref = crossref[2]
            if crossref.scope_provider is not None:
                from textx.scoping import ModelLoader
                scope_provider = crossref.scope_provider
                if isinstance(scope_provider, ModelLoader):
                    scope_provider.load_models(model, encoding=encoding)

        if not is_immutable_obj:
            model._tx_reference_resolver = ReferenceResolver(
                parser, model, pos_crossref_list)
            model._tx_parser = parser

        if is_main_model:
            models = get_included_models(model)
            try:
                # filter out all models w/o resolver:
                models = list(
                    filter(lambda x: hasattr(x, "_tx_reference_resolver"),
                           models))

                resolved_count = 1
                unresolved_count = 1
                while unresolved_count > 0 and resolved_count > 0:
                    resolved_count = 0
                    unresolved_count = 0
                    # print("***RESOLVING {} models".format(len(models)))
                    for m in models:
                        resolved_count_for_this_model, delayed_crossrefs = \
                            m._tx_reference_resolver.resolve_one_step()
                        resolved_count += resolved_count_for_this_model
                        unresolved_count += len(delayed_crossrefs)
                    # print("DEBUG: delayed #:{} unresolved #:{}".
                    #      format(unresolved_count,unresolved_count))
                if (unresolved_count > 0):
                    error_text = "Unresolvable cross references:"

                    for m in models:
                        for _, _, delayed \
                                in m._tx_reference_resolver.delayed_crossrefs:
                            line, col = parser.pos_to_linecol(delayed.position)
                            error_text += ' "{}" of class "{}" at {}'.format(
                                delayed.obj_name, delayed.cls.__name__,
                                (line, col))
                    raise TextXSemanticError(error_text, line=line, col=col)

                for m in models:
                    assert not m._tx_reference_resolver.parser._inst_stack

                # cleanup
                for m in models:
                    _end_model_construction(m)

                # final check that everything went ok
                for m in models:
                    assert 0 == len(
                        get_children_of_type(Postponed.__class__, m))

                    # We have model loaded and all link resolved
                    # So we shall do a depth-first call of object
                    # processors
                    if parser.debug:
                        parser.dprint("CALLING OBJECT PROCESSORS")
                    call_obj_processors(m._tx_metamodel, m)

            except:  # noqa
                # remove all processed models from (global) repo (if present)
                # (remove all of them, not only the model with errors,
                # since, models with errors may be included in other models)
                remove_models_from_repositories(models, models)
                raise

        if metamodel.textx_tools_support \
                and type(model) not in PRIMITIVE_PYTHON_TYPES:
            # Cross-references for go-to definition language server support
            # Already sorted based on ref_pos_start attr
            # (required for binary search)
            model._pos_crossref_list = pos_crossref_list

            # Dict for storing rules where key is position of rule instance in
            # text. Sorted based on nested rules.
            model._pos_rule_dict = OrderedDict(
                sorted(pos_rule_dict.items(), key=lambda x: x[0],
                       reverse=True))
    # exception occurred during model creation
    except:  # noqa
        _remove_all_affected_models_in_construction(model)
        raise

    return model