""" from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode from langkit.expressions import Not, Self, langkit_property from langkit.parsers import Grammar, Predicate from lexer_example import Token from utils import build_and_run class FooNode(ASTNode): pass class Name(FooNode): token_node = True @langkit_property() def is_not_class_id(): return Not(Self.symbol == 'class') foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Predicate(Name(Token.Identifier), Name.is_not_class_id), ) build_and_run(foo_grammar, 'main.py') print('Done')
# Attributes, needed because of 'Class: A'Class'(...) AttributeRef(A.qual_name_internal, "'", A.identifier, Null(AdaNode)), A.direct_name), name=Or( CallExpr(A.name, "(", cut(), A.call_suffix, ")"), ExplicitDeref(A.name, ".", "all"), DottedName(A.name, ".", cut(), A.direct_name), # Special case for 'Update UpdateAttributeRef(A.name, "'", Identifier(L.Identifier(match_text="Update")), A.update_attr_aggregate), # General Attributes AttributeRef(A.name, "'", Predicate(A.identifier, T.Identifier.is_attr_with_args), Opt("(", A.call_suffix, ")")), # Class attribute AttributeRef(A.name, "'", A.identifier, Null(A.call_suffix)), QualExpr(A.name, "'", Or(A.paren_expr, A.regular_aggregate)), A.direct_name_or_target_name, ), defining_name=DefiningName(A.static_name), direct_name_or_target_name=Or(A.direct_name, A.target_name), target_name=TargetName("@"), update_attr_aggregate=Or( A.regular_aggregate, Aggregate("(", cut(), Null(Expr), A.update_attr_content, ")")), update_attr_content=List(A.multidim_array_assoc, sep=",",
def lower(rule): """ Helper to lower one parser. :param liblktlang.GrammarExpr rule: Grammar rule to lower. :rtype: Parser """ # For convenience, accept null input rules, as we generally want to # forward them as-is to the lower level parsing machinery. if rule is None: return None loc = ctx.lkt_loc(rule) with ctx.lkt_context(rule): if isinstance(rule, liblktlang.ParseNodeExpr): node = resolve_node_ref(rule.f_node_name) # Lower the subparsers subparsers = [ lower(subparser) for subparser in rule.f_sub_exprs ] # Qualifier nodes are a special case: we produce one subclass # or the other depending on whether the subparsers accept the # input. if node._type.is_bool_node: return Opt(*subparsers, location=loc).as_bool(node) # Likewise for enum nodes elif node._type.base and node._type.base.is_enum_node: return _Transform(_Row(*subparsers, location=loc), node.type_ref, location=loc) # For other nodes, always create the node when the subparsers # accept the input. else: return _Transform(parser=_Row(*subparsers), typ=node, location=loc) elif isinstance(rule, liblktlang.GrammarToken): token_name = rule.f_token_name.text try: val = tokens[token_name] except KeyError: check_source_language( False, 'Unknown token: {}'.format(token_name)) match_text = '' if rule.f_expr: # The grammar is supposed to mainain this invariant assert isinstance(rule.f_expr, liblktlang.TokenLit) match_text = denoted_string_literal(rule.f_expr) return _Token(val=val, match_text=match_text, location=loc) elif isinstance(rule, liblktlang.TokenLit): return _Token(denoted_string_literal(rule), location=loc) elif isinstance(rule, liblktlang.GrammarList): return List(lower(rule.f_expr), empty_valid=rule.f_kind.text == '*', list_cls=resolve_node_ref(rule.f_list_type), sep=lower(rule.f_sep), location=loc) elif isinstance( rule, (liblktlang.GrammarImplicitPick, liblktlang.GrammarPick)): return Pick(*[lower(subparser) for subparser in rule.f_exprs], location=loc) elif isinstance(rule, liblktlang.GrammarRuleRef): return getattr(grammar, rule.f_node_name.text) elif isinstance(rule, liblktlang.GrammarOrExpr): return Or( *[lower(subparser) for subparser in rule.f_sub_exprs], location=loc) elif isinstance(rule, liblktlang.GrammarOpt): return Opt(lower(rule.f_expr), location=loc) elif isinstance(rule, liblktlang.GrammarOptGroup): return Opt(*[lower(subparser) for subparser in rule.f_expr], location=loc) elif isinstance(rule, liblktlang.GrammarExprList): return Pick(*[lower(subparser) for subparser in rule], location=loc) elif isinstance(rule, liblktlang.GrammarDiscard): return Discard(lower(rule.f_expr), location=loc) elif isinstance(rule, liblktlang.GrammarNull): return Null(resolve_node_ref(rule.f_name), location=loc) elif isinstance(rule, liblktlang.GrammarSkip): return Skip(resolve_node_ref(rule.f_name), location=loc) elif isinstance(rule, liblktlang.GrammarDontSkip): return DontSkip(lower(rule.f_expr), lower(rule.f_dont_skip), location=loc) elif isinstance(rule, liblktlang.GrammarPredicate): check_source_language( isinstance(rule.f_prop_ref, liblktlang.DotExpr), 'Invalid property reference') node = resolve_node_ref(rule.f_prop_ref.f_prefix) prop_name = rule.f_prop_ref.f_suffix.text try: prop = getattr(node, prop_name) except AttributeError: check_source_language( False, '{} has no {} property'.format( node._name.camel_with_underscores, prop_name)) return Predicate(lower(rule.f_expr), prop, location=loc) else: raise NotImplementedError('unhandled parser: {}'.format(rule))
Op.alt_rem("rem")), A.factor), A.factor), unop_term=Or(UnOp(Op.alt_plus("+") | Op.alt_minus("-"), A.term), A.term), simple_expr=Or( BinOp(A.simple_expr, Or(Op.alt_plus("+"), Op.alt_minus("-"), Op.alt_concat("&")), A.term), A.unop_term), boolean_op=Or( Op.alt_xor("xor"), Op.alt_and_then("and", "then"), Op.alt_and("and"), Op.alt_or_else("or", "else"), Op.alt_or("or"), ), discrete_range=Or( BinOp(A.simple_expr, Op.alt_double_dot(".."), A.simple_expr), Predicate(A.name, T.Name.is_range_attribute)), choice=Or(A.discrete_range, A.discrete_subtype_indication, A.expr, A.others_designator), choice_list=List(A.choice, sep="|", list_cls=AlternativesList), rel_op=Or( Op.alt_not_in("not", "in"), Op.alt_in("in"), ), membership_choice=Or( A.discrete_range, DiscreteSubtypeExpr(A.discrete_subtype_indication), A.simple_expr, ), membership_choice_list=List(A.membership_choice, sep="|", list_cls=ExprAlternativesList),
name=Or( CallExpr(A.name, "(", cut(), A.call_suffix, ")"), ExplicitDeref(A.name, ".", "all"), DottedName(A.name, ".", cut(), A.direct_name), # Special case for 'Update UpdateAttributeRef( A.name, "'", Identifier(L.Identifier(match_text="Update")), A.update_attr_aggregate ), # General Attributes AttributeRef(A.name, "'", Predicate(A.identifier, T.Identifier.is_not_class_id), Opt("(", A.call_suffix, ")")), AttributeRef(A.name, "'", Identifier(L.Identifier(match_text='Class')), Null(A.call_suffix)), QualExpr(A.name, "'", Or(A.paren_expr, A.aggregate)), A.direct_name_or_target_name, ), defining_name=DefiningName(A.static_name), direct_name_or_target_name=Or(A.direct_name, A.target_name), target_name=TargetName("@"),