Beispiel #1
0
def test_invalid_rule_2(l):
    l.add_rules(
        Case(
            Literal('def'),
            Alt(prev_token_cond=(Token.Var, ),
                send=Token.LexingFailure,
                match_size=3), Alt(send=Token.Def, match_size=3)))
Beispiel #2
0
    def lower_case_alt(alt):
        """
        Lower the alternative of a case lexing rule.

        :type alt: liblktlang.BaseLexerCaseRuleAlt
        :rtype: Alt
        """
        prev_token_cond = None
        if isinstance(alt, liblktlang.LexerCaseRuleCondAlt):
            prev_token_cond = [lower_token_ref(ref)
                               for ref in alt.f_cond_exprs]
        return Alt(prev_token_cond=prev_token_cond,
                   send=lower_token_ref(alt.f_send.f_sent),
                   match_size=int(alt.f_send.f_match_size.text))
Beispiel #3
0
]

# Those keywords can also be attributes, which is why they have special
# handling, for example as in A'Access.
for kw_text, kw_token in [
    ("access", Token.Access),
    ("range", Token.Range),
    ("digits", Token.Digits),
    ("delta", Token.Delta),
    ("mod", Token.Mod),
]:
    rules.append(
        Case(
            NoCaseLit(kw_text),
            Alt(prev_token_cond=(Token.Tick, ),
                send=Token.Identifier,
                match_size=len(kw_text)),
            Alt(send=kw_token, match_size=len(kw_text))))

rules += [
    # Keywords
    (NoCaseLit("abort"), Token.Abort),
    (NoCaseLit("else"), Token.Else),
    (NoCaseLit("new"), Token.New),
    (NoCaseLit("return"), Token.Return),
    (NoCaseLit("abs"), Token.Abs),
    (NoCaseLit("elsif"), Token.Elsif),
    (NoCaseLit("not"), Token.Not),
    (NoCaseLit("reverse"), Token.Reverse),
    (NoCaseLit("end"), Token.End),
    (NoCaseLit("null"), Token.Null),
rflx_lexer = Lexer(Token)

rflx_lexer.add_rules(
    (Pattern(r"[ \t\r\n]+"), Ignore()),
    (Pattern(r"--.*"), Token.Comment),
)

# Hack to support keywords that equal attributes
# Inspired by Libadalang grammar (ada/language/lexer.py)
rflx_lexer.add_rules(*[
    Case(
        Literal(text),
        Alt(
            prev_token_cond=(Token.Tick, ),
            send=token,
            match_size=len(text),
        ),
        Alt(send=Token.UnqualifiedIdentifier, match_size=len(text)),
    ) for text, token in [
        ("First", Token.First),
        ("Last", Token.Last),
        ("Size", Token.Size),
        ("Valid_Checksum", Token.ValidChecksum),
        ("Has_Data", Token.HasData),
        ("Head", Token.Head),
        ("Opaque", Token.Opaque),
        ("Present", Token.Present),
        ("Valid", Token.Valid),
    ]
])