Beispiel #1
0
 def ignore_constructor(start_ignore_layout, end_ignore_layout):
     """
     Adapter to build a Ignore instance with the same API as WithText
     constructors.
     """
     del start_ignore_layout, end_ignore_layout
     return Ignore()
Beispiel #2
0
    LshAssign = WithText()
    BinOr = WithText()
    Rcurl = WithText(end_ignore_layout=True)
    With = WithText()
    Plus = WithText()
    Lt = WithText()
    Number = WithText()
    String = WithText()
    Comment = WithTrivia()

    Identifier = WithSymbol()


python_lexer = Lexer(Token,
                     track_indent=True,
                     pre_rules=[(Pattern(r'\\\n[ \r\t]*'), Ignore())])

python_lexer.add_patterns(
    ("STRING_DBQ", r'\"(\\\"|[^\n\"])*\"'),
    ("STRING_SQ", r"'(\\'|[^\n'])*'"),
    ("MLSTRING_DBQ", r'\"\"\"([^"]|("[^"])|(""[^"])|\n)*\"\"\"'),
    ("MLSTRING_SQ", r"'''([^']|('[^'])|(''[^'])|\n)*'''"),
)

python_lexer.add_rules(
    (Pattern('(u|U)?(r|R)?'
             '({MLSTRING_SQ}|{MLSTRING_DBQ}'
             '|{STRING_SQ}|{STRING_DBQ})'), Token.String),
    (Pattern(r'[ \r\t]+'), Ignore()),
    (Pattern(r"#(.?)+"), Token.Comment),
    (Literal('>>='), Token.RshAssign),
Beispiel #3
0
class BaseToken(LexerToken):
    Example = WithText()
    Whitespace = Ignore()
Beispiel #4
0
    Comma = WithText()
    Dot = WithText()
    LPar = WithText()
    RPar = WithText()
    LBrace = WithText()
    RBrace = WithText()
    Equal = WithText()
    Plus = WithText()

    Number = WithText()
    Identifier = WithSymbol()


foo_lexer = Lexer(Token)
foo_lexer.add_rules(
    (Pattern(r'[ \n\r\t]+'), Ignore()),
    (Eof(), Token.Termination),
    (Literal('def'), Token.Def),
    (Literal('error'), Token.Error),
    (Literal('example'), Token.Example),
    (Literal('null'), Token.Null),
    (Literal(','), Token.Comma),
    (Literal('.'), Token.Dot),
    (Literal('('), Token.LPar),
    (Literal(')'), Token.RPar),
    (Literal('{'), Token.LBrace),
    (Literal('}'), Token.RBrace),
    (Literal('='), Token.Equal),
    (Literal('+'), Token.Plus),
    (Pattern('[0-9]+'), Token.Number),
    (Pattern('[a-zA-Z_][a-zA-Z0-9_]*'), Token.Identifier),
    Colon = WithText()

    # Comment
    Comment = WithTrivia()

    # Numeric
    Numeral = WithText()

    # String
    StringLiteral = WithText()


rflx_lexer = Lexer(Token)

rflx_lexer.add_rules(
    (Pattern(r"[ \t\r\n]+"), Ignore()),
    (Pattern(r"--.*"), Token.Comment),
)

# Hack to support keywords that equal attributes
# Inspired by Libadalang grammar (ada/language/lexer.py)
rflx_lexer.add_rules(*[
    Case(
        Literal(text),
        Alt(
            prev_token_cond=(Token.Tick, ),
            send=token,
            match_size=len(text),
        ),
        Alt(send=Token.UnqualifiedIdentifier, match_size=len(text)),
    ) for text, token in [