Exemplo n.º 1
0
    def load_from_ast(self, ast_root):
        for st in ast_root.statements:
            if isinstance(st, ast.TokenDefinition):
                self.tokens[st.name] = st.regexp
            elif isinstance(st, ast.RuleDefinition):
                self.rules[st.name] = ast_simplify.convert_ast_to_rule(self, st.name, st.expression)

        lexer.add_literal_tokens(self.tokens, self.token_literals)
        self.lexer = lexer.Tokenizer(self.tokens)

        self.table = parsing_table.build_parse_table(self.rules, self.tokens)
Exemplo n.º 2
0
def create_lexer():
    tokens_info = {
        "@@skip" : r"(?:\s*(?://[^\n]*\n)?)*",
        "@token-id" : r"(@@?[a-z]+(?:-[a-z]+)*)",
        "@rule-id" : r"(#?[a-z]+(?:-[a-z]+)*)",
        "@token-literal" : r"'((?:\\.|[^'\n])+)'",
        "@token-match" : '"' + r"((?:\\.|[^\"\n])+)" + '"'
    }

    literals = [':=', '(', ')', '?', '*', '=>', '|', ';']
    lexer.add_literal_tokens(tokens_info, literals)

    return lexer.Tokenizer(tokens_info)
Exemplo n.º 3
0
def create_lexer():
    tokens_info = OrderedDict()

    literals = {'int', 'real', 'char', 'bool', 'string', 'list',
            'true', 'false',
            'not', 'and', 'or',
            'input', 'output', 'if', 'then', 'else', 'end', 'while', 'do', 'var',
            '&', '[', ']', '(', ')',
            '+', '-', '*', '/', '%',
            '==', '!=', '<', '<=', '>', '>=',
            ',', ':=', ':', ';'
            }
    lexer.add_literal_tokens(tokens_info, literals)

    tokens_info.update([
        ("@@skip", r"(?:\s*(?://[^\n]*\n)?)*"),
        ("@identifier", r"([a-zA-Z_][a-zA-Z0-9_]*)"),
        ("@real-literal", r"([0-9]+\.[0-9]+)"),
        ("@int-literal", r"([0-9]+)"),
        ("@char-literal", r"'(.)'"),
        ("@string-literal", '"' + r"([^\"]*)" + '"')
        ])

    return lexer.Tokenizer(tokens_info)