def test_simple(self):
        token_list = [
            Token("num", 6),
            Token("/"),
            Token("num", 2),
            Token("<EOF>")
        ]

        sp = SkeletonParser("output.yaml", token_list)
        self.assertTrue(sp.parse())
예제 #2
0
def render_with_string(input_string, data):
    raw_token_list = [i[1] for i in lex_analysis(input_string, lexer_define)]
    # remote whitespace token
    token_list = list(
        filter(lambda x: not isinstance(x, WhiteSpaceToken), raw_token_list)
    )
    # append EOF token
    token_list.append(Token("<EOF>"))

    return render_with_tokens(token_list, data)
예제 #3
0
def main(input_string):
    raw_token_list = [i[1] for i in lex_analysis(input_string, lexer_define)]
    # remote whitespace token
    token_list = list(filter(lambda x: not isinstance(x, WhiteSpaceToken), raw_token_list))
    # append EOF token
    token_list.append(Token("<EOF>"))

    g = Generator(bnf_file)
    g.generate()
    g.write_yaml(ll1_grammar_file)

    result = arithmetic_calculator(ll1_grammar_file, token_list, user_level_parser, graph_file)

    return result
예제 #4
0
from demo.arithmetic_calculator.arithmetic_calculator import arithmetic_calculator
from demo.arithmetic_calculator.user_level_parser import Parser

user_level_parser = Parser()


def main(token_list):
    g = Generator("calculator.mbnf")
    g.generate()
    g.write_yaml("calculator.yaml")

    result = arithmetic_calculator("calculator.yaml", token_list,
                                   user_level_parser, "calculator.graphml")
    print(result)


if __name__ == "__main__":
    # equal to: 6 * (2 + 2)
    token_list = [
        Token("num", 6),
        Token("*", operator.mul),
        Token("("),
        Token("num", 2),
        Token("+", operator.add),
        Token("num", 2),
        Token(")"),
        Token("<EOF>"),
    ]

    main(token_list)
예제 #5
0
import operator

from MicroCompiler.SkeletonParser import Token, WhiteSpaceToken

lexer_define = [
    # token type, token regex, token action
    ["num", r"(0|1|2|3|4|5|6|7|8|9)+", lambda x: Token("num", int(x))],
    ["+", r"\+", lambda x: Token("+", operator.add)],
    ["-", r"-", lambda x: Token("-", operator.sub)],
    ["*", r"\*", lambda x: Token("*", operator.mul)],
    ["/", r"/", lambda x: Token("/", operator.truediv)],
    ["(", r"\(", lambda x: Token("(")],
    [")", r"\)", lambda x: Token(")")],
    ["white space", r" +", lambda x: WhiteSpaceToken(x)],
]
예제 #6
0
from demo.template_engine.render_engine import render_engine
from demo.template_engine.user_level_parser import Parser


def render_with_tokens(token_list, data):
    # BNF to LL1
    g = Generator("syntax.mbnf")
    g.generate()
    g.write_yaml("syntax.yaml")

    # Node walker
    user_level_parser = Parser(data)

    return render_engine(
        "syntax.yaml", token_list, user_level_parser, "syntax.graphml"
    )


if __name__ == "__main__":
    # equal to: `Hello, {{ name }}`
    token_list = [
        Token("const", "Hello, "),
        Token("{{", None),
        Token("var", "name"),
        Token("}}", None),
        Token("<EOF>"),
    ]

    result = render_with_tokens(token_list, {"name": "Xiaoquan"})
    print(result)
예제 #7
0
from MicroCompiler.SkeletonParser import Token, WhiteSpaceToken

lexer_define = [
    # token type, token regex, token action
    [
        "var",
        (
            r"(a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t|u|v|w|x|y|z)+"
            r"(a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t|u|v|w|x|y|z|0|1|2|3|4|5|6|7|8|9)*"
        ),
        lambda x: Token("var", str(x)),
    ],
    [
        "const",
        r"(A|B|C|D|E|F|G|H|I|J|K|L|M|N|O|P|Q|R|S|T|U|V|W|X|Y|Z|,)+",
        lambda x: Token("const", str(x)),
    ],
    ["{{", "{{", lambda x: Token("{{", None)],
    ["}}", "}}", lambda x: Token("}}", None)],
    ["white space", r" +", lambda x: WhiteSpaceToken(x)],
]
예제 #8
0
import operator

from MicroCompiler.SkeletonParser import Token, SkeletonParser, Epsilon
from MicroCompiler.parser_builder import ParserBuilder
from MicroCompiler.postfix_expression.operator import PythonBuiltinOperator

token_list = [Token("num", 6), Token("/", operator.truediv), Token("num", 2), Token("<EOF>")]
# token_list = [
#     Token("num", 6),
#     Token("/", operator.truediv),
#     Token("num", 2),
#     Token("/", operator.truediv),
#     Token("num", 2),
#     Token("<EOF>"),
# ]


sp = SkeletonParser("output.yaml", token_list)
sp.parse()

from MicroCompiler.abstract_syntax_tree.abstract_syntax_tree import (
    AbstractSyntaxTree as AST,
)
from MicroCompiler.abstract_syntax_tree.node import create_or_get_node

import matplotlib.pyplot as plt

import networkx as nx

DG = nx.DiGraph()