def makeInterpreter(self, text):
     from src.Lexer import Lexer
     from src.Parser import Parser
     from src.Interpreter import Interpreter
     lexer = Lexer(text)
     parser = Parser(lexer)
     interpreter = Interpreter(parser)
     return interpreter
Beispiel #2
0
 def makeInterpreter(self, text):
     from src.Lexer import Lexer
     from src.Parser import Parser
     from src.Interpreter import Interpreter
     from src.SymbolTable import SymbolTableBuilder
     lexer = Lexer(text)
     parser = Parser(lexer)
     tree = parser.parse()
     symtab_builder = SymbolTableBuilder()
     symtab_builder.visit(tree)
     interpreter = Interpreter(tree)
     return interpreter
Beispiel #3
0
def main():
    argparser = argparse.ArgumentParser(
        description='Generate an AST DOT file.')
    argparser.add_argument('fname', help='Pascal source file')
    args = argparser.parse_args()
    fname = args.fname
    text = open(fname, 'r').read()

    lexer = Lexer(text)
    parser = Parser(lexer)
    viz = ASTVisualizer(parser)
    content = viz.gendot()
    print(content)
Beispiel #4
0
def main(fname):
    # argparser = argparse.ArgumentParser(
    #     description='Generate an AST DOT file.'
    # )
    # argparser.add_argument(
    #     'fname',
    #     help='Pascal source file'
    # )
    # args = argparser.parse_args()
    # fname = args.fname
    text = open(fname, 'r').read()

    lexer = Lexer(text)
    parser = Parser(lexer)
    viz = ASTVisualizer(parser)
    content = viz.gendot()
    print(content)
    w = open('ast.dot', 'w')
    w.write(content)
Beispiel #5
0
def get_tokens_from_raw(raw: str) -> List[Token]:
    '''
    Get a list of tokens from raw text

    Parameters
    ----------
    raw : str
        raw text
    
    Returns
    -------
    list[Token]
        A list of all tokens found in `raw`

    Raises
    ------
    PyllowException if something like a syntax error is present in `raw`
    '''

    lexer = Lexer()
    return lexer.lex(raw)
Beispiel #6
0
 def setUp(self):
     self.Lexer = Lexer()
     self.Lexer.lex_file('test/Lexer_test.plw', error_path='test')
Beispiel #7
0
 def setUp(self):
     self.Lexer = Lexer()
     self.Lexer.lex(RAW_TEXT, path='test')
        self.scope.insert(var_symbol)

    def visit_Assign(self, node):
        # right-hand side
        self.visit(node.right)
        # left-hand side
        self.visit(node.left)

    def visit_Var(self, node):
        var_name = node.value
        var_symbol = self.scope.lookup(var_name)
        if var_symbol is None:
            raise Exception("Error: Symbol(identifier) not found '%s'" %
                            var_name)


if __name__ == '__main__':
    text = """
program Main;
   var x, y : integer;
begin
   x := x + y;
end.
"""
    lexer = Lexer(text)
    parser = Parser(lexer)
    tree = parser.parse()
    semantic_analyzer = SemanticAnalyzer()
    semantic_analyzer.visit(tree)

    print(semantic_analyzer.scope)
 def __init__(self, path, target):
     self.path = path
     self.target = target
     self.lexer = Lexer()
     self.parser = Parser()
     self.builder = Builder()
Beispiel #10
0
 def makeLexer(self, text):
     from src.Lexer import Lexer
     lexer = Lexer(text)
     return lexer
from src.Lexer import Lexer
from src.Parser import Parser

lexer = Lexer()
parser = Parser()


def interpret(query):
    output, symbol_table = lexer.analyze(query)
    parser.parse(output)
Beispiel #12
0
 def set_stream(self, raw):
     lexer = Lexer()
     lexed = lexer.lex(raw)
     self.tree._stream = TokenStream(lexed)
     self.tree._stream.next()