Esempio n. 1
0
def runmain():
    from src.Semantic_Analyzer import SemanticAnalyzer
    from src.Parser import Parser

    while True:
        try:
            text = \
                """PROGRAM Test;
                   VAR
                       a : INTEGER;
                   BEGIN
                       a := 1
                   END.
                """            
            #input('calc> ')
        except EOFError:
            break
        if not text:
            continue

        lexer = Lexer(text)
        parser = Parser(lexer)
        tree = parser.parse()

        semantic_analyzer = SemanticAnalyzer()
        
        try:
            semantic_analyzer.visit(tree)
        except SystemError as sys_error:
            print(sys_error)

        for token in lexer.tokens:
            print(token)

        break
Esempio n. 2
0
 def makeInterpreter(self, text):
     from src.Lexer import Lexer
     from src.Parser import Parser
     from src.Interpreter import Interpreter
     from src.SymbolTable import SymbolTableBuilder
     lexer = Lexer(text)
     parser = Parser(lexer)
     tree = parser.parse()
     symtab_builder = SymbolTableBuilder()
     symtab_builder.visit(tree)
     interpreter = Interpreter(tree)
     return interpreter
class Preprocessor:
    def __init__(self, path, target):
        self.path = path
        self.target = target
        self.lexer = Lexer()
        self.parser = Parser()
        self.builder = Builder()

    def compile(self, minify):
        print(f'Compiling {self.path}')
        with open(self.path, 'r') as file:
            raw = file.read()
        self.lexer.tokenize(raw)
        self.parser.parse(self.lexer.tokens)
        html = self.builder.build(self.parser.dom)
        if minify: html = self.builder.minify(html)
        with open(self.target, 'w', newline='\n') as file:
            file.write(html)

    def watch(self, minify):
        print(f'Watching {self.path}')
        path = self.getSanitizedPath()
        eventHandler = EventHandler(path.fn, self.compile, [minify])
        observer = Observer()
        observer.schedule(eventHandler, path.dir, recursive=False)
        observer.start()
        try:
            while True:
                sleep(.1)
        except KeyboardInterrupt:
            observer.stop()
        observer.join()

    def getSanitizedPath(self):
        pathChunks = self.path.replace('\\', '/').split('/')
        return Namespace(dir='/'.join(pathChunks[:-1]), fn=pathChunks[-1])
        self.scope.insert(var_symbol)

    def visit_Assign(self, node):
        # right-hand side
        self.visit(node.right)
        # left-hand side
        self.visit(node.left)

    def visit_Var(self, node):
        var_name = node.value
        var_symbol = self.scope.lookup(var_name)
        if var_symbol is None:
            raise Exception("Error: Symbol(identifier) not found '%s'" %
                            var_name)


if __name__ == '__main__':
    text = """
program Main;
   var x, y : integer;
begin
   x := x + y;
end.
"""
    lexer = Lexer(text)
    parser = Parser(lexer)
    tree = parser.parse()
    semantic_analyzer = SemanticAnalyzer()
    semantic_analyzer.visit(tree)

    print(semantic_analyzer.scope)
Esempio n. 5
0
def test_parser():
    l = Lox()
    printer = Printer()
    parserE = Parser([
        Token(type=TokenTypes.BANG_EQUAL,
              lexeme='!=',
              literal=None,
              line_number=0)
    ], l)

    assert (parserE.check(TokenTypes.BANG_EQUAL) == True)
    assert (parserE.match(TokenTypes.BANG_EQUAL) == True)

    parser = Parser([
        Token(type=TokenTypes.TRUE, lexeme='true', literal=True,
              line_number=0),
        Token(type=TokenTypes.BANG_EQUAL,
              lexeme='!=',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.FALSE,
              lexeme='false',
              literal=False,
              line_number=0)
    ], l)
    assert (printer.print(parser.parse()) == '(!= True False)')

    parser = Parser([
        Token(type=TokenTypes.NUMBER, lexeme='2', literal=2, line_number=0),
        Token(type=TokenTypes.STAR, lexeme='*', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
        Token(type=TokenTypes.PLUS, lexeme='+', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='5', literal=5, line_number=0),
    ], l)

    parser = Parser([
        Token(type=TokenTypes.LEFT_PAREN,
              lexeme='(',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='5', literal=5, line_number=0),
        Token(type=TokenTypes.RIGHT_PAREN,
              lexeme=')',
              literal=None,
              line_number=0),
    ], l)
    # 2 * (3 + 5)
    assert (printer.print(parser.parse()) == '(group 5)')

    parser = Parser([
        Token(type=TokenTypes.NUMBER, lexeme='2', literal=2, line_number=0),
        Token(type=TokenTypes.STAR, lexeme='*', literal=None, line_number=0),
        Token(type=TokenTypes.LEFT_PAREN,
              lexeme='(',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
        Token(type=TokenTypes.PLUS, lexeme='+', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='5', literal=5, line_number=0),
        Token(type=TokenTypes.RIGHT_PAREN,
              lexeme=')',
              literal=None,
              line_number=0),
    ], l)
    # 2 * (3 + 5)

    assert (printer.print(parser.parse()) == '(* 2 (group (+ 3 5)))')

    parser = Parser([
        Token(type=TokenTypes.TRUE, lexeme='true', literal=True,
              line_number=0),
        Token(type=TokenTypes.EQUAL_EQUAL,
              lexeme='==',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.LEFT_PAREN,
              lexeme='(',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
        Token(type=TokenTypes.PLUS, lexeme='+', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='5', literal=5, line_number=0),
        Token(type=TokenTypes.RIGHT_PAREN,
              lexeme=')',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.GREATER_EQUAL,
              lexeme='>=',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='8', literal=8, line_number=0),
    ], l)
    # True == (3 + 5) >= 8
    assert (printer.print(
        parser.parse()) == '(== True (>= (group (+ 3 5)) 8))')

    parser = Parser([
        Token(type=TokenTypes.LEFT_PAREN,
              lexeme='(',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='5', literal=5, line_number=0),
    ], l)

    # (5 unbalanced paren
    assert_exception(lambda: printer.print(parser.parse()),
                     '[LOX ERROR] line 0: Expected `)` token')

    # ** Unexpected token
    assert_exception(
        lambda: printer.print(
            Parser([
                Token(type=TokenTypes.STAR,
                      lexeme="*",
                      literal=None,
                      line_number=99),
            ], l).expression()),
        '[LOX ERROR] line 99: Unexpected token TokenTypes.STAR: `*`')

    assert_exception(
        lambda: printer.print(
            Parser([
                Token(type=TokenTypes.NUMBER,
                      lexeme='3',
                      literal=3,
                      line_number=0),
                Token(type=TokenTypes.PLUS,
                      lexeme='+',
                      literal=None,
                      line_number=0),
                Token(type=TokenTypes.NUMBER,
                      lexeme='2',
                      literal=2,
                      line_number=0),
                Token(type=TokenTypes.STAR,
                      lexeme="*",
                      literal=None,
                      line_number=99),
            ], l).expression()),
        '[LOX ERROR] line 99: Unexpected token TokenTypes.STAR: `*`')

    # comma operator
    parser = Parser([
        Token(type=TokenTypes.NUMBER, lexeme='12', literal=12, line_number=0),
        Token(type=TokenTypes.SLASH, lexeme='/', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='9', literal=9, line_number=0),
        Token(type=TokenTypes.COMMA, lexeme=',', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
        Token(type=TokenTypes.PLUS, lexeme='+', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='2', literal=2, line_number=0),
    ], l)
    # 12/9, 3+5
    assert (printer.print(parser.parse()) == '(, (/ 12 9) (+ 3 2))')

    # TODO:comma operator wth function
    # parser = Parser([
    #     Token(type=TokenTypes.IDENTIFIER, lexeme='foo', literal=None, line_number=0),
    #     Token(type=TokenTypes.LEFT_PAREN, lexeme='(', literal=None, line_number=0),
    #     Token(type=TokenTypes.NUMBER, lexeme='9', literal=9, line_number=0),
    #     Token(type=TokenTypes.COMMA, lexeme=',', literal=None, line_number=0),
    #     Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
    #     Token(type=TokenTypes.RIGHT_PAREN, lexeme=')', literal=None, line_number=0),
    # ], l)
    # assert(printer.print(parser.parse()) ==
    #        '(foo 9 3)')

    # ternary operator
    # a == b ? 0 : 1
    parser = Parser([
        Token(type=TokenTypes.NUMBER, lexeme='9', literal=9, line_number=0),
        Token(type=TokenTypes.EQUAL_EQUAL,
              lexeme='==',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
        Token(type=TokenTypes.QUESTION_MARK,
              lexeme='?',
              literal=None,
              line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='0', literal=0, line_number=0),
        Token(type=TokenTypes.COLON, lexeme=':', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='1', literal=1, line_number=0),
    ], l)
    # 12/9, 3+5
    assert (printer.print(parser.parse()) == '(?: (== 9 3) 0 1)')

    parser = Parser([
        Token(type=TokenTypes.NUMBER, lexeme='9', literal=9, line_number=0),
        Token(type=TokenTypes.MINUS, lexeme='-', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
    ], l)
    assert (printer.print(parser.parse()) == '(- 9 3)')

    # !0
    parser = Parser([
        Token(type=TokenTypes.BANG, lexeme='!', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='0', literal=0, line_number=0),
    ], l)
    assert (printer.print(parser.parse()) == '(! 0)')
    # -4
    parser = Parser([
        Token(type=TokenTypes.MINUS, lexeme='-', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='4', literal=4, line_number=0),
    ], l)
    assert (printer.print(parser.parse()) == '(- 4)')

    # -4 * 3
    parser = Parser([
        Token(type=TokenTypes.MINUS, lexeme='-', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='4', literal=4, line_number=0),
        Token(type=TokenTypes.STAR, lexeme='*', literal=None, line_number=0),
        Token(type=TokenTypes.NUMBER, lexeme='3', literal=3, line_number=0),
    ], l)
Esempio n. 6
0
from config import ROOT_DIR
import os

from src.ImageCutter import ImageCutter
from src.Parser import Parser

xml_directory = os.path.join(ROOT_DIR, "data", "labels")
img_directory = os.path.join(ROOT_DIR, "data", "images")
out_directory = os.path.join(ROOT_DIR, "data", "output")

directory = os.fsencode(xml_directory)

for file in os.listdir(directory):
    file_path = os.path.join(xml_directory, os.fsdecode(file))
    image_info = Parser.parse(file_path)
    ImageCutter.cut(image_info, img_directory, out_directory)