def main():
    number_of_tests = 27
    test_passes = True
    status = ""
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    for i in range(1, number_of_tests + 1, 1):
        prefix = f"tests/code_gen/samples/T{i}/"

        sc = build_scanner(f"{prefix}input.txt")
        parser = LL1(sc, grammar, CodeGen())

        tables.get_token_table().tokens = []
        tables.get_symbol_table().clear()
        tables.symbol_table.add_symbol(Token(TokenType.ID, "output"))
        tables.symbol_table.fetch("output").address = 5

        tables.get_error_table().parse_trees = []

        parser.generate_parse_tree()
        parser.code_gen.execute_from("main")
        parser.export_code("output.txt")
        os.system(test_command[platform.system()])
        logger.warning(f"test no.{i}:")
        logger.warning(
            f"\texpected.txt:\t{open('expected.txt').read().strip() == open(f'{prefix}expected.txt').read().strip()}")

        test_status = open('expected.txt').read().strip() == open(f'{prefix}expected.txt').read().strip()

        test_passes = test_passes and test_status
        status += ("F", ".")[test_status]

    logger.warning("".ljust(60, "="))
    logger.warning(status)
    logger.warning(("test failed", "test was successful!")[test_passes])
Exemple #2
0
def error_gen(line_no, lexeme):
    if 57 >= ord(
            lexeme[0]) >= 48 and (91 > ord(lexeme[len(lexeme) - 1]) > 64
                                  or 123 > ord(lexeme[len(lexeme) - 1]) > 96):
        error = tables.Error(line_no, lexeme, "Invalid number")
        tables.get_error_table().add_lexical_error(error)
    elif lexeme.startswith("/*"):
        if len(lexeme) < 8:
            error = tables.Error(line_no, lexeme, "Unclosed comment")
        else:
            error = tables.Error(line_no, lexeme[0:7] + "...",
                                 "Unclosed comment")
        tables.get_error_table().add_lexical_error(error)
    elif lexeme == "*/":
        error = tables.Error(line_no, lexeme, "Unmatched comment")
        tables.get_error_table().add_lexical_error(error)
    else:
        error = tables.Error(line_no, lexeme, "Invalid input")
        tables.get_error_table().add_lexical_error(error)
    return Token(TokenType.ERROR, lexeme)
 def add_symbol(self, token):
     if token.lexeme in self.keyword:
         return Token(TokenType.KEYWORD, token.lexeme)
     self.get_current_scope().append(token, self.is_declaration)
     self.set_declaration(False)
     return token
Exemple #4
0
def num_token_gen(line_no, lexeme):
    token = Token(TokenType.NUM, lexeme)
    tables.token_table.add_token(line_no, token)
    return token
Exemple #5
0
def whitespace_token_gen(line_no, lexeme):
    if lexeme == chr(26):
        return Token(TokenType.EOF, "$")
    else:
        return Token(TokenType.WHITE_SPACE, lexeme)
Exemple #6
0
def comment_token_gen(line_no, lexeme):
    return Token(TokenType.COMMENT, lexeme)
Exemple #7
0
def symbol_token_gen(line_no, lexeme):
    token = Token(TokenType(sum(ord(c) for c in lexeme)), lexeme)
    tables.get_token_table().add_token(line_no, token)
    return token
Exemple #8
0
def id_token_gen(line_no, lexeme):
    token = tables.get_symbol_table().add_symbol(Token(TokenType.ID, lexeme))
    tables.token_table.add_token(line_no, token)
    return token
from Parser import init_grammar
from Parser.parser import LL1
from code_gen import CodeGen
from scanner.default_scanner import build_scanner
from scanner.tokens import Token, TokenType
from tables import tables

# Arshia Akhavan 97110422
# Ghazal Shenavar 97101897

tables.symbol_table.add_symbol(Token(TokenType.ID, "output"))
tables.symbol_table.fetch("output").address = 5
parser = LL1(build_scanner("input.txt"), init_grammar(), CodeGen())
parser.generate_parse_tree()
parser.export_parse_tree("parse_tree.txt")

parser.code_gen.execute_from("main")
parser.export_code("output.txt")

parser.export_syntax_error("syntax_errors.txt")
tables.get_error_table().export("lexical_errors.txt")
# tables.get_symbol_table().export("symbol_table.txt")
tables.get_token_table().export("tokens.txt")