Exemple #1
0
    def test_break(self):
        with io.StringIO() as buf, redirect_stdout(buf):
            with FileManager(
                    "Testing/Final_Test_Cases/break_in_nested_while.txt"
            ) as file_manager:
                tokeniser = Tokeniser(file_manager)
                analysed_tokens = []
                analysed_tokens.append(Token("START_OF_FILE", "", 0))
                while (analysed_tokens[-1].type is not "END_OF_FILE"):
                    analysed_tokens.append(tokeniser.get_token())

                analysed_tokens = analysed_tokens[1:]

                parsed = ProgramNode(analysed_tokens[::-1], file_manager)
                parsed.execute()

                solution = """1
10
100
1
10
100
1
10
100"""

                for line_sol, line_test in zip(solution.rstrip('\r'),
                                               buf.getvalue().rstrip('\r')):
                    self.compare_output(line_sol, line_test)
def loop_test(fm):
    tokeniser = Tokeniser(fm)
    analysed_tokens = []
    analysed_tokens.append(Token("START_OF_FILE", "", 0))
    while (analysed_tokens[-1].type is not "END_OF_FILE"):
        analysed_tokens.append(tokeniser.get_token())

    return analysed_tokens[1:]
Exemple #3
0
def run_program(fm):
    tokeniser = Tokeniser(fm)
    analysed_tokens = []
    analysed_tokens.append(Token("START_OF_FILE", "", 0))
    while (analysed_tokens[-1].type is not "END_OF_FILE"):
        analysed_tokens.append(tokeniser.get_token())

    analysed_tokens = analysed_tokens[1:]
    parsed = ProgramNode(analysed_tokens[::-1], fm)

    parsed.execute()
Exemple #4
0
def run(path):
    file_manager = FileManager(path)
    with file_manager as fm:
        tokeniser = Tokeniser(fm)
        analysed_tokens = []
        analysed_tokens.append(Token("START_OF_FILE", "", 0))
        while (analysed_tokens[-1].type is not "END_OF_FILE"):
            analysed_tokens.append(tokeniser.get_token())

        analysed_tokens = analysed_tokens[1:]
        print_tokens(analysed_tokens)

        parsed = ProgramNode(analysed_tokens[::-1], file_manager)
        print_tree(parsed, 0)

        print("==========")
        parsed.execute()
 def test_realnumber_too_long(self, mock_error, mock_open):
     with FileManager("") as fm:
         self.assertRaises(error_manager.ErrorManager,
                           Tokeniser(fm).get_token)
         self.assertTrue(mock_error.called)
 def assert_equal_tokens(self, expected):
     with FileManager("") as fm:
         for _type, _value in expected:
             token = Tokeniser(fm).get_token()
             self.assertEqual(_type, token.type)
             self.assertEqual(_value, token.value)
 def test_unknown_char(self, mock_error, mock_open):
     with FileManager("") as fm:
         self.assertRaises(error_manager.ErrorManager,
                           Tokeniser(fm).get_token)
         self.assertTrue(mock_error.called)
 def test_number_starting_with_0(self, mock_error, mock_open):
     with FileManager("") as fm:
         self.assertRaises(error_manager.ErrorManager,
                           Tokeniser(fm).get_token)
         self.assertTrue(mock_error.called)