def test_break(self): with io.StringIO() as buf, redirect_stdout(buf): with FileManager( "Testing/Final_Test_Cases/break_in_nested_while.txt" ) as file_manager: tokeniser = Tokeniser(file_manager) analysed_tokens = [] analysed_tokens.append(Token("START_OF_FILE", "", 0)) while (analysed_tokens[-1].type is not "END_OF_FILE"): analysed_tokens.append(tokeniser.get_token()) analysed_tokens = analysed_tokens[1:] parsed = ProgramNode(analysed_tokens[::-1], file_manager) parsed.execute() solution = """1 10 100 1 10 100 1 10 100""" for line_sol, line_test in zip(solution.rstrip('\r'), buf.getvalue().rstrip('\r')): self.compare_output(line_sol, line_test)
def loop_test(fm): tokeniser = Tokeniser(fm) analysed_tokens = [] analysed_tokens.append(Token("START_OF_FILE", "", 0)) while (analysed_tokens[-1].type is not "END_OF_FILE"): analysed_tokens.append(tokeniser.get_token()) return analysed_tokens[1:]
def run_program(fm): tokeniser = Tokeniser(fm) analysed_tokens = [] analysed_tokens.append(Token("START_OF_FILE", "", 0)) while (analysed_tokens[-1].type is not "END_OF_FILE"): analysed_tokens.append(tokeniser.get_token()) analysed_tokens = analysed_tokens[1:] parsed = ProgramNode(analysed_tokens[::-1], fm) parsed.execute()
def run(path): file_manager = FileManager(path) with file_manager as fm: tokeniser = Tokeniser(fm) analysed_tokens = [] analysed_tokens.append(Token("START_OF_FILE", "", 0)) while (analysed_tokens[-1].type is not "END_OF_FILE"): analysed_tokens.append(tokeniser.get_token()) analysed_tokens = analysed_tokens[1:] print_tokens(analysed_tokens) parsed = ProgramNode(analysed_tokens[::-1], file_manager) print_tree(parsed, 0) print("==========") parsed.execute()