def test_lexical_analyze_divide_single_line_multi_line(self): # Test divide, single_line_comment, and multi_line_comment source_code = """var a = 1 / 2 // Hello World /* Bye World */ """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) divide = Token("divide", "", 1) single_line_comment = Token("single_line_comment", " Hello World", 2) multi_line_comment = Token( "multi_line_comment", """ Bye World """, 3, ) self.assertEqual(tokens[4], divide) self.assertEqual(tokens[7], single_line_comment)
def test_skip_all_nextlines_no_nextline(self): tokens_list = [Token("print", "", 1), Token("var", "", 1)] i = 0 i = skip_all_nextlines(tokens=tokens_list, i=0) self.assertEqual(i, 1)
def test_check_ptr_false_case(self): tokens_list = [Token("print", "", 1), Token("newline", "", 1)] is_ptr, asterisk_count, _ = check_ptr(tokens=tokens_list, i=0) self.assertEqual(is_ptr, False) self.assertEqual(asterisk_count, 0)
def test_for_statement_missing_in_keyword(self): tokens_list = [ Token("for", "", 1), Token("id", 1, 1), Token("print", "", 1) ] self.__test_error_case(tokens_list)
def test_for_statement_missing_starting_value(self): tokens_list = [ Token("for", "", 1), Token("id", 1, 1), Token("in", "", 1), Token("print", "", 1), ] self.__test_error_case(tokens_list)
def test_lexical_analyze_keyword_identifier(self): source_code = "var a" self.__setup(source_code=source_code) tokens, _ = self.lexical_analyzer.lexical_analyze() tokens_to_match = [Token("var", "", 1), Token("id", 1, 1)] self.__assertListEquality(tokens, tokens_to_match)
def test_exit_statement_expected_left_paren_error(self): tokens_list = [Token("exit", "", 1), Token("print", "", 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _ = exit_statement(tokens=tokens_list, i=1, table=table, func_ret_type={}) self.__release_print()
def test_var_statement_array_expected_integer_size_of_array(self): tokens_list = [ Token("var", "", 1), Token("id", 1, 1), Token("left_bracket", "", 1), Token("number", 2, 1), Token("right_bracket", "", 1), Token("assignment", "", 1), Token("left_brace", "", 1), Token("number", 3, 1), Token("comma", "", 1), ] table = SymbolTable() table.entry("a", "var", "variable") table.entry("3.14", "float", "constant") self.__suppress_print() with self.assertRaises(SystemExit): _ = var_statement(tokens=tokens_list, i=1, table=table, func_ret_type={}) self.__release_print()
def test_skip_all_nextlines_some_nextline(self): tokens_list = [ Token("newline", "", 1), Token("newline", "", 1), Token("print", "", 1), ] i = 0 i = skip_all_nextlines(tokens=tokens_list, i=0) self.assertEqual(i, 2)
def test_check_ptr_true_case(self): tokens_list = [ Token("multiply", "", 1), Token("multiply", "", 1), Token("newline", "", 1), ] is_ptr, asterisk_count, _ = check_ptr(tokens=tokens_list, i=0) self.assertEqual(is_ptr, True) self.assertEqual(asterisk_count, 2)
def test_struct_declaration_statement_expected_struct_name(self): tokens_list = [Token("struct", "", 1), Token("print", "", 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _ = struct_declaration_statement(tokens=tokens_list, i=1, table=table) self.__release_print()
def test_lexical_analyze_brackets(self): source_code = "[]" self.__setup(source_code=source_code) tokens, _ = self.lexical_analyzer.lexical_analyze() tokens_to_match = [ Token("left_bracket", "", 1), Token("right_bracket", "", 1) ] self.__assertListEquality(tokens, tokens_to_match)
def test_switch_statement_missing_left_paren(self): tokens_list = [Token("switch", "", 1), Token("left_brace", "", 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _, _, _ = switch_statement( tokens=tokens_list, i=1, table=table, func_ret_type={} ) self.__release_print()
def test_function_definition_statement_func_name_missing(self): tokens_list = [Token("fun", "", 1), Token("left_paren", "", 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _, _, _, _ = function_definition_statement( tokens=tokens_list, i=1, table=table, func_ret_type={} ) self.__release_print()
def test_lexical_analyze_single_line_comment(self): source_code = """// This is a comment """ self.__setup(source_code=source_code) tokens, _ = self.lexical_analyzer.lexical_analyze() tokens_to_match = [ Token("single_line_comment", " This is a comment", 1), Token("newline", "", 1), ] self.__assertListEquality(tokens, tokens_to_match)
def test_lexical_analyze_call_end_token(self): source_code = "()\n" self.__setup(source_code=source_code) tokens, _ = self.lexical_analyzer.lexical_analyze() tokens_to_match = [ Token("left_paren", "", 1), Token("right_paren", "", 1), Token("call_end", "", 1), ] self.__assertListEquality(tokens, tokens_to_match)
def test_for_statement_missing_to_keyword(self): tokens_list = [ Token("for", "", 1), Token("id", 1, 1), Token("in", "", 1), Token("number", 2, 1), Token("print", "", 1), ] table = SymbolTable() table.entry("a", "int", "variable") table.entry("1", "int", "variable") self.__test_error_case(tokens_list, table=table)
def test_var_statement_general_expected_id(self): tokens_list = [Token("var", "", 1), Token("print", "", 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _ = var_statement(tokens=tokens_list, i=1, table=table, func_ret_type={}) self.__release_print()
def test_exit_no_error(self): tokens_list = [ Token("exit", "", 1), Token("left_paren", "", 1), Token("number", 1, 1), Token("right_paren", "", 1), ] table = SymbolTable() table.entry("0", "int", "constant") opcode, _, _ = exit_statement( tokens=tokens_list, i=1, table=table, func_ret_type={} ) self.assertEqual(opcode, OpCode("exit", "0", None))
def test_while_statement_missing_left_paren(self): tokens_list = [Token("while", "", 1), Token("id", 1, 1)] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _, _, _ = while_statement(tokens=tokens_list, i=1, table=table, in_do=False, func_ret_type={}) self.__release_print()
def test_if_statement_missing_right_paren(self): tokens_list = [ Token("if", "", 1), Token("left_paren", "", 1), Token("print", "", 1), ] table = SymbolTable() self.__suppress_print() with self.assertRaises(SystemExit): _, _, _ = if_statement( tokens=tokens_list, i=1, table=table, func_ret_type={} ) self.__release_print()
def test_var_statement_var_no_assign(self): tokens_list = [ Token("var", "", 1), Token("id", 1, 1), Token("newline", "", 1) ] table = SymbolTable() table.symbol_table = { 1: ["b", "var", "variable", "", ""], 2: ["1", "int", "constant", "", ""], 3: ["a", "var", "variable", "", ""], } opcodes = parse(tokens_list, table) self.assertEqual(opcodes[0], OpCode("var_no_assign", "b", None))
def test_print_statement_no_error(self): tokens_list = [ Token("print", "", 1), Token("left_paren", "", 1), Token("string", 1, 1), Token("right_paren", "", 1), Token("newline", "", 1), ] table = SymbolTable() table.entry('"hello world"', "string", "constant") opcode, _, _ = print_statement( tokens=tokens_list, i=1, table=table, func_ret_type={}, num_opcodes=-1 ) self.assertEqual(opcode, OpCode("print", '"hello world"', None))
def test_case_statement_missing_colon(self): tokens_list = [ Token("case", "", 1), Token("number", "", 1), Token("print", "", 1), ] table = SymbolTable() table.entry("1", "int", "variable") self.__suppress_print() with self.assertRaises(SystemExit): _, _, _ = case_statement( tokens=tokens_list, i=1, table=table, func_ret_type={} ) self.__release_print()
class TestTokenClass(unittest.TestCase): def setUp(self): self.token = Token("number", 1, 2) self.other = Token("number", 2, 2) def test__str__(self): self.assertEqual(str(self.token), "Token('number', '1', '2')") def test__eq__(self): self.assertTrue(self.token != self.other) def test_token2dig(self): self.assertEqual(self.token.token2dig("string"), 2) self.assertEqual(self.token.token2dig("multiply"), 11) self.assertEqual(self.token.token2dig("assignment"), 8) self.assertEqual(self.token.token2dig("while"), 22) self.assertEqual(self.token.token2dig("hello"), 0)
def test_lexical_analyze_assignment_equal(self): # Test assignment and equal source_code = "var a = 1 == 1" with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) assignment = Token("assignment", "", 1) equal = Token("equal", "", 1) self.assertEqual(tokens[2], assignment) self.assertEqual(tokens[-2], equal)
def test_lexical_analyze_left_right_paren_call_end(self): # Test left_paren, right_paren, and call_end source_code = "var a = (1)" with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) left_paren = Token("left_paren", "", 1) right_paren = Token("right_paren", "", 1) call_end = Token("call_end", "", 1) self.assertEqual(tokens[3], left_paren) self.assertEqual(tokens[5], right_paren)
def test_struct_declaration_statement_expected_left_brace(self): tokens_list = [ Token("struct", "", 1), Token("id", 1, 1), Token("print", "", 1) ] table = SymbolTable() table.entry("my_struct", "var", "variable") self.__suppress_print() with self.assertRaises(SystemExit): _ = struct_declaration_statement(tokens=tokens_list, i=1, table=table) self.__release_print()
def test_assign_statement_variable_used_before_declaration(self): tokens_list = [ Token("id", 1, 1), Token("assignment", "", 1), Token("number", 2, 1), Token("newline", "", 1), ] table = SymbolTable() table.entry("a", "var", "variable") table.entry("2", "int", "constant") self.__suppress_print() with self.assertRaises(SystemExit): _ = parse(tokens_list, table) self.__release_print()
def test_lexical_analyze_newline(self): source_code = "\n" self.__setup(source_code=source_code) tokens, _ = self.lexical_analyzer.lexical_analyze() tokens_to_match = [Token("newline", "", 1)] self.__assertListEquality(tokens, tokens_to_match)