def test_struct(self): program = """ struct foo { int x; }; """ lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.value, 'struct', 'Value should be "struct"') token = lexer.lex() self.assertEqual(token.value, 'foo', 'Value should be "foo"') token = lexer.lex() self.assertEqual(token.value, '{', 'Value should be "{"') token = lexer.lex() self.assertEqual(token.value, 'int', 'Value should be "int"') token = lexer.lex() self.assertEqual(token.value, 'x', 'Value should be "x"') token = lexer.lex() self.assertEqual(token.value, ';', 'Value should be ";"') token = lexer.lex() self.assertEqual(token.value, '}', 'Value should be "}"') token = lexer.lex() self.assertEqual(token.value, ';', 'Value should be ";"') token = lexer.lex() self.assertIsNone(token, 'Value should be None')
def test_single_line_comment(self): program = '// Single line comment.' lexer = new_lexer(program) token = lexer.lex() self.assertIsNotNone(token, 'Token should not be None') self.assertEqual(token.type, 'comment', 'The type should be comment') self.assertEqual(token.value, '// Single line comment.', 'Value should be "// Single line comment."')
def test_names(self): program = 'int flag' lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.value, 'int', 'Value should be "int"') token = lexer.lex() self.assertEqual(token.type, 'name', 'Type should be "name"') self.assertEqual(token.value, 'flag', 'Value should be "flag"')
def test_numbers(self): program = '0 12345' lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.type, 'number', 'Type should be "number"') self.assertEqual(token.value, '0', 'Value should be 0') token = lexer.lex() self.assertEqual(token.type, 'number', 'Type should be "number"') self.assertEqual(token.value, '12345', 'Value should be 12345')
def test_multi_line_comment(self): program = """/* * Multi-line comment */""" lexer = new_lexer(program) token = lexer.lex() self.assertIsNotNone(token, 'Token should not be None') self.assertEqual(token.type, 'comment', 'The type should be comment') self.assertEqual(token.value, program, 'Value should be equal')
def test_verify_whitespace_skip(self): program = """ 1 2 """ lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.value, '1', 'Value should be "1"') token = lexer.lex() self.assertEqual(token.value, '2', 'Value should be "2"')
def test_operators(self): program = '{ } [ ] ; ,' lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, '{', 'Value should be "{"') token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, '}', 'Value should be "}"') token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, '[', 'Value should be "["') token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, ']', 'Value should be "]"') token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, ';', 'Value should be ";"') token = lexer.lex() self.assertEqual(token.type, 'operator', 'Type should be "operator"') self.assertEqual(token.value, ',', 'Value should be ","') token = lexer.lex() self.assertIsNone(token, 'Value should be None')
def test_type(self): program = 'char signed char unsigned char bool short unsigned short int unsigned int long unsigned long float long long unsigned long long double' lexer = new_lexer(program) token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'char', 'Value should be "char"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'signed char', 'Value should be "signed char"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'unsigned char', 'Value should be "unsigned char"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'bool', 'Value should be "bool"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'short', 'Value should be "short"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'unsigned short', 'Value should be "unsigned short"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'int', 'Value should be "int"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'unsigned int', 'Value should be "unsigned int"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'long', 'Value should be "long"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'unsigned long', 'Value should be "unsigned long"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'float', 'Value should be "float"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'long long', 'Value should be "long long"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'unsigned long long', 'Value should be "unsigned long long"') token = lexer.lex() self.assertEqual(token.type, 'type', 'Type should be type') self.assertEqual(token.value, 'double', 'Value should be "double"') token = lexer.lex() self.assertIsNone(token, 'Value should be None')