def test_logical_types(self): line = "or and not" predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_OR), BaseToken(TokenType.T_AND), BaseToken(TokenType.T_NOT) ] self.assertEqual(expected, predicted)
def test_lex_math_operations_types(self): line = "+ - * /" predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_PLUS), BaseToken(TokenType.T_MINUS), BaseToken(TokenType.T_MUL), BaseToken(TokenType.T_DIV) ] self.assertEqual(expected, predicted)
def test_lex_data_types(self): line = "int double char bool string " predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_INT), BaseToken(TokenType.T_DOUBLE), BaseToken(TokenType.T_CHAR), BaseToken(TokenType.T_BOOL), BaseToken(TokenType.T_STRING) ] self.assertEqual(expected, predicted)
def test_other_types(self): line = "if else true false return while = void function" predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_IF), BaseToken(TokenType.T_ELSE), BaseToken(TokenType.T_TRUE), BaseToken(TokenType.T_FALSE), BaseToken(TokenType.T_RETURN), BaseToken(TokenType.T_WHILE), BaseToken(TokenType.T_ASSIGN), BaseToken(TokenType.T_VOID), BaseToken(TokenType.T_FUNCTION) ] self.assertEqual(expected, predicted)
def find_token(self, line): token_args = self._find_token_args(line) if not token_args: return None token_type, value = token_args if token_type.has_value_field(): token = ValueToken(token_type, value) else: token = BaseToken(token_type) return token
def test_lex_punctuation_types(self): line = ", . : ; { } ( )" predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_COMMA), BaseToken(TokenType.T_DOT), BaseToken(TokenType.T_COLON), BaseToken(TokenType.T_SEMICOLON), BaseToken(TokenType.T_LBRACKET), BaseToken(TokenType.T_RBRACKET), BaseToken(TokenType.T_LPARENT), BaseToken(TokenType.T_RPARENT) ] self.assertEqual(expected, predicted)
def test_math_order_types(self): line = "<= < >= > == !=" predicted = self.lexer._get_tokens_from_line(line) expected = [ BaseToken(TokenType.T_LESS_OR_EQ), BaseToken(TokenType.T_LESS), BaseToken(TokenType.T_GREATER_OR_EQ), BaseToken(TokenType.T_GREATER), BaseToken(TokenType.T_EQ), BaseToken(TokenType.T_NOT_EQ) ] self.assertEqual(expected, predicted)
def test_string(self): line = "string" token = self.find_token(line) expected = BaseToken(TokenType.T_STRING) self.assertEqual(expected, token)
def test_char(self): line = "char" token = self.find_token(line) expected = BaseToken(TokenType.T_CHAR) self.assertEqual(expected, token)
def test_double(self): line = "double" token = self.find_token(line) expected = BaseToken(TokenType.T_DOUBLE) self.assertEqual(expected, token)
def test_and(self): line = "and" token = self.find_token(line) expected = BaseToken(TokenType.T_AND) self.assertEqual(expected, token)
def test_void(self): line = "void" token = self.find_token(line) expected = BaseToken(TokenType.T_VOID) self.assertEqual(expected, token)
def test_dot(self): line = "." token = self.find_token(line) expected = BaseToken(TokenType.T_DOT) self.assertEqual(expected, token)
def test_while(self): line = "while" token = self.find_token(line) expected = BaseToken(TokenType.T_WHILE) self.assertEqual(expected, token)
def test_assign(self): line = "=" token = self.find_token(line) expected = BaseToken(TokenType.T_ASSIGN) self.assertEqual(expected, token)
def test_return(self): line = "return" token = self.find_token(line) expected = BaseToken(TokenType.T_RETURN) self.assertEqual(expected, token)
def test_false(self): line = "false" token = self.find_token(line) expected = BaseToken(TokenType.T_FALSE) self.assertEqual(expected, token)
def test_true(self): line = "true" token = self.find_token(line) expected = BaseToken(TokenType.T_TRUE) self.assertEqual(expected, token)
def test_bool(self): line = "bool" token = self.find_token(line) expected = BaseToken(TokenType.T_BOOL) self.assertEqual(expected, token)
def test_function(self): line = "function" token = self.find_token(line) expected = BaseToken(TokenType.T_FUNCTION) self.assertEqual(expected, token)
def test_comma(self): line = "," token = self.find_token(line) expected = BaseToken(TokenType.T_COMMA) self.assertEqual(expected, token)
def test_greater(self): line = ">" token = self.find_token(line) expected = BaseToken(TokenType.T_GREATER) self.assertEqual(expected, token)
def test_greater_or_eq(self): line = ">=" token = self.find_token(line) expected = BaseToken(TokenType.T_GREATER_OR_EQ) self.assertEqual(expected, token)
def test_less(self): line = "<" token = self.find_token(line) expected = BaseToken(TokenType.T_LESS) self.assertEqual(expected, token)
def test_not_eq(self): line = "!=" token = self.find_token(line) expected = BaseToken(TokenType.T_NOT_EQ) self.assertEqual(expected, token)
def test_int(self): line = "int" token = self.find_token(line) expected = BaseToken(TokenType.T_INT) self.assertEqual(expected, token)
def test_arrow(self): line = "->" token = self.find_token(line) expected = BaseToken(TokenType.T_ARROW) self.assertEqual(expected, token)
def test_or(self): line = "or" token = self.find_token(line) expected = BaseToken(TokenType.T_OR) self.assertEqual(expected, token)
def test_specific_to_task_types(self): line = "unit phys" predicted = self.lexer._get_tokens_from_line(line) expected = [BaseToken(TokenType.T_UNIT), BaseToken(TokenType.T_PHYS)] self.assertEqual(expected, predicted)
def test_unit(self): line = "unit" token = self.find_token(line) expected = BaseToken(TokenType.T_UNIT) self.assertEqual(expected, token)