def test_multiple_float_comes_out_as_2_token(self):
     actual = Lexer("""1.9 2.5\n""").tokenize()
     expected = [
         Token(type="Float", value=1.9),
         Token(type="Float", value=2.5)
     ]
     self.assertListEqual(actual, expected)
 def test_multiple_operator_comes_out_as_2_token(self):
     actual = Lexer("""+ -\n""").tokenize()
     expected = [
         Token(type="Operator", value="+"),
         Token(type="Operator", value="-")
     ]
     self.assertListEqual(actual, expected)
 def test_multiple_variable_comes_out_as_2_token(self):
     actual = Lexer("""testVariableA testVariableB\n""").tokenize()
     expected = [
         Token(type="Variable", value="testVariableA"),
         Token(type="Variable", value="testVariableB")
     ]
     self.assertListEqual(actual, expected)
 def test_multiple_control_comes_out_as_2_token(self):
     actual = Lexer("""if else\n""").tokenize()
     expected = [
         Token(type="Control", value="if"),
         Token(type="Control", value="else")
     ]
     self.assertListEqual(actual, expected)
 def test_multiple_string_inputs_comes_out_as_2_token(self):
     actual = Lexer(""""abc" "xyz"\n""").tokenize()
     expected = [
         Token(type="String", value="abc"),
         Token(type="String", value="xyz")
     ]
     self.assertListEqual(actual, expected)
 def test_arithmetic_expression(self):
     actual = Lexer("""5+7\n""").tokenize()
     expected = [
         Token(type="Int", value=5),
         Token(type="Operator", value="+"),
         Token(type="Int", value=7)
     ]
     self.assertEqual(actual, expected)
 def test_method_call(self):
     actual = Lexer("""print("hello world")\n""").tokenize()
     expected = [
         Token(type="Variable", value="print"),
         Token(type="Operator", value="("),
         Token(type="String", value="hello world"),
         Token(type="Operator", value=")")
     ]
     self.assertEqual(actual, expected)
 def test_arithmetic_assignment_expression(self):
     actual = Lexer("""testVar = 5 + 7\n""").tokenize()
     expected = [
         Token(type="Variable", value="testVar"),
         Token(type="Operator", value="="),
         Token(type="Int", value=5),
         Token(type="Operator", value="+"),
         Token(type="Int", value=7)
     ]
     self.assertEqual(actual, expected)
 def test_assorted_space_characters_between_tokens_are_removed(self):
     actual = Lexer("""  1
     3\t5
     \n""").tokenize()
     expected = [
         Token(type="Int", value=1),
         Token(type="Int", value=3),
         Token(type="Int", value=5)
     ]
     self.assertEqual(actual, expected)
    def parse_variable(self):
        variable_literal = ""
        while self.position.char.isalnum():
            variable_literal += self.position.char
            self.advance()

        if ControlToken.is_valid_control_token(variable_literal):
            return Token('Control', variable_literal)
        else:
            return Token('Variable', variable_literal)
    def parse_operator(self):
        string_literal = ''

        while(OperatorToken.is_operator_char(self.position.char)):
            string_literal += self.position.char
            self.advance()

        return Token("Operator", string_literal)
    def parse_numeric(self):
        numeric_string_archived = ''
        numeric_string = ''

        while(self.position.char.isnumeric() or self.position.char == '.'):
            if self.position.char == '.':
                if numeric_string_archived != '':
                    raise Exception("You're only allowed one dot in a number hooligan")
                numeric_string_archived = numeric_string
                numeric_string = ''

            numeric_string += self.position.char
            self.advance()

        if numeric_string_archived:
            return Token("Float", float(numeric_string_archived + numeric_string))
        else:
            return Token("Int", int(numeric_string))
 def test_all_possible_controls(self):
     actual = Lexer("""if elif else for while in or and\n""").tokenize()
     expected = [
         Token(type="Control", value="if"),
         Token(type="Control", value="elif"),
         Token(type="Control", value="else"),
         Token(type="Control", value="for"),
         Token(type="Control", value="while"),
         Token(type="Control", value="in"),
         Token(type="Control", value="or"),
         Token(type="Control", value="and")
     ]
     self.assertListEqual(actual, expected)
 def test_conditional_expression(self):
     # Todo: should this throw an error?
     actual = Lexer("""
     if 5==5:
         v = 5\n""").tokenize()
     expected = [
         Token(type="Control", value="if"),
         Token(type="Int", value=5),
         Token(type="Operator", value="=="),
         Token(type="Int", value=5),
         Token(type="Operator", value=":"),
         Token(type="Variable", value="v"),
         Token(type="Operator", value="="),
         Token(type="Int", value=5)
     ]
     self.assertEqual(actual, expected)
    def parse_string_literal(self):
        string_literal = ""
        self.advance() # Advance beyond the initial double quote


        # TODO - Handle escaping quotes if we're feeling it
        while(self.position.char not in STRING_DELINEATORS):
            string_literal += self.position.char

            if not self.advance():
                raise Exception("Reached end of file error while looking for terminating string delineator")

        # Advance beyond the last double quote, don't want to start interepeting another string literal
        self.advance()
        return Token("String", string_literal)
 def test_single_control_comes_out_as_1_token(self):
     actual = Lexer("""if\n""").tokenize()
     expected = [Token(type="Control", value="if")]
     self.assertListEqual(actual, expected)
 def test_newlines_between_tokens_are_removed(self):  #???
     actual = Lexer("""1
     3\n""").tokenize()
     expected = [Token(type="Int", value=1), Token(type="Int", value=3)]
     self.assertEqual(actual, expected)
 def test_single_alphanumeric_variables(self):
     actual = Lexer("""testVariable1\n""").tokenize()
     expected = [Token(type="Variable", value="testVariable1")]
     self.assertListEqual(actual, expected)
 def test_single_variable_comes_out_as_1_token(self):
     actual = Lexer("""testVariable\n""").tokenize()
     expected = [Token(type="Variable", value="testVariable")]
     self.assertListEqual(actual, expected)
 def test_multiple_character_string_comes_out_as_1_token(self):
     actual = Lexer(""""abc"\n""").tokenize()
     expected = [Token(type="String", value="abc")]
     self.assertListEqual(actual, expected)
 def test_single_float_comes_out_as_1_token(self):
     actual = Lexer("""1.9\n""").tokenize()
     expected = [Token(type="Float", value=1.9)]
     self.assertListEqual(actual, expected)
 def test_multiple_int_comes_out_as_2_token(self):
     actual = Lexer("""1 2\n""").tokenize()
     expected = [Token(type="Int", value=1), Token(type="Int", value=2)]
     self.assertListEqual(actual, expected)
 def test_space_within_string_comes_out_as_1_token(self):
     actual = Lexer(""""abc xyz"\n""").tokenize()
     expected = [Token(type="String", value="abc xyz")]
     self.assertListEqual(actual, expected)