def test_tokeniser_recognises_beginning_of_if_statement_with_boolean(self): from forest import Tokeniser tokeniser = Tokeniser("WALK_PATH_IF_SEE^true") self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"BOOLEAN" : "true"}])
def test_method_returns_crow_operator_when_passed_in_with_integers(self): from forest import Tokeniser tokeniser = Tokeniser("5^(*)>^2") self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "5"}, {"MODULUS" : "(*)>"}, {"INTEGER" : "2"}])
def test_tokeniser_recognises_beginning_of_if_statement_with_argument(self): from forest import Tokeniser tokeniser = Tokeniser("WALK_PATH_IF_SEE^6") self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"INTEGER" : "6"}])
def test_method_returns_dead_owl_tokens_when_passed_with_strings(self): from forest import Tokeniser tokeniser = Tokeniser("<<Superman>>^XvX^<<Batman>>") self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "Superman"}, {"STRSTOP" : ">>"}, {"NOT_EQUAL" : "XvX"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Batman"}, {"STRSTOP" : ">>"}])
def test_method_returns_crow_operator_when_passed_in(self): from forest import Tokeniser tokeniser = Tokeniser("(*)>") self.assertEqual(tokeniser.create_tokens(), [{"MODULUS" : "(*)>"}])
def test_method_returns_dead_owl_when_passed_in_with_integers(self): from forest import Tokeniser tokeniser = Tokeniser("7^XvX^8") self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "7"}, {"NOT_EQUAL" : "XvX"}, {"INTEGER" : "8"}])
def test_method_returns_dead_owl_when_passed_in_with_bools(self): from forest import Tokeniser tokeniser = Tokeniser("true^XvX^false") self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"NOT_EQUAL" : "XvX"}, {"BOOLEAN" : "false"}])
def test_method_returns_comparison_token_and_bool_for_owl_operator_with_bools(self): from forest import Tokeniser tokeniser = Tokeniser("true^OvO^false") self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"EQUALS" : "OvO"}, {"BOOLEAN" : "false"}])
def test_method_returns_tokens_for_String_comparison(self): from forest import Tokeniser tokeniser = Tokeniser("<<this>>^OvO^<<that>>") self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "this"}, {"STRSTOP" : ">>"}, {"EQUALS" : "OvO"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "that"}, {"STRSTOP" : ">>"}])
def test_method_returns_string_token_when_passed_a_longer_string(self): from forest import Tokeniser tokeniser = Tokeniser("<<a lot of text>>") self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "a lot of text"}, {"STRSTOP" : ">>"}])
def test_method_returns_comparison_token_for_owl_operator(self): from forest import Tokeniser tokeniser = Tokeniser("OvO") self.assertEqual(tokeniser.create_tokens(), [{"EQUALS" : "OvO"}])
def test_method_returns_array_split_by_delimiter(self): from forest import Tokeniser tokeniser = Tokeniser("true^4") self.assertEqual(tokeniser.split_input(), ["true", "4"])
def test_method_returns_integer_item_when_passed_number(self): from forest import Tokeniser tokeniser = Tokeniser("9") self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : '9'}])
def test_tokeniser_recognises_that_false_is_false(self): from forest import Tokeniser tokeniser = Tokeniser("false") self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "false"}])
def test_tokeniser_recognises_all_tokens_in_text(self): from forest import Tokeniser tokeniser = Tokeniser("echo^<<Hello Test!>>") self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Hello Test!"}, {"STRSTOP" : ">>"}])
def test_method_returns_string_token_when_passed_caps_z(self): from forest import Tokeniser tokeniser = Tokeniser("echo^<<Z>>") self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Z"}, {"STRSTOP" : ">>"}])