Example #1
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_boolean(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^true")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"BOOLEAN" : "true"}])
Example #2
0
 def test_method_returns_crow_operator_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("5^(*)>^2")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "5"}, {"MODULUS" : "(*)>"}, {"INTEGER" : "2"}])
Example #3
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_argument(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^6")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"INTEGER" : "6"}])
Example #4
0
 def test_method_returns_dead_owl_tokens_when_passed_with_strings(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<Superman>>^XvX^<<Batman>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "Superman"}, {"STRSTOP" : ">>"}, {"NOT_EQUAL" : "XvX"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Batman"}, {"STRSTOP" : ">>"}])
Example #5
0
 def test_method_returns_crow_operator_when_passed_in(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("(*)>")
     self.assertEqual(tokeniser.create_tokens(), [{"MODULUS" : "(*)>"}])
Example #6
0
 def test_method_returns_dead_owl_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("7^XvX^8")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "7"}, {"NOT_EQUAL" : "XvX"}, {"INTEGER" : "8"}])
Example #7
0
 def test_method_returns_dead_owl_when_passed_in_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^XvX^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"NOT_EQUAL" : "XvX"}, {"BOOLEAN" : "false"}])
Example #8
0
 def test_method_returns_comparison_token_and_bool_for_owl_operator_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^OvO^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"EQUALS" : "OvO"}, {"BOOLEAN" : "false"}])
Example #9
0
 def test_method_returns_tokens_for_String_comparison(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<this>>^OvO^<<that>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "this"}, {"STRSTOP" : ">>"}, {"EQUALS" : "OvO"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "that"}, {"STRSTOP" : ">>"}])
Example #10
0
 def test_method_returns_string_token_when_passed_a_longer_string(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<a lot of text>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "a lot of text"}, {"STRSTOP" : ">>"}])
Example #11
0
 def test_method_returns_comparison_token_for_owl_operator(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("OvO")
     self.assertEqual(tokeniser.create_tokens(), [{"EQUALS" : "OvO"}])
Example #12
0
 def test_method_returns_array_split_by_delimiter(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^4")
     self.assertEqual(tokeniser.split_input(), ["true", "4"])
Example #13
0
 def test_method_returns_integer_item_when_passed_number(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("9")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : '9'}])
Example #14
0
 def test_tokeniser_recognises_that_false_is_false(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "false"}])
Example #15
0
 def test_tokeniser_recognises_all_tokens_in_text(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Hello Test!>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Hello Test!"}, {"STRSTOP" : ">>"}])
Example #16
0
 def test_method_returns_string_token_when_passed_caps_z(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Z>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Z"}, {"STRSTOP" : ">>"}])