Пример #1
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_boolean(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^true")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"BOOLEAN" : "true"}])
Пример #2
0
 def test_method_returns_crow_operator_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("5^(*)>^2")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "5"}, {"MODULUS" : "(*)>"}, {"INTEGER" : "2"}])
Пример #3
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_argument(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^6")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"INTEGER" : "6"}])
Пример #4
0
 def test_method_returns_dead_owl_tokens_when_passed_with_strings(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<Superman>>^XvX^<<Batman>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "Superman"}, {"STRSTOP" : ">>"}, {"NOT_EQUAL" : "XvX"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Batman"}, {"STRSTOP" : ">>"}])
Пример #5
0
 def test_method_returns_crow_operator_when_passed_in(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("(*)>")
     self.assertEqual(tokeniser.create_tokens(), [{"MODULUS" : "(*)>"}])
Пример #6
0
 def test_method_returns_dead_owl_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("7^XvX^8")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "7"}, {"NOT_EQUAL" : "XvX"}, {"INTEGER" : "8"}])
Пример #7
0
 def test_method_returns_dead_owl_when_passed_in_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^XvX^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"NOT_EQUAL" : "XvX"}, {"BOOLEAN" : "false"}])
Пример #8
0
 def test_method_returns_comparison_token_and_bool_for_owl_operator_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^OvO^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"EQUALS" : "OvO"}, {"BOOLEAN" : "false"}])
Пример #9
0
 def test_method_returns_tokens_for_String_comparison(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<this>>^OvO^<<that>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "this"}, {"STRSTOP" : ">>"}, {"EQUALS" : "OvO"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "that"}, {"STRSTOP" : ">>"}])
Пример #10
0
 def test_method_returns_string_token_when_passed_a_longer_string(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<a lot of text>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "a lot of text"}, {"STRSTOP" : ">>"}])
Пример #11
0
 def test_method_returns_comparison_token_for_owl_operator(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("OvO")
     self.assertEqual(tokeniser.create_tokens(), [{"EQUALS" : "OvO"}])
Пример #12
0
 def test_method_returns_array_split_by_delimiter(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^4")
     self.assertEqual(tokeniser.split_input(), ["true", "4"])
Пример #13
0
 def test_method_returns_integer_item_when_passed_number(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("9")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : '9'}])
Пример #14
0
 def test_tokeniser_recognises_that_false_is_false(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "false"}])
Пример #15
0
 def test_tokeniser_recognises_all_tokens_in_text(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Hello Test!>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Hello Test!"}, {"STRSTOP" : ">>"}])
Пример #16
0
 def test_method_returns_string_token_when_passed_caps_z(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Z>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Z"}, {"STRSTOP" : ">>"}])