Пример #1
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_argument(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^6")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"INTEGER" : "6"}])
Пример #2
0
 def test_tokeniser_recognises_beginning_of_if_statement_with_boolean(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^true")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"BOOLEAN" : "true"}])
Пример #3
0
 def test_method_returns_crow_operator_when_passed_in(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("(*)>")
     self.assertEqual(tokeniser.create_tokens(), [{"MODULUS" : "(*)>"}])
Пример #4
0
 def test_method_returns_crow_operator_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("5^(*)>^2")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "5"}, {"MODULUS" : "(*)>"}, {"INTEGER" : "2"}])
Пример #5
0
 def test_tokenise_variable_assigment_assigner(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("PACK_WITH")
     self.assertEqual(tokeniser.create_tokens(), [{"ASSIGNMENT" : "PACK_WITH"}])
Пример #6
0
 def test_tokeniser_recognises_all_tokens_in_text(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Hello Test!>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Hello Test!"}, {"STRSTOP" : ">>"}])
Пример #7
0
 def test_method_returns_tokens_for_String_comparison(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<this>>^OvO^<<that>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "this"}, {"STRSTOP" : ">>"}, {"EQUALS" : "OvO"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "that"}, {"STRSTOP" : ">>"}])
Пример #8
0
 def test_tokeniser_tokenises_fizzbuzz_statement_with_end(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^30^(*)>^15^OvO^0^echo^<<fizzbuzz>>^CAMP")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"INTEGER" : "30"}, {"MODULUS" : "(*)>"}, {"INTEGER" : "15"}, {"EQUALS" : "OvO"}, {"INTEGER" : "0"}, {"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "fizzbuzz"}, {"STRSTOP" : ">>"}, {"END" : "CAMP"}])
Пример #9
0
 def test_method_returns_comparison_token_for_owl_operator(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("OvO")
     self.assertEqual(tokeniser.create_tokens(), [{"EQUALS" : "OvO"}])
Пример #10
0
 def test_method_returns_comparison_token_and_bool_for_owl_operator_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^OvO^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"EQUALS" : "OvO"}, {"BOOLEAN" : "false"}])
Пример #11
0
 def test_method_returns_string_token_when_passed_a_longer_string(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<a lot of text>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "a lot of text"}, {"STRSTOP" : ">>"}])
Пример #12
0
 def test_method_returns_integer_item_when_passed_number(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("9")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : '9'}])
Пример #13
0
 def test_tokeniser_recognises_that_false_is_false(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "false"}])
Пример #14
0
 def test_tokeniser_recognises_end_of_expression(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("CAMP")
     self.assertEqual(tokeniser.create_tokens(), [{"END" : "CAMP"}])
Пример #15
0
 def test_method_returns_dead_owl_when_passed_in_with_integers(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("7^XvX^8")
     self.assertEqual(tokeniser.create_tokens(), [{"INTEGER" : "7"}, {"NOT_EQUAL" : "XvX"}, {"INTEGER" : "8"}])
Пример #16
0
 def test_tokeniser_tokenises_if_end_statement(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("WALK_PATH_IF_SEE^CAMP")
     self.assertEqual(tokeniser.create_tokens(), [{"IF_START" : "WALK_PATH_IF_SEE"}, {"END" : "CAMP"}])
Пример #17
0
 def test_method_returns_dead_owl_when_passed_in_with_bools(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("true^XvX^false")
     self.assertEqual(tokeniser.create_tokens(), [{"BOOLEAN" : "true"}, {"NOT_EQUAL" : "XvX"}, {"BOOLEAN" : "false"}])
Пример #18
0
 def test_tokenise_variable_assigment(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("BACKPACK")
     self.assertEqual(tokeniser.create_tokens(), [{"VARIABLE" : "BACKPACK"}])
Пример #19
0
 def test_method_returns_dead_owl_tokens_when_passed_with_strings(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("<<Superman>>^XvX^<<Batman>>")
     self.assertEqual(tokeniser.create_tokens(), [{"STRSTART" : "<<"}, {"STRING_CONTENT" : "Superman"}, {"STRSTOP" : ">>"}, {"NOT_EQUAL" : "XvX"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Batman"}, {"STRSTOP" : ">>"}])
Пример #20
0
 def test_tokenise_variable_assignment_with_a_string(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("BACKPACK:arnold^PACK_WITH^<<pikachu>>")
     self.assertEqual(tokeniser.create_tokens(), [{"VARIABLE" : "BACKPACK"}, {"VARIABLE_NAME" : "arnold"}, {"ASSIGNMENT" : "PACK_WITH"},
     {"STRSTART" : "<<"}, {"STRING_CONTENT" : "pikachu"}, {"STRSTOP" : ">>"}])
Пример #21
0
 def test_method_returns_string_token_when_passed_caps_z(self):
     from forest import Tokeniser
     tokeniser = Tokeniser("echo^<<Z>>")
     self.assertEqual(tokeniser.create_tokens(), [{"ECHO" : "echo"}, {"STRSTART" : "<<"}, {"STRING_CONTENT" : "Z"}, {"STRSTOP" : ">>"}])