Ejemplo n.º 1
0
 def test_detect_joined_strings_first_with_rus_letter(self):
     data = b"#1074#1086#1076#1086 + \r\n#1087#1072#1076)"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "водопад")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 2
0
 def test_detect_joined_strings_first_with_quote(self):
     data = b"'aaaa' + \r\n'bbbb')"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "aaaabbbb")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 3
0
 def test_detect_single_string_tailed_with_rus_letter(self):
     data = b"#1101#1101#1101)"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "эээ")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 4
0
 def test_detect_single_string_tailed_with_quote(self):
     data = b"#1101#1101#1101'...')"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "эээ...")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 5
0
 def test_detect_end_of_file_token(self):
     data = b""
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertTrue(t.reader.eof)
     self.assertTrue(t.done)
     self.assertEqual(token.id, "END_FILE")
Ejemplo n.º 6
0
    def test_detect_splitted_string_with_russian_letters(self):
        data = b"#1063#1077#1084 #1073#1086#1083#1100#1096#1077 #1089#1080#1083#1072, #1090 +\r\n\
    #1077#1084 #1073#1086#1083#1100#1096#1077 #1086#1090#1074#1077#1090#1089#1090#1074#1077#1085#1085#1086#1089#1090#1100'.'"

        t = Tokenizer(data)
        token = t.get_next_token()
        self.assertEqual(token.value,
                         "Чем больше сила, тем больше ответственность.")
Ejemplo n.º 7
0
 def test_detect_joined_strings_followed_with_single_string_quote(self):
     data = b"'string one'+\r\n' continues here'\r\n#1082#1086#1085#1077#1094)"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "string one continues here")
     token = t.get_next_token()
     self.assertEqual(token.value, "конец")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 8
0
 def test_detect_joined_strings_followed_with_single_string_rus(self):
     data = b"#1074#1086#1076#1086 + \r\n#1087#1072#1076\r\n#1090#1072#1088#1077#1083#1082#1072)"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "водопад")
     token = t.get_next_token()
     self.assertEqual(token.value, "тарелка")
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 9
0
 def check_sequence(self, fixture, sequence):
     """
     Извлекает все токены из sequence, а затем сверяет их с fixture.
     """
     t = Tokenizer(sequence)
     tokens = []
     while t.has_tokens():
         token = t.get_next_token()
         if token.value == "":
             tokens.append(str(token))
         else:
             tokens.append(token.value)
     self.assertEqual(fixture, tokens)
Ejemplo n.º 10
0
 def test_decode_string_with_leading_tabs(self):
     data = b"#9#9'from student where id = 1234'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "\t\tfrom student where id = 1234")
Ejemplo n.º 11
0
 def test_detect_string_with_russian_letters(self):
     data = b"#1040 #1089#1084#1099#1089#1083'?'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "А смысл?")
Ejemplo n.º 12
0
 def test_detect_boolean_token(self):
     data = b"False"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "BOOLEAN")
     self.assertEqual(token.value, False)
Ejemplo n.º 13
0
 def test_detect_identifier_token(self):
     data = b" someObject: objClass"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "IDENTIFIER")
     self.assertEqual(token.value, "someObject")
Ejemplo n.º 14
0
 def test_detect_object_token(self):
     data = b"\n  object someObject: objClass"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "OBJECT")
Ejemplo n.º 15
0
 def test_detect_assignment_token(self):
     data = b" = value"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "=")
Ejemplo n.º 16
0
 def test_detect_sequence_entry_token(self):
     data = b",value"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, ",")
Ejemplo n.º 17
0
 def test_detect_item_token(self):
     data = b"  item\n"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "ITEM")
Ejemplo n.º 18
0
 def test_detect_scalar_sequence_end_token(self):
     data = b" )"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, ")")
Ejemplo n.º 19
0
 def test_detect_type_definition_token(self):
     data = b" : integer\n"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "TYPEDEF")
     self.assertEqual(token.value, "integer")
Ejemplo n.º 20
0
 def test_fetch_word(self):
     data = b" wordToFetch123<"
     t = Tokenizer(data)
     t.move_to_next_token()
     word = t.fetch_word()
     self.assertEqual(word, b"wordToFetch123")
Ejemplo n.º 21
0
 def test_detect_number_token(self):
     data = b" -123.8"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "NUMBER")
     self.assertEqual(token.value, -123.8)
Ejemplo n.º 22
0
 def test_detect_identifier_sequence_end_token(self):
     data = b" ]"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "]")
Ejemplo n.º 23
0
 def test_detect_string_token(self):
     data = b"@1SomeThing"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "STRING")
     self.assertEqual(token.value, "@1SomeThing")
Ejemplo n.º 24
0
 def test_detect_item_sequence_end_token(self):
     data = b" >"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, ">")
Ejemplo n.º 25
0
 def test_detect_quoted_string_token(self):
     data = b"'here goes quoted string'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "here goes quoted string")
Ejemplo n.º 26
0
 def test_detect_binary_sequence_end_token(self):
     data = b" }"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "}")
Ejemplo n.º 27
0
 def test_decode_russian_letters_long_code(self):
     data = b"#1099#1099'21'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "ыы21")
Ejemplo n.º 28
0
 def test_detect_block_end_token(self):
     data = b"\n  end"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.id, "END_BLOCK")
Ejemplo n.º 29
0
 def test_decode_mixed_russian_letters(self):
     data = b"'Abibas - '#1101#1090#1086' '#1089#1080#1083#1072'!'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "Abibas - это сила!")
Ejemplo n.º 30
0
 def test_decode_many_sharps(self):
     data = b"####1101'###'"
     t = Tokenizer(data)
     token = t.get_next_token()
     self.assertEqual(token.value, "###э###")