Beispiel #1
0
 def test_normalized_embedded_not_a_string(self):
     with self.assertRaises(TypeError):
         tokens.normalize_embedded({123: 'b'})
     with self.assertRaises(TypeError):
         tokens.normalize_embedded({'a': None})
     with self.assertRaises(TypeError):
         tokens.normalize_embedded({'a': 123})
Beispiel #2
0
 def test_normalized_embedded_not_a_string(self):
   with self.assertRaises(TypeError):
     tokens.normalize_embedded({123: 'b'})
   with self.assertRaises(TypeError):
     tokens.normalize_embedded({'a': None})
   with self.assertRaises(TypeError):
     tokens.normalize_embedded({'a': 123})
Beispiel #3
0
 def test_normalized_embedded_non_ascii(self):
     with self.assertRaises(UnicodeEncodeError):
         tokens.normalize_embedded({u'\u043f': 'b'})
     with self.assertRaises(UnicodeEncodeError):
         tokens.normalize_embedded({'a': u'\u043f'})
Beispiel #4
0
 def test_normalized_embedded_ascii(self):
     result = tokens.normalize_embedded({u'a': u'b'})
     self.assertEqual({'a': 'b'}, result)
     self.assertTrue(isinstance(result.keys()[0], str))
     self.assertTrue(isinstance(result['a'], str))
Beispiel #5
0
 def test_normalize_embedded_reserved_keys(self):
     with self.assertRaises(ValueError):
         tokens.normalize_embedded({'_i': ''})
Beispiel #6
0
 def test_normalized_embedded_non_ascii(self):
   with self.assertRaises(UnicodeEncodeError):
     tokens.normalize_embedded({u'\u043f': 'b'})
   with self.assertRaises(UnicodeEncodeError):
     tokens.normalize_embedded({'a': u'\u043f'})
Beispiel #7
0
 def test_normalized_embedded_ascii(self):
   result = tokens.normalize_embedded({u'a': u'b'})
   self.assertEqual({'a': 'b'}, result)
   self.assertTrue(isinstance(result.keys()[0], str))
   self.assertTrue(isinstance(result['a'], str))
Beispiel #8
0
 def test_normalize_embedded_reserved_keys(self):
   with self.assertRaises(ValueError):
     tokens.normalize_embedded({'_i': ''})
Beispiel #9
0
 def test_normalized_embedded_non_ascii(self):
     with self.assertRaises(UnicodeEncodeError):
         tokens.normalize_embedded({u"\u043f": "b"})
     with self.assertRaises(UnicodeEncodeError):
         tokens.normalize_embedded({"a": u"\u043f"})
Beispiel #10
0
 def test_normalized_embedded_ascii(self):
     result = tokens.normalize_embedded({u"a": u"b"})
     self.assertEqual({"a": "b"}, result)
     self.assertTrue(isinstance(result.keys()[0], str))
     self.assertTrue(isinstance(result["a"], str))