def test_normalized_embedded_not_a_string(self): with self.assertRaises(TypeError): tokens.normalize_embedded({123: 'b'}) with self.assertRaises(TypeError): tokens.normalize_embedded({'a': None}) with self.assertRaises(TypeError): tokens.normalize_embedded({'a': 123})
def test_normalized_embedded_non_ascii(self): with self.assertRaises(UnicodeEncodeError): tokens.normalize_embedded({u'\u043f': 'b'}) with self.assertRaises(UnicodeEncodeError): tokens.normalize_embedded({'a': u'\u043f'})
def test_normalized_embedded_ascii(self): result = tokens.normalize_embedded({u'a': u'b'}) self.assertEqual({'a': 'b'}, result) self.assertTrue(isinstance(result.keys()[0], str)) self.assertTrue(isinstance(result['a'], str))
def test_normalize_embedded_reserved_keys(self): with self.assertRaises(ValueError): tokens.normalize_embedded({'_i': ''})
def test_normalized_embedded_non_ascii(self): with self.assertRaises(UnicodeEncodeError): tokens.normalize_embedded({u"\u043f": "b"}) with self.assertRaises(UnicodeEncodeError): tokens.normalize_embedded({"a": u"\u043f"})
def test_normalized_embedded_ascii(self): result = tokens.normalize_embedded({u"a": u"b"}) self.assertEqual({"a": "b"}, result) self.assertTrue(isinstance(result.keys()[0], str)) self.assertTrue(isinstance(result["a"], str))