Beispiel #1
0
 def _break_and_wrap(text_to_tokenize_match):
     text_to_tokenize = text_to_tokenize_match.group(0)
     wrapped = ""
     for token in Languages.tokenize(Languages.chinese.value, text_to_tokenize):
         word_zh = WordZH.get_or_create_with_translator(word=token)[0]
         wrapped += '<span class="chinese-word"><span>' + word_zh.pinyin + '</span><span>' + token + '</span></span>'
     return wrapped
Beispiel #2
0
 def test_can_translate_chinese_word_with_db(self):
     word_zh = WordZH(word='some_word_zh', pinyin='some_word_zh_pinyin')
     word_zh.save()
     word_zh.worden_set.create(word='some_word_en')
     word_en = WordZH.objects.get(word='some_word_zh').get_translations().all()[0].word
     self.assertEqual(word_en, 'some_word_en')