Exemple #1
0
def _process_phrase(phrase, process_func, *args, **kwargs):
    """ обработать фразу """
    words = tokenizers.extract_tokens(phrase)
    result=""
    try:
        for word in words:
            if tokenizers.GROUPING_SPACE_REGEX.match(word):
                result += word
                continue
            processed = process_func(word.upper(), *args, **kwargs)
            processed = restore_word_case(processed, word) if processed else word
            result += processed
    except Exception:
        return phrase
    return result
Exemple #2
0
 def assertRestored(self, word, original_word, result):
     self.assertEqualRu(restore_word_case(word, original_word), result)