def test_moses_tokenizer(): tokenizer = t.SacreMosesTokenizer() text = u"Introducing Gluon: An Easy-to-Use Programming Interface for Flexible Deep Learning." try: ret = tokenizer(text) except ImportError: warnings.warn("NLTK not installed, skip test_moses_tokenizer().") return assert isinstance(ret, list) assert len(ret) > 0
def test_moses_tokenizer(): tokenizer = t.SacreMosesTokenizer() text = u"Introducing Gluon: An Easy-to-Use Programming Interface for Flexible Deep Learning." ret = tokenizer(text) assert isinstance(ret, list) assert len(ret) > 0