Пример #1
0
def test_spacy_tokenizer():
    tokenizer = SpaCyTokenizer()
    vectorizer = DocVectorizer(tokenizer=tokenizer)
    result = vectorizer.fit(test_text)
Пример #2
0
def test_docvectorizer_basic():
    vectorizer = DocVectorizer()
    result = vectorizer.fit(test_text)