Esempio n. 1
0
def tokenize(tweet):
    return tokenizer_g(tweet)
def glove_tokenize(text):
    text = tokenizer_g(text)
    text = ' '.join([c for c in text if c not in punctuation])
    words = text.split()
    words = [word for word in words if word not in STOPWORDS]
    return words
Esempio n. 3
0
def Tokenize(tweet):
    #return MyTokenizer.tokenize(tweet)
    #pdb.set_trace()
    return tokenizer_g(tweet)