def tokenize(tweet): return tokenizer_g(tweet)
def glove_tokenize(text): text = tokenizer_g(text) text = ' '.join([c for c in text if c not in punctuation]) words = text.split() words = [word for word in words if word not in STOPWORDS] return words
def Tokenize(tweet): #return MyTokenizer.tokenize(tweet) #pdb.set_trace() return tokenizer_g(tweet)