def tokenize(self, block_input): block_id, text_block = block_input sense_phrases = sense_tokenize(text_block, self.annotator, self.stemmer, self.stop_words) return block_id, sense_phrases, text_block
def tokenize(self, text_block): sense_phrases = sense_tokenize(text_block, self.annotator, self.stemmer, self.stop_words) self.tokenized_blocks.extend(sense_phrases)