def _tokenise (self,iterator): for self.line,parsed in tokens(iterator): if not parsed: continue # ignore # lines # set Location information yield [word for x,word in parsed]
def _tokenise (self,iterator): for parsed in tokens(iterator): words = [word for y,x,word in parsed] self.line = ''.join(words) # ignore # lines # set Location information yield words
def _tokenise(self, iterator): for parsed in tokens(iterator): words = [word for y, x, word in parsed] self.line = ''.join(words) # ignore # lines # set Location information yield words
def _tokenise (self,iterator): for self.line,parsed in tokens(iterator): if not parsed: continue # ignore # lines # set Location information yield [word for x,word in parsed]