def tokenize(self, line): """ Create tokens from string. @param line: string to scan """ self.rv = [] GenericScanner.tokenize(self, line) return self.rv
def tokenize(self, input): self.tokens = [] lines = input.split("\n") for line in lines: #remove inline comments line = line.split("//")[0] GenericScanner.tokenize(self, line) self.lineno += 1 t = Token(type='NEWLINE', lineno=self.lineno) self.tokens.append(t) return self.tokens
def tokenize(self, input): self.tokens = [] lines=input.split("\n") for line in lines: #remove inline comments line=line.split("//")[0] GenericScanner.tokenize(self, line) self.lineno+=1 t=Token(type='NEWLINE',lineno=self.lineno) self.tokens.append(t) return self.tokens
def tokenize(self, input): self.rv = [] GenericScanner.tokenize(self, input) return self.rv
def tokenize(self, input_): self.rv = [] GenericScanner.tokenize(self, input_) return self.rv
def tokenize(self, input): self.tokens = [] GenericScanner.tokenize(self, input) return self.tokens
def tokenize(self, input, outputFile): self.rv = [] self.outputFile = outputFile GenericScanner.tokenize(self, input) return self.rv
def tokenize(self, input): self.rv = [] GenericScanner.tokenize(self, input) self.line_count=0 return self.rv