def get_tokens(): skip = False for line in lines: if '#' in line: line = line[:line.index('#')] tokens = breakup_line(line) final = get_strings(tokens) for c, item in enumerate(final): if not skip: if is_punc(item): try: if is_punc(item + final[c + 1]): print('(PUNC "%s")' % str(item + final[c + 1])) skip = True else: print('(PUNC "%s")' % item) except: print('(PUNC "%s")' % item) elif is_keyword(item): pass elif is_ID(item): pass else: print("(LIT %s)" % item) else: skip = False print("(ENDMARKER)") final.append('$') return final
def dot_rest(self, expr): if self.token() == STAR: self.accept(STAR) expr = PropertyExpr(expr, "*") else: name = None if (self.token() == IDENTIFIER or self.token() == LITERAL_STRING): name = self.token_str() self.next_token() elif is_keyword(self.token()): name = self.token_str() self.next_token() else: raise ParserException("error: " + self.token_str()) expr = PropertyExpr(expr, name) return expr
new_words.append(w) return new_words skip = False for line in lines: if '#' in line: line = line[:line.index('#')] tokens = breakup_line(line) final = get_strings(tokens) for c, item in enumerate(final): if not skip: if is_punc(item): try: if is_punc(item + final[c+1]): print '(PUNC "%s")' % str(item + final[c+1]) skip = True else: print '(PUNC "%s")' % item except: print '(PUNC "%s")' % item elif is_keyword(item): pass elif is_ID(item): pass else: print "(LIT %s)" % item else: skip = False print "(ENDMARKER)"