def read_token(self, token): """turns a Token into a lispy form""" if token.type == "symbol": return symbol(token.text) elif token.type in ["number", "string"]: return eval(token.text) elif token.type == "macro": return self.macro_funcs[token.text](token) assert False, "unexpected token type %r" % token.type
def read_unquote_splicing(self, token): token = self.tokens.next() if token.type == "end": raise mkSyntaxError("EOF after \",@\" (unquote-splicing)", self.filename, token.line_no, token.col, token.linetext, completable=True) return (symbol("__lispy__:unquote-splicing"), self.read_token(token))
def read_quasiquote(self, token): token = self.tokens.next() if token.type == "end": raise mkSyntaxError("EOF after \"`\" (quasiquote)", self.filename, token.line_no, token.col, token.linetext, completable=True) return (symbol("__lispy__:quasiquote"), self.read_token(token))
def read(self): forms = [] while 1: token = self.tokens.next() if token.type == "end": break else: forms.append(self.read_token(token)) if len(forms) == 0: return None if len(forms) == 1: return forms[0] return (symbol("__lispy__:do"), ) + tuple(forms)