def jsemit_ForNode(self, node, indent): tok = PythonTokenizer(node.stmt) tok.consume_till('in') a = node.stmt[:tok.index].strip() # for i in a = a[len("for"):-len("in")].strip() # strip `for` and `in` b = node.stmt[tok.index:-1].strip() # rest of for stmt excluding : b = web.re_compile(r"loop.setup\((.*)\)").match(b).group(1) text = "" text += indent + f"foreach({py2js(b)}, loop, function(loop, {a}) {{\n" text += self.jsemit(node.suite, indent + INDENT) text += indent + "});\n" return text
def jsemit_ForNode(self, node, indent): tok = PythonTokenizer(node.stmt) tok.consume_till('in') a = node.stmt[:tok.index].strip() # for i in a = a[len("for"):-len("in")].strip() # strip `for` and `in` b = node.stmt[tok.index:-1].strip() # rest of for stmt excluding : b = web.re_compile("loop.setup\((.*)\)").match(b).group(1) text = "" text += indent + "foreach(%s, loop, function(loop, %s) {\n" % (py2js(b), a) text += self.jsemit(node.suite, indent + INDENT) text += indent + "});\n" return text
def tokenize(code): """Tokenize python code. >>> list(tokenize("x + y")) ['x', ' ', '+', ' ', 'y'] """ end = 0 tok = PythonTokenizer(code) try: while True: x = tok.next() begin = x.begin[1] if begin > end: yield ' ' * (begin - end) if x.value: yield x.value end = x.end[1] except StopIteration: pass
def tokenize(code): """Tokenize python code.:: >>> list(tokenize("x + y")) ['x', ' ', '+', ' ', 'y'] """ end = 0 tok = PythonTokenizer(code) try: while True: x = tok.next() begin = x.begin[1] if begin > end: yield ' ' * (begin - end) if x.value: yield x.value end = x.end[1] except StopIteration: pass