def Comment(comment): return Leaf(token.COMMENT, comment)
def DoubleStar(prefix=None): return Leaf(token.DOUBLESTAR, u'**', prefix=prefix)
def touch_import_top(package, name_to_import, node): """Works like `does_tree_import` but adds an import statement at the top if it was not imported (but below any __future__ imports) and below any comments such as shebang lines). Based on lib2to3.fixer_util.touch_import() Calling this multiple times adds the imports in reverse order. Also adds "standard_library.install_aliases()" after "from future import standard_library". This should probably be factored into another function. """ root = find_root(node) if does_tree_import(package, name_to_import, root): return # Ideally, we would look for whether futurize --all-imports has been run, # as indicated by the presence of ``from builtins import (ascii, ..., # zip)`` -- and, if it has, we wouldn't import the name again. # Look for __future__ imports and insert below them found = False for name in ['absolute_import', 'division', 'print_function', 'unicode_literals']: if does_tree_import('__future__', name, root): found = True break if found: # At least one __future__ import. We want to loop until we've seen them # all. start, end = None, None for idx, node in enumerate(root.children): if check_future_import(node): start = idx # Start looping idx2 = start while node: node = node.next_sibling idx2 += 1 if not check_future_import(node): end = idx2 break break assert start is not None assert end is not None insert_pos = end else: # No __future__ imports. # We look for a docstring and insert the new node below that. If no docstring # exists, just insert the node at the top. for idx, node in enumerate(root.children): if node.type != syms.simple_stmt: break if not is_docstring(node): # This is the usual case. break insert_pos = idx children_hooks = [] if package is None: import_ = Node(syms.import_name, [ Leaf(token.NAME, u"import"), Leaf(token.NAME, name_to_import, prefix=u" ") ]) else: import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) if name_to_import == u'standard_library': # Add: # standard_library.install_aliases() # after: # from future import standard_library install_hooks = Node(syms.simple_stmt, [Node(syms.power, [Leaf(token.NAME, u'standard_library'), Node(syms.trailer, [Leaf(token.DOT, u'.'), Leaf(token.NAME, u'install_aliases')]), Node(syms.trailer, [Leaf(token.LPAR, u'('), Leaf(token.RPAR, u')')]) ]) ] ) children_hooks = [install_hooks, Newline()] # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) children_import = [import_, Newline()] old_prefix = root.children[insert_pos].prefix root.children[insert_pos].prefix = u'' root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix)) if len(children_hooks) > 0: root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks))
from __future__ import unicode_literals from lib2to3.fixer_base import BaseFix from .utils import find_indentation from lib2to3.pgen2 import token from lib2to3.pygram import python_symbols as symbols from lib2to3.pytree import Node, Leaf NL = Leaf(token.NEWLINE, '\n') class FixCompoundStatements(BaseFix): """ Compound statements (multiple statements on the same line) are generally discouraged. While sometimes it's okay to put an if/for/while with a small body on the same line, never do this for multi-clause statements. Also avoid folding such long lines! """ def match(self, node): results = {} if (node.prev_sibling and isinstance(node.prev_sibling, Leaf) and node. prev_sibling.type == token.COLON and node.type != symbols.suite): # If it's inside a lambda definition, subscript, or sliceop, leave # it alone # symbols.trailer if node.parent.type in [symbols.lambdef, symbols.subscript, symbols.sliceop, symbols.dictsetmaker, symbols.trailer]: pass else:
def walk_dedent_tree_node(node, children, indent, force_split_next=False): if six.text_type(node).startswith("\n"): if isinstance(node, Leaf): prev = node.prev_sibling next = node.next_sibling is_followup = prev and prev.type == token.STRING and node.type == token.STRING if is_followup: new_value = node.value # insert linebreak after last string in braces, so its closing brace moves to new line if not node.next_sibling: closing_bracket = node.parent.parent.children[-1] if not six.text_type(closing_bracket).startswith("\n"): new_value = "%s\n%s" % (node.value, (' ' * (indent + 4))) node.replace( Leaf( node.type, new_value, prefix="\n%s" % (' ' * (indent + 8)), )) else: if six.text_type(node).strip() in (')', '}'): new_prefix = "\n%s" % (' ' * (indent + 4)) else: new_prefix = "\n%s" % (' ' * (indent + 8)) node.replace(Leaf(node.type, node.value, prefix=new_prefix)) else: for item in children: walk_dedent_tree_node(item, item.children, indent) elif isinstance(node, Leaf): if node.type == token.STRING: strings_tuple = node.parent.parent prev = node.prev_sibling next = node.next_sibling is_opening = prev is None and six.text_type( strings_tuple).strip()[0] == '(' has_followup = next and next.type == token.STRING if is_opening and has_followup: node.replace( Leaf( node.type, node.value, prefix="\n%s" % (' ' * (indent + 8)), )) elif force_split_next: node.replace( Leaf( node.type, "%s\n%s" % (node.value, (' ' * (indent + 4))), prefix="\n%s" % (' ' * (indent + 8)), )) else: for item in children: walk_dedent_tree_node(item, item.children, indent)
"""Rudimentary fixer""" from lib2to3.pgen2 import token from lib2to3.pytree import Leaf from lib2to3.fixer_base import BaseFix from lib2to3.fixer_util import Call, Name, Node, String Slash = Leaf(token.SLASH, "/", prefix=" ") class FixPaths(BaseFix): BM_compatible = True PATTERN = """ import_name< 'import' imp=any > | power< module='os' trailer< dot='.' method=('makedirs' | 'mkdir' | 'unlink' | 'remove' | 'rmdir' | 'rename' | 'replace' | 'stat' | 'chmod' | 'link' | 'listdir') > trailer< '(' obj=any ')'> > | power< buildin='open' trailer< '(' obj=any ')'> > | power< module='os' trailer< dot='.' method=('getcwd' | 'listdir') >
self.assert_(x) -> assert x self.assertEquals(x, y) -> assert x == y """ from __future__ import absolute_import from lib2to3.fixer_base import BaseFix from lib2to3.fixer_util import Name, Comma, LParen, RParen from lib2to3.patcomp import PatternCompiler from lib2to3.pgen2 import token from lib2to3.pytree import Leaf, Node from lib2to3.pygram import python_symbols as syms import copy NOT = [Leaf(token.NAME, "not", prefix=" ")] EQUALS = [Leaf(token.EQEQUAL, "==", prefix=" ")] NOTEQUALS = [Leaf(token.NOTEQUAL, "!=", prefix=" ")] IN = [Leaf(token.NAME, "in", prefix=" ")] NOT_IN = NOT + IN IS = [Leaf(token.NAME, "is", prefix=" ")] IS_NOT = IS + NOT NONE = Leaf(token.NAME, "None", prefix=" ") GREATER = [Leaf(token.GREATER, ">", prefix=" ")] GREATER_EQUAL = [Leaf(token.GREATEREQUAL, ">=", prefix=" ")] LESS = [Leaf(token.LESS, "<", prefix=" ")] LESS_EQUAL = [Leaf(token.LESSEQUAL, "<=", prefix=" ")] TRUE = Name("True") FALSE = Name("False")
root_path = Path(root).relative_to(path) # current path current_path = path / root parent_spec = ignores.get(current_path) or next( ignores[p] for p in current_path.parents if p in ignores) spec = _load_ignore(Path(root), parent_spec, ignores) for file_name in files: result = root_path / file_name if (file_name != ".gitignore" and keep(result) and not spec.match_file(str(result))): yield path / result for cur_dir in list(dirs): if spec.match_file(str(root_path / cur_dir)): dirs.remove(cur_dir) _as = Leaf(token.NAME, "as", prefix=" ") _colon = Leaf(token.COLON, ":") _comma = Leaf(token.COMMA, ",") _dot = Leaf(token.DOT, ".") _dstar = Leaf(token.DOUBLESTAR, "**") _eq = Leaf(token.EQUAL, "=", prefix=" ") _lpar = Leaf(token.LPAR, "(") _lsqb = Leaf(token.LSQB, "[") _newline = Leaf(token.NEWLINE, "\n") _none = Leaf(token.NAME, "None") _rarrow = Leaf(token.RARROW, "->", prefix=" ") _rpar = Leaf(token.RPAR, ")") _rsqb = Leaf(token.RSQB, "]") _star = Leaf(token.STAR, "*") _ellipsis = Node(syms.atom, children=[new(_dot), new(_dot), new(_dot)])
def _c_num(n): return Leaf(token.NUMBER, repr(n.n))
def _create_logging_call(self, log_call_args, node): try: len_log_call_args = len(log_call_args) if (len_log_call_args == 1 and log_call_args[0].type == syms.atom and log_call_args[0].children[0].type == token.LPAR and log_call_args[0].children[2].type == token.RPAR): candidate = log_call_args[0].children[1] log_call_args = candidate.children if \ candidate.type == syms.testlist_gexp else [candidate] len_log_call_args = len(log_call_args) string_args = self._literal_string_args(log_call_args) len_log_call_args = len(log_call_args) if not string_args and len_log_call_args > 1: log_call_args.extend( [Leaf(token.COMMA, ','), Leaf(token.STRING, "''")]) string_args = self._literal_string_args(log_call_args) if string_args: aggregation_string_args = deque() index, leaf = string_args[0] if leaf.type == syms.term: string_args = leaf.children[2] if is_tuple(string_args): _string_args = [ c for c in string_args.children[1].children ] aggregation_string_args.extend( [Leaf(token.COMMA, ',')] + _string_args) else: aggregation_string_args.extend( [Leaf(token.COMMA, ','), string_args]) leaf = leaf.children[0] lead, aggregation_string, trail = _get_string_contents( leaf.value) aggregation_string = _thingy(aggregation_string, aggregation_string_args, log_call_args[index + 1:]) aggregation_string = _thingy(aggregation_string, aggregation_string_args, log_call_args[:index], prepend=True) if (len(aggregation_string_args) > 2 and token.COMMA == aggregation_string_args[-1].type): aggregation_string_args.pop() for arg in aggregation_string_args: arg.prefix = ' ' if arg.type != token.COMMA else '' log_call_args = [ String(''.join([lead, aggregation_string, trail])) ] + [a.clone() for a in aggregation_string_args] new_node = Call(Name(u"%s.info" % LOGGER_NAME), [a.clone() for a in log_call_args], prefix=node.prefix) touch_import(None, 'logging', node) add_global_assignment_after_imports(LOGGER_NAME, LOGGER_GET.clone(), node) except Exception: logger.exception('Node is:%s', node) raise return new_node
def transform(self, node, results): if 'main' in find_root(node).future_features: node.insert_child(0, Leaf(1, '#')) return node
from abc import ABCMeta from collections import deque from lib2to3 import fixer_base, pytree from lib2to3 import patcomp from lib2to3.fixer_util import Assign, Call, Name, is_import, find_root, \ find_binding, Newline, Comma, touch_import, String, is_tuple from lib2to3.pgen2 import token from lib2to3.pygram import python_symbols as syms from lib2to3.pytree import Leaf, Node logger = logging.getLogger(__name__) parend_expr = patcomp.compile_pattern("""atom< '(' [atom|STRING|NAME] ')' >""") LOGGER_NAME = u"logger" LOGGER_GET = Assign( Leaf(token.NAME, LOGGER_NAME), Call(Name(u"logging.getLogger"), [Leaf(token.NAME, u"__name__")])) # It defaults to being sysms.atom. I don't know why :s LOGGER_GET.type = syms.expr_stmt def is_import_ish_stmt(node): # We also pretend gevent.monkey_patch() is an import because it's found # amongst them, and we don't want to create a LOGGER right after this. return (node.type == syms.simple_stmt and node.children and is_import(node.children[0])) or all( v in set(l.value for l in node.leaves()) for v in {u'eventlet', u'monkey_patch', u'.'}) def add_global_assignment_after_imports(_name, assignment, node):
def transform(self, node, results): node.insert_child(0, Leaf(1, '#'))
def File(*nodes): return Node(syms.file_input, [*nodes, Leaf(token.ENDMARKER, "")])
def _c_str(s): return Leaf(token.STRING, repr(s.s))
def fix_main(node): main_list = [] del_list =[] if(len(node.children)==0): return node # 处理文件头多行注释 temp = False if (node.children[0].type==syms.simple_stmt) \ and (node.children[0].children[0].type==3): temp = node.children[0].clone() node.children[0].remove() for i in node.children: if(i.type==syms.funcdef)|(i.type==syms.async_funcdef)|(i.type==syms.classdef)|(i.type==syms.decorated): continue if len(i.children)!=0: if(i.children[0].type==syms.import_name)|(i.children[0].type==syms.import_from): continue main_list.append(i.clone()) del_list.append(i) for i in del_list: i.remove() if temp: node.insert_child(0,temp) args = [ Leaf(1, 'def'), Leaf(1, 'main',prefix=' '), Node(syms.parameters, [ Leaf(7, '('), Leaf(8, ')'), ]), Leaf(11, ':'), Leaf(4, '\r\n') ] args+=main_list args+=[Node(syms.suite, [ Leaf(5, '\t'), Node(syms.simple_stmt, [ Node(syms.return_stmt, [ Leaf(1, 'return '), Leaf(2, '0'), ]), Leaf(4, '\r\n'), ]), Leaf(6, '') ])] main_node = Node(syms.funcdef,args) node.append_child(main_node)
def _c_attribute(attr): # This is hacky. ¯\_(ツ)_/¯ return Leaf(token.NAME, f"{convert_annotation(attr.value)}.{attr.attr}")
def add_file_part(file, lst): if file is None or isNone(file): return lst.append(Leaf(token.RIGHTSHIFT, ">>", prefix=" ")) lst.append(file.clone()) lst.append(Comma())
def transform(self, node: Node, results): if "imp" in results: return self._handle_import(node, results) else: if "buildin" in results: method_name = results["buildin"].value return self._handle_buildin(node, method_name) if isinstance(results['method'], Leaf): method_name = results["method"].value else: method_name = results["method"][0].value if "obj" not in results: # Prefix: Path. return self._handle_no_args(node, method_name) else: obj = results["obj"] argname = obj.clone() if "submod" in results: if method_name == "join" and len(argname.children) >= 1: first_arg, remaining_args = self._split_arguments( argname) x = Call(Name("Path"), first_arg, prefix=node.prefix) if len(remaining_args) > 0 and all( a.type in [token.COMMA, token.STRING] for a in remaining_args): if str(remaining_args[0].value).startswith('*'): x.append_child( Call(Name('joinpath', prefix=""), remaining_args, prefix=".")) return x x.append_child(Slash) for i, e in enumerate(remaining_args): if isinstance(e.value, Node): val = e.value elif isinstance(e.value, Leaf): val = e.value.value else: continue if e.type == token.STRING and val != ",": # if self.split_strings and "/" in e.value: # # TODO: get more robust e.value without quotes # parts = re.split('(/|\\\\)', e.value[1:-1]) # for part in parts: # if part in ["/", "\\"]: # x.append_child(Slash) # else: # x.append_child(String('"{}"'.format(part), prefix=" ")) # else: if i < 1: p = " " else: p = "" x.append_child(String(e, prefix=p)) else: x.append_child(Slash) return x else: first_arg, remaining_args = self._split_arguments( argname) x = Call(Name("Path"), first_arg, prefix=node.prefix) new_names = { "isabs": "is_absolute", "isdir": "is_dir", "isfile": "is_file", "islink": "is_symlink", "abspath": "resolve", "realpath": "resolve", "normpath": "resolve", "same": "samefile", } new_attribs = { "basename": "name", "dirname": "parent", "getsize": "stat().st_size", } if method_name in new_names: x.append_child( Call(Name(new_names[method_name], prefix=""), remaining_args, prefix=".")) elif method_name in new_attribs: x.append_child( String(new_attribs[method_name], prefix=".")) else: x.append_child( Call(Name(method_name, prefix=""), remaining_args, prefix=".")) return x else: arglist = argname first_arg, remaining_args = self._split_arguments(arglist) x = Call(Name("Path"), first_arg, prefix=node.prefix) if method_name == "remove": method_name = "unlink" if method_name == "listdir": x.append_child(String('glob("*")', prefix=".")) x.prefix = "" return Call(Name('list'), [x], prefix=node.prefix) elif method_name == 'makedirs': if len(remaining_args) > 0: children = [ Leaf(12, ','), Leaf(1, 'parents', prefix=' '), Leaf(22, '='), Leaf(3, 'True') ] remaining_args += [ Node(type=260, children=children) ] else: remaining_args = [ Leaf(1, 'parents', prefix=''), Leaf(22, '='), Leaf(3, 'True') ] x.append_child( Call(Name("mkdir"), remaining_args, prefix=".")) else: x.append_child( Call(Name(method_name), remaining_args, prefix=".")) return x
"""lib2to3's AST requires unique objects as children.""" if isinstance(n, Leaf): return Leaf(n.type, n.value, prefix=n.prefix if prefix is None else prefix) # this is hacky, we assume complex nodes are just being reused once from the # original AST. n.parent = None if prefix is not None: n.prefix = prefix return n _as = Leaf(token.NAME, 'as', prefix=' ') _colon = Leaf(token.COLON, ':') _comma = Leaf(token.COMMA, ',') _dot = Leaf(token.DOT, '.') _dstar = Leaf(token.DOUBLESTAR, '**') _eq = Leaf(token.EQUAL, '=', prefix=' ') _lpar = Leaf(token.LPAR, '(') _lsqb = Leaf(token.LSQB, '[') _newline = Leaf(token.NEWLINE, '\n') _none = Leaf(token.NAME, 'None') _rarrow = Leaf(token.RARROW, '->', prefix=' ') _rpar = Leaf(token.RPAR, ')') _rsqb = Leaf(token.RSQB, ']') _star = Leaf(token.STAR, '*') _ellipsis = Node(syms.atom, children=[new(_dot), new(_dot), new(_dot)])
def transform(self, node, results): if FixAnnotate.counter is not None: if FixAnnotate.counter <= 0: return # Check if there's already a long-form annotation for some argument. parameters = results.get('parameters') if parameters is not None: for ch in parameters.pre_order(): if ch.prefix.lstrip().startswith('# type:'): return args = results.get('args') if args is not None: for ch in args.pre_order(): if ch.prefix.lstrip().startswith('# type:'): return suite = results['suite'] children = suite[0].children # NOTE: I've reverse-engineered the structure of the parse tree. # It's always a list of nodes, the first of which contains the # entire suite. Its children seem to be: # # [0] NEWLINE # [1] INDENT # [2...n-2] statements (the first may be a docstring) # [n-1] DEDENT # # Comments before the suite are part of the INDENT's prefix. # # "Compact" functions (e.g. "def foo(x, y): return max(x, y)") # have a different structure (no NEWLINE, INDENT, or DEDENT). # Check if there's already an annotation. for ch in children: if ch.prefix.lstrip().startswith('# type:'): return # There's already a # type: comment here; don't change anything. # Compute the annotation annot = self.make_annotation(node, results) if annot is None: return # Insert '# type: {annot}' comment. # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib. if len(children) >= 1 and children[0].type != token.NEWLINE: if children[0].prefix.strip() == '': children[0].prefix = '' children.insert(0, Leaf(token.NEWLINE, '\n')) children.insert( 1, Leaf(token.INDENT, find_indentation(node) + ' ')) children.append(Leaf(token.DEDENT, '')) if len(children) >= 2 and children[1].type == token.INDENT: argtypes, restype = annot degen_str = '(...) -> %s' % restype short_str = '(%s) -> %s' % (', '.join(argtypes), restype) if (len(short_str) > 64 or len(argtypes) > 5) and len(short_str) > len(degen_str): self.insert_long_form(node, results, argtypes) annot_str = degen_str else: annot_str = short_str children[1].prefix = '%s# type: %s\n%s' % ( children[1].value, annot_str, children[1].prefix) children[1].changed() if FixAnnotate.counter is not None: FixAnnotate.counter -= 1 # Also add 'from typing import Any' at the top if needed. self.patch_imports(argtypes + [restype], node) else: self.log_message( "%s:%d: cannot insert annotation for one-line function" % (self.filename, node.get_lineno()))
def _r_functiondef(fun, node): assert node.type in (syms.file_input, syms.suite) name = Leaf(token.NAME, fun.name) pyi_decorators = decorator_names(fun.decorator_list) pyi_method_decorators = list( \ filter(is_builtin_method_decorator, pyi_decorators) ) or ['instancemethod'] is_method = ( node.parent is not None and \ node.parent.type == syms.classdef and "staticmethod" not in pyi_method_decorators ) args, returns = get_function_signature(fun, is_method=is_method) for child in flatten_some(node.children): decorators = None if child.type == syms.decorated: # skip decorators decorators = child.children[0] child = child.children[1] if child.type in (syms.async_stmt, syms.async_funcdef): # async def in 3.5 and 3.6 child = child.children[1] if child.type != syms.funcdef: continue offset = 1 if child.children[offset] == name: lineno = child.get_lineno() column = 1 if decorators: src_decorators = decorator_names(decorators) src_method_decorators = list( filter(is_builtin_method_decorator, src_decorators)) or ['instancemethod'] if pyi_method_decorators != src_method_decorators: raise ValueError( f"Incompatible method kind for {fun.name!r}: " + f"{lineno}:{column}: Expected: " + f"{pyi_method_decorators[0]}, actual: " + f"{src_method_decorators[0]}") is_method = "staticmethod" not in pyi_decorators try: annotate_parameters(child.children[offset + 1], args, is_method=is_method) annotate_return(child.children, returns, offset + 2) reapply(fun.body, child.children[-1]) remove_function_signature_type_comment(child.children[-1]) except ValueError as ve: raise ValueError( f"Annotation problem in function {name.value!r}: " + f"{lineno}:{column}: {ve}") break else: raise ValueError(f"Function {name.value!r} not found in source.") return []
from pythoscope.util import quoted_block from pythoscope.astvisitor import is_leaf_of_type, is_node_of_type from lib2to3 import pygram, pytree from lib2to3.pgen2 import driver, token from lib2to3.pgen2.parse import ParseError from lib2to3.pygram import python_symbols as syms from lib2to3.pytree import Node, Leaf __all__ = [ "EmptyCode", "Newline", "ParseError", "clone", "create_import", "insert_after", "insert_before", "parse", "parse_fragment", "regenerate" ] EmptyCode = lambda: Node(syms.file_input, []) Newline = lambda: Leaf(token.NEWLINE, "\n") def clone(tree): """Clone the tree, preserving its add_newline attribute. """ if tree is None: return None new_tree = tree.clone() if hasattr(tree, 'added_newline') and tree.added_newline: new_tree.added_newline = True return new_tree def create_import(import_desc):
def _r_annassign(annassign, body): assert body.type in (syms.file_input, syms.suite) target = annassign.target if isinstance(target, ast3.Name): name = target.id elif isinstance(target, ast3.Attribute): name = serialize_attribute(target) else: raise NotImplementedError(f"unexpected assignment target: {target}") annotation = convert_annotation(annassign.annotation) annotation.prefix = " " annassign_node = Node( syms.annassign, [ new(_colon), annotation, ], ) for child in flatten_some(body.children): if child.type != syms.simple_stmt: continue maybe_expr = child.children[0] if maybe_expr.type != syms.expr_stmt: continue expr = maybe_expr.children if (expr[0].type in (token.NAME, syms.power) and minimize_whitespace(str(expr[0])) == name): if expr[1].type == syms.annassign: # variable already typed, let's just ensure it's sane if len(expr[1].children) > 2 and expr[1].children[2] != _eq: raise NotImplementedError( f"unexpected element after annotation: {str(expr[3])}") ensure_annotations_equal(name, annotation, expr[1].children[1]) break if expr[1] != _eq: # If it's not an assignment, we're ignoring it. It could be: # - indexing # - tuple unpacking # - calls # - etc. etc. continue maybe_type_comment = _type_comment_re.match( child.children[1].prefix) if maybe_type_comment: # variable already typed by type comment, let's ensure it's sane... type_comment = parse_type_comment( maybe_type_comment.group('type')) actual_annotation = convert_annotation(type_comment) ensure_annotations_equal(name, annotation, actual_annotation) # ...and remove the redundant comment child.children[1].prefix = maybe_type_comment.group('nl') if len(expr[2:]) > 0 and expr[2:] != [_ellipsis]: # copy the value unless it was an old-style variable type comment # with no actual value (but just a ... placeholder) annassign_node.children.append(new(_eq)) annassign_node.children.extend(new(elem) for elem in expr[2:]) maybe_expr.children = [expr[0], annassign_node] break else: # If the variable was used in some `if` statement, etc.; let's define # its type from the stub on the top level of the function. offset, prefix = get_offset_and_prefix(body, skip_assignments=True) body.children.insert( offset, Node( syms.simple_stmt, [ Node( syms.expr_stmt, [ Leaf(token.NAME, name), annassign_node, ], ), new(_newline), ], prefix=prefix.lstrip('\n'), ), ) return []
def transform(self, node, results): arg = results['arg'] wrapper = results["wrapper"] if len(arg) == 1 and arg[0].type == token.STRING: b = Leaf(token.STRING, 'b' + arg[0].value, prefix=wrapper.prefix) node.children = [b] + results['rest']
def _c_name(name): return Leaf(token.NAME, name.id)
def Minus(prefix=None): return Leaf(token.MINUS, u'-', prefix=prefix)
def _c_nameconstant(const): return Leaf(token.NAME, repr(const.value))
def Star(prefix=None): return Leaf(token.STAR, u'*', prefix=prefix)
def Dedent(): return Leaf(token.DEDENT, "")