def transform(self, node, results): syms = self.syms imports = results.get("imports") f = results.get("f") names = results.get("names") if imports: if imports.type == syms.import_as_name or not imports.children: children = [imports] else: children = imports.children for child in children[::2]: if isinstance(child, Node): for kid in child.children: if kid.value == "filterfalse": kid.changed() kid.value = "ifilterfalse" break elif child.value == "filterfalse": child.changed() child.value = "ifilterfalse" break elif names: for name in names: if is_probably_builtin(name): name.value = "i" + name.value touch_import("itertools", name.value, node) elif f: f.changed() f.value = "ifilterfalse"
def transform(self, node, results): local = results.get("local") tail = results.get("tail") if local: local = local[0] local_name = local.value modules = self._names_to_modules[local_name] if len(modules) > 1: self.warnings(node, "Conflicting name '%s' is present in %s! Ignoring transformation!" % local_name, modules) return module = list(modules)[0] if all("module" not in res for res in self._import_matches): self.warning(node, "Aggressive name matched '%s' but no corresponding import! Fix manually." % local_name) return new_name = unicode(self.mapping[module + "." + local_name]) syms = self.syms if tail: tail = [t.clone() for t in tail] # tail = tail.clone() new = self.package_tree(new_name) new = pytree.Node(syms.power, new + tail, prefix=local.prefix) # Make sure the proper package is imported package = new_name.rsplit(".", 1)[0] touch_import(None, package, node) return new
def transform(self, node, results): if self.filename in blacklist: return elif self.filename == 'mercurial/util.py': touch_import('.', 'py3kcompat', node=node) formatstr = results['formatstr'].clone() data = results['data'].clone() formatstr.prefix = '' # remove spaces from start if isnumberremainder(formatstr, data): return # We have two possibilities: # 1- An identifier or name is passed, it is going to be a leaf, thus, we # just need to copy its value as an argument to the formatter; # 2- A tuple is explicitly passed. In this case, we're gonna explode it # to pass to the formatter # TODO: Check for normal strings. They don't need to be translated if is_tuple(data): args = [formatstr, Comma().clone()] + \ [c.clone() for c in data.children[:]] else: args = [formatstr, Comma().clone(), data] call = Call(Name('bytesformatter', prefix=' '), args) return call
def transform(self, node, results): import_mod = results.get("module_name") if import_mod: mod_name = import_mod.value new_name = unicode(self.mapping[mod_name]) import_mod.replace(Name(new_name, prefix=import_mod.prefix)) if "name_import" in results: # If it's not a "from x import x, y" or "import x as y" import, # marked its usage to be replaced. self.replace[mod_name] = new_name if "multiple_imports" in results: # This is a nasty hack to fix multiple imports on a line (e.g., # "import StringIO, urlparse"). The problem is that I can't # figure out an easy way to make a pattern recognize the keys of # MAPPING randomly sprinkled in an import statement. results = self.match(node) if results: self.transform(node, results) else: # Replace usage of the module. bare_name = results["bare_with_attr"][0] new_name = self.mapping.get(bare_name.value) if new_name: bare_name.replace(Name(new_name, prefix=bare_name.prefix)) touch_import(None, new_name, node)
def transform(self, node, results): syms = self.syms imports = results.get(u"imports") f = results.get(u"f") names = results.get(u"names") if imports: if imports.type == syms.import_as_name or not imports.children: children = [imports] else: children = imports.children for child in children[::2]: if isinstance(child, Node): for kid in child.children: if kid.value == u"filterfalse": kid.changed() kid.value = u"ifilterfalse" break elif child.value == u"filterfalse": child.changed() child.value = u"ifilterfalse" break elif names: for name in names: if is_probably_builtin(name): name.value = u"i" + name.value touch_import(u"itertools", name.value, node) elif f: f.changed() f.value = u"ifilterfalse"
def transform(self, node, results): func = results["func"] touch_import(None, "collections", node=node) args = [func.clone(), String(", ")] args.extend(Attr(Name("collections"), Name("Callable"))) return Call(Name("isinstance"), args, prefix=node.prefix)
def handle_one(n): if n.type == token.NAME: if "Orange." not in n.value: new_contents.append(n) else: touch_import(None, "Orange", node) else: new_contents.append(n)
def transform(self, node, results): func = results['func'] touch_import(None, u'collections', node=node) args = [func.clone(), String(u', ')] args.extend(Attr(Name(u'collections'), Name(u'Callable'))) return Call(Name(u'isinstance'), args, prefix=node.prefix)
def transform(self, node, results): func = results['func'] touch_import(None, 'collections', node=node) args = [func.clone(), String(', ')] args.extend(Attr(Name('collections'), Name('Callable'))) return Call(Name('isinstance'), args, prefix=node.prefix)
def transform(self, node, results): assert results base = results.get('base') if not base: return touch_import(None, u'six', node) base = [n.clone() for n in base] base[0].prefix = u"" node.replace(Call(Name(u"six.advance_iterator", prefix=node.prefix), base))
def transform(self, node, results): if is_probably_builtin(node): arg1 = results['arg1'] if 'arg2' not in results: is_kwarg_expansion = (arg1.type == self.syms.argument and arg1.children[0].value == '**') if arg1.type != self.syms.star_expr and not is_kwarg_expansion: return touch_import('syx', 'hasattr', node) return node
def transform(self, node, results): assert results base = results.get('base') if not base: return method = results['method'][0] touch_import(None, u'six', node) base = [n.clone() for n in base] base[0].prefix = u"" node.replace(Call(Name(u"six.%s" % method.value, prefix=node.prefix), base))
def transform(self, node, results): assert results base = results.get('base') if not base: return method = results['method'][0] touch_import(None, u'six', node) base = [n.clone() for n in base] base[0].prefix = u"" node.replace( Call(Name(u"six.%s" % method.value, prefix=node.prefix), base))
def transform(self, node, results): if 'name' in results: touch_import(None, u'six', node) name = results['name'] name.replace(Name(u'six.text_type', prefix=name.prefix)) elif node.type == token.STRING and _literal_re.match(node.value): touch_import(None, u'six', node) new = node.clone() new.value = new.value[1:] new.prefix = '' node.replace(Call(Name(u'six.u', prefix=node.prefix), [new]))
def add_globals(self, node): """Add required globals to the root of node. Idempotent.""" if self.added_pyi_globals: return # TODO: get rid of this -- added to prevent adding .parsed_pyi.top_lines every time # we annotate a different function in the same file, but can break when we run the tool # twice on the same file. Have to do something like what touch_import does. self.added_pyi_globals = True imports, top_lines = self.parsed_pyi.imports, self.parsed_pyi.top_lines # Copy imports if not already present for pkg, names in imports: if names is None: # TODO: do ourselves, touch_import puts stuff above license headers touch_import(None, pkg, node) # == 'import pkg' else: for name in names: touch_import(pkg, name, node) root = find_root(node) import_idx = [ idx for idx, node in enumerate(root.children) if self.import_pattern.match(node) ] if import_idx: future_insert_pos = import_idx[0] top_insert_pos = import_idx[-1] + 1 else: future_insert_pos = top_insert_pos = 0 # first string (normally docstring) for idx, node in enumerate(root.children): if (node.type == syms.simple_stmt and node.children and node.children[0].type == token.STRING): future_insert_pos = top_insert_pos = idx + 1 break top_lines = '\n'.join(top_lines) top_lines = Util.parse_string(top_lines) # strips some newlines for offset, node in enumerate(top_lines.children[:-1]): root.insert_child(top_insert_pos + offset, node) # touch_import doesn't do proper order for __future__ pkg = '__future__' future_imports = [ n for n in self.future_imports if not does_tree_import(pkg, n, root) ] for offset, name in enumerate(future_imports): node = FromImport(pkg, [Leaf(token.NAME, name, prefix=" ")]) node = Node(syms.simple_stmt, [node, Newline()]) root.insert_child(future_insert_pos + offset, node)
def transform(self, node, results): unifunc = results["unifunc"] strfunc = Name("__str__", prefix=unifunc.prefix) unifunc.replace(strfunc) klass = node.clone() klass.prefix = '\n' + find_indentation(node) decorator = Node(syms.decorator, [Leaf(token.AT, "@"), Name('python_2_unicode_compatible')]) decorated = Node(syms.decorated, [decorator, klass], prefix=node.prefix) node.replace(decorated) touch_import('django.utils.encoding', 'python_2_unicode_compatible', decorated)
def transform(self, node, results): assert self.parsed_pyi, 'must provide pyi_string' src_sig = FuncSignature(node, results) if not self.can_annotate(src_sig): return pyi_sig = self.parsed_pyi.funcs[src_sig.full_name] if self.annotate_pep484: self.insert_annotation(src_sig, pyi_sig) else: annot = self.get_comment_annotation(src_sig, pyi_sig) if src_sig.try_insert_comment_annotation(annot) and 'Any' in annot: touch_import('typing', 'Any', node) self.add_globals(node)
def transform(self, node, results): unifunc = results["unifunc"] strfunc = Name("__str__", prefix=unifunc.prefix) unifunc.replace(strfunc) klass = node.clone() klass.prefix = '\n' + find_indentation(node) decorator = Node( syms.decorator, [Leaf(token.AT, "@"), Name('python_2_unicode_compatible')]) decorated = Node(syms.decorated, [decorator, klass], prefix=node.prefix) node.replace(decorated) touch_import('django.utils.encoding', 'python_2_unicode_compatible', decorated)
def transform(self, node, results): if FixAnnotate.counter is not None: if FixAnnotate.counter <= 0: return suite = results['suite'] children = suite[0].children # NOTE: I've reverse-engineered the structure of the parse tree. # It's always a list of nodes, the first of which contains the # entire suite. Its children seem to be: # # [0] NEWLINE # [1] INDENT # [2...n-2] statements (the first may be a docstring) # [n-1] DEDENT # # Comments before the suite are part of the INDENT's prefix. # # "Compact" functions (e.g. "def foo(x, y): return max(x, y)") # have a different structure that isn't matched by PATTERN. ## print('-'*60) ## print(node) ## for i, ch in enumerate(children): ## print(i, repr(ch.prefix), repr(ch)) # Check if there's already an annotation. for ch in children: if ch.prefix.lstrip().startswith('# type:'): return # There's already a # type: comment here; don't change anything. # Compute the annotation annot = self.make_annotation(node, results) # Insert '# type: {annot}' comment. # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib. if len(children) >= 2 and children[1].type == token.INDENT: children[1].prefix = '%s# type: %s\n%s' % ( children[1].value, annot, children[1].prefix) children[1].changed() if FixAnnotate.counter is not None: FixAnnotate.counter -= 1 # Also add 'from typing import Any' at the top. if 'Any' in annot: touch_import('typing', 'Any', node)
def transform(self, node, results): if FixAnnotate.counter is not None: if FixAnnotate.counter <= 0: return suite = results['suite'] children = suite[0].children # NOTE: I've reverse-engineered the structure of the parse tree. # It's always a list of nodes, the first of which contains the # entire suite. Its children seem to be: # # [0] NEWLINE # [1] INDENT # [2...n-2] statements (the first may be a docstring) # [n-1] DEDENT # # Comments before the suite are part of the INDENT's prefix. # # "Compact" functions (e.g. "def foo(x, y): return max(x, y)") # have a different structure that isn't matched by PATTERN. ## print('-'*60) ## print(node) ## for i, ch in enumerate(children): ## print(i, repr(ch.prefix), repr(ch)) # Check if there's already an annotation. for ch in children: if ch.prefix.lstrip().startswith('# type:'): return # There's already a # type: comment here; don't change anything. # Compute the annotation annot = self.make_annotation(node, results) # Insert '# type: {annot}' comment. # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib. if len(children) >= 2 and children[1].type == token.INDENT: children[1].prefix = '%s# type: %s\n%s' % (children[1].value, annot, children[1].prefix) children[1].changed() if FixAnnotate.counter is not None: FixAnnotate.counter -= 1 # Also add 'from typing import Any' at the top. if 'Any' in annot: touch_import('typing', 'Any', node)
def add_globals(self, node): """Add required globals to the root of node. Idempotent.""" if self.added_pyi_globals: return # TODO(tsudol): get rid of this -- added to prevent adding # .parsed_pyi.top_lines every time we annotate a different function in the # same file, but can break when we run the tool twice on the same file. Have # to do something like what touch_import does. self.added_pyi_globals = True imports, top_lines = self.parsed_pyi.imports, self.parsed_pyi.top_lines # Copy imports if not already present for pkg, names in imports: if names is None: # TODO(tsudol): do ourselves, touch_import puts stuff above license # headers. touch_import(None, pkg, node) # == 'import pkg' else: for name in names: touch_import(pkg, name, node) root = find_root(node) import_idx = [ idx for idx, node in enumerate(root.children) if self.import_pattern.match(node) ] if import_idx: insert_pos = import_idx[-1] + 1 else: insert_pos = 0 # first string (normally docstring) for idx, node in enumerate(root.children): if (node.type == syms.simple_stmt and node.children and node.children[0].type == token.STRING): insert_pos = idx + 1 break top_lines = '\n'.join(top_lines) top_lines = Util.parse_string(top_lines) # strips some newlines for offset, node in enumerate(top_lines.children[:-1]): root.insert_child(insert_pos + offset, node)
def add_end_part(end, file, parent, loc): if isNone(end): return if end.type == token.STRING and end.value in ("' '", '" "', "u' '", 'u" "', "b' '", 'b" "'): return if file is None: touch_import(None, "sys", parent) file = Node(syms.power, [Name("sys"), Node(syms.trailer, [Dot(), Name("stdout")])]) end_part = Node(syms.power, [ file, Node(syms.trailer, [Dot(), Name("write")]), Node(syms.trailer, [LParen(), end, RParen()]) ]) end_part.prefix = " " parent.insert_child(loc, Leaf(token.SEMI, ";")) parent.insert_child(loc + 1, end_part)
def transform(self, node, results): member = results.get("member") head = results.get("head") tail = results.get("tail") module = head[0].value if member and module in self._modules_to_change: node = member[0] head = head[0] old_name = module + "." + node.value if old_name not in self.mapping: return new_name = unicode(self.mapping[old_name]) if ":" in new_name: # ':' is the delimiter used to separate module namespace package = new_name.split(":", 1)[0] new_name = new_name.replace(":", ".") else: package = new_name.rsplit(".", 1)[0] syms = self.syms if tail: tail = [t.clone() for t in tail] new = self.package_tree(new_name) new = pytree.Node(syms.power, new + tail, prefix=head.prefix) # Make sure the proper package is imported # if ":" in new_name: # package = new_name.split(":",1)[0] # else: # package = new_name.rsplit(".", 1)[0] def orange_to_root(package): return "Orange" if package.startswith("Orange.") else package touch_import(None, orange_to_root(package), node) return new
def transform(self, node, results): assert results bare_print = results.get("bare") if bare_print: # Special-case print all by itself bare_print.replace(Call(Name(u"print"), [], prefix=bare_print.prefix)) return assert node.children[0] == Name(u"print") args = node.children[1:] if len(args) == 1 and parend_expr.match(args[0]): # We don't want to keep sticking parens around an # already-parenthesised expression. return sep = end = file = None if args and args[-1] == Comma(): args = args[:-1] end = " " if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): assert len(args) >= 2 file = args[1].clone() args = args[3:] # Strip a possible comma after the file expression # Now synthesize a print(args, sep=..., end=..., file=...) node. l_args = [arg.clone() for arg in args] if l_args: l_args[0].prefix = u"" if sep is not None or end is not None or file is not None: if sep is not None: self.add_kwarg(l_args, u"sep", String(repr(sep))) if end is not None: self.add_kwarg(l_args, u"end", String(repr(end))) if file is not None: self.add_kwarg(l_args, u"file", file) n_stmt = Call(Name(u"print"), l_args) n_stmt.prefix = node.prefix touch_import(u'__future__', u'print_function', node) return n_stmt
def add_globals(self, node): """Add required globals to the root of node. Idempotent.""" if self.added_pyi_globals: return # TODO: get rid of this -- added to prevent adding .parsed_pyi.top_lines every time # we annotate a different function in the same file, but can break when we run the tool # twice on the same file. Have to do something like what touch_import does. self.added_pyi_globals = True imports, top_lines = self.parsed_pyi.imports, self.parsed_pyi.top_lines # Copy imports if not already present for pkg, names in imports: if names is None: # TODO: do ourselves, touch_import puts stuff above license headers touch_import(None, pkg, node) # == 'import pkg' else: for name in names: touch_import(pkg, name, node) root = find_root(node) import_idx = [idx for idx, node in enumerate(root.children) if self.import_pattern.match(node)] if import_idx: insert_pos = import_idx[-1] + 1 else: insert_pos = 0 # first string (normally docstring) for idx, node in enumerate(root.children): if (node.type == syms.simple_stmt and node.children and node.children[0].type == token.STRING): insert_pos = idx + 1 break top_lines = '\n'.join(top_lines) top_lines = Util.parse_string(top_lines) # strips some newlines for offset, node in enumerate(top_lines.children[:-1]): root.insert_child(insert_pos + offset, node)
def transform(self, node, results): assert self.parsed_pyi, 'must provide pyi_string' if FixAnnotate.counter is not None: if FixAnnotate.counter <= 0: return cur_sig = FuncSignature(node, results) if not self.can_annotate(cur_sig): return if FixAnnotate.counter is not None: FixAnnotate.counter -= 1 # Compute the annotation, or directly insert if not self.emit_as_comment annot = self.get_or_insert_annotation(cur_sig) if not self.annotate_pep484 and annot: if cur_sig.try_insert_comment_annotation(annot) and 'Any' in annot: touch_import('typing', 'Any', node) self.add_globals(node)
def transform(self, node, results): local = results.get("local") tail = results.get("tail") if local: local = local[0] local_name = local.value modules = self._names_to_modules[local_name] if len(modules) > 1: self.warnings( node, "Conflicting name '%s' is present in %s! Ignoring transformation!" % local_name, modules) return module = list(modules)[0] if all("module" not in res for res in self._import_matches): self.warning( node, "Aggressive name matched '%s' but no corresponding import! Fix manually." % local_name) return new_name = unicode(self.mapping[module + "." + local_name]) syms = self.syms if tail: tail = [t.clone() for t in tail] # tail = tail.clone() new = self.package_tree(new_name) new = pytree.Node(syms.power, new + tail, prefix=local.prefix) # Make sure the proper package is imported package = new_name.rsplit(".", 1)[0] touch_import(None, package, node) return new
def patch_imports(self, types, node): if self.needed_imports: for mod, name in sorted(self.needed_imports): touch_import(mod, name, node) self.needed_imports = None
def test_from_import(self): node = parse('bar()') fixer_util.touch_import("cgi", "escape", node) self.assertEqual(str(node), 'from cgi import escape\nbar()\n\n')
def test_name_import(self): node = parse('bar()') fixer_util.touch_import(None, "cgi", node) self.assertEqual(str(node), 'import cgi\nbar()\n\n')
def test_after_imports(self): node = parse('"""foo"""\nimport bar\nbar()') fixer_util.touch_import(None, "foo", node) self.assertEqual(str(node), '"""foo"""\nimport bar\nimport foo\nbar()\n\n')
def test_beginning(self): node = parse('bar()') fixer_util.touch_import(None, "foo", node) self.assertEqual(str(node), 'import foo\nbar()\n\n')
def _handle_type2abc(self, node, results, module, abc): touch_import(None, module, node) obj = results['obj'] args = [obj.clone(), String(u', ' + u'.'.join([module, abc]))] return Call(Name(u'isinstance'), args, prefix=node.prefix)
def transform(self, node, results): if not has_metaclass(node): return fixup_parse_tree(node) # find metaclasses, keep the last one last_metaclass = None for suite, i, stmt in find_metas(node): last_metaclass = stmt stmt.remove() text_type = node.children[0].type # always Leaf(nnn, 'class') # figure out what kind of classdef we have if len(node.children) == 7: # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) # 0 1 2 3 4 5 6 if node.children[3].type == syms.arglist: arglist = node.children[3] # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) else: parent = node.children[3].clone() arglist = Node(syms.arglist, [parent]) node.set_child(3, arglist) elif len(node.children) == 6: # Node(classdef, ['class', 'name', '(', ')', ':', suite]) # 0 1 2 3 4 5 arglist = Node(syms.arglist, []) node.insert_child(3, arglist) elif len(node.children) == 4: # Node(classdef, ['class', 'name', ':', suite]) # 0 1 2 3 arglist = Node(syms.arglist, []) node.insert_child(2, Leaf(token.RPAR, u')')) node.insert_child(2, arglist) node.insert_child(2, Leaf(token.LPAR, u'(')) else: raise ValueError("Unexpected class definition") touch_import(None, u'six', node) metaclass = last_metaclass.children[0].children[2].clone() metaclass.prefix = u'' arguments = [metaclass] if arglist.children: if len(arglist.children) == 1: base = arglist.children[0].clone() base.prefix = u' ' else: # Unfortunately six.with_metaclass() only allows one base # class, so we have to dynamically generate a base class if # there is more than one. bases = parenthesize(arglist.clone()) bases.prefix = u' ' base = Call(Name('type'), [ String("'NewBase'"), Comma(), bases, Comma(), Node( syms.atom, [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], prefix=u' ' ) ], prefix=u' ') arguments.extend([Comma(), base]) arglist.replace(Call( Name(u'six.with_metaclass', prefix=arglist.prefix), arguments )) fixup_indent(suite) # check for empty suite if not suite.children: # one-liner that was just __metaclass_ suite.remove() pass_leaf = Leaf(text_type, u'pass') pass_leaf.prefix = orig_meta_prefix node.append_child(pass_leaf) node.append_child(Leaf(token.NEWLINE, u'\n')) elif len(suite.children) > 1 and \ (suite.children[-2].type == token.INDENT and suite.children[-1].type == token.DEDENT): # there was only one line in the class body and it was __metaclass__ pass_leaf = Leaf(text_type, u'pass') suite.insert_child(-1, pass_leaf) suite.insert_child(-1, Leaf(token.NEWLINE, u'\n'))
def transform(self, node, results): touch_import(u'functools', u'reduce', node)
def transform(self, node, results): touch_import(u'six.moves', u'map', node)
def touch_import(package, name, node): fixer_util.touch_import(package, name, node)
def transform(self, node, results): touch_import('imp', 'reload', node) return node
def transform(self, node, results): touch_import(u'six.moves', u'range', node) results['name'][0].value = 'range'
def _handle_type2abc(self, node, results, module, abc): touch_import(None, module, node) obj = results["obj"] args = [obj.clone(), String(", " + ".".join([module, abc]))] return Call(Name("isinstance"), args, prefix=node.prefix)
def touch_import(package, name, node): add_future(node, 'absolute_import') fixer_util.touch_import(package, name, node)
def transform(self, node, results): touch_import('six.moves', 'filter', node)
def transform(self, node, results): if not has_metaclass(node): return fixup_parse_tree(node) # find metaclasses, keep the last one last_metaclass = None for suite, i, stmt in find_metas(node): last_metaclass = stmt stmt.remove() text_type = node.children[0].type # always Leaf(nnn, 'class') # figure out what kind of classdef we have if len(node.children) == 7: # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) # 0 1 2 3 4 5 6 if node.children[3].type == syms.arglist: arglist = node.children[3] # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) else: parent = node.children[3].clone() arglist = Node(syms.arglist, [parent]) node.set_child(3, arglist) elif len(node.children) == 6: # Node(classdef, ['class', 'name', '(', ')', ':', suite]) # 0 1 2 3 4 5 arglist = Node(syms.arglist, []) node.insert_child(3, arglist) elif len(node.children) == 4: # Node(classdef, ['class', 'name', ':', suite]) # 0 1 2 3 arglist = Node(syms.arglist, []) node.insert_child(2, Leaf(token.RPAR, u')')) node.insert_child(2, arglist) node.insert_child(2, Leaf(token.LPAR, u'(')) else: raise ValueError("Unexpected class definition") # now stick the metaclass in the arglist meta_txt = last_metaclass.children[0].children[0] meta_txt.value = 'metaclass' orig_meta_prefix = meta_txt.prefix # Was: touch_import(None, u'future.utils', node) touch_import(u'future.utils', u'with_metaclass', node) metaclass = last_metaclass.children[0].children[2].clone() metaclass.prefix = u'' arguments = [metaclass] if arglist.children: if len(arglist.children) == 1: base = arglist.children[0].clone() base.prefix = u' ' else: # Unfortunately six.with_metaclass() only allows one base # class, so we have to dynamically generate a base class if # there is more than one. bases = parenthesize(arglist.clone()) bases.prefix = u' ' base = Call(Name('type'), [ String("'NewBase'"), Comma(), bases, Comma(), Node(syms.atom, [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], prefix=u' ') ], prefix=u' ') arguments.extend([Comma(), base]) arglist.replace( Call(Name(u'with_metaclass', prefix=arglist.prefix), arguments)) fixup_indent(suite) # check for empty suite if not suite.children: # one-liner that was just __metaclass_ suite.remove() pass_leaf = Leaf(text_type, u'pass') pass_leaf.prefix = orig_meta_prefix node.append_child(pass_leaf) node.append_child(Leaf(token.NEWLINE, u'\n')) elif len(suite.children) > 1 and \ (suite.children[-2].type == token.INDENT and suite.children[-1].type == token.DEDENT): # there was only one line in the class body and it was __metaclass__ pass_leaf = Leaf(text_type, u'pass') suite.insert_child(-1, pass_leaf) suite.insert_child(-1, Leaf(token.NEWLINE, u'\n'))
def transform(self, node, results): touch_import(u'six.moves', u'filter', node)
def transform(self, node, results): if is_probably_builtin(node): touch_import('syx', 'round', node) return node
def test_beginning(self): node = parse("bar()") fixer_util.touch_import(None, "foo", node) self.assertEqual(str(node), "import foo\nbar()\n\n")
def test_from_import(self): node = parse("bar()") fixer_util.touch_import("cgi", "escape", node) self.assertEqual(str(node), "from cgi import escape\nbar()\n\n")
def test_name_import(self): node = parse("bar()") fixer_util.touch_import(None, "cgi", node) self.assertEqual(str(node), "import cgi\nbar()\n\n")
def transform(self, node, results): touch_import(six.u('six.moves'), six.u('filter'), node)
def patch_imports(self, types, node): for typ in types: if 'Any' in typ: touch_import('typing', 'Any', node) break