Exemple #1
0
def main(
    debug: bool = False,
    validate: bool = False,
    source_dir: str = ".",
    dest_dir: str = ".",
):
    setup_logger(debug)
    source = Path(source_dir)
    dest = Path(dest_dir)
    stubs = Converter(source, dest).gen_stubs()

    if validate:
        success = True
        from typed_ast.ast3 import parse  # pylint: disable=import-outside-toplevel

        for stub in stubs:
            with open(stub, "r") as f:
                content = f.read()
            try:
                parse(content)
            except Exception:  # pylint: disable=broad-except
                success = False
                log.exception("validation error for %s", stub)

        if not success:
            raise click.ClickException("stub validation failed")
Exemple #2
0
def transform(transformer, before):
    tree = parse(before)
    try:
        transformer().visit(tree)
        return unparse(tree).strip()
    except:
        print('Before:')
        print(dump(parse(before)))
        print('After:')
        print(dump(tree))
        raise
Exemple #3
0
def parse_single_version(src: str,
                         version: Tuple[int, int]) -> Union[ast.AST, ast3.AST]:
    filename = "<unknown>"
    # typed_ast is needed because of feature version limitations in the builtin ast
    if sys.version_info >= (3, 8) and version >= (3, ):
        return ast.parse(src, filename, feature_version=version)
    elif version >= (3, ):
        if _IS_PYPY:
            return ast3.parse(src, filename)
        else:
            return ast3.parse(src, filename, feature_version=version[1])
    raise AssertionError(
        "INTERNAL ERROR: Tried parsing unsupported Python version!")
Exemple #4
0
def test_parse_python_method_args(processor):
    myAst = ast3.parse("""
testing1(1, 2, 3)
testing2(name1=1, name2=2, name3="3")
testing3(1, 2, 3, name=4)
testing4(1, 2, 3, 4, 5, 6, 7)
testing5(1, 2, 3, name3=4)
    """)
    args = processor.parse_python_method_args(myAst.body[0].value,
                                              ['arg1', 'arg2', 'arg3'])
    assert args == {'arg1': 1, 'arg2': 2, 'arg3': 3}

    args = processor.parse_python_method_args(myAst.body[1].value,
                                              ['name1', 'name2', 'name3'])
    assert args == {'name1': 1, 'name2': 2, 'name3': '3'}

    args = processor.parse_python_method_args(myAst.body[2].value,
                                              ['arg1', 'arg2', 'arg3', 'name'])
    assert args == {'arg1': 1, 'arg2': 2, 'arg3': 3, 'name': 4}

    args = processor.parse_python_method_args(myAst.body[3].value,
                                              ['arg1', 'arg2', 'arg3'])
    assert args == {'arg1': 1, 'arg2': 2, 'arg3': 3}

    args = processor.parse_python_method_args(
        myAst.body[4].value,
        ['arg1', 'arg2', 'arg3', 'name1', 'name2', 'name3'])
    assert args == {'arg1': 1, 'arg2': 2, 'arg3': 3, 'name3': 4}
Exemple #5
0
def parse_signature_type_comment(type_comment):
    """Parse the fugly signature type comment into AST nodes.

    Caveats: ASTifying **kwargs is impossible with the current grammar so we
    hack it into unary subtraction (to differentiate from Starred in vararg).

    For example from:
    "(str, int, *int, **Any) -> 'SomeReturnType'"

    To:
    ([ast3.Name, ast.Name, ast3.Name, ast.Name], ast3.Str)
    """
    try:
        result = ast3.parse(type_comment, "<func_type>", "func_type")
    except SyntaxError as exc:
        raise ValueError(
            f"invalid function signature type comment: {type_comment!r}"
        ) from exc

    assert isinstance(result, ast3.FunctionType)
    if len(result.argtypes) == 1:
        argtypes = result.argtypes[0]
    else:
        argtypes = result.argtypes
    return argtypes, result.returns
Exemple #6
0
    def visit_FunctionDef(self, node):
        decorator_names = [
            utils.get_decorator_name(decorator)
            for decorator in node.decorator_list
        ]
        typ = 'property' if '@property' in decorator_names else 'function'
        if any(
                _match(name, self.ignore_decorators)
                for name in decorator_names):
            self._log('Ignoring {} "{}" (decorator whitelisted)'.format(
                typ, node.name))
        elif typ == 'property':
            self._define(self.defined_props, node.name, node)
        else:
            # Function is not a property.
            self._define(self.defined_funcs,
                         node.name,
                         node,
                         ignore=_ignore_function)

        # Detect *args and **kwargs parameters. Python 3 recognizes them
        # in visit_Name. For Python 2 we use this workaround. We can't
        # use visit_arguments, because its node has no lineno.
        for param in [node.args.vararg, node.args.kwarg]:
            if param and isinstance(param, str):
                self._define_variable(param, node, confidence=100)

        if node.type_comment:
            type_comment = ast3.parse(node.type_comment, '<func_type>',
                                      'func_type')
            v = TypeCommentVisitor()
            v.visit(type_comment)
            for name in v.attrs:
                self.used_names.add(name)
Exemple #7
0
def main():
    for filename in sys.argv[1:]:
        tree = ast3.parse(pathlib.Path(filename).read_text())
        for mod, cls, name in find_enums(tree):
            old = f"{mod}.{name}"
            new = f"{mod}.{cls}.{name}"
            print(f"{old} {new}")
Exemple #8
0
    def visit_Module(self, node: ast3.Module) -> VisitorOutput:
        "Adding the necessary pumbling for the transformed module to work"

        # In the case the transformation is being called from Console, then don't add
        # anything to it
        cursorline_at_end = \
            self.cursorline is not None \
            and len(node.body) > 0 \
            and node.body[-1].lineno < self.cursorline

        self.generic_visit(node)

        if not self.console:
            node.body = (
                ast3.parse(  # type: ignore
                    'import pytropos.internals as pt\n'
                    # 'import pytropos.libs.base\n'
                    'st = pt.Store()\n'
                    'pt.loadBuiltinFuncs(st)\n'
                    # 'st.load_module(pytropos.libs.base, "__builtins__")\n'
                    f'fn = {self.filename!r}\n').body + node.body)

        if cursorline_at_end:
            node.body.append(self._show_store_contents_expr())

        return node
Exemple #9
0
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
    """Given a correct type comment, obtain a FunctionType object"""
    if _ast_py3 is None:
        return None

    func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
    return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
Exemple #10
0
def parse_ast(src: str) -> Union[ast.AST, ast3.AST, ast27.AST]:
    filename = "<unknown>"
    if sys.version_info >= (3, 8):
        # TODO: support Python 4+ ;)
        for minor_version in range(sys.version_info[1], 4, -1):
            try:
                return ast.parse(src,
                                 filename,
                                 feature_version=(3, minor_version))
            except SyntaxError:
                continue
    else:
        for feature_version in (7, 6):
            try:
                return ast3.parse(src,
                                  filename,
                                  feature_version=feature_version)
            except SyntaxError:
                continue
    if ast27.__name__ == "ast":
        raise SyntaxError(
            "The requested source code has invalid Python 3 syntax.\n"
            "If you are trying to format Python 2 files please reinstall Black"
            " with the 'python2' extra: `python3 -m pip install black[python2]`."
        )
    return ast27.parse(src)
Exemple #11
0
def get_expression_code(expression, context: Context):
    if isinstance(expression, str):
        expression = ast.parse(expression)
    parser = ExpressionParser(context)
    code = parser.visit(expression)
    code.libraries = parser.libraries
    return code, parser.prepends
Exemple #12
0
def _parse(src: str, feature_version: int, filename: str = ""):
  """Call the typed_ast parser with the appropriate feature version."""
  try:
    ast_root_node = ast3.parse(src, filename, feature_version=feature_version)
  except SyntaxError as e:
    raise ParseError(e.msg, line=e.lineno, filename=filename) from e
  return ast_root_node
Exemple #13
0
    def __init__(self, source: str, type_graph: TypeLatticeGenerator):
        self.__type_graph = type_graph
        self.__node_to_id: Dict[Any, int] = {}
        self.__id_to_node: List[Any] = []

        self.__symbol_to_supernode_id: Dict[Symbol, int] = {}

        self.__edges: Dict[EdgeType, Dict[int, Set[int]]] = {
            e: defaultdict(set)
            for e in EdgeType
        }

        self.__ast = parse(source)
        self.__scope_symtable = [symtable(source, 'file.py', 'exec')]

        self.__imported_symbols = {
        }  # type: Dict[TypeAnnotationNode, TypeAnnotationNode]

        # For the CHILD edges
        self.__current_parent_node: Optional[AST] = None

        # For the NEXT_TOKEN edges
        self.__backbone_sequence: List[TokenNode] = []
        self.__prev_token_node: Optional[TokenNode] = None

        # For the RETURNS_TO edge
        self.__return_scope: Optional[AST] = None

        # For the OCCURRENCE_OF and Supernodes
        self.__variable_like_symbols: Dict[Any, SymbolInformation] = {}

        # Last Lexical Use
        self.__last_lexical_use: Dict[Any, Any] = {}
Exemple #14
0
def _parse_string_annotation(node):
    assert hasattr(node, 's')
    try:
        node = parse(node.s, '', mode='eval')
        return _parse_recursive(node.body)
    except SyntaxError:
        return None
Exemple #15
0
    def annotate(self, fpath, pred_idx, type_idx):
        self.__reset()

        if pred_idx == -1:
            self.__sift(fpath=fpath)
        else:
            self.__sift(pred_idx=pred_idx)

        # if no proper (i.e. non-property-access) predictions are for this file,
        # or the predictions are fewer than args.top
        if len(self.__rel_lines) == 0 or type_idx >= len(
                self.__rel_lines[0]["predicted_annotation_logprob_dist"]):
            return fpath

        self.__type_idx = type_idx

        with open(fpath) as src:
            self.__tree = parse(src.read())
        # self.__PP.pprint(dump(tree))

        new_tree = self.visit(self.__tree)
        if self.__unmodified:
            return fpath
        self.__add_type_imports(self.__get_types_2_import())
        new_tree = fix_missing_locations(new_tree)

        OLD_EXT = ".py"
        NEW_EXT = f"_tpl_{type_idx}.py"
        new_fpath = rreplace(fpath, OLD_EXT, NEW_EXT, 1)
        with open(new_fpath, "w", encoding="utf8") as dst:
            dst.write(typed_astunparse.unparse(new_tree))

        return new_fpath
Exemple #16
0
def use(fn: Callable[..., Any]):
    t = ast.parse(inspect.getsource(fn))
    assert len(t.body) == 1
    body = t.body[0].body

    for node in body:
        print(ast.dump(node))
Exemple #17
0
def _parse_string_annotation(node):
    assert hasattr(node, "s")
    try:
        node = parse(node.s, "", mode="eval")
        return _parse_recursive(node.body)
    except SyntaxError:
        return None
Exemple #18
0
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
    """Given a correct type comment, obtain a FunctionType object"""
    if _ast_py3 is None:
        return None

    func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
    return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
Exemple #19
0
def test_variables_replacer():
    tree = ast.parse('''
from f.f import f as f
import f as f

class f(f):
    def f(f):
        f = f
        for f in f:
            with f as f:
                yield f
        return f

    ''')
    VariablesReplacer.replace(tree, {'f': 'x'})
    code = unparse(tree)

    expected = '''
from x.x import x as x
import x as x

class x(x):

    def x(x):
        x = x
        for x in x:
            with x as x:
                (yield x)
        return x
    '''

    assert code.strip() == expected.strip()
Exemple #20
0
def test_variables_finder():
    tree = ast.parse('''
let(a)
x = 1
let(b)
    ''')
    assert find_variables(tree) == ['a', 'b']
Exemple #21
0
 def test_something(self):
     tree = ast.parse(TEST_CODE)
     m = ModuleParser()
     m.visit(tree)
     TypeDB.get_type_by_name("int")
     TypeDB.get_type_by_name("float")
     TypeDB.get_string()
Exemple #22
0
def retype_file(src, pyi_dir, targets, *, quiet=False, hg=False):
    """Retype `src`, finding types in `pyi_dir`. Save in `targets`.

    The file should remain formatted exactly as it was before, save for:
    - annotations
    - additional imports needed to satisfy annotations
    - additional module-level names needed to satisfy annotations

    Type comments in sources are normalized to type annotations.
    """
    with tokenize.open(src) as src_buffer:
        src_encoding = src_buffer.encoding
        src_node = lib2to3_parse(src_buffer.read())
    try:
        with open((pyi_dir / src.name).with_suffix('.pyi')) as pyi_file:
            pyi_txt = pyi_file.read()
    except FileNotFoundError:
        if not quiet:
            print(
                f'warning: .pyi file for source {src} not found in {pyi_dir}',
                file=sys.stderr,
            )
    else:
        pyi_ast = ast3.parse(pyi_txt)
        assert isinstance(pyi_ast, ast3.Module)
        reapply_all(pyi_ast.body, src_node)
    fix_remaining_type_comments(src_node)
    targets.mkdir(parents=True, exist_ok=True)
    with open(targets / src.name, 'w', encoding=src_encoding) as target_file:
        target_file.write(lib2to3_unparse(src_node, hg=hg))
    return targets / src.name
Exemple #23
0
def _transform(path: str, code: str,
               target: CompilationTarget) -> Tuple[str, List[str]]:
    """Applies all transformation for passed target."""
    dependencies = []  # type: List[str]

    for transformer in transformers:
        tree = ast.parse(code, path)
        if transformer.target < target:
            continue

        try:
            result = transformer.transform(tree)
        except:
            raise TransformationError(path, transformer, dump(tree),
                                      format_exc())

        if not result.tree_changed:
            continue

        dependencies.extend(result.dependencies)

        try:
            code = unparse(tree)
        except:
            raise TransformationError(path, transformer, dump(tree),
                                      format_exc())

    return fix_code(code), dependencies
Exemple #24
0
    def load_project(self):
        """
        This method simply finds all the *.py files and loads them into ASTs and adds them all to the python_file_asts
        dictionary for processing.
        :return: None
        """
        # Find all of the python files
        glob_path = os.path.join(self.working_dir, '**', '*.py')
        project_files = glob.glob(glob_path, recursive=True)
        project_files = list(filter(lambda x: os.path.isfile(x),
                                    project_files))

        # Loop through the files looking for endpoints
        for f in project_files:
            try:
                with codecs.open(f, 'r', 'utf-8', 'ignore') as fh:
                    code = fh.read()
            except UnicodeDecodeError as e:
                logger.warning("There was an error decoding '%s': %s",
                               self.strip_work_dir(f), e)
                continue
            # Use typed_ast library to parse the code for easier analysis
            try:
                # Try parsing as python 3.5 code
                tree = ast3.parse(code)
            except SyntaxError:
                tree = self._load_27_code(code)

            if tree:
                self.python_file_asts[self.strip_work_dir(f)] = tree
def test_flatten_ast(title, source, expected):
    expected = regex.sub(r" +# .+", "", expected)
    pseudo_hash.reset()
    print(title)
    print("-" * len(title))
    result = flatten_ast(ast.parse(source)).strip()
    print(result)
    assert expected in result
Exemple #26
0
def parse_function_type_comment(type_comment: str) -> FunctionType | None:
    """Given a correct type comment, obtain a FunctionType object"""
    if _ast_py3 is None:
        return None

    func_type = _ast_py3.parse(type_comment, "<type_comment>",
                               "func_type")  # type: ignore[attr-defined]
    return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
Exemple #27
0
 def get_body(self, **snippet_kwargs: Variable) -> List[ast.AST]:
     """Get AST of snippet body with replaced variables."""
     source = get_source(self._fn)
     tree = ast.parse(source)
     variables = self._get_variables(tree, snippet_kwargs)
     extend_tree(tree, variables)
     VariablesReplacer.replace(tree, variables)
     return tree.body[0].body  # type: ignore
Exemple #28
0
 def scan_code(self, code: str) -> Sequence[ast.AST]:
     try:
         # horast can parse and preserve comments
         tree = horast.parse(code)
     except (ValueError, TypeError):
         # fallback to regular typed ast
         tree = ast.parse(code)
     return self.scan_ast(tree)
Exemple #29
0
 def maybe_visit_statement_type_comment(self, node):
     if node.type_comment:
         type_comment = ast3.parse(node.type_comment, '<type_comment>',
                                   'eval')
         v = TypeCommentVisitor()
         v.visit(type_comment)
         for name in v.attrs:
             self.used_names.add(name)
Exemple #30
0
    def _Class(self, node: ET.Element):  # pylint: disable=invalid-name
        context = node.attrib['context']
        assert context in self.namespaces, context

        cls_name = node.attrib['name']
        if '<' in cls_name:
            _LOG.warning('processing template class %s', cls_name)
            assert '>' in cls_name
            cls_name, _, rest = cls_name.partition('<')
            rest = rest[:-1]
            generic_args = [_.strip() for _ in rest.split(',')]
            _LOG.warning('found generic args: %s', generic_args)

        full_name = '{}::{}'.format(self.namespaces[context], cls_name)
        is_stl_class = full_name in CPP_STL_CLASSES and generic_args
        value_type = None

        body = []
        for member_id in node.attrib['members'].split():
            if not is_stl_class:
                # TODO: handle non-STL classes too
                break
            if member_id not in self.all_types:
                continue
            member_type = self.all_types[member_id]
            if member_type.tag == 'Typedef' and member_type.attrib[
                    'name'] == 'value_type':
                referenced_id = member_type.attrib['type']
                assert referenced_id in self.all_types
                if referenced_id not in self.relevant_types \
                        and referenced_id not in self._new_relevant_types:
                    self._new_relevant_types[referenced_id] = self.all_types[
                        referenced_id]
                    _LOG.debug(
                        'type marked as relevant due to being container value type %s',
                        ET.tostring(
                            self.all_types[referenced_id]).decode().rstrip())
                body.append(typed_ast3.Expr(typed_ast3.Str(referenced_id, '')))
                value_type = referenced_id
            '''
            if member_id not in self.relevant_types and member_id not in self._new_relevant_types:
                self._new_relevant_types[member_id] = member_type
                _LOG.warning('marked %s as relevant type',
                             ET.tostring(member_type).decode().rstrip())
            body.append(typed_ast3.Expr(typed_ast3.Str(member_id, '')))
            '''

        base_class = typed_ast3.parse(CPP_PYTHON_CLASS_PAIRS[full_name],
                                      mode='eval').body

        if is_stl_class:
            assert value_type is not None
            base_class = typed_ast3.Subscript(
                value=base_class,
                slice=typed_ast3.Index(typed_ast3.Str(value_type, '')),
                ctx=typed_ast3.Load())

        return base_class
Exemple #31
0
def parse_type_comment(annotation: str) -> Optional[TypeAnnotationNode]:
    try:
        node = parse(annotation, '', mode='eval')
    except SyntaxError:
        return None
    try:
        return _parse_recursive(node.body)
    except Exception as e:
        return None
Exemple #32
0
def parse_type_comment(type_comment: str, line: int, errors: Optional[Errors]) -> Optional[Type]:
    try:
        typ = ast3.parse(type_comment, '<type_comment>', 'eval')
    except SyntaxError as e:
        if errors is not None:
            errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR, blocker=True)
            return None
        else:
            raise
    else:
        assert isinstance(typ, ast3.Expression)
        return TypeConverter(errors, line=line).visit(typ.body)
Exemple #33
0
def parse(source: Union[str, bytes],
          fnam: str,
          module: Optional[str],
          errors: Optional[Errors] = None,
          options: Optional[Options] = None) -> MypyFile:

    """Parse a source file, without doing any semantic analysis.

    Return the parse tree. If errors is not provided, raise ParseError
    on failure. Otherwise, use the errors object to report parse errors.
    """
    raise_on_error = False
    if errors is None:
        errors = Errors()
        raise_on_error = True
    if options is None:
        options = Options()
    errors.set_file(fnam, module)
    is_stub_file = fnam.endswith('.pyi')
    try:
        if is_stub_file:
            feature_version = defaults.PYTHON3_VERSION[1]
        else:
            assert options.python_version[0] >= 3
            feature_version = options.python_version[1]
        ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version)

        tree = ASTConverter(options=options,
                            is_stub=is_stub_file,
                            errors=errors,
                            ).visit(ast)
        tree.path = fnam
        tree.is_stub = is_stub_file
    except SyntaxError as e:
        errors.report(e.lineno, e.offset, e.msg, blocker=True)
        tree = MypyFile([], [], False, set())

    if raise_on_error and errors.is_errors():
        errors.raise_error()

    return tree
Exemple #34
0
    def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef],
                    is_coroutine: bool = False) -> Union[FuncDef, Decorator]:
        """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef."""
        no_type_check = bool(n.decorator_list and
                             any(is_no_type_check_decorator(d) for d in n.decorator_list))

        args = self.transform_args(n.args, n.lineno, no_type_check=no_type_check)

        arg_kinds = [arg.kind for arg in args]
        arg_names = [arg.variable.name() for arg in args]  # type: List[Optional[str]]
        arg_names = [None if argument_elide_name(name) else name for name in arg_names]
        if special_function_elide_names(n.name):
            arg_names = [None] * len(arg_names)
        arg_types = []  # type: List[Optional[Type]]
        if no_type_check:
            arg_types = [None] * len(args)
            return_type = None
        elif n.type_comment is not None:
            try:
                func_type_ast = ast3.parse(n.type_comment, '<func_type>', 'func_type')
                assert isinstance(func_type_ast, ast3.FunctionType)
                # for ellipsis arg
                if (len(func_type_ast.argtypes) == 1 and
                        isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)):
                    if n.returns:
                        # PEP 484 disallows both type annotations and type comments
                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
                    arg_types = [a.type_annotation
                                 if a.type_annotation is not None
                                 else AnyType(TypeOfAny.unannotated)
                                 for a in args]
                else:
                    # PEP 484 disallows both type annotations and type comments
                    if n.returns or any(a.type_annotation is not None for a in args):
                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
                    translated_args = (TypeConverter(self.errors, line=n.lineno)
                                       .translate_expr_list(func_type_ast.argtypes))
                    arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated)
                                for a in translated_args]
                return_type = TypeConverter(self.errors,
                                            line=n.lineno).visit(func_type_ast.returns)

                # add implicit self type
                if self.in_class() and len(arg_types) < len(args):
                    arg_types.insert(0, AnyType(TypeOfAny.special_form))
            except SyntaxError:
                self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
                if n.type_comment and n.type_comment[0] != "(":
                    self.note('Suggestion: wrap argument types in parentheses',
                              n.lineno, n.col_offset)
                arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
                return_type = AnyType(TypeOfAny.from_error)
        else:
            arg_types = [a.type_annotation for a in args]
            return_type = TypeConverter(self.errors, line=n.returns.lineno
                                        if n.returns else n.lineno).visit(n.returns)

        for arg, arg_type in zip(args, arg_types):
            self.set_type_optional(arg_type, arg.initializer)

        func_type = None
        if any(arg_types) or return_type:
            if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
                self.fail("Ellipses cannot accompany other argument types "
                          "in function type signature.", n.lineno, 0)
            elif len(arg_types) > len(arg_kinds):
                self.fail('Type signature has too many arguments', n.lineno, 0)
            elif len(arg_types) < len(arg_kinds):
                self.fail('Type signature has too few arguments', n.lineno, 0)
            else:
                func_type = CallableType([a if a is not None else
                                          AnyType(TypeOfAny.unannotated) for a in arg_types],
                                         arg_kinds,
                                         arg_names,
                                         return_type if return_type is not None else
                                         AnyType(TypeOfAny.unannotated),
                                         _dummy_fallback)

        func_def = FuncDef(n.name,
                       args,
                       self.as_required_block(n.body, n.lineno),
                       func_type)
        if is_coroutine:
            # A coroutine is also a generator, mostly for internal reasons.
            func_def.is_generator = func_def.is_coroutine = True
        if func_type is not None:
            func_type.definition = func_def
            func_type.line = n.lineno

        if n.decorator_list:
            var = Var(func_def.name())
            var.is_ready = False
            var.set_line(n.decorator_list[0].lineno)

            func_def.is_decorated = True
            func_def.set_line(n.lineno + len(n.decorator_list))
            func_def.body.set_line(func_def.get_line())
            return Decorator(func_def, self.translate_expr_list(n.decorator_list), var)
        else:
            return func_def
Exemple #35
0
    def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement:
        lineno = n.lineno
        converter = TypeConverter(self.errors, line=lineno,
                                  assume_str_is_unicode=self.unicode_literals)
        args, decompose_stmts = self.transform_args(n.args, lineno)

        arg_kinds = [arg.kind for arg in args]
        arg_names = [arg.variable.name() for arg in args]  # type: List[Optional[str]]
        arg_names = [None if argument_elide_name(name) else name for name in arg_names]
        if special_function_elide_names(n.name):
            arg_names = [None] * len(arg_names)

        arg_types = []  # type: List[Optional[Type]]
        type_comment = n.type_comment
        if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)):
            arg_types = [None] * len(args)
            return_type = None
        elif type_comment is not None and len(type_comment) > 0:
            try:
                func_type_ast = ast3.parse(type_comment, '<func_type>', 'func_type')
                assert isinstance(func_type_ast, FunctionType)
                # for ellipsis arg
                if (len(func_type_ast.argtypes) == 1 and
                        isinstance(func_type_ast.argtypes[0], ast3_Ellipsis)):
                    arg_types = [a.type_annotation
                                 if a.type_annotation is not None
                                 else AnyType(TypeOfAny.unannotated)
                                 for a in args]
                else:
                    # PEP 484 disallows both type annotations and type comments
                    if any(a.type_annotation is not None for a in args):
                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, lineno, n.col_offset)
                    arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for
                                 a in converter.translate_expr_list(func_type_ast.argtypes)]
                return_type = converter.visit(func_type_ast.returns)

                # add implicit self type
                if self.in_class() and len(arg_types) < len(args):
                    arg_types.insert(0, AnyType(TypeOfAny.special_form))
            except SyntaxError:
                self.fail(TYPE_COMMENT_SYNTAX_ERROR, lineno, n.col_offset)
                arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
                return_type = AnyType(TypeOfAny.from_error)
        else:
            arg_types = [a.type_annotation for a in args]
            return_type = converter.visit(None)

        for arg, arg_type in zip(args, arg_types):
            self.set_type_optional(arg_type, arg.initializer)

        func_type = None
        if any(arg_types) or return_type:
            if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
                self.fail("Ellipses cannot accompany other argument types "
                          "in function type signature.", lineno, 0)
            elif len(arg_types) > len(arg_kinds):
                self.fail('Type signature has too many arguments', lineno, 0)
            elif len(arg_types) < len(arg_kinds):
                self.fail('Type signature has too few arguments', lineno, 0)
            else:
                any_type = AnyType(TypeOfAny.unannotated)
                func_type = CallableType([a if a is not None else any_type for a in arg_types],
                                        arg_kinds,
                                        arg_names,
                                        return_type if return_type is not None else any_type,
                                        _dummy_fallback)

        body = self.as_required_block(n.body, lineno)
        if decompose_stmts:
            body.body = decompose_stmts + body.body
        func_def = FuncDef(n.name,
                       args,
                       body,
                       func_type)
        if isinstance(func_def.type, CallableType):
            # semanal.py does some in-place modifications we want to avoid
            func_def.unanalyzed_type = func_def.type.copy_modified()
        if func_type is not None:
            func_type.definition = func_def
            func_type.line = lineno

        if n.decorator_list:
            var = Var(func_def.name())
            var.is_ready = False
            var.set_line(n.decorator_list[0].lineno)

            func_def.is_decorated = True
            func_def.set_line(lineno + len(n.decorator_list))
            func_def.body.set_line(func_def.get_line())
            dec = Decorator(func_def, self.translate_expr_list(n.decorator_list), var)
            dec.set_line(lineno, n.col_offset)
            return dec
        else:
            # Overrides set_line -- can't use self.set_line
            func_def.set_line(lineno, n.col_offset)
            return func_def