def check_or_generate_pyi(options, errorlog, loader): """Returns generated errors and result pyi or None if it's only check. Args: options: config.Options object. errorlog: errors.ErrorLog object. loader: load_pytd.Loader object. Returns: A tuple, (PYI Ast as string, AST) or None. """ result = pytd_builtins.DEFAULT_SRC ast = pytd_builtins.GetDefaultAst(options.python_version) try: if options.check: check_py(input_filename=options.input, errorlog=errorlog, options=options, loader=loader) return None else: result, ast = generate_pyi(input_filename=options.input, errorlog=errorlog, options=options, loader=loader) except utils.UsageError as e: raise except pyc.CompileError as e: errorlog.python_compiler_error(options.input, e.lineno, e.error) except IndentationError as e: errorlog.python_compiler_error(options.input, e.lineno, e.msg) except tokenize.TokenError as e: msg, (lineno, unused_column) = e.args # pylint: disable=unbalanced-tuple-unpacking errorlog.python_compiler_error(options.input, lineno, msg) except directors.SkipFile: result += "# skip-file found, file not analyzed" except Exception as e: # pylint: disable=broad-except if options.nofail: log.warn("***Caught exception: %s", str(e), exc_info=True) if not options.check: result += ( # pytype: disable=name-error "# Caught error in pytype: " + str(e).replace("\n", "\n#") + "\n# " + "\n# ".join(traceback.format_exc().splitlines())) else: e.args = (str(utils.message(e)) + "\nFile: %s" % options.input, ) + e.args[1:] raise return (result, ast)
def write_pickle(ast, loader, options): """Dump a pickle of the ast to a file.""" try: ast = serialize_ast.PrepareForExport( options.module_name, options.python_version, ast, loader) except parser.ParseError as e: if options.nofail: ast = serialize_ast.PrepareForExport( options.module_name, options.python_version, pytd_builtins.GetDefaultAst(options.python_version), loader) log.warn("***Caught exception: %s", str(e), exc_info=True) else: raise if options.verify_pickle: ast1 = ast.Visit(visitors.LateTypeToClassType()) ast1 = ast1.Visit(visitors.ClearClassPointers()) ast2 = loader.load_file(options.module_name, options.verify_pickle) ast2 = ast2.Visit(visitors.ClearClassPointers()) if not ast1.ASTeq(ast2): raise AssertionError() serialize_ast.StoreAst(ast, options.output)
def infer_types(src, errorlog, options, loader, filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, show_library_calls=False, maximum_depth=None, tracer_vm=None, **kwargs): """Given Python source return its types. Args: src: A string containing Python source code. errorlog: Where error messages go. Instance of errors.ErrorLog. options: config.Options object loader: A load_pytd.Loader instance to load PYI information. filename: Filename of the program we're parsing. deep: If True, analyze all functions, even the ones not called by the main execution flow. init_maximum_depth: Depth of analysis during module loading. show_library_calls: If True, call traces are kept in the output. maximum_depth: Depth of the analysis. Default: unlimited. tracer_vm: An instance of CallTracer, in case the caller wants to instantiate and retain the vm used for type inference. **kwargs: Additional parameters to pass to vm.VirtualMachine Returns: A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit) Raises: AssertionError: In case of a bad parameter combination. """ # If the caller has passed in a vm, use that. if tracer_vm: assert isinstance(tracer_vm, CallTracer) tracer = tracer_vm else: tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=options.protocols, store_all_calls=not deep, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) log.info("===Done running definitions and module-level code===") snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:infer_types:tracer") if deep: if maximum_depth is None: if not options.quick: maximum_depth = MAXIMUM_DEPTH elif options.analyze_annotated: # Since there's no point in analyzing annotated functions for inference, # the presence of this option means that the user wants checking, too. maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH else: maximum_depth = QUICK_INFER_MAXIMUM_DEPTH tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc snapshotter.take_snapshot("analyze:infer_types:post") ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports or any( a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS): try: ast.Lookup("__getattr__") except KeyError: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) # If merged with other if statement, triggers a ValueError: Unresolved class # when attempts to load from the protocols file if options.protocols: protocols_pytd = tracer.loader.import_name("protocols") else: protocols_pytd = None builtins_pytd = tracer.loader.concat_all() # Insert type parameters, where appropriate ast = ast.Visit(visitors.CreateTypeParametersForSignatures()) if options.protocols: log.info("=========== PyTD to solve =============\n%s", pytd_utils.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") # Rename remaining "~unknown" to "?" ast = ast.Visit(visitors.RemoveUnknownClasses()) # Remove "~list" etc.: ast = convert_structural.extract_local(ast) _maybe_output_debug(options, tracer.program) return ast, builtins_pytd
def process_one_file(options): """Check a .py file or generate a .pyi for it, according to options. Args: options: config.Options object. Returns: An error code (0 means no error). """ log.info("Process %s => %s", options.input, options.output) errorlog = errors.ErrorLog() result = pytd_builtins.DEFAULT_SRC ast = pytd_builtins.GetDefaultAst(options.python_version) loader = load_pytd.create_loader(options) try: if options.check: check_py(input_filename=options.input, errorlog=errorlog, options=options, loader=loader) else: result, ast = generate_pyi(input_filename=options.input, errorlog=errorlog, options=options, loader=loader) except utils.UsageError as e: logging.error("Usage error: %s\n", utils.message(e)) return 1 except pyc.CompileError as e: errorlog.python_compiler_error(options.input, e.lineno, e.error) except IndentationError as e: errorlog.python_compiler_error(options.input, e.lineno, e.msg) except tokenize.TokenError as e: msg, (lineno, unused_column) = e.args # pylint: disable=unbalanced-tuple-unpacking errorlog.python_compiler_error(options.input, lineno, msg) except directors.SkipFile: result += "# skip-file found, file not analyzed" except Exception as e: # pylint: disable=broad-except if options.nofail: log.warn("***Caught exception: %s", str(e), exc_info=True) if not options.check: result += ( # pytype: disable=name-error "# Caught error in pytype: " + str(e).replace("\n", "\n#") + "\n# " + "\n# ".join(traceback.format_exc().splitlines())) else: e.args = (str(utils.message(e)) + "\nFile: %s" % options.input, ) + e.args[1:] raise if not options.check: if options.output == "-" or not options.output: sys.stdout.write(result) else: log.info("write pyi %r => %r", options.input, options.output) with open(options.output, "w") as fi: fi.write(result) if options.output_pickled: write_pickle(ast, loader, options) exit_status = handle_errors(errorlog, options) # If we have set return_success, set exit_status to 0 after the regular error # handler has been called. if options.return_success: exit_status = 0 # Touch output file upon success. if options.touch and not exit_status: with open(options.touch, "a"): os.utime(options.touch, None) return exit_status
def infer_types(src, errorlog, options, loader, filename=None, run_builtins=True, deep=True, cache_unknowns=False, show_library_calls=False, analyze_annotated=False, init_maximum_depth=INIT_MAXIMUM_DEPTH, maximum_depth=None): """Given Python source return its types. Args: src: A string containing Python source code. errorlog: Where error messages go. Instance of errors.ErrorLog. options: config.Options object loader: A load_pytd.Loader instance to load PYI information. filename: Filename of the program we're parsing. run_builtins: Whether to preload the native Python builtins when running the program. deep: If True, analyze all functions, even the ones not called by the main execution flow. cache_unknowns: If True, do a faster approximation of unknown types. show_library_calls: If True, call traces are kept in the output. analyze_annotated: If True, analyze methods with type annotations, too. init_maximum_depth: Depth of analysis during module loading. maximum_depth: Depth of the analysis. Default: unlimited. Returns: A TypeDeclUnit Raises: AssertionError: In case of a bad parameter combination. """ tracer = CallTracer(errorlog=errorlog, options=options, module_name=get_module_name(filename, options), cache_unknowns=cache_unknowns, analyze_annotated=analyze_annotated, generate_unknowns=options.protocols, store_all_calls=not deep, loader=loader) loc, defs = tracer.run_program(src, filename, init_maximum_depth, run_builtins) log.info("===Done running definitions and module-level code===") snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("infer:infer_types:tracer") if deep: tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc snapshotter.take_snapshot("infer:infer_types:post") ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports or ("HAS_DYNAMIC_ATTRIBUTES" in defs or "has_dynamic_attributes" in defs): try: ast.Lookup("__getattr__") except KeyError: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) # If merged with other if statement, triggers a ValueError: Unresolved class # when attempts to load from the protocols file if options.protocols: protocols_pytd = tracer.loader.import_name("protocols") else: protocols_pytd = None builtins_pytd = tracer.loader.concat_all() # Insert type parameters, where appropriate ast = ast.Visit(visitors.CreateTypeParametersForSignatures()) if options.protocols: log.info("=========== PyTD to solve =============\n%s", pytd.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") # Rename remaining "~unknown" to "?" ast = ast.Visit(visitors.RemoveUnknownClasses()) # Remove "~list" etc.: ast = convert_structural.extract_local(ast) if options.output_cfg or options.output_typegraph: if options.output_cfg and options.output_typegraph: raise AssertionError("Can output CFG or typegraph, but not both") dot = debug.program_to_dot(tracer.program, set([]), bool(options.output_cfg)) proc = subprocess.Popen([ "/usr/bin/dot", "-T", "svg", "-o", options.output_cfg or options.output_typegraph ], stdin=subprocess.PIPE) proc.stdin.write(dot) proc.stdin.close() _maybe_output_debug(options, tracer.program) return ast, builtins_pytd
def infer_types(src, errorlog, options, filename=None, run_builtins=True, deep=True, solve_unknowns=True, cache_unknowns=False, show_library_calls=False, analyze_annotated=False, init_maximum_depth=INIT_MAXIMUM_DEPTH, maximum_depth=None): """Given Python source return its types. Args: src: A string containing Python source code. errorlog: Where error messages go. Instance of errors.ErrorLog. options: config.Options object filename: Filename of the program we're parsing. run_builtins: Whether to preload the native Python builtins when running the program. deep: If True, analyze all functions, even the ones not called by the main execution flow. solve_unknowns: If yes, try to replace structural types ("~unknowns") with nominal types. cache_unknowns: If True, do a faster approximation of unknown types. show_library_calls: If True, call traces are kept in the output. analyze_annotated: If True, analyze methods with type annotations, too. init_maximum_depth: Depth of analysis during module loading. maximum_depth: Depth of the analysis. Default: unlimited. Returns: A TypeDeclUnit Raises: AssertionError: In case of a bad parameter combination. """ tracer = CallTracer(errorlog=errorlog, options=options, module_name=_get_module_name(filename, options), cache_unknowns=cache_unknowns, analyze_annotated=analyze_annotated, generate_unknowns=not options.quick, store_all_calls=not deep) loc, defs = tracer.run_program( src, filename, init_maximum_depth, run_builtins) log.info("===Done running definitions and module-level code===") if deep: tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports: try: ast.Lookup("__getattr__") except KeyError: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) builtins_pytd = tracer.loader.concat_all() if solve_unknowns: log.info("=========== PyTD to solve =============\n%s", pytd.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") # Rename "~unknown" to "?" ast = ast.Visit(visitors.RemoveUnknownClasses()) # Remove "~list" etc.: ast = convert_structural.extract_local(ast) if options.output_cfg or options.output_typegraph: if options.output_cfg and options.output_typegraph: raise AssertionError("Can output CFG or typegraph, but not both") dot = program_to_dot(tracer.program, set([]), bool(options.output_cfg)) proc = subprocess.Popen(["/usr/bin/dot", "-T", "svg", "-o", options.output_cfg or options.output_typegraph], stdin=subprocess.PIPE) proc.stdin.write(dot) proc.stdin.close() _maybe_output_debug(options, tracer.program) return ast, builtins_pytd