Exemple #1
0
def _LookupClassReferences(serializable_ast, module_map, self_name):
    """Fills .cls references in serializable_ast.ast with ones from module_map.

  Already filled references are not changed. References to the module self._name
  are not filled. Setting self_name=None will fill all references.

  Args:
    serializable_ast: A SerializableAst instance.
    module_map: Used to resolve ClassType.cls links to already loaded modules.
      The loaded module will be added to the dict.
    self_name: A string representation of a module which should not be resolved,
      for example: "foo.bar.module1" or None to resolve all modules.

  Returns:
    A SerializableAst with an updated .ast. .class_type_nodes is set to None
    if any of the Nodes needed to be regenerated.
  """

    class_lookup = visitors.LookupExternalTypes(module_map,
                                                self_name=self_name)
    raw_ast = serializable_ast.ast

    for node in (serializable_ast.class_type_nodes or ()):
        try:
            if node is not class_lookup.VisitClassType(node):
                serializable_ast = serializable_ast.Replace(
                    class_type_nodes=None)
                break
        except KeyError as e:
            raise UnrestorableDependencyError("Unresolved class: %r." %
                                              utils.message(e))
    for node in (serializable_ast.function_type_nodes or ()):
        try:
            # Use VisitNamedType, even though this is a FunctionType. We want to
            # do a name lookup, to make sure this is still a function.
            if not isinstance(class_lookup.VisitNamedType(node),
                              pytd.FunctionType):
                serializable_ast = serializable_ast.Replace(
                    function_type_nodes=None)
                break
        except KeyError as e:
            raise UnrestorableDependencyError("Unresolved class: %r." %
                                              utils.message(e))
    if (serializable_ast.class_type_nodes is None
            or serializable_ast.function_type_nodes is None):
        try:
            raw_ast = raw_ast.Visit(class_lookup)
        except KeyError as e:
            raise UnrestorableDependencyError("Unresolved class: %r." %
                                              utils.message(e))
    serializable_ast = serializable_ast.Replace(ast=raw_ast)
    return serializable_ast
Exemple #2
0
 def _verify_pyi(self, pyval, ast_name=None):
     try:
         pyval.Visit(visitors.VerifyLookup(ignore_late_types=True))
     except ValueError as e:
         raise BadDependencyError(utils.message(e), ast_name
                                  or pyval.name) from e
     pyval.Visit(visitors.VerifyContainers())
 def apply_type_comment(self, state, op, name, value):
     """If there is a type comment for the op, return its value."""
     assert op is self.vm.frame.current_opcode
     if op.code.co_filename != self.vm.filename:
         return value
     if not op.type_comment:
         return value
     comment = op.type_comment
     try:
         var = self._eval_expr(state.node, self.vm.frame.f_globals,
                               self.vm.frame.f_locals, comment)
     except EvaluationError as e:
         self.vm.errorlog.invalid_type_comment(self.vm.frames,
                                               comment,
                                               details=utils.message(e))
         value = self.vm.new_unsolvable(state.node)
     else:
         try:
             typ = abstract_utils.get_atomic_value(var)
         except abstract_utils.ConversionError:
             self.vm.errorlog.invalid_type_comment(
                 self.vm.frames, comment, details="Must be constant.")
             value = self.vm.new_unsolvable(state.node)
         else:
             if self.get_type_parameters(typ):
                 self.vm.errorlog.not_supported_yet(
                     self.vm.frames, "using type parameter in type comment")
             try:
                 value = self.init_annotation(typ, name, self.vm.frames,
                                              state.node)
             except self.LateAnnotationError:
                 value = LateAnnotation(typ, name, self.vm.simple_stack())
     return value
Exemple #4
0
  def load_file(self, module_name, filename, ast=None):
    """Load (or retrieve from cache) a module and resolve its dependencies."""
    if not is_pickle(filename):
      return super(PickledPyiLoader, self).load_file(module_name, filename, ast)
    existing = self._get_existing_ast(module_name)
    if existing:
      # TODO(kramm): When does this happen?
      return existing
    loaded_ast = pytd_utils.LoadPickle(filename)
    # At this point ast.name and module_name could be different.
    # They are later synced in ProcessAst.
    dependencies = {d: names for d, names in loaded_ast.dependencies
                    if d != loaded_ast.ast.name}
    loaded_ast = serialize_ast.EnsureAstName(loaded_ast, module_name, fix=True)
    self._modules[module_name] = Module(module_name, filename, loaded_ast.ast)
    self._load_ast_dependencies(dependencies, ast, module_name)
    try:
      ast = serialize_ast.ProcessAst(loaded_ast, self._get_module_map())
    except serialize_ast.UnrestorableDependencyError as e:
      del self._modules[module_name]
      raise BadDependencyError(utils.message(e), module_name)
    # Mark all the module's late dependencies as explicitly imported.
    for d, _ in loaded_ast.late_dependencies:
      if d != loaded_ast.ast.name:
        self.add_module_prefixes(d)

    self._modules[module_name].ast = ast
    self._modules[module_name].pickle = None
    self._modules[module_name].dirty = False
    return ast
Exemple #5
0
 def _check_arg(self, condition, name, value):
     if condition:
         try:
             condition.check(value)
         except PreconditionError as e:
             raise PreconditionError("argument=%s: %s." %
                                     (name, utils.message(e)))
Exemple #6
0
    def call(self, node, _, args):
        try:
            name_var, field_names, field_types = self._getargs(node, args)
        except abstract_utils.ConversionError:
            return node, self.vm.convert.unsolvable.to_variable(node)

        try:
            name = abstract_utils.get_atomic_python_constant(name_var)
        except abstract_utils.ConversionError:
            return node, self.vm.convert.unsolvable.to_variable(node)

        try:
            field_names = self._validate_and_rename_args(
                name, field_names, False)
        except ValueError as e:
            self.vm.errorlog.invalid_namedtuple_arg(self.vm.frames,
                                                    utils.message(e))
            return node, self.vm.convert.unsolvable.to_variable(node)

        annots, late_annots = self.vm.annotations_util.convert_annotations_list(
            moves.zip(field_names, field_types))
        if late_annots:
            # We currently don't support forward references. Report if we find any,
            # then continue by using Unsolvable instead.
            self.vm.errorlog.not_supported_yet(
                self.vm.frames, "Forward references in typing.NamedTuple")
        field_types = [
            annots.get(field_name, self.vm.convert.unsolvable)
            for field_name in field_names
        ]
        cls_var = self._build_namedtuple(name, field_names, field_types, node)
        self.vm.trace_classdef(cls_var)
        return node, cls_var
Exemple #7
0
 def check_error(self, src, expected_line, message):
     """Check that parsing the src raises the expected error."""
     with self.assertRaises(parser.ParseError) as e:
         parser.parse_string(textwrap.dedent(src).lstrip(),
                             options=self.options)
     self.assertRegex(utils.message(e.exception), re.escape(message))
     self.assertEqual(expected_line, e.exception.line)
Exemple #8
0
 def load_file(self, module_name, filename, ast=None):
   """Load (or retrieve from cache) a module and resolve its dependencies."""
   if not os.path.splitext(filename)[1].startswith(".pickled"):
     return super(PickledPyiLoader, self).load_file(module_name, filename, ast)
   existing = self._get_existing_ast(module_name)
   if existing:
     # TODO(kramm): When does this happen?
     return existing
   loaded_ast = pytd_utils.LoadPickle(filename)
   # At this point ast.name and module_name could be different.
   # They are later synced in ProcessAst.
   dependencies = [d for d in loaded_ast.dependencies
                   if d != loaded_ast.ast.name]
   loaded_ast = serialize_ast.EnsureAstName(loaded_ast, module_name, fix=True)
   self._modules[module_name] = Module(module_name, filename, loaded_ast.ast)
   self._load_ast_dependencies(dependencies, ast, module_name)
   try:
     ast = serialize_ast.ProcessAst(loaded_ast, self._get_module_map())
   except serialize_ast.UnrestorableDependencyError as e:
     del self._modules[module_name]
     raise BadDependencyError(utils.message(e), module_name)
   self._modules[module_name].ast = ast
   self._modules[module_name].pickle = None
   self._modules[module_name].dirty = False
   return ast
Exemple #9
0
 def _resolve_external_types(self, ast):
   try:
     ast = ast.Visit(visitors.LookupExternalTypes(
         self._get_module_map(), self_name=ast.name))
   except KeyError as e:
     raise BadDependencyError(utils.message(e), ast.name)
   return ast
Exemple #10
0
    def call(self, node, _, args):
        try:
            name_var, field_names, field_types = self._getargs(node, args)
        except abstract_utils.ConversionError:
            return node, self.vm.new_unsolvable(node)

        try:
            name = abstract_utils.get_atomic_python_constant(name_var)
        except abstract_utils.ConversionError:
            return node, self.vm.new_unsolvable(node)

        try:
            field_names = self._validate_and_rename_args(
                name, field_names, False)
        except ValueError as e:
            self.vm.errorlog.invalid_namedtuple_arg(self.vm.frames,
                                                    utils.message(e))
            return node, self.vm.new_unsolvable(node)

        annots, late_annots = self.vm.annotations_util.convert_annotations_list(
            moves.zip(field_names, field_types))
        field_types = [
            annots.get(field_name, self.vm.convert.unsolvable)
            for field_name in field_names
        ]
        node, cls_var = self._build_namedtuple(name, field_names, field_types,
                                               late_annots, node)

        self.vm.trace_classdef(cls_var)
        return node, cls_var
Exemple #11
0
 def verify(self, mod_ast, *, mod_name=None):
   try:
     mod_ast.Visit(visitors.VerifyLookup(ignore_late_types=True))
   except ValueError as e:
     name = mod_name or mod_ast.name
     raise BadDependencyError(utils.message(e), name) from e
   mod_ast.Visit(visitors.VerifyContainers())
Exemple #12
0
def main():
  argument_parser = make_parser()
  opts = argument_parser.parse_args()
  python_version = utils.version_from_string(opts.python_version)
  try:
    utils.validate_version(python_version)
  except utils.UsageError as e:
    sys.stderr.write("Usage error: %s\n" % utils.message(e))
    sys.exit(1)

  with open(opts.input) as fi:
    sourcecode = fi.read()
    try:
      parsed = parser.parse_string(sourcecode, filename=opts.input,
                                   python_version=python_version)
    except parser.ParseError as e:
      sys.stderr.write(str(e))
      sys.exit(1)

  if opts.optimize:
    parsed = optimize.Optimize(parsed,
                               builtins.GetBuiltinsPyTD(python_version),
                               lossy=opts.lossy,
                               use_abcs=opts.use_abcs,
                               max_union=opts.max_union,
                               remove_mutable=opts.remove_mutable,
                               can_do_lookup=False)

  if opts.output is not None:
    out_text = pytd_utils.Print(parsed, opts.multiline_args)
    if opts.output == "-":
      sys.stdout.write(out_text)
    else:
      with open(opts.output, "w") as out:
        out.write(out_text)
Exemple #13
0
    def from_function(cls, function: ast3.AST, is_async: bool) -> "NameAndSig":
        """Constructor from an ast.FunctionDef node."""
        name = function.name

        # decorators
        decorators = set(function.decorator_list)
        abstracts = {"abstractmethod", "abc.abstractmethod"}
        coroutines = {"coroutine", "asyncio.coroutine", "coroutines.coroutine"}
        overload = {"overload"}
        ignored = {"type_check_only"}
        is_abstract = bool(decorators & abstracts)
        is_coroutine = bool(decorators & coroutines)
        is_overload = bool(decorators & overload)
        decorators -= abstracts
        decorators -= coroutines
        decorators -= overload
        decorators -= ignored
        # TODO(mdemello): do we need this limitation?
        if len(decorators) > 1:
            raise ParseError("Too many decorators for %s" % name)
        decorator, = decorators if decorators else (None, )

        exceptions = []
        mutators = []
        for i, x in enumerate(function.body):
            if isinstance(x, types.Raise):
                exceptions.append(x.exception)
            elif isinstance(x, Mutator):
                mutators.append(x)
            elif isinstance(x, types.Ellipsis):
                pass
            elif (isinstance(x, ast3.Expr) and isinstance(x.value, ast3.Str)
                  and i == 0):
                # docstring
                pass
            else:
                msg = textwrap.dedent("""
            Unexpected statement in function body.
            Only `raise` statements and type mutations are valid
        """).lstrip()
                if isinstance(x, ast3.AST):
                    raise ParseError(msg).at(x)
                else:
                    raise ParseError(msg)

        # exceptions
        sig = _pytd_signature(function, is_async, exceptions=exceptions)

        # mutators
        for mutator in mutators:
            try:
                sig = sig.Visit(mutator)
            except NotImplementedError as e:
                raise ParseError(utils.message(e)) from e
            if not mutator.successful:
                raise ParseError("No parameter named %s" % mutator.name)

        return cls(name, sig, decorator, is_abstract, is_coroutine,
                   is_overload)
Exemple #14
0
  def parse(self, src, name, filename):
    """Parse a PYI file and return the corresponding AST.

    Note that parse() should be called exactly once per _Parser instance.  It
    holds aggregated state during parsing and is not designed to be reused.

    Args:
      src: The source text to parse.
      name: The name of the module to be created.
      filename: The name of the source file.

    Returns:
      A pytd.TypeDeclUnit() representing the parsed pyi.

    Raises:
      ParseError: If the PYI source could not be parsed.
    """
    # Ensure instances do not get reused.
    assert not self._used
    self._used = True

    self._filename = filename
    self._ast_name = name
    self._type_map = {}

    try:
      defs = parser_ext.parse(self, src)
      ast = self._build_type_decl_unit(defs)
    except ParseError as e:
      if self._error_location:
        line = e.line or self._error_location[0]
        try:
          text = src.splitlines()[line-1]
        except IndexError:
          text = None
        raise ParseError(utils.message(e), line=line, filename=self._filename,
                         column=self._error_location[1], text=text)
      else:
        raise e

    ast = ast.Visit(_PropertyToConstant())
    ast = ast.Visit(_InsertTypeParameters())
    # TODO(kramm): This is in the wrong place- it should happen after resolving
    # local names, in load_pytd.
    ast = ast.Visit(pep484.ConvertTypingToNative(name))

    if name:
      ast = ast.Replace(name=name)
      ast = ast.Visit(visitors.AddNamePrefix())
    else:
      # If there's no unique name, hash the sourcecode.
      ast = ast.Replace(name=hashlib.md5(src.encode("utf-8")).hexdigest())
    ast = ast.Visit(visitors.StripExternalNamePrefix())

    # Typeshed files that explicitly import and refer to "builtins" need to have
    # that rewritten to __builtin__
    ast = ast.Visit(visitors.RenameBuiltinsPrefix())

    return ast.Replace(is_package=utils.is_pyi_directory_init(filename))
Exemple #15
0
 def _resolve_external_types(self, pyval, ast_name=None):
   name = ast_name or pyval.name
   try:
     pyval = pyval.Visit(visitors.LookupExternalTypes(
         self._get_module_map(), self_name=name,
         module_alias_map=self._aliases))
   except KeyError as e:
     raise BadDependencyError(utils.message(e), name)
   return pyval
Exemple #16
0
 def resolve_external_types(self, mod_ast, module_map, aliases, *,
                            mod_name=None):
   name = mod_name or mod_ast.name
   try:
     mod_ast = mod_ast.Visit(visitors.LookupExternalTypes(
         module_map, self_name=name, module_alias_map=aliases))
   except KeyError as e:
     raise BadDependencyError(utils.message(e), name) from e
   return mod_ast
Exemple #17
0
 def call(self, node, _, args):
     """Call typing.TypeVar()."""
     try:
         param = self._get_typeparam(node, args)
     except TypeVarError as e:
         self.vm.errorlog.invalid_typevar(self.vm.frames, utils.message(e),
                                          e.bad_call)
         return node, self.vm.convert.unsolvable.to_variable(node)
     return node, param.to_variable(node)
Exemple #18
0
 def check(self, value):
     errors = []
     for c in self._choices:
         try:
             c.check(value)
             return
         except PreconditionError as e:
             errors.append(e)
     raise PreconditionError(" or ".join("(%s)" % utils.message(e)
                                         for e in errors))
Exemple #19
0
 def call(self, node, _, args, alias_map=None):
     sig = None
     if isinstance(self.func.__self__, _classes.CallableClass):
         sig = function.Signature.from_callable(self.func.__self__)
     args = args.simplify(node, self.ctx, match_signature=sig)
     posargs = [u.AssignToNewVariable(node) for u in args.posargs]
     namedargs = {
         k: u.AssignToNewVariable(node)
         for k, u in args.namedargs.items()
     }
     try:
         inspect.signature(self.func).bind(node, *posargs, **namedargs)
     except ValueError as e:
         # Happens for, e.g.,
         #   def f((x, y)): pass
         #   f((42,))
         raise NotImplementedError(
             "Wrong number of values to unpack") from e
     except TypeError as e:
         # The possible errors here are:
         #   (1) wrong arg count
         #   (2) duplicate keyword
         #   (3) unexpected keyword
         # The way we constructed namedargs rules out (2).
         if "keyword" in utils.message(e):
             # Happens for, e.g.,
             #   def f(*args): pass
             #   f(x=42)
             raise NotImplementedError("Unexpected keyword") from e
         # The function was passed the wrong number of arguments. The signature is
         # ([self, ]node, ...). The length of "..." tells us how many variables
         # are expected.
         expected_argcount = len(inspect.getfullargspec(self.func).args) - 1
         if inspect.ismethod(self.func) and self.func.__self__ is not None:
             expected_argcount -= 1
         actual_argcount = len(posargs) + len(namedargs)
         if (actual_argcount > expected_argcount
                 or (not args.starargs and not args.starstarargs)):
             # If we have too many arguments, or starargs and starstarargs are both
             # empty, then we can be certain of a WrongArgCount error.
             argnames = tuple("_" + str(i)
                              for i in range(expected_argcount))
             sig = function.Signature(self.name, argnames, 0, None, set(),
                                      None, {}, {}, {})
             raise function.WrongArgCount(sig, args, self.ctx)
         assert actual_argcount < expected_argcount
         # Assume that starargs or starstarargs fills in the missing arguments.
         # Instead of guessing where these arguments should go, overwrite all of
         # the arguments with a list of unsolvables of the correct length, which
         # is guaranteed to give us a correct (but imprecise) analysis.
         posargs = [
             self.ctx.new_unsolvable(node) for _ in range(expected_argcount)
         ]
         namedargs = {}
     return self.func(node, *posargs, **namedargs)
Exemple #20
0
 def call(self, node, _, args):
     """Call typing.TypeVar()."""
     try:
         param = self._get_typeparam(node, args)
     except TypeVarError as e:
         self.vm.errorlog.invalid_typevar(self.vm.frames, utils.message(e),
                                          e.bad_call)
         return node, self.vm.new_unsolvable(node)
     if param.has_late_types():
         self.vm.params_with_late_types.append(
             (param, self.vm.simple_stack()))
     return node, param.to_variable(node)
Exemple #21
0
def process_file(options, source_text=None, kythe_args=None):
    """Process a single file and return cross references.

  Args:
    options: A dictionary of pytype options.
    source_text: Optional text of the file; will be read from the file pointed
      to by options.input if not supplied.
    kythe_args: Extra args for generating the kythe index

  Returns:
    An Indexer object with the indexed code, or None if pytype fails.
  """

    # We bind the global ast variable in this function.
    global ast

    errorlog = errors.ErrorLog()
    loader = load_pytd.create_loader(options)
    src = source_text or io.read_source_file(options.input)
    vm = analyze.CallTracer(errorlog=errorlog,
                            options=options,
                            generate_unknowns=options.protocols,
                            store_all_calls=False,
                            loader=loader)
    try:
        analyze.infer_types(src=src,
                            filename=options.input,
                            errorlog=errorlog,
                            options=options,
                            loader=loader,
                            tracer_vm=vm)
    except utils.UsageError as e:
        logging.error("Usage error: %s\n", utils.message(e))
        return None

    major, minor = options.python_version
    if major == 2:
        # python2.7 is the only supported py2 version.
        a = ast27.parse(src, options.input)
        ast = ast27
    else:
        a = ast3.parse(src, options.input, feature_version=minor)
        ast = ast3

    # TODO(mdemello): Get from args
    module_name = "module"
    source = SourceFile(src, vm.opcode_traces, filename=options.input)
    ix = Indexer(source, vm.loader.get_resolved_modules(), module_name,
                 kythe_args)
    ix.index(a)
    ix.finalize()
    return ix
Exemple #22
0
 def __str__(self):
   lines = []
   if self._filename or self._line is not None:
     lines.append('  File: "%s", line %s' % (self._filename, self._line))
   if self._column and self._text:
     indent = 4
     stripped = self._text.lstrip()
     lines.append("%*s%s" % (indent, "", stripped))
     # Output a pointer below the error column, adjusting for stripped spaces.
     pos = indent + (self._column - 1) - (len(self._text) - len(stripped))
     lines.append("%*s^" % (pos, ""))
   lines.append("%s: %s" % (type(self).__name__, utils.message(self)))
   return "\n".join(lines)
Exemple #23
0
def check_or_generate_pyi(options, errorlog, loader):
    """Returns generated errors and result pyi or None if it's only check.

  Args:
    options: config.Options object.
    errorlog: errors.ErrorLog object.
    loader: load_pytd.Loader object.

  Returns:
    A tuple, (PYI Ast as string, AST) or None.
  """

    result = pytd_builtins.DEFAULT_SRC
    ast = pytd_builtins.GetDefaultAst(options.python_version)
    try:
        if options.check:
            check_py(input_filename=options.input,
                     errorlog=errorlog,
                     options=options,
                     loader=loader)
            return None
        else:
            result, ast = generate_pyi(input_filename=options.input,
                                       errorlog=errorlog,
                                       options=options,
                                       loader=loader)
    except utils.UsageError as e:
        raise
    except pyc.CompileError as e:
        errorlog.python_compiler_error(options.input, e.lineno, e.error)
    except IndentationError as e:
        errorlog.python_compiler_error(options.input, e.lineno, e.msg)
    except tokenize.TokenError as e:
        msg, (lineno, unused_column) = e.args  # pylint: disable=unbalanced-tuple-unpacking
        errorlog.python_compiler_error(options.input, lineno, msg)
    except directors.SkipFile:
        result += "# skip-file found, file not analyzed"
    except Exception as e:  # pylint: disable=broad-except
        if options.nofail:
            log.warn("***Caught exception: %s", str(e), exc_info=True)
            if not options.check:
                result += (  # pytype: disable=name-error
                    "# Caught error in pytype: " +
                    str(e).replace("\n", "\n#") + "\n# " +
                    "\n# ".join(traceback.format_exc().splitlines()))
        else:
            e.args = (str(utils.message(e)) +
                      "\nFile: %s" % options.input, ) + e.args[1:]
            raise

    return (result, ast)
Exemple #24
0
 def _parse_source(self, src):
     """Parse a source file, extracting directives from comments."""
     f = moves.StringIO(src)
     defs_start = None
     closing_bracket_lines = set()
     whitespace_lines = set()
     for tok, _, start, _, line in tokenize.generate_tokens(f.readline):
         lineno, col = start
         if defs_start is None and _CLASS_OR_FUNC_RE.match(line):
             defs_start = lineno
         if _CLOSING_BRACKETS_RE.match(line):
             closing_bracket_lines.add(lineno)
         elif _WHITESPACE_RE.match(line):
             whitespace_lines.add(lineno)
         elif _DOCSTRING_RE.match(line):
             self._docstrings.add(lineno)
         else:
             if closing_bracket_lines:
                 self._adjust_type_comments(closing_bracket_lines,
                                            whitespace_lines)
             closing_bracket_lines.clear()
             whitespace_lines.clear()
         if tok == tokenize.COMMENT:
             matches = list(_DIRECTIVE_RE.finditer(line[col:]))
             is_nested = bool(matches) and matches[0].start(0) > 0
             for m in matches:
                 code = line[:col].strip()
                 tool, data = m.groups()
                 open_ended = not code
                 data = data.strip()
                 if tool == "type":
                     self._process_type(lineno, code, data, is_nested)
                 elif tool == "pytype":
                     try:
                         self._process_pytype(lineno, data, open_ended)
                     except _DirectiveError as e:
                         self._errorlog.invalid_directive(
                             self._filename, lineno, utils.message(e))
                 else:
                     pass  # ignore comments for other tools
     if closing_bracket_lines:
         self._adjust_type_comments(closing_bracket_lines, whitespace_lines)
     if defs_start is not None:
         disables = list(self._disables.items())
         # Add "# type: ignore" to the list of disables that we check.
         disables.append(("Type checking", self._ignore))
         for name, lineset in disables:
             lineno = lineset.get_disable_after(defs_start)
             if lineno is not None:
                 self._errorlog.late_directive(self._filename, lineno, name)
Exemple #25
0
def process_one_file(options):
  """Check a .py file or generate a .pyi for it, according to options.

  Args:
    options: config.Options object.

  Returns:
    An error code (0 means no error).
  """

  log.info("Process %s => %s", options.input, options.output)
  errorlog = errors.ErrorLog()
  loader = load_pytd.create_loader(options)
  try:
    generated_values = check_or_generate_pyi(options, errorlog, loader)
  except utils.UsageError as e:
    logging.error("Usage error: %s\n", utils.message(e))
    return 1

  if not options.check:
    result, ast = generated_values
    if options.pickle_output:
      pyi_output = options.verify_pickle
    else:
      pyi_output = options.output
    if pyi_output == "-":
      sys.stdout.write(result)
    elif pyi_output:
      log.info("write pyi %r => %r", options.input, pyi_output)
      with open(pyi_output, "w") as fi:
        fi.write(result)
    if options.pickle_output:
      log.info("write pickle %r => %r", options.input, options.output)
      write_pickle(ast, loader, options)
  exit_status = handle_errors(errorlog, options)

  # If we have set return_success, set exit_status to 0 after the regular error
  # handler has been called.
  if options.return_success:
    exit_status = 0

  # Touch output file upon success.
  if options.touch and not exit_status:
    with open(options.touch, "a"):
      os.utime(options.touch, None)
  return exit_status
Exemple #26
0
    def _eval_comparison(self, ident, op, value) -> bool:
        """Evaluate a comparison and return a bool.

    Args:
      ident: A tuple of a dotted name string and an optional __getitem__ key
        (int or slice).
      op: One of the comparison operator strings in cmp_slots.COMPARES.
      value: Either a string, an integer, or a tuple of integers.

    Returns:
      The boolean result of the comparison.

    Raises:
      ParseError: If the comparison cannot be evaluated.
    """
        name, key = ident
        if name == "sys.version_info":
            if key is None:
                key = slice(None, None, None)
            assert isinstance(key, (int, slice))
            if isinstance(key, int) and not isinstance(value, int):
                raise ParseError(
                    "an element of sys.version_info must be compared to an integer"
                )
            if isinstance(key, slice) and not _is_int_tuple(value):
                raise ParseError(
                    "sys.version_info must be compared to a tuple of integers")
            try:
                actual = self._version[key]
            except IndexError as e:
                raise ParseError(utils.message(e)) from e
            if isinstance(key, slice):
                actual = _three_tuple(actual)
                value = _three_tuple(value)
        elif name == "sys.platform":
            if not isinstance(value, str):
                raise ParseError("sys.platform must be compared to a string")
            valid_cmps = (cmp_slots.EQ, cmp_slots.NE)
            if op not in valid_cmps:
                raise ParseError(
                    "sys.platform must be compared using %s or %s" %
                    valid_cmps)
            actual = self._platform
        else:
            raise ParseError("Unsupported condition: '%s'." % name)
        return cmp_slots.COMPARES[op](actual, value)
Exemple #27
0
    def __init__(self, argv):
        """Parse and encapsulate the command-line options.

    Also sets up some basic logger configuration.

    Args:
      argv: sys.argv[1:] (sys.argv[0] is the main script).

    Raises:
      sys.exit(2): bad option or input filenames.
    """
        argument_parser = make_parser()
        options = argument_parser.parse_args(argv)
        names = set(vars(options))
        try:
            Postprocessor(names, options, self).process()
        except PostprocessingError as e:
            argument_parser.error(utils.message(e))
Exemple #28
0
def process_file(options):
    """Process a single file and return cross references."""

    # We bind the global ast variable in this function.
    global ast

    errorlog = errors.ErrorLog()
    loader = load_pytd.create_loader(options)
    src = io.read_source_file(options.input)
    vm = analyze.CallTracer(errorlog=errorlog,
                            options=options,
                            generate_unknowns=options.protocols,
                            store_all_calls=False,
                            loader=loader)
    try:
        analyze.infer_types(src=src,
                            filename=options.input,
                            errorlog=errorlog,
                            options=options,
                            loader=loader,
                            tracer_vm=vm)
    except utils.UsageError as e:
        logging.error("Usage error: %s\n", utils.message(e))
        return 1

    major, minor = options.python_version
    if major == 2:
        # python2.7 is the only supported py2 version.
        a = ast27.parse(src, options.input)
        ast = ast27
    else:
        a = ast3.parse(src, options.input, feature_version=minor)
        ast = ast3

    # TODO(mdemello): Get from args
    module_name = "module"
    source = SourceFile(src, vm.opcode_traces, filename=options.input)
    ix = Indexer(source, module_name)
    ix.index(a)
    ix.lookup_refs()
    ix.process_deflocs()
    ix.process_links()
    return ix
Exemple #29
0
def process_one_file(options):
    """Check a .py file or generate a .pyi for it, according to options.

  Args:
    options: config.Options object.

  Returns:
    An error code (0 means no error).
  """

    log.info("Process %s => %s", options.input, options.output)
    loader = load_pytd.create_loader(options)
    try:
        errorlog, result, ast = check_or_generate_pyi(options, loader)
    except utils.UsageError as e:
        logging.error("Usage error: %s\n", utils.message(e))
        return 1

    if not options.check:
        if options.pickle_output:
            pyi_output = options.verify_pickle
        else:
            pyi_output = options.output
        # Write out the pyi file.
        if pyi_output:
            _write_pyi_output(options, result, pyi_output)
        # Write out the pickle file.
        if options.pickle_output:
            log.info("write pickle %r => %r", options.input, options.output)
            write_pickle(ast, options, loader)
    exit_status = handle_errors(errorlog, options)

    # If we have set return_success, set exit_status to 0 after the regular error
    # handler has been called.
    if options.return_success:
        exit_status = 0

    # Touch output file upon success.
    if options.touch and not exit_status:
        with options.open_function(options.touch, "a"):
            os.utime(options.touch, None)
    return exit_status
Exemple #30
0
 def _process_comment(self, line, lineno, col, type_comment_set):
     """Process a single comment."""
     matches = list(_DIRECTIVE_RE.finditer(line[col:]))
     is_nested = bool(matches) and matches[0].start(0) > 0
     for m in matches:
         code = line[:col].strip()
         tool, data = m.groups()
         open_ended = not code
         data = data.strip()
         if tool == "type":
             self._process_type(lineno, code, data, is_nested,
                                type_comment_set)
         elif tool == "pytype":
             try:
                 self._process_pytype(lineno, data, open_ended)
             except _DirectiveError as e:
                 self._errorlog.invalid_directive(self._filename, lineno,
                                                  utils.message(e))
         else:
             pass  # ignore comments for other tools