def visit_Assign(self, node): targets = node.targets if len(targets) > 1 or isinstance(targets[0], ast3.Tuple): msg = "Assignments must be of the form 'name = value'" raise ParseError(msg) self.convert_node_annotations(node) target = targets[0] name = target.id # Record and erase typevar definitions. if isinstance(node.value, _TypeVar): self.defs.add_type_var(name, node.value) return Splice([]) if node.type_comment: # TODO(mdemello): can pyi files have aliases with typecomments? ret = pytd.Constant(name, node.type_comment) else: ret = self.new_alias_or_constant(name, node.value) if self.in_function: # Should never happen, but this keeps pytype happy. if isinstance(ret, types.SlotDecl): raise ParseError("Cannot change the type of __slots__") return function.Mutator(name, ret.type) if self.level == 0: self.defs.add_alias_or_constant(ret) return ret
def _split_definitions(defs: List[Any]): """Return [constants], [functions] given a mixed list of definitions.""" constants = [] functions = [] aliases = [] slots = None classes = [] for d in defs: if isinstance(d, pytd.Class): classes.append(d) elif isinstance(d, pytd.Constant): if d.name == "__slots__": pass # ignore definitions of __slots__ as a type else: constants.append(d) elif isinstance(d, function.NameAndSig): functions.append(d) elif isinstance(d, pytd.Alias): aliases.append(d) elif isinstance(d, types.SlotDecl): if slots is not None: raise ParseError("Duplicate __slots__ declaration") slots = d.slots elif isinstance(d, types.Ellipsis): pass elif isinstance(d, ast3.Expr): raise ParseError("Unexpected expression").at(d) else: msg = "Unexpected definition" lineno = None if isinstance(d, ast3.AST): lineno = getattr(d, "lineno", None) raise ParseError(msg, line=lineno) return constants, functions, aliases, slots, classes
def _parameterized_type(self, base_type, parameters): """Return a parameterized type.""" if self._is_literal_base_type(base_type): return types.pytd_literal(parameters) elif any(isinstance(p, types.Constant) for p in parameters): parameters = ", ".join( p.repr_str() if isinstance(p, types.Constant) else "_" for p in parameters) raise ParseError( "%s[%s] not supported" % (pytd_utils.Print(base_type), parameters)) elif types.is_any(base_type): return pytd.AnythingType() elif len(parameters) == 2 and parameters[-1] is self.ELLIPSIS and ( not self._is_callable_base_type(base_type)): element_type = parameters[0] if element_type is self.ELLIPSIS: raise ParseError("[..., ...] not supported") return pytd.GenericType(base_type=base_type, parameters=(element_type,)) else: parameters = tuple(pytd.AnythingType() if p is self.ELLIPSIS else p for p in parameters) if self._is_tuple_base_type(base_type): return types.heterogeneous_tuple(base_type, parameters) elif self._is_callable_base_type(base_type): return types.pytd_callable(base_type, parameters) else: assert parameters return pytd.GenericType(base_type=base_type, parameters=parameters)
def get_parents( bases: List[ast3.AST]) -> Tuple[List[pytd_node.Node], Optional[int]]: """Collect base classes and namedtuple index.""" parents = [] namedtuple_index = None for i, p in enumerate(bases): if _is_parameterized_protocol(p): # From PEP 544: "`Protocol[T, S, ...]` is allowed as a shorthand for # `Protocol, Generic[T, S, ...]`." # https://www.python.org/dev/peps/pep-0544/#generic-protocols parents.append(p.base_type) parents.append( p.Replace(base_type=pytd.NamedType("typing.Generic"))) elif isinstance(p, pytd.NamedType) and p.name == "typing.NamedTuple": if namedtuple_index is not None: raise ParseError( "cannot inherit from bare NamedTuple more than once") namedtuple_index = i parents.append(p) elif isinstance(p, pytd.Type): parents.append(p) else: msg = "Unexpected class base:" + p raise ParseError(msg) return parents, namedtuple_index
def new_alias_or_constant(self, name, value): """Build an alias or constant.""" # This is here rather than in _Definitions because we need to build a # constant or alias from a partially converted typed_ast subtree. if name == "__slots__": if not (isinstance(value, ast3.List) and all(types.Constant.is_str(x) for x in value.elts)): raise ParseError("__slots__ must be a list of strings") return types.SlotDecl(tuple([x.value for x in value.elts])) elif isinstance(value, types.Constant): return pytd.Constant(name, value.to_pytd()) elif isinstance(value, types.Ellipsis): return pytd.Constant(name, pytd.AnythingType()) elif isinstance(value, pytd.NamedType): res = self.defs.resolve_type(value.name) return pytd.Alias(name, res) elif isinstance(value, ast3.List): if name != "__all__": raise ParseError( "Only __slots__ and __all__ can be literal lists") return pytd.Constant(name, types.pytd_list("str")) elif isinstance(value, ast3.Tuple): # TODO(mdemello): Consistent with the current parser, but should it # properly be Tuple[Type]? return pytd.Constant(name, pytd.NamedType("tuple")) elif isinstance(value, ast3.Name): value = self.defs.resolve_type(value.id) return pytd.Alias(name, value) else: # TODO(mdemello): add a case for TypeVar() # Convert any complex type aliases value = self.convert_node(value) return pytd.Alias(name, value)
def from_function(cls, function: ast3.AST, is_async: bool) -> "NameAndSig": """Constructor from an ast.FunctionDef node.""" name = function.name # decorators decorators = set(function.decorator_list) abstracts = {"abstractmethod", "abc.abstractmethod"} coroutines = {"coroutine", "asyncio.coroutine", "coroutines.coroutine"} overload = {"overload"} ignored = {"type_check_only"} is_abstract = bool(decorators & abstracts) is_coroutine = bool(decorators & coroutines) is_overload = bool(decorators & overload) decorators -= abstracts decorators -= coroutines decorators -= overload decorators -= ignored # TODO(mdemello): do we need this limitation? if len(decorators) > 1: raise ParseError("Too many decorators for %s" % name) decorator, = decorators if decorators else (None, ) exceptions = [] mutators = [] for i, x in enumerate(function.body): if isinstance(x, types.Raise): exceptions.append(x.exception) elif isinstance(x, Mutator): mutators.append(x) elif isinstance(x, types.Ellipsis): pass elif (isinstance(x, ast3.Expr) and isinstance(x.value, ast3.Str) and i == 0): # docstring pass else: msg = textwrap.dedent(""" Unexpected statement in function body. Only `raise` statements and type mutations are valid """).lstrip() if isinstance(x, ast3.AST): raise ParseError(msg).at(x) else: raise ParseError(msg) # exceptions sig = _pytd_signature(function, is_async, exceptions=exceptions) # mutators for mutator in mutators: try: sig = sig.Visit(mutator) except NotImplementedError as e: raise ParseError(utils.message(e)) from e if not mutator.successful: raise ParseError("No parameter named %s" % mutator.name) return cls(name, sig, decorator, is_abstract, is_coroutine, is_overload)
def build_type_decl_unit(self, defs) -> pytd.TypeDeclUnit: """Return a pytd.TypeDeclUnit for the given defs (plus parser state).""" # defs contains both constant and function definitions. constants, functions, aliases, slots, classes = _split_definitions( defs) assert not slots # slots aren't allowed on the module level # TODO(mdemello): alias/constant handling is broken in some weird manner. # assert not aliases # We handle top-level aliases in add_alias_or_constant # constants.extend(self.constants) if self.module_info.module_name == "builtins": constants.extend(types.builtin_keyword_constants()) generated_classes = sum(self.generated_classes.values(), []) classes = generated_classes + classes functions = function.merge_method_signatures(functions) name_to_class = {c.name: c for c in classes} name_to_constant = {c.name: c for c in constants} aliases = [] for a in self.aliases.values(): t = _maybe_resolve_alias(a, name_to_class, name_to_constant) if t is None: continue elif isinstance(t, pytd.Function): functions.append(t) elif isinstance(t, pytd.Constant): constants.append(t) else: assert isinstance(t, pytd.Alias) aliases.append(t) all_names = ([f.name for f in functions] + [c.name for c in constants] + [c.name for c in self.type_params] + [c.name for c in classes] + [c.name for c in aliases]) duplicates = [ name for name, count in collections.Counter(all_names).items() if count >= 2 ] if duplicates: raise ParseError("Duplicate top-level identifier(s): " + ", ".join(duplicates)) properties = [x for x in functions if x.kind == pytd.PROPERTY] if properties: prop_names = ", ".join(p.name for p in properties) raise ParseError( "Module-level functions with property decorators: " + prop_names) return pytd.TypeDeclUnit(name=None, constants=tuple(constants), type_params=tuple(self.type_params), functions=tuple(functions), classes=tuple(classes), aliases=tuple(aliases))
def _convert_typed_dict_args(self, node): # TODO(b/157603915): new_typed_dict currently doesn't do anything with the # args, so we don't bother converting them fully. msg = "Wrong args: expected TypedDict(name, {field: type, ...})" if len(node.args) != 2: raise ParseError(msg) name, fields = node.args if not (isinstance(name, ast3.Str) and isinstance(fields, ast3.Dict)): raise ParseError(msg)
def _convert_typevar_args(self, node): self.annotation_visitor.visit(node.keywords) if not node.args: raise ParseError("Missing arguments to TypeVar") name, *rest = node.args if not isinstance(name, ast3.Str): raise ParseError("Bad arguments to TypeVar") node.args = [name.s] + [self.convert_node(x) for x in rest] # Special-case late types in bound since typeshed uses it. for kw in node.keywords: if kw.arg == "bound": if isinstance(kw.value, types.Constant): val = types.string_value(kw.value, context="TypeVar bound") kw.value = self.annotation_visitor.convert_late_annotation(val)
def visit_BoolOp(self, node): if isinstance(node.op, ast3.Or): return any(node.values) elif isinstance(node.op, ast3.And): return all(node.values) else: raise ParseError("Unexpected boolean operator: " + node.op)
def _is_property(name: str, decorator: str, signature: pytd.Signature) -> bool: """Parse a signature as a property getter, setter, or deleter. Checks that the decorator name matches one of {@property, @foo.getter, @foo.setter, @foo.deleter} and that the corresponding signature is valid. NOTE: This function assumes that all other recognised decorators have already been handled, and will therefore raise if decorator is not a property. Args: name: method name decorator: decorator signature: method signature Returns: True: If we have a valid property decorator False: If decorator is None Raises: ParseError: If we have a non-property decorator. """ if not decorator: return False sigs = _property_decorators(name) if decorator in sigs and sigs[decorator].arity == len(signature.params): return True raise ParseError("Unhandled decorator: %s" % decorator)
def _parse(src: str, feature_version: int, filename: str = ""): """Call the typed_ast parser with the appropriate feature version.""" try: ast_root_node = ast3.parse(src, filename, feature_version=feature_version) except SyntaxError as e: raise ParseError(e.msg, line=e.lineno, filename=filename) from e return ast_root_node
def visit_ImportFrom(self, node): if self.level > 0: raise ParseError("Import statements need to be at module level") imports = [_tuple_of_import(x) for x in node.names] module = _import_from_module(node.module, node.level) self.defs.add_import(module, imports) return Splice([])
def fail(self, name=None): if name: msg = f"Unsupported condition: '{name}'. " else: msg = "Unsupported condition. " msg += "Supported checks are sys.platform and sys.version_info" raise ParseError(msg)
def get_metaclass(keywords: List[ast3.AST], parents: List[pytd_node.Node]): """Scan keywords for a metaclass.""" for k in keywords: keyword, value = k.arg, k.value if keyword not in ("metaclass", "total"): raise ParseError("Unexpected classdef kwarg %r" % keyword) elif keyword == "total" and not any( isinstance(parent, pytd.NamedType) and parent.name in _TYPED_DICT_ALIASES for parent in parents): raise ParseError( "'total' allowed as classdef kwarg only for TypedDict subclasses" ) if keyword == "metaclass": return value return None
def _convert_newtype_args(self, node): if len(node.args) != 2: msg = "Wrong args: expected NewType(name, [(field, type), ...])" raise ParseError(msg) name, typ = node.args typ = self.convert_node(typ) node.args = [name.s, typ]
def visit_If(self, node): if not isinstance(node.test, bool): raise ParseError("Unexpected if statement" + debug.dump(node, ast3)) if node.test: return Splice(node.body) else: return Splice(node.orelse)
def visit_AnnAssign(self, node): name = node.target.id typ = node.annotation val = self.convert_node(node.value) if val and not types.is_any(val): msg = f"Default value for {name}: {typ.name} can only be '...', got {val}" raise ParseError(msg) return pytd.Constant(name, typ)
def _convert_collections_namedtuple_args(self, node): if len(node.args) != 2: msg = "Wrong args: expected namedtuple(name, [field, ...])" raise ParseError(msg) name, fields = node.args fields = self.convert_node(fields) fields = [(types.string_value(n), pytd.AnythingType()) for n in fields] node.args = [name.s, fields]
def new_type( self, name: Union[str, pytd_node.Node], parameters: Optional[List[pytd_node.Node]] = None ) -> pytd_node.Node: """Return the AST for a type. Args: name: The name of the type. parameters: List of type parameters. Returns: A pytd type node. Raises: ParseError: if the wrong number of parameters is supplied for the base_type - e.g., 2 parameters to Optional or no parameters to Union. """ base_type = self.resolve_type(name) if not isinstance(base_type, pytd.NamedType): # We assume that all type parameters have been defined. Since pytype # orders type parameters to appear before classes and functions, this # assumption is generally safe. AnyStr is special-cased because imported # type parameters aren't recognized. type_params = self.type_params + [ pytd.TypeParameter("typing.AnyStr") ] base_type = base_type.Visit(_InsertTypeParameters(type_params)) try: resolved_type = visitors.MaybeSubstituteParameters( base_type, parameters) except ValueError as e: raise ParseError(str(e)) from e if resolved_type: return resolved_type if parameters is not None: if (len(parameters) > 1 and isinstance(base_type, pytd.NamedType) and base_type.name == "typing.Optional"): raise ParseError("Too many options to %s" % base_type.name) return self._parameterized_type(base_type, parameters) else: if (isinstance(base_type, pytd.NamedType) and base_type.name in _TYPING_SETS): raise ParseError("Missing options to %s" % base_type.name) return base_type
def _convert_typing_namedtuple_args(self, node): # TODO(mdemello): handle NamedTuple("X", a=int, b=str, ...) if len(node.args) != 2: msg = "Wrong args: expected NamedTuple(name, [(field, type), ...])" raise ParseError(msg) name, fields = node.args fields = self.convert_node(fields) fields = [(types.string_value(n), t) for (n, t) in fields] node.args = [name.s, fields]
def EnterParameter(self, node): if isinstance(node.mutated_type, pytd.GenericType): params = self._GetTypeParameters(node.mutated_type) extra = params - self.type_params_in_scope[-1] if extra: fn = pytd_utils.Print(self.current_function) msg = "Type parameter(s) {%s} not in scope in\n\n%s" % ( ", ".join(sorted(extra)), fn) raise ParseError(msg)
def _preprocess_decorator_list(self, node): decorators = [] for d in node.decorator_list: if isinstance(d, ast3.Name): decorators.append(d.id) elif isinstance(d, ast3.Attribute): decorators.append(f"{d.value.id}.{d.attr}") else: raise ParseError(f"Unexpected decorator: {d}") node.decorator_list = decorators
def check_for_duplicate_defs(methods, constants, aliases) -> None: """Check a class's list of definitions for duplicates.""" all_names = (list(set(f.name for f in methods)) + [c.name for c in constants] + [a.name for a in aliases]) duplicates = [ name for name, count in collections.Counter(all_names).items() if count >= 2 ] if duplicates: raise ParseError("Duplicate class-level identifier(s): " + ", ".join(duplicates))
def add_type_var(self, name, typevar): """Add a type variable, <name> = TypeVar(<name_arg>, <args>).""" if name != typevar.name: raise ParseError("TypeVar name needs to be %r (not %r)" % ( typevar.name, name)) bound = typevar.bound if isinstance(bound, str): bound = pytd.NamedType(bound) constraints = tuple(typevar.constraints) if typevar.constraints else () self.type_params.append(pytd.TypeParameter( name=name, constraints=constraints, bound=bound))
def _qualify_name_with_special_dir(self, orig_name): """Handle the case of '.' and '..' as package names.""" if "__PACKAGE__." in orig_name: # Generated from "from . import foo" - see parser.yy prefix, _, name = orig_name.partition("__PACKAGE__.") if prefix: raise ParseError("Cannot resolve import: %s" % orig_name) return self.package_name + "." + name elif "__PARENT__." in orig_name: # Generated from "from .. import foo" - see parser.yy prefix, _, name = orig_name.partition("__PARENT__.") if prefix: raise ParseError("Cannot resolve import: %s" % orig_name) if not self.parent_name: raise ParseError( "Cannot resolve relative import ..: Package %s has no parent" % self.package_name) return self.parent_name + "." + name else: return None
def enter_If(self, node): # Evaluate the test and preemptively remove the invalid branch so we don't # waste time traversing it. node.test = conditions.evaluate(node.test, self.version, self.platform) if not isinstance(node.test, bool): raise ParseError("Unexpected if statement" + debug.dump(node, ast3)) if node.test: node.orelse = [] else: node.body = []
def _attribute_to_name(node: ast3.Attribute) -> ast3.Name: """Recursively convert Attributes to Names.""" val = node.value if isinstance(val, ast3.Name): prefix = val.id elif isinstance(val, ast3.Attribute): prefix = _attribute_to_name(val).id elif isinstance(val, (pytd.NamedType, pytd.Module)): prefix = val.name else: msg = "Unexpected attribute access on %r [%s]" % (val, type(val)) raise ParseError(msg) return ast3.Name(prefix + "." + node.attr)
def from_call(cls, node: ast3.AST) -> "_TypeVar": """Construct a _TypeVar from an ast.Call node.""" name, *constraints = node.args bound = None # 'bound' is the only keyword argument we currently use. # TODO(rechen): We should enforce the PEP 484 guideline that # len(constraints) != 1. However, this guideline is currently violated # in typeshed (see https://github.com/python/typeshed/pull/806). kws = {x.arg for x in node.keywords} extra = kws - {"bound", "covariant", "contravariant"} if extra: raise ParseError("Unrecognized keyword(s): %s" % ", ".join(extra)) for kw in node.keywords: if kw.arg == "bound": bound = kw.value return cls(name, bound, constraints)
def qualify_name(self, orig_name): """Qualify an import name.""" # Doing the "builtins" rename here ensures that we catch alias names. orig_name = visitors.RenameBuiltinsPrefixInName(orig_name) if not self.package_name: return orig_name rel_name = self._qualify_name_with_special_dir(orig_name) if rel_name: return rel_name if orig_name.startswith("."): name = module_utils.get_absolute_name(self.package_name, orig_name) if name is None: raise ParseError("Cannot resolve relative import %s" % orig_name.rsplit(".", 1)[0]) return name return orig_name