def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): # TODO: Better message self.fail(f'Invalid location for ParamSpec "{arg.name}"', t) continue if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail(message_registry. INVALID_TYPEVAR_AS_TYPEARG.format( arg.name, info.name), t, code=codes.TYPE_VAR) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) if not is_subtype(arg, tvar.upper_bound): self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( format_type(arg), info.name, format_type(tvar.upper_bound)), t, code=codes.TYPE_VAR) super().visit_instance(t)
def _infer_type_from_left_and_inferred_right( api: SemanticAnalyzerPluginInterface, node: Var, left_hand_explicit_type: Optional[types.Type], python_type_for_type: Union[Instance, UnionType], ) -> Optional[Union[Instance, UnionType]]: """Validate type when a left hand annotation is present and we also could infer the right hand side:: attrname: SomeType = Column(SomeDBType) """ if not is_subtype(left_hand_explicit_type, python_type_for_type): descriptor = api.modules["sqlalchemy.orm.attributes"].names["Mapped"] effective_type = Instance(descriptor.node, [python_type_for_type]) msg = ( "Left hand assignment '{}: {}' not compatible " "with ORM mapped expression of type {}" ) util.fail( api, msg.format( node.name, format_type(left_hand_explicit_type), format_type(effective_type), ), node, ) return left_hand_explicit_type
def register_function(ctx: PluginContext, singledispatch_obj: Instance, func: Type, register_arg: Optional[Type] = None) -> None: """Register a function""" func = get_proper_type(func) if not isinstance(func, CallableType): return metadata = get_singledispatch_info(singledispatch_obj) if metadata is None: # if we never added the fallback to the type variables, we already reported an error, so # just don't do anything here return dispatch_type = get_dispatch_type(func, register_arg) if dispatch_type is None: # TODO: report an error here that singledispatch requires at least one argument # (might want to do the error reporting in get_dispatch_type) return fallback = metadata.fallback fallback_dispatch_type = fallback.arg_types[0] if not is_subtype(dispatch_type, fallback_dispatch_type): fail( ctx, 'Dispatch type {} must be subtype of fallback function first argument {}' .format(format_type(dispatch_type), format_type(fallback_dispatch_type)), func.definition) return return
def array_constructor_callback(ctx: 'mypy.plugin.FunctionContext') -> Type: """Callback to provide an accurate signature for the ctypes.Array constructor.""" # Extract the element type from the constructor's return type, i. e. the type of the array # being constructed. et = _get_array_element_type(ctx.default_return_type) if et is not None: allowed = _autoconvertible_to_cdata(et, ctx.api) assert len(ctx.arg_types) == 1, \ "The stub of the ctypes.Array constructor should have a single vararg parameter" for arg_num, (arg_kind, arg_type) in enumerate(zip(ctx.arg_kinds[0], ctx.arg_types[0]), 1): if arg_kind == nodes.ARG_POS and not is_subtype(arg_type, allowed): ctx.api.msg.fail( 'Array constructor argument {} of type {}' ' is not convertible to the array element type {}' .format(arg_num, format_type(arg_type), format_type(et)), ctx.context) elif arg_kind == nodes.ARG_STAR: ty = ctx.api.named_generic_type("typing.Iterable", [allowed]) if not is_subtype(arg_type, ty): it = ctx.api.named_generic_type("typing.Iterable", [et]) ctx.api.msg.fail( 'Array constructor argument {} of type {}' ' is not convertible to the array element type {}' .format(arg_num, format_type(arg_type), format_type(it)), ctx.context) return ctx.default_return_type
def _infer_type_from_left_and_inferred_right( api: SemanticAnalyzerPluginInterface, node: Var, left_hand_explicit_type: ProperType, python_type_for_type: ProperType, orig_left_hand_type: Optional[ProperType] = None, orig_python_type_for_type: Optional[ProperType] = None, ) -> Optional[ProperType]: """Validate type when a left hand annotation is present and we also could infer the right hand side:: attrname: SomeType = Column(SomeDBType) """ if orig_left_hand_type is None: orig_left_hand_type = left_hand_explicit_type if orig_python_type_for_type is None: orig_python_type_for_type = python_type_for_type if not is_subtype(left_hand_explicit_type, python_type_for_type): effective_type = api.named_type( "__sa_Mapped", [orig_python_type_for_type] ) msg = ( "Left hand assignment '{}: {}' not compatible " "with ORM mapped expression of type {}" ) util.fail( api, msg.format( node.name, format_type(orig_left_hand_type), format_type(effective_type), ), node, ) return orig_left_hand_type
def _infer_type_from_left_and_inferred_right( api: SemanticAnalyzerPluginInterface, node: Var, left_hand_explicit_type: Optional[types.Type], python_type_for_type: Union[Instance, UnionType], type_is_a_collection: bool = False, ) -> Optional[Union[Instance, UnionType]]: """Validate type when a left hand annotation is present and we also could infer the right hand side:: attrname: SomeType = Column(SomeDBType) """ orig_left_hand_type = left_hand_explicit_type orig_python_type_for_type = python_type_for_type if type_is_a_collection and left_hand_explicit_type.args: left_hand_explicit_type = left_hand_explicit_type.args[0] python_type_for_type = python_type_for_type.args[0] if not is_subtype(left_hand_explicit_type, python_type_for_type): descriptor = api.lookup("__sa_Mapped", node) effective_type = Instance(descriptor.node, [orig_python_type_for_type]) msg = ("Left hand assignment '{}: {}' not compatible " "with ORM mapped expression of type {}") util.fail( api, msg.format( node.name, format_type(orig_left_hand_type), format_type(effective_type), ), node, ) return orig_left_hand_type
def process_newtype_declaration(self, s: AssignmentStmt) -> None: """Check if s declares a NewType; if yes, store it in symbol table.""" # Extract and check all information from newtype declaration name, call = self.analyze_newtype_declaration(s) if name is None or call is None: return old_type = self.check_newtype_args(name, call, s) call.analyzed = NewTypeExpr(name, old_type, line=call.line, column=call.column) if old_type is None: return # Create the corresponding class definition if the aliased type is subtypeable if isinstance(old_type, TupleType): newtype_class_info = self.build_newtype_typeinfo( name, old_type, old_type.partial_fallback) newtype_class_info.tuple_type = old_type elif isinstance(old_type, Instance): if old_type.type.is_protocol: self.fail("NewType cannot be used with protocol classes", s) newtype_class_info = self.build_newtype_typeinfo( name, old_type, old_type) else: message = "Argument 2 to NewType(...) must be subclassable (got {})" self.fail(message.format(format_type(old_type)), s) return check_for_explicit_any(old_type, self.options, self.api.is_typeshed_stub_file, self.msg, context=s) if self.options.disallow_any_unimported and has_any_from_unimported_type( old_type): self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s) # If so, add it to the symbol table. node = self.api.lookup(name, s) if node is None: self.fail("Could not find {} in current namespace".format(name), s) return # TODO: why does NewType work in local scopes despite always being of kind GDEF? node.kind = GDEF call.analyzed.info = node.node = newtype_class_info
def array_raw_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.raw.""" et = _get_array_element_type(ctx.type) if et is not None: types: List[Type] = [] for tp in union_items(et): if (isinstance(tp, AnyType) or isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_char'): types.append(_get_bytes_type(ctx.api)) else: ctx.api.msg.fail( 'Array attribute "raw" is only available' ' with element type "c_char", not {}' .format(format_type(et)), ctx.context) return make_simplified_union(types) return ctx.default_attr_type
def array_value_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.value.""" et = _get_array_element_type(ctx.type) if et is not None: types = [] # type: List[Type] for tp in union_items(et): if isinstance(tp, AnyType): types.append(AnyType(TypeOfAny.from_another_any, source_any=tp)) elif isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_char': types.append(_get_bytes_type(ctx.api)) elif isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_wchar': types.append(_get_text_type(ctx.api)) else: ctx.api.msg.fail( 'Array attribute "value" is only available' ' with element type "c_char" or "c_wchar", not {}'.format( format_type(et)), ctx.context) return make_simplified_union(types) return ctx.default_attr_type
def process_newtype_declaration(self, s: AssignmentStmt) -> bool: """Check if s declares a NewType; if yes, store it in symbol table. Return True if it's a NewType declaration. The current target may be deferred as a side effect if the base type is not ready, even if the return value is True. The logic in this function mostly copies the logic for visit_class_def() with a single (non-Generic) base. """ name, call = self.analyze_newtype_declaration(s) if name is None or call is None: return False # OK, now we know this is a NewType. But the base type may be not ready yet, # add placeholder as we do for ClassDef. fullname = self.api.qualified_name(name) if (not call.analyzed or isinstance(call.analyzed, NewTypeExpr) and not call.analyzed.info): # Start from labeling this as a future class, as we do for normal ClassDefs. placeholder = PlaceholderNode(fullname, s, s.line, becomes_typeinfo=True) self.api.add_symbol(name, placeholder, s, can_defer=False) old_type, should_defer = self.check_newtype_args(name, call, s) old_type = get_proper_type(old_type) if not call.analyzed: call.analyzed = NewTypeExpr(name, old_type, line=call.line, column=call.column) if old_type is None: if should_defer: # Base type is not ready. self.api.defer() return True # Create the corresponding class definition if the aliased type is subtypeable if isinstance(old_type, TupleType): newtype_class_info = self.build_newtype_typeinfo( name, old_type, old_type.partial_fallback) newtype_class_info.tuple_type = old_type elif isinstance(old_type, Instance): if old_type.type.is_protocol: self.fail("NewType cannot be used with protocol classes", s) newtype_class_info = self.build_newtype_typeinfo( name, old_type, old_type) else: if old_type is not None: message = "Argument 2 to NewType(...) must be subclassable (got {})" self.fail(message.format(format_type(old_type)), s, code=codes.VALID_NEWTYPE) # Otherwise the error was already reported. old_type = AnyType(TypeOfAny.from_error) object_type = self.api.named_type('__builtins__.object') newtype_class_info = self.build_newtype_typeinfo( name, old_type, object_type) newtype_class_info.fallback_to_any = True check_for_explicit_any(old_type, self.options, self.api.is_typeshed_stub_file, self.msg, context=s) if self.options.disallow_any_unimported and has_any_from_unimported_type( old_type): self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s) # If so, add it to the symbol table. assert isinstance(call.analyzed, NewTypeExpr) # As we do for normal classes, create the TypeInfo only once, then just # update base classes on next iterations (to get rid of placeholders there). if not call.analyzed.info: call.analyzed.info = newtype_class_info else: call.analyzed.info.bases = newtype_class_info.bases self.api.add_symbol(name, call.analyzed.info, s) newtype_class_info.line = s.line return True