def py__get__(self, instance, class_value): from medi.inference.value.instance import BoundMethod if instance is None: # Calling the Foo.bar results in the original bar function. return ValueSet([self]) return ValueSet( [BoundMethod(instance, class_value.as_context(), self)])
def to_stub(value): if value.is_stub(): return ValueSet([value]) was_instance = value.is_instance() if was_instance: value = value.py__class__() qualified_names = value.get_qualified_names() stub_module = _load_stub_module(value.get_root_context().get_value()) if stub_module is None or qualified_names is None: return NO_VALUES was_bound_method = value.is_bound_method() if was_bound_method: # Infer the object first. We can infer the method later. method_name = qualified_names[-1] qualified_names = qualified_names[:-1] was_instance = True stub_values = ValueSet([stub_module]) for name in qualified_names: stub_values = stub_values.py__getattribute__(name) if was_instance: stub_values = ValueSet.from_sets(c.execute_with_values() for c in stub_values if c.is_class()) if was_bound_method: # Now that the instance has been properly created, we can simply get # the method. stub_values = stub_values.py__getattribute__(method_name) return stub_values
def _infer_subscript_list(context, index): """ Handles slices in subscript nodes. """ if index == ':': # Like array[:] return ValueSet([iterable.Slice(context, None, None, None)]) elif index.type == 'subscript' and not index.children[0] == '.': # subscript basically implies a slice operation, except for Python 2's # Ellipsis. # e.g. array[:3] result = [] for el in index.children: if el == ':': if not result: result.append(None) elif el.type == 'sliceop': if len(el.children) == 2: result.append(el.children[1]) else: result.append(el) result += [None] * (3 - len(result)) return ValueSet([iterable.Slice(context, *result)]) elif index.type == 'subscriptlist': return ValueSet([iterable.SequenceLiteralValue(context.inference_state, context, index)]) # No slices return context.infer_node(index)
def execute_annotation(self): string_name = self._tree_name.value if string_name == 'Union': # This is kind of a special case, because we have Unions (in Medi # ValueSets). return self.gather_annotation_classes().execute_annotation() elif string_name == 'Optional': # Optional is basically just saying it's either None or the actual # type. return self.gather_annotation_classes().execute_annotation() \ | ValueSet([builtin_from_name(self.inference_state, u'None')]) elif string_name == 'Type': # The type is actually already given in the index_value return self._generics_manager[0] elif string_name == 'ClassVar': # For now don't do anything here, ClassVars are always used. return self._generics_manager[0].execute_annotation() mapped = { 'Tuple': Tuple, 'Generic': Generic, 'Protocol': Protocol, 'Callable': Callable, } cls = mapped[string_name] return ValueSet([ cls( self.parent_context, self, self._tree_name, generics_manager=self._generics_manager, ) ])
def define_generics(self, type_var_dict): from medi.inference.gradual.type_var import TypeVar changed = False new_generics = [] for generic_set in self.get_generics(): values = NO_VALUES for generic in generic_set: if isinstance(generic, (DefineGenericBaseClass, TypeVar)): result = generic.define_generics(type_var_dict) values |= result if result != ValueSet({generic}): changed = True else: values |= ValueSet([generic]) new_generics.append(values) if not changed: # There might not be any type vars that change. In that case just # return itself, because it does not make sense to potentially lose # cached results. return ValueSet([self]) return ValueSet([ self._create_instance_with_generics( TupleGenericManager(tuple(new_generics))) ])
def py__call__(self, arguments=None): from medi.inference.value import TreeInstance from medi.inference.gradual.typing import TypedDict if self.is_typeddict(): return ValueSet([TypedDict(self)]) return ValueSet([ TreeInstance(self.inference_state, self.parent_context, self, arguments) ])
def py__getitem__(self, index_value_set, contextualized_node): from medi.inference.gradual.base import GenericClass if not index_value_set: return ValueSet([self]) return ValueSet( GenericClass( self, LazyGenericManager( context_of_index=contextualized_node.context, index_value=index_value, )) for index_value in index_value_set)
def _tuple(self): def lambda_scoping_in_for_loop_sucks(lazy_value): return lambda: ValueSet(_resolve_forward_references( self._context_of_index, lazy_value.infer() )) if isinstance(self._index_value, SequenceLiteralValue): for lazy_value in self._index_value.py__iter__(contextualized_node=None): yield lambda_scoping_in_for_loop_sucks(lazy_value) else: yield lambda: ValueSet(_resolve_forward_references( self._context_of_index, ValueSet([self._index_value]) ))
def load_module_from_path(inference_state, file_io, import_names=None, is_package=None): """ This should pretty much only be used for get_modules_containing_name. It's here to ensure that a random path is still properly loaded into the Medi module structure. """ path = file_io.path if import_names is None: e_sys_path = inference_state.get_sys_path() import_names, is_package = sys_path.transform_path_to_dotted( e_sys_path, path) else: assert isinstance(is_package, bool) is_stub = file_io.path.endswith('.pyi') if is_stub: folder_io = file_io.get_parent_folder() if folder_io.path.endswith('-stubs'): folder_io = FolderIO(folder_io.path[:-6]) if file_io.path.endswith('__init__.pyi'): python_file_io = folder_io.get_file_io('__init__.py') else: python_file_io = folder_io.get_file_io(import_names[-1] + '.py') try: v = load_module_from_path(inference_state, python_file_io, import_names, is_package=is_package) values = ValueSet([v]) except FileNotFoundError: values = NO_VALUES return create_stub_module(inference_state, values, parse_stub_module(inference_state, file_io), file_io, import_names) else: module = _load_python_module( inference_state, file_io, import_names=import_names, is_package=is_package, ) inference_state.module_cache.add(import_names, ValueSet([module])) return module
def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled=True): assert not (only_stubs and prefer_stubs) with debug.increase_indent_cm('convert values'): if only_stubs or prefer_stubs: return ValueSet.from_sets( to_stub(value) or ( ValueSet({value}) if prefer_stubs else NO_VALUES) for value in values) else: return ValueSet.from_sets( _stub_to_python_value_set(stub_value, ignore_compiled=ignore_compiled) or ValueSet({stub_value}) for stub_value in values)
def infer(self): inferred = super(StubName, self).infer() if self.string_name == 'version_info' and self.get_root_context( ).py__name__() == 'sys': from medi.inference.gradual.stub_value import VersionInfo return ValueSet(VersionInfo(c) for c in inferred) return inferred
def infer_return_types(function, arguments): """ Infers the type of a function's return value, according to type annotations. """ all_annotations = py__annotations__(function.tree_node) annotation = all_annotations.get("return", None) if annotation is None: # If there is no Python 3-type annotation, look for a Python 2-type annotation node = function.tree_node comment = parser_utils.get_following_comment_same_line(node) if comment is None: return NO_VALUES match = re.match(r"^#\s*type:\s*\([^#]*\)\s*->\s*([^#]*)", comment) if not match: return NO_VALUES return _infer_annotation_string( function.get_default_param_context(), match.group(1).strip()).execute_annotation() context = function.get_default_param_context() unknown_type_vars = find_unknown_type_vars(context, annotation) annotation_values = infer_annotation(context, annotation) if not unknown_type_vars: return annotation_values.execute_annotation() type_var_dict = infer_type_vars_for_execution(function, arguments, all_annotations) return ValueSet.from_sets( ann.define_generics(type_var_dict) if isinstance( ann, (DefineGenericBaseClass, TypeVar)) else ValueSet({ann}) for ann in annotation_values).execute_annotation()
def infer(self, context, name): def_ = name.get_definition(import_name_always=True) if def_ is not None: type_ = def_.type is_classdef = type_ == 'classdef' if is_classdef or type_ == 'funcdef': if is_classdef: c = ClassValue(self, context, name.parent) else: c = FunctionValue.from_context(context, name.parent) return ValueSet([c]) if type_ == 'expr_stmt': is_simple_name = name.parent.type not in ('power', 'trailer') if is_simple_name: return infer_expr_stmt(context, def_, name) if type_ == 'for_stmt': container_types = context.infer_node(def_.children[3]) cn = ContextualizedNode(context, def_.children[3]) for_types = iterate_values(container_types, cn) n = TreeNameDefinition(context, name) return check_tuple_assignments(n, for_types) if type_ in ('import_from', 'import_name'): return imports.infer_import(context, name) if type_ == 'with_stmt': return tree_name_to_values(self, context, name) elif type_ == 'param': return context.py__getattribute__(name.value, position=name.end_pos) else: result = follow_error_node_imports_if_possible(context, name) if result is not None: return result return helpers.infer_call_of_leaf(context, name)
def _apply_decorators(context, node): """ Returns the function, that should to be executed in the end. This is also the places where the decorators are processed. """ if node.type == 'classdef': decoratee_value = ClassValue( context.inference_state, parent_context=context, tree_node=node ) else: decoratee_value = FunctionValue.from_context(context, node) initial = values = ValueSet([decoratee_value]) if is_big_annoying_library(context): return values for dec in reversed(node.get_decorators()): debug.dbg('decorator: %s %s', dec, values, color="MAGENTA") with debug.increase_indent_cm(): dec_values = context.infer_node(dec.children[1]) trailer_nodes = dec.children[2:-1] if trailer_nodes: # Create a trailer and infer it. trailer = tree.PythonNode('trailer', trailer_nodes) trailer.parent = dec dec_values = infer_trailer(context, dec_values, trailer) if not len(dec_values): code = dec.get_code(include_prefix=False) # For the short future, we don't want to hear about the runtime # decorator in typing that was intentionally omitted. This is not # "correct", but helps with debugging. if code != '@runtime\n': debug.warning('decorator not found: %s on %s', dec, node) return initial values = dec_values.execute(arguments.ValuesArguments([values])) if not len(values): debug.warning('not possible to resolve wrappers found %s', node) return initial debug.dbg('decorator end %s', values, color="MAGENTA") if values != initial: return ValueSet([Decoratee(c, decoratee_value) for c in values]) return values
def get_key_values(self): filtered_values = itertools.chain.from_iterable( (f.values() for f in self._definition_class.get_filters(is_instance=True))) return ValueSet({ create_simple_object(self.inference_state, v.string_name) for v in filtered_values })
def define_generics(self, type_var_dict): try: found = type_var_dict[self.py__name__()] except KeyError: pass else: if found: return found return self._get_classes() or ValueSet({self})
def execute_operation(self, other, operator): try: return ValueSet([ create_from_access_path( self.inference_state, self.access_handle.execute_operation( other.access_handle, operator)) ]) except TypeError: return NO_VALUES
def infer(self): compiled_value = self._wrapped_name.infer_compiled_value() tree_value = self._parent_tree_value if tree_value.is_instance() or tree_value.is_class(): tree_values = tree_value.py__getattribute__(self.string_name) if compiled_value.is_function(): return ValueSet({MixedObject(compiled_value, v) for v in tree_values}) module_context = tree_value.get_root_context() return _create(self._inference_state, compiled_value, module_context)
def infer(self): if self._string_value is not None: s = self._string_value if self.parent_context.inference_state.environment.version_info.major == 2 \ and not isinstance(s, bytes): s = s.encode('utf-8') return ValueSet([ create_simple_object(self.parent_context.inference_state, s) ]) return compiled.get_string_value_set(self.parent_context.inference_state)
def py__getitem__(self, index_value_set, contextualized_node): return ValueSet( self.index_class.create_cached( self.inference_state, self.parent_context, self._tree_name, generics_manager=LazyGenericManager( context_of_index=contextualized_node.context, index_value=index_value, )) for index_value in index_value_set)
def _load_stub_module(module): if module.is_stub(): return module return try_to_load_stub_cached( module.inference_state, import_names=module.string_names, python_value_set=ValueSet([module]), parent_module_value=None, sys_path=module.inference_state.get_sys_path(), )
def _create(inference_state, compiled_value, module_context): # TODO accessing this is bad, but it probably doesn't matter that much, # because we're working with interpreteters only here. python_object = compiled_value.access_handle.access._obj result = _find_syntax_node_name(inference_state, python_object) if result is None: # TODO Care about generics from stuff like `[1]` and don't return like this. if type(python_object) in (dict, list, tuple): return ValueSet({compiled_value}) tree_values = to_stub(compiled_value) if not tree_values: return ValueSet({compiled_value}) else: module_node, tree_node, file_io, code_lines = result if module_context is None or module_context.tree_node != module_node: root_compiled_value = compiled_value.get_root_context().get_value() # TODO this __name__ might be wrong. name = root_compiled_value.py__name__() string_names = tuple(name.split('.')) module_value = ModuleValue( inference_state, module_node, file_io=file_io, string_names=string_names, code_lines=code_lines, is_package=root_compiled_value.is_package(), ) if name is not None: inference_state.module_cache.add(string_names, ValueSet([module_value])) module_context = module_value.as_context() tree_values = ValueSet({module_context.create_value(tree_node)}) if tree_node.type == 'classdef': if not compiled_value.is_class(): # Is an instance, not a class. tree_values = tree_values.execute_with_values() return ValueSet( MixedObject(compiled_value, tree_value=tree_value) for tree_value in tree_values )
def py__call__(self, arguments): return_annotation = self.access_handle.get_return_annotation() if return_annotation is not None: # TODO the return annotation may also be a string. return create_from_access_path( self.inference_state, return_annotation).execute_annotation() try: self.access_handle.getattr_paths(u'__call__') except AttributeError: return super(CompiledValue, self).py__call__(arguments) else: if self.access_handle.is_class(): from medi.inference.value import CompiledInstance return ValueSet([ CompiledInstance(self.inference_state, self.parent_context, self, arguments) ]) else: return ValueSet(self._execute_function(arguments))
def infer(self): p = self._signature_param inference_state = self.parent_context.inference_state values = NO_VALUES if p.has_default: values = ValueSet( [create_from_access_path(inference_state, p.default)]) if p.has_annotation: annotation = create_from_access_path(inference_state, p.annotation) values |= annotation.execute_with_values() return values
def py__getitem__(self, index_value_set, contextualized_node): all_access_paths = self.access_handle.py__getitem__all_values() if all_access_paths is None: # This means basically that no __getitem__ has been defined on this # object. return super(CompiledValue, self).py__getitem__(index_value_set, contextualized_node) return ValueSet( create_from_access_path(self.inference_state, access) for access in all_access_paths)
def py__simple_getitem__(self, index): with reraise_getitem_errors(IndexError, KeyError, TypeError): try: access = self.access_handle.py__simple_getitem__(index) except AttributeError: return super(CompiledValue, self).py__simple_getitem__(index) if access is None: return NO_VALUES return ValueSet( [create_from_access_path(self.inference_state, access)])
def infer(self): def iter_(): for value in self._value_set: # Replace any with the constraints if they are there. from medi.inference.gradual.typing import AnyClass if isinstance(value, AnyClass): for constraint in self._type_var.constraints: yield constraint else: yield value return ValueSet(iter_())
def infer_param(function_value, param, ignore_stars=False): values = _infer_param(function_value, param) if ignore_stars or not values: return values inference_state = function_value.inference_state if param.star_count == 1: tuple_ = builtin_from_name(inference_state, 'tuple') return ValueSet( [GenericClass( tuple_, TupleGenericManager((values, )), )]) elif param.star_count == 2: dct = builtin_from_name(inference_state, 'dict') generics = (ValueSet([builtin_from_name(inference_state, 'str')]), values) return ValueSet([GenericClass( dct, TupleGenericManager(generics), )]) return values
def infer_return_for_callable(arguments, param_values, result_values): all_type_vars = {} for pv in param_values: if pv.array_type == 'list': type_var_dict = _infer_type_vars_for_callable( arguments, pv.py__iter__()) all_type_vars.update(type_var_dict) return ValueSet.from_sets( v.define_generics(all_type_vars) if isinstance(v, ( DefineGenericBaseClass, TypeVar)) else ValueSet({v}) for v in result_values).execute_annotation()
def py__stop_iteration_returns(self): for cls in self._wrapped_value.class_value.py__mro__(): if cls.py__name__() == 'Generator': generics = cls.get_generics() try: return generics[2].execute_annotation() except IndexError: pass elif cls.py__name__() == 'Iterator': return ValueSet( [builtin_from_name(self.inference_state, u'None')]) return self._wrapped_value.py__stop_iteration_returns()