class FunctionContext(use_metaclass(CachedMetaClass, AbstractFunction)): """ Needed because of decorators. Decorators are evaluated here. """ @classmethod def from_context(cls, context, tree_node): from jedi.evaluate.context import AbstractInstanceContext while context.is_class() or isinstance(context, AbstractInstanceContext): context = context.parent_context return cls(context.evaluator, parent_context=context, tree_node=tree_node) def get_function_execution(self, arguments=None): if arguments is None: arguments = AnonymousArguments() return FunctionExecutionContext(self.evaluator, self.parent_context, self, arguments) def py__class__(self): return compiled.get_special_object(self.evaluator, u'FUNCTION_CLASS')
class Generator(use_metaclass(CachedMetaClass, IterableWrapper, GeneratorMixin)): """Handling of `yield` functions.""" def __init__(self, evaluator, func, var_args): super(Generator, self).__init__() self._evaluator = evaluator self.func = func self.var_args = var_args def iter_content(self): """ returns the content of __iter__ """ # Directly execute it, because with a normal call to py__call__ a # Generator will be returned. from jedi.evaluate.representation import FunctionExecution f = FunctionExecution(self._evaluator, self.func, self.var_args) return f.get_return_types(check_yields=True) def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'parent', 'get_imports', 'doc', 'docstr', 'get_parent_until', 'get_code', 'subscopes' ]: raise AttributeError("Accessing %s of %s is not allowed." % (self, name)) return getattr(self.func, name) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.func)
class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext)): """ Needed because of decorators. Decorators are evaluated here. """ api_type = 'function' def __init__(self, evaluator, parent_context, funcdef): """ This should not be called directly """ super(FunctionContext, self).__init__(evaluator, parent_context) self.tree_node = funcdef def get_filters(self, search_global, until_position=None, origin_scope=None): if search_global: yield ParserTreeFilter( self.evaluator, context=self, until_position=until_position, origin_scope=origin_scope ) else: scope = self.py__class__() for filter in scope.get_filters(search_global=False, origin_scope=origin_scope): yield filter def infer_function_execution(self, function_execution): """ Created to be used by inheritance. """ if self.tree_node.is_generator(): return set([iterable.Generator(self.evaluator, function_execution)]) else: return function_execution.get_return_values() def get_function_execution(self, arguments=None): e = self.evaluator if arguments is None: return AnonymousFunctionExecution(e, self.parent_context, self) else: return FunctionExecutionContext(e, self.parent_context, self, arguments) def py__call__(self, arguments): function_execution = self.get_function_execution(arguments) return self.infer_function_execution(function_execution) def py__class__(self): # This differentiation is only necessary for Python2. Python3 does not # use a different method class. if isinstance(self.tree_node.get_parent_scope(), tree.Class): name = 'METHOD_CLASS' else: name = 'FUNCTION_CLASS' return compiled.get_special_object(self.evaluator, name) @property def name(self): return ContextName(self, self.tree_node.name) def get_param_names(self): function_execution = self.get_function_execution() return [ParamName(function_execution, param.name) for param in self.tree_node.params]
class Generator(use_metaclass(CachedMetaClass, IterableWrapper, GeneratorMixin)): """Handling of `yield` functions.""" def __init__(self, evaluator, func, var_args): super(Generator, self).__init__() self._evaluator = evaluator self.func = func self.var_args = var_args def py__iter__(self): from jedi.evaluate.representation import FunctionExecution f = FunctionExecution(self._evaluator, self.func, self.var_args) return f.get_yield_types() def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'parent', 'get_imports', 'doc', 'docstr', 'get_parent_until', 'get_code', 'subscopes' ]: raise AttributeError("Accessing %s of %s is not allowed." % (self, name)) return getattr(self.func, name) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.func)
class InstanceElement(use_metaclass(CachedMetaClass, pr.Base)): """ InstanceElement is a wrapper for any object, that is used as an instance variable (e.g. self.variable or class methods). """ def __init__(self, evaluator, instance, var, is_class_var=False): self._evaluator = evaluator if isinstance(var, pr.Function): var = Function(evaluator, var) elif isinstance(var, pr.Class): var = Class(evaluator, var) self.instance = instance self.var = var self.is_class_var = is_class_var @common.safe_property @memoize_default(None) def parent(self): par = self.var.parent if isinstance(par, Class) and par == self.instance.base \ or isinstance(par, pr.Class) \ and par == self.instance.base.base: par = self.instance elif not isinstance(par, (pr.Module, compiled.CompiledObject)): par = InstanceElement(self.instance._evaluator, self.instance, par, self.is_class_var) return par def get_parent_until(self, *args, **kwargs): return pr.Simple.get_parent_until(self, *args, **kwargs) def get_decorated_func(self): """ Needed because the InstanceElement should not be stripped """ func = self.var.get_decorated_func() func = InstanceElement(self._evaluator, self.instance, func) return func def expression_list(self): # Copy and modify the array. return [ InstanceElement(self.instance._evaluator, self.instance, command, self.is_class_var) if not isinstance(command, (pr.Operator, Token)) else command for command in self.var.expression_list() ] def __iter__(self): for el in self.var.__iter__(): yield InstanceElement(self.instance._evaluator, self.instance, el, self.is_class_var) def __getattr__(self, name): return getattr(self.var, name) def isinstance(self, *cls): return isinstance(self.var, cls) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.var)
class AbstractObjectOverwrite(use_metaclass(_OverwriteMeta, object)): def get_object(self): raise NotImplementedError def get_filters(self, search_global, *args, **kwargs): yield SpecialMethodFilter(self, self.overwritten_methods, self.get_object()) for filter in self.get_object().get_filters(search_global): yield filter
class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)): """ InstanceElement is a wrapper for any object, that is used as an instance variable (e.g. self.variable or class methods). """ def __init__(self, instance, var, is_class_var=False): if isinstance(var, pr.Function): var = Function(var) elif isinstance(var, pr.Class): var = Class(var) self.instance = instance self.var = var self.is_class_var = is_class_var @property @cache.memoize_default() def parent(self): par = self.var.parent if isinstance(par, Class) and par == self.instance.base \ or isinstance(par, pr.Class) \ and par == self.instance.base.base: par = self.instance elif not isinstance(par, pr.Module): par = InstanceElement(self.instance, par, self.is_class_var) return par def get_parent_until(self, *args, **kwargs): return pr.Simple.get_parent_until(self, *args, **kwargs) def get_decorated_func(self): """ Needed because the InstanceElement should not be stripped """ func = self.var.get_decorated_func(self.instance) if func == self.var: return self return func def get_commands(self): # Copy and modify the array. return [ InstanceElement(self.instance, command, self.is_class_var) if not isinstance(command, unicode) else command for command in self.var.get_commands() ] def __iter__(self): for el in self.var.__iter__(): yield InstanceElement(self.instance, el, self.is_class_var) def __getattr__(self, name): return getattr(self.var, name) def isinstance(self, *cls): return isinstance(self.var, cls) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.var)
class ImplicitNamespaceContext( use_metaclass(CachedMetaClass, context.TreeContext)): """ Provides support for implicit namespace packages """ api_type = 'module' parent_context = None def __init__(self, evaluator, fullname): super(ImplicitNamespaceContext, self).__init__(evaluator, parent_context=None) self.evaluator = evaluator self.fullname = fullname def get_filters(self, search_global, until_position=None, origin_scope=None): yield DictFilter(self._sub_modules_dict()) @property @memoize_default() def name(self): string_name = self.py__package__().rpartition('.')[-1] return ImplicitNSName(self, string_name) def py__file__(self): return None def py__package__(self): """Return the fullname """ return self.fullname @property def py__path__(self): return lambda: [self.paths] @memoize_default() def _sub_modules_dict(self): names = {} paths = self.paths file_names = chain.from_iterable(os.listdir(path) for path in paths) mods = [ file_name.rpartition('.')[0] if '.' in file_name else file_name for file_name in file_names if file_name != '__pycache__' ] for name in mods: names[name] = imports.SubModuleName(self, name) return names
class Generator(use_metaclass(CachedMetaClass, pr.Base)): """Handling of `yield` functions.""" def __init__(self, evaluator, func, var_args): super(Generator, self).__init__() self._evaluator = evaluator self.func = func self.var_args = var_args @underscore_memoization def _get_defined_names(self): """ Returns a list of names that define a generator, which can return the content of a generator. """ executes_generator = '__next__', 'send', 'next' for name in compiled.generator_obj.get_defined_names(): if name.name in executes_generator: parent = GeneratorMethod(self, name.parent) yield helpers.FakeName(name.name, parent) else: yield name def scope_names_generator(self, position=None): yield self, self._get_defined_names() def iter_content(self): """ returns the content of __iter__ """ return self._evaluator.execute(self.func, self.var_args, True) def get_index_types(self, index_array): #debug.warning('Tried to get array access on a generator: %s', self) analysis.add(self._evaluator, 'type-error-generator', index_array) return [] def get_exact_index_types(self, index): """ Exact lookups are used for tuple lookups, which are perfectly fine if used with generators. """ return [self.iter_content()[index]] def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'parent', 'get_imports', 'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code', 'subscopes' ]: raise AttributeError("Accessing %s of %s is not allowed." % (self, name)) return getattr(self.func, name) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.func)
class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)): """ Needed because of decorators. Decorators are evaluated here. """ def is_function(self): return True @classmethod def from_context(cls, context, tree_node): def create(tree_node): if context.is_class(): return MethodContext( context.evaluator, context, parent_context=parent_context, tree_node=tree_node ) else: return cls( context.evaluator, parent_context=parent_context, tree_node=tree_node ) overloaded_funcs = list(_find_overload_functions(context, tree_node)) parent_context = context while parent_context.is_class() or parent_context.is_instance(): parent_context = parent_context.parent_context function = create(tree_node) if overloaded_funcs: return OverloadedFunctionContext( function, [create(f) for f in overloaded_funcs] ) return function def py__class__(self): c, = contexts_from_qualified_names(self.evaluator, u'types', u'FunctionType') return c def get_default_param_context(self): return self.parent_context def get_signature_functions(self): return [self]
class ModuleWrapper(use_metaclass(CachedMetaClass, pr.Module)): def __init__(self, evaluator, module): self._evaluator = evaluator self._module = module def scope_names_generator(self, position=None): yield self, pr.filter_after_position(self._module.get_defined_names(), position) yield self, self._module_attributes() sub_modules = self._sub_modules() if sub_modules: yield self, self._sub_modules() @memoize_default() def _module_attributes(self): names = [ '__file__', '__package__', '__doc__', '__name__', '__version__' ] # All the additional module attributes are strings. parent = Instance(self._evaluator, compiled.create(self._evaluator, str)) return [helpers.FakeName(n, parent) for n in names] @memoize_default() def _sub_modules(self): """ Lists modules in the directory of this module (if this module is a package). """ path = self._module.path names = [] if path is not None and path.endswith(os.path.sep + '__init__.py'): mods = pkgutil.iter_modules([os.path.dirname(path)]) for module_loader, name, is_pkg in mods: name = helpers.FakeName(name) # It's obviously a relative import to the current module. imp = helpers.FakeImport(name, self, level=1) name.parent = imp names.append(name) return names def __getattr__(self, name): return getattr(self._module, name) def __repr__(self): return "<%s: %s>" % (type(self).__name__, self._module)
class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)): """ Cares for `yield` statements. """ def __init__(self, func, var_args): super(Generator, self).__init__() self.func = func self.var_args = var_args def get_defined_names(self): """ Returns a list of names that define a generator, which can return the content of a generator. """ names = [] none_pos = (0, 0) executes_generator = ('__next__', 'send') for n in ('close', 'throw') + executes_generator: name = pr.Name(builtin.Builtin.scope, [(n, none_pos)], none_pos, none_pos) if n in executes_generator: name.parent = self else: name.parent = builtin.Builtin.scope names.append(name) debug.dbg('generator names', names) return names def iter_content(self): """ returns the content of __iter__ """ return Execution(self.func, self.var_args).get_return_types(True) def get_index_types(self, index=None): debug.warning('Tried to get array access on a generator', self) return [] def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'parent', 'get_imports', 'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code', 'subscopes' ]: raise AttributeError("Accessing %s of %s is not allowed." % (self, name)) return getattr(self.func, name) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.func)
class ModuleWrapper(use_metaclass(CachedMetaClass, pr.Module, Wrapper)): def __init__(self, evaluator, module): self._evaluator = evaluator self.base = self._module = module def names_dicts(self, search_global): yield self.base.names_dict yield self._module_attributes_dict() for star_module in self.star_imports(): yield star_module.names_dict yield dict((str(n), [GlobalName(n)]) for n in self.base.global_names) yield self._sub_modules_dict() @cache_star_import @memoize_default([]) def star_imports(self): modules = [] for i in self.base.imports: if i.is_star_import(): name = i.star_import_name() new = imports.ImportWrapper(self._evaluator, name).follow() for module in new: if isinstance(module, pr.Module): modules += module.star_imports() modules += new return modules @memoize_default() def _module_attributes_dict(self): def parent_callback(): return self._evaluator.execute( compiled.create(self._evaluator, str))[0] names = ['__file__', '__package__', '__doc__', '__name__'] # All the additional module attributes are strings. return dict( (n, [helpers.LazyName(n, parent_callback, is_definition=True)]) for n in names) @property @memoize_default() def name(self): return helpers.FakeName(unicode(self.base.name), self, (1, 0)) @memoize_default() def _sub_modules_dict(self): """ Lists modules in the directory of this module (if this module is a package). """ path = self._module.path names = {} if path is not None and path.endswith(os.path.sep + '__init__.py'): mods = pkgutil.iter_modules([os.path.dirname(path)]) for module_loader, name, is_pkg in mods: fake_n = helpers.FakeName(name) # It's obviously a relative import to the current module. imp = helpers.FakeImport(fake_n, self, level=1) fake_n.parent = imp names[name] = [fake_n] # TODO add something like this in the future, its cleaner than the # import hacks. # ``os.path`` is a hardcoded exception, because it's a # ``sys.modules`` modification. #if str(self.name) == 'os': # names.append(helpers.FakeName('path', parent=self)) return names def __getattr__(self, name): return getattr(self._module, name) def __repr__(self): return "<%s: %s>" % (type(self).__name__, self._module) def py__bool__(self): return True
class Function(use_metaclass(CachedMetaClass, Wrapper)): """ Needed because of decorators. Decorators are evaluated here. """ def __init__(self, evaluator, func, is_decorated=False): """ This should not be called directly """ self._evaluator = evaluator self.base = self.base_func = func self.is_decorated = is_decorated # A property that is set by the decorator resolution. self.decorates = None @memoize_default() def get_decorated_func(self): """ Returns the function, that should to be executed in the end. This is also the places where the decorators are processed. """ f = self.base_func decorators = self.base_func.get_decorators() if not decorators or self.is_decorated: return self # Only enter it, if has not already been processed. if not self.is_decorated: for dec in reversed(decorators): debug.dbg('decorator: %s %s', dec, f) dec_results = self._evaluator.eval_element(dec.children[1]) trailer = dec.children[2:-1] if trailer: # Create a trailer and evaluate it. trailer = pr.Node('trailer', trailer) dec_results = self._evaluator.eval_trailer( dec_results, trailer) if not len(dec_results): debug.warning('decorator not found: %s on %s', dec, self.base_func) return self decorator = dec_results.pop() if dec_results: debug.warning('multiple decorators found %s %s', self.base_func, dec_results) # Create param array. if isinstance(f, Function): old_func = f # TODO this is just hacky. change. else: old_func = Function(self._evaluator, f, is_decorated=True) wrappers = self._evaluator.execute_evaluated( decorator, old_func) if not len(wrappers): debug.warning('no wrappers found %s', self.base_func) return self if len(wrappers) > 1: # TODO resolve issue with multiple wrappers -> multiple types debug.warning('multiple wrappers found %s %s', self.base_func, wrappers) f = wrappers[0] if isinstance(f, (Instance, Function)): f.decorates = self debug.dbg('decorator end %s', f) return f def names_dicts(self, search_global): if search_global: yield self.names_dict else: for names_dict in compiled.magic_function_class.names_dicts(False): yield names_dict @Python3Method def py__call__(self, evaluator, params): if self.base.is_generator(): return [iterable.Generator(evaluator, self, params)] else: return FunctionExecution(evaluator, self, params).get_return_types() def py__bool__(self): return True def __getattr__(self, name): return getattr(self.base_func, name) def __repr__(self): dec = '' if self.decorates is not None: dec = " decorates " + repr(self.decorates) return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)
class Class(use_metaclass(CachedMetaClass, Wrapper)): """ This class is not only important to extend `pr.Class`, it is also a important for descriptors (if the descriptor methods are evaluated or not). """ def __init__(self, evaluator, base): self._evaluator = evaluator self.base = base @memoize_default(default=()) def py__mro__(self, evaluator): def add(cls): if cls not in mro: mro.append(cls) mro = [self] # TODO Do a proper mro resolution. Currently we are just listing # classes. However, it's a complicated algorithm. for cls in self.py__bases__(self._evaluator): # TODO detect for TypeError: duplicate base class str, # e.g. `class X(str, str): pass` try: mro_method = cls.py__mro__ except AttributeError: # TODO add a TypeError like: """ >>> class Y(lambda: test): pass Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: function() argument 1 must be code, not str >>> class Y(1): pass Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: int() takes at most 2 arguments (3 given) """ pass else: add(cls) for cls_new in mro_method(evaluator): add(cls_new) return tuple(mro) @memoize_default(default=()) def py__bases__(self, evaluator): arglist = self.base.get_super_arglist() if arglist: args = param.Arguments(self._evaluator, arglist) return list(chain.from_iterable(args.eval_args())) else: return [compiled.object_obj] def py__call__(self, evaluator, params): return [Instance(evaluator, self, params)] def py__getattribute__(self, name): return self._evaluator.find_types(self, name) @property def params(self): return self.get_subscope_by_name('__init__').params def names_dicts(self, search_global, is_instance=False): if search_global: yield self.names_dict else: for scope in self.py__mro__(self._evaluator): if isinstance(scope, compiled.CompiledObject): yield scope.names_dicts(False, is_instance)[0] else: yield scope.names_dict def is_class(self): return True def get_subscope_by_name(self, name): for s in self.py__mro__(self._evaluator): for sub in reversed(s.subscopes): if sub.name.value == name: return sub raise KeyError("Couldn't find subscope.") def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'parent', 'raw_doc', 'doc', 'get_imports', 'get_parent_until', 'get_code', 'subscopes', 'names_dict', 'type' ]: raise AttributeError("Don't touch this: %s of %s !" % (name, self)) return getattr(self.base, name) def __repr__(self): return "<e%s of %s>" % (type(self).__name__, self.base)
class InstanceElement(use_metaclass(CachedMetaClass, pr.Base)): """ InstanceElement is a wrapper for any object, that is used as an instance variable (e.g. self.variable or class methods). """ def __init__(self, evaluator, instance, var, is_class_var): self._evaluator = evaluator self.instance = instance self.var = var self.is_class_var = is_class_var @common.safe_property @memoize_default() def parent(self): par = self.var.parent if isinstance(par, Class) and par == self.instance.base \ or isinstance(par, pr.Class) \ and par == self.instance.base.base: par = self.instance else: par = get_instance_el(self._evaluator, self.instance, par, self.is_class_var) return par def get_parent_until(self, *args, **kwargs): return pr.BaseNode.get_parent_until(self, *args, **kwargs) def get_definition(self): return self.get_parent_until((pr.ExprStmt, pr.IsScope, pr.Import)) def get_decorated_func(self): """ Needed because the InstanceElement should not be stripped """ func = self.var.get_decorated_func() func = get_instance_el(self._evaluator, self.instance, func) return func def get_rhs(self): return get_instance_el(self._evaluator, self.instance, self.var.get_rhs(), self.is_class_var) def is_definition(self): return self.var.is_definition() @property def children(self): # Copy and modify the array. return [ get_instance_el(self._evaluator, self.instance, command, self.is_class_var) for command in self.var.children ] @property @memoize_default() def name(self): name = self.var.name return helpers.FakeName(unicode(name), self, name.start_pos) def __iter__(self): for el in self.var.__iter__(): yield get_instance_el(self._evaluator, self.instance, el, self.is_class_var) def __getitem__(self, index): return get_instance_el(self._evaluator, self.instance, self.var[index], self.is_class_var) def __getattr__(self, name): return getattr(self.var, name) def isinstance(self, *cls): return isinstance(self.var, cls) def is_scope(self): """ Since we inherit from Base, it would overwrite the action we want here. """ return self.var.is_scope() def py__call__(self, evaluator, params): return Function.py__call__(self, evaluator, params) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self.var)
class Array(use_metaclass(CachedMetaClass, pr.Base)): """ Used as a mirror to pr.Array, if needed. It defines some getter methods which are important in this module. """ def __init__(self, evaluator, array): self._evaluator = evaluator self._array = array @memoize_default(NO_DEFAULT) def get_index_types(self, indexes=()): """ Get the types of a specific index or all, if not given. :param indexes: The index input types. """ result = [] if [index for index in indexes if isinstance(index, Slice)]: return [self] if len(indexes) == 1: # This is indexing only one element, with a fixed index number, # otherwise it just ignores the index (e.g. [1+1]). index = indexes[0] if isinstance(index, compiled.CompiledObject) \ and isinstance(index.obj, (int, str, unicode)): with common.ignored(KeyError, IndexError, TypeError): return self.get_exact_index_types(index.obj) result = list(_follow_values(self._evaluator, self._array.values)) result += check_array_additions(self._evaluator, self) return result def get_exact_index_types(self, mixed_index): """ Here the index is an int/str. Raises IndexError/KeyError """ index = mixed_index if self.type == pr.Array.DICT: index = None for i, key_statement in enumerate(self._array.keys): # Because we only want the key to be a string. key_expression_list = key_statement.expression_list() if len(key_expression_list ) != 1: # cannot deal with complex strings continue key = key_expression_list[0] if isinstance(key, pr.Literal): key = key.value elif isinstance(key, pr.Name): key = str(key) else: continue if mixed_index == key: index = i break if index is None: raise KeyError('No key found in dictionary') # Can raise an IndexError values = [self._array.values[index]] return _follow_values(self._evaluator, values) def get_defined_names(self): """ This method generates all `ArrayMethod` for one pr.Array. It returns e.g. for a list: append, pop, ... """ # `array.type` is a string with the type, e.g. 'list'. scope = self._evaluator.find_types(compiled.builtin, self._array.type)[0] scope = self._evaluator.execute(scope)[ 0] # builtins only have one class names = scope.get_defined_names() return [ArrayMethod(n) for n in names] @common.safe_property def parent(self): return compiled.builtin def get_parent_until(self): return compiled.builtin def __getattr__(self, name): if name not in [ 'type', 'start_pos', 'get_only_subelement', 'parent', 'get_parent_until', 'items' ]: raise AttributeError('Strange access on %s: %s.' % (self, name)) return getattr(self._array, name) def __getitem__(self): return self._array.__getitem__() def __iter__(self): return self._array.__iter__() def __len__(self): return self._array.__len__() def __repr__(self): return "<e%s of %s>" % (type(self).__name__, self._array)
class IsScope(use_metaclass(IsScopeMeta)): pass
class FunctionContext(use_metaclass(CachedMetaClass, TreeContext)): """ Needed because of decorators. Decorators are evaluated here. """ api_type = u'function' def __init__(self, evaluator, parent_context, funcdef): """ This should not be called directly """ super(FunctionContext, self).__init__(evaluator, parent_context) self.tree_node = funcdef def get_filters(self, search_global, until_position=None, origin_scope=None): if search_global: yield ParserTreeFilter( self.evaluator, context=self, until_position=until_position, origin_scope=origin_scope ) else: scope = self.py__class__() for filter in scope.get_filters(search_global=False, origin_scope=origin_scope): yield filter def infer_function_execution(self, function_execution): """ Created to be used by inheritance. """ is_coroutine = self.tree_node.parent.type == 'async_stmt' is_generator = bool(get_yield_exprs(self.evaluator, self.tree_node)) if is_coroutine: if is_generator: if self.evaluator.environment.version_info < (3, 6): return NO_CONTEXTS return ContextSet(asynchronous.AsyncGenerator(self.evaluator, function_execution)) else: if self.evaluator.environment.version_info < (3, 5): return NO_CONTEXTS return ContextSet(asynchronous.Coroutine(self.evaluator, function_execution)) else: if is_generator: return ContextSet(iterable.Generator(self.evaluator, function_execution)) else: return function_execution.get_return_values() def get_function_execution(self, arguments=None): if arguments is None: arguments = AnonymousArguments() return FunctionExecutionContext(self.evaluator, self.parent_context, self, arguments) def py__call__(self, arguments): function_execution = self.get_function_execution(arguments) return self.infer_function_execution(function_execution) def py__class__(self): # This differentiation is only necessary for Python2. Python3 does not # use a different method class. if isinstance(parser_utils.get_parent_scope(self.tree_node), tree.Class): name = u'METHOD_CLASS' else: name = u'FUNCTION_CLASS' return compiled.get_special_object(self.evaluator, name) @property def name(self): if self.tree_node.type == 'lambdef': return LambdaName(self) return ContextName(self, self.tree_node.name) def get_param_names(self): function_execution = self.get_function_execution() return [ParamName(function_execution, param.name) for param in self.tree_node.get_params()]
class ModuleWrapper(use_metaclass(CachedMetaClass, tree.Module, Wrapper)): def __init__(self, evaluator, module, parent_module=None): self._evaluator = evaluator self.base = self._module = module self._parent_module = parent_module def names_dicts(self, search_global): yield self.base.names_dict yield self._module_attributes_dict() for star_module in self.star_imports(): yield star_module.names_dict yield dict((str(n), [GlobalName(n)]) for n in self.base.global_names) yield self._sub_modules_dict() # I'm not sure if the star import cache is really that effective anymore # with all the other really fast import caches. Recheck. Also we would need # to push the star imports into Evaluator.modules, if we reenable this. #@cache_star_import @memoize_default([]) def star_imports(self): modules = [] for i in self.base.imports: if i.is_star_import(): name = i.star_import_name() new = imports.ImportWrapper(self._evaluator, name).follow() for module in new: if isinstance(module, tree.Module): modules += module.star_imports() modules += new return modules @memoize_default() def _module_attributes_dict(self): def parent_callback(): # Create a string type object (without a defined string in it): return list( self._evaluator.execute(compiled.create(self._evaluator, str)))[0] names = ['__file__', '__package__', '__doc__', '__name__'] # All the additional module attributes are strings. return dict( (n, [helpers.LazyName(n, parent_callback, is_definition=True)]) for n in names) @property @memoize_default() def name(self): return helpers.FakeName(unicode(self.base.name), self, (1, 0)) def _get_init_directory(self): """ :return: The path to the directory of a package. None in case it's not a package. """ for suffix, _, _ in imp.get_suffixes(): ending = '__init__' + suffix py__file__ = self.py__file__() if py__file__ is not None and py__file__.endswith(ending): # Remove the ending, including the separator. return self.py__file__()[:-len(ending) - 1] return None def py__name__(self): for name, module in self._evaluator.modules.items(): if module == self: return name return '__main__' def py__file__(self): """ In contrast to Python's __file__ can be None. """ if self._module.path is None: return None return os.path.abspath(self._module.path) def py__package__(self): if self._get_init_directory() is None: return re.sub(r'\.?[^\.]+$', '', self.py__name__()) else: return self.py__name__() def _py__path__(self): if self._parent_module is None: search_path = self._evaluator.sys_path else: search_path = self._parent_module.py__path__() init_path = self.py__file__() if os.path.basename(init_path) == '__init__.py': with open(init_path, 'rb') as f: content = common.source_to_unicode(f.read()) # these are strings that need to be used for namespace packages, # the first one is ``pkgutil``, the second ``pkg_resources``. options = ('declare_namespace(__name__)', 'extend_path(__path__') if options[0] in content or options[1] in content: # It is a namespace, now try to find the rest of the # modules on sys_path or whatever the search_path is. paths = set() for s in search_path: other = os.path.join(s, unicode(self.name)) if os.path.isdir(other): paths.add(other) return list(paths) # Default to this. return [self._get_init_directory()] @property def py__path__(self): """ Not seen here, since it's a property. The callback actually uses a variable, so use it like:: foo.py__path__(sys_path) In case of a package, this returns Python's __path__ attribute, which is a list of paths (strings). Raises an AttributeError if the module is not a package. """ path = self._get_init_directory() if path is None: raise AttributeError('Only packages have __path__ attributes.') else: return self._py__path__ @memoize_default() def _sub_modules_dict(self): """ Lists modules in the directory of this module (if this module is a package). """ path = self._module.path names = {} if path is not None and path.endswith(os.path.sep + '__init__.py'): mods = pkgutil.iter_modules([os.path.dirname(path)]) for module_loader, name, is_pkg in mods: fake_n = helpers.FakeName(name) # It's obviously a relative import to the current module. imp = helpers.FakeImport(fake_n, self, level=1) fake_n.parent = imp names[name] = [fake_n] # TODO add something like this in the future, its cleaner than the id:114 gh:115 # import hacks. # ``os.path`` is a hardcoded exception, because it's a # ``sys.modules`` modification. #if str(self.name) == 'os': # names.append(helpers.FakeName('path', parent=self)) return names def py__class__(self): return compiled.get_special_object(self._evaluator, 'MODULE_CLASS') def __getattr__(self, name): return getattr(self._module, name) def __repr__(self): return "<%s: %s>" % (type(self).__name__, self._module)
class Definition(use_metaclass(CachedMetaClass, BaseDefinition)): """ *Definition* objects are returned from :meth:`api.Script.goto_assignments` or :meth:`api.Script.goto_definitions`. """ def __init__(self, evaluator, definition): super(Definition, self).__init__(evaluator, definition, definition.start_pos) @property @underscore_memoization def name(self): """ Name of variable/function/class/module. For example, for ``x = None`` it returns ``'x'``. :rtype: str or None """ d = self._definition if isinstance(d, er.InstanceElement): d = d.var if isinstance(d, (compiled.CompiledObject, compiled.CompiledName)): name = d.name elif isinstance(d, pr.Name): name = d.names[-1] elif isinstance(d, iterable.Array): name = d.type elif isinstance( d, (pr.Class, er.Class, er.Instance, er.Function, pr.Function)): name = d.name elif isinstance(d, pr.Module): name = self.module_name elif isinstance(d, pr.Import): try: name = d.get_defined_names()[0].names[-1] except (AttributeError, IndexError): return None elif isinstance(d, pr.Statement): try: expression_list = d.assignment_details[0][0] name = expression_list[0].name.names[-1] except IndexError: if isinstance(d, pr.Param): try: return unicode(d.expression_list()[0].name) except (IndexError, AttributeError): # IndexError for syntax error params # AttributeError for *args/**kwargs pass return None elif isinstance(d, iterable.Generator): return None elif isinstance(d, pr.NamePart): name = d return unicode(name) @property def description(self): """ A description of the :class:`.Definition` object, which is heavily used in testing. e.g. for ``isinstance`` it returns ``def isinstance``. Example: >>> from jedi import Script >>> source = ''' ... def f(): ... pass ... ... class C: ... pass ... ... variable = f or C''' >>> script = Script(source, column=3) # line is maximum by default >>> defs = script.goto_definitions() >>> defs = sorted(defs, key=lambda d: d.line) >>> defs [<Definition def f>, <Definition class C>] >>> str(defs[0].description) # strip literals in python2 'def f' >>> str(defs[1].description) 'class C' """ d = self._definition if isinstance(d, er.InstanceElement): d = d.var if isinstance(d, pr.Name): d = d.parent if isinstance(d, compiled.CompiledObject): d = d.type() + ' ' + d.name elif isinstance(d, iterable.Array): d = 'class ' + d.type elif isinstance(d, (pr.Class, er.Class, er.Instance)): d = 'class ' + unicode(d.name) elif isinstance(d, (er.Function, pr.Function)): d = 'def ' + unicode(d.name) elif isinstance(d, pr.Module): # only show module name d = 'module %s' % self.module_name elif self.is_keyword: d = 'keyword %s' % d.name else: d = d.get_code().replace('\n', '') return d @property def desc_with_module(self): """ In addition to the definition, also return the module. .. warning:: Don't use this function yet, its behaviour may change. If you really need it, talk to me. .. todo:: Add full path. This function is should return a `module.class.function` path. """ position = '' if self.in_builtin_module else '@%s' % (self.line) return "%s:%s%s" % (self.module_name, self.description, position) @memoize_default() def defined_names(self): """ List sub-definitions (e.g., methods in class). :rtype: list of Definition """ defs = self._follow_statements_imports() # For now we don't want base classes or evaluate decorators. defs = [ d.base if isinstance(d, (er.Class, er.Function)) else d for d in defs ] iterable = (defined_names(self._evaluator, d) for d in defs) iterable = list(iterable) return list(chain.from_iterable(iterable)) def __eq__(self, other): return self._start_pos == other._start_pos \ and self.module_path == other.module_path \ and self.name == other.name \ and self._evaluator == other._evaluator def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash( (self._start_pos, self.module_path, self.name, self._evaluator))
class Instance(use_metaclass(CachedMetaClass, Executed)): """ This class is used to evaluate instances. """ def __init__(self, evaluator, base, var_args, is_generated=False): super(Instance, self).__init__(evaluator, base, var_args) self.decorates = None # Generated instances are classes that are just generated by self # (No var_args) used. self.is_generated = is_generated if base.name.get_code() in ['list', 'set'] \ and compiled.builtin == base.get_parent_until(): # compare the module path with the builtin name. self.var_args = iterable.check_array_instances(evaluator, self) elif not is_generated: # Need to execute the __init__ function, because the dynamic param # searching needs it. try: method = self.get_subscope_by_name('__init__') except KeyError: pass else: evaluator.execute(method, self.var_args) @property def py__call__(self): def actual(evaluator, params): return evaluator.execute(method, params) try: method = self.get_subscope_by_name('__call__') except KeyError: # Means the Instance is not callable. raise AttributeError return actual def py__class__(self, evaluator): return self.base def py__bool__(self): # Signalize that we don't know about the bool type. return None @memoize_default() def _get_method_execution(self, func): func = get_instance_el(self._evaluator, self, func, True) return FunctionExecution(self._evaluator, func, self.var_args) def _get_func_self_name(self, func): """ Returns the name of the first param in a class method (which is normally self. """ try: return str(func.params[0].name) except IndexError: return None def _self_names_dict(self, add_mro=True): names = {} # This loop adds the names of the self object, copies them and removes # the self. for sub in self.base.subscopes: if isinstance(sub, pr.Class): continue # Get the self name, if there's one. self_name = self._get_func_self_name(sub) if self_name is None: continue if sub.name.value == '__init__' and not self.is_generated: # ``__init__`` is special because the params need are injected # this way. Therefore an execution is necessary. if not sub.get_decorators(): # __init__ decorators should generally just be ignored, # because to follow them and their self variables is too # complicated. sub = self._get_method_execution(sub) for name_list in sub.names_dict.values(): for name in name_list: if name.value == self_name and name.prev_sibling() is None: trailer = name.next_sibling() if pr.is_node(trailer, 'trailer') \ and len(trailer.children) == 2 \ and trailer.children[0] == '.': name = trailer.children[1] # After dot. if name.is_definition(): arr = names.setdefault(name.value, []) arr.append( get_instance_el(self._evaluator, self, name)) return names def get_subscope_by_name(self, name): sub = self.base.get_subscope_by_name(name) return get_instance_el(self._evaluator, self, sub, True) def execute_subscope_by_name(self, name, *args): method = self.get_subscope_by_name(name) return self._evaluator.execute_evaluated(method, *args) def get_descriptor_returns(self, obj): """ Throws a KeyError if there's no method. """ # Arguments in __get__ descriptors are obj, class. # `method` is the new parent of the array, don't know if that's good. args = [obj, obj.base] if isinstance( obj, Instance) else [compiled.none_obj, obj] try: return self.execute_subscope_by_name('__get__', *args) except KeyError: return [self] @memoize_default() def names_dicts(self, search_global): yield self._self_names_dict() for s in self.base.py__mro__(self._evaluator)[1:]: if not isinstance(s, compiled.CompiledObject): # Compiled objects don't have `self.` names. for inst in self._evaluator.execute(s): yield inst._self_names_dict(add_mro=False) for names_dict in self.base.names_dicts(search_global=False, is_instance=True): yield LazyInstanceDict(self._evaluator, self, names_dict) def get_index_types(self, evaluator, index_array): indexes = iterable.create_indexes_or_slices(self._evaluator, index_array) if any([isinstance(i, iterable.Slice) for i in indexes]): # Slice support in Jedi is very marginal, at the moment, so just # ignore them in case of __getitem__. # TODO support slices in a more general way. indexes = [] try: method = self.get_subscope_by_name('__getitem__') except KeyError: debug.warning('No __getitem__, cannot access the array.') return [] else: return self._evaluator.execute( method, [iterable.AlreadyEvaluated(indexes)]) @property @underscore_memoization def name(self): name = self.base.name return helpers.FakeName(unicode(name), self, name.start_pos) def __getattr__(self, name): if name not in [ 'start_pos', 'end_pos', 'get_imports', 'type', 'doc', 'raw_doc' ]: raise AttributeError("Instance %s: Don't touch this (%s)!" % (self, name)) return getattr(self.base, name) def __repr__(self): dec = '' if self.decorates is not None: dec = " decorates " + repr(self.decorates) return "<e%s of %s(%s)%s>" % (type(self).__name__, self.base, self.var_args, dec)
class Definition(use_metaclass(CachedMetaClass, BaseDefinition)): """ *Definition* objects are returned from :meth:`api.Script.goto_assignments` or :meth:`api.Script.goto_definitions`. """ def __init__(self, evaluator, definition): super(Definition, self).__init__(evaluator, definition) @property def description(self): """ A description of the :class:`.Definition` object, which is heavily used in testing. e.g. for ``isinstance`` it returns ``def isinstance``. Example: >>> from jedi import Script >>> source = ''' ... def f(): ... pass ... ... class C: ... pass ... ... variable = f if random.choice([0,1]) else C''' >>> script = Script(source, column=3) # line is maximum by default >>> defs = script.goto_definitions() >>> defs = sorted(defs, key=lambda d: d.line) >>> defs [<Definition def f>, <Definition class C>] >>> str(defs[0].description) # strip literals in python2 'def f' >>> str(defs[1].description) 'class C' """ d = self._definition if isinstance(d, er.InstanceElement): d = d.var if isinstance(d, compiled.CompiledObject): typ = d.api_type() if typ == 'instance': typ = 'class' # The description should be similar to Py objects. d = typ + ' ' + d.name.get_code() elif isinstance(d, iterable.Array): d = 'class ' + d.type elif isinstance(d, (tree.Class, er.Class, er.Instance)): d = 'class ' + unicode(d.name) elif isinstance(d, (er.Function, tree.Function)): d = 'def ' + unicode(d.name) elif isinstance(d, tree.Module): # only show module name d = 'module %s' % self.module_name elif isinstance(d, tree.Param): d = d.get_code().strip() if d.endswith(','): d = d[:-1] # Remove the comma. else: # ExprStmt try: first_leaf = d.first_leaf() except AttributeError: # `d` is already a Leaf (Name). first_leaf = d # Remove the prefix, because that's not what we want for get_code # here. old, first_leaf.prefix = first_leaf.prefix, '' try: d = d.get_code() finally: first_leaf.prefix = old # Delete comments: d = re.sub('#[^\n]+\n', ' ', d) # Delete multi spaces/newlines return re.sub('\s+', ' ', d).strip() @property def desc_with_module(self): """ In addition to the definition, also return the module. .. warning:: Don't use this function yet, its behaviour may change. If you really need it, talk to me. .. todo:: Add full path. This function is should return a `module.class.function` path. """ position = '' if self.in_builtin_module else '@%s' % (self.line) return "%s:%s%s" % (self.module_name, self.description, position) @memoize_default() def defined_names(self): """ List sub-definitions (e.g., methods in class). :rtype: list of Definition """ defs = self._follow_statements_imports() # For now we don't want base classes or evaluate decorators. defs = [d.base if isinstance(d, (er.Class, er.Function)) else d for d in defs] iterable = (defined_names(self._evaluator, d) for d in defs) iterable = list(iterable) return list(chain.from_iterable(iterable)) def is_definition(self): """ Returns True, if defined as a name in a statement, function or class. Returns False, if it's a reference to such a definition. """ return self._name.is_definition() def __eq__(self, other): return self._name.start_pos == other._name.start_pos \ and self.module_path == other.module_path \ and self.name == other.name \ and self._evaluator == other._evaluator def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash((self._name.start_pos, self.module_path, self.name, self._evaluator))
class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, ContextWrapper)): pass
class ClassContext( use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): """ This class is not only important to extend `tree.Class`, it is also a important for descriptors (if the descriptor methods are evaluated or not). """ api_type = u'class' @evaluator_method_cache() def list_type_vars(self): found = [] arglist = self.tree_node.get_super_arglist() if arglist is None: return [] for stars, node in unpack_arglist(arglist): if stars: continue # These are not relevant for this search. from jedi.evaluate.gradual.annotation import find_unknown_type_vars for type_var in find_unknown_type_vars(self.parent_context, node): if type_var not in found: # The order matters and it's therefore a list. found.append(type_var) return found def _get_bases_arguments(self): arglist = self.tree_node.get_super_arglist() if arglist: from jedi.evaluate import arguments return arguments.TreeArguments(self.evaluator, self.parent_context, arglist) return None @evaluator_method_cache(default=()) def py__bases__(self): args = self._get_bases_arguments() if args is not None: lst = [value for key, value in args.unpack() if key is None] if lst: return lst if self.py__name__() == 'object' \ and self.parent_context == self.evaluator.builtins_module: return [] return [ LazyKnownContexts( self.evaluator.builtins_module.py__getattribute__('object')) ] def py__getitem__(self, index_context_set, contextualized_node): from jedi.evaluate.gradual.typing import LazyGenericClass if not index_context_set: return ContextSet([self]) return ContextSet( LazyGenericClass( self, index_context, context_of_index=contextualized_node.context, ) for index_context in index_context_set) def define_generics(self, type_var_dict): from jedi.evaluate.gradual.typing import GenericClass def remap_type_vars(): """ The TypeVars in the resulting classes have sometimes different names and we need to check for that, e.g. a signature can be: def iter(iterable: Iterable[_T]) -> Iterator[_T]: ... However, the iterator is defined as Iterator[_T_co], which means it has a different type var name. """ for type_var in self.list_type_vars(): yield type_var_dict.get(type_var.py__name__(), NO_CONTEXTS) if type_var_dict: return ContextSet( [GenericClass(self, generics=tuple(remap_type_vars()))]) return ContextSet({self}) @plugin_manager.decorate() def get_metaclass_filters(self, metaclass): debug.dbg('Unprocessed metaclass %s', metaclass) return [] @evaluator_method_cache(default=NO_CONTEXTS) def get_metaclasses(self): args = self._get_bases_arguments() if args is not None: m = [value for key, value in args.unpack() if key == 'metaclass'] metaclasses = ContextSet.from_sets(lazy_context.infer() for lazy_context in m) metaclasses = ContextSet(m for m in metaclasses if m.is_class()) if metaclasses: return metaclasses for lazy_base in self.py__bases__(): for context in lazy_base.infer(): if context.is_class(): contexts = context.get_metaclasses() if contexts: return contexts return NO_CONTEXTS
class Function(use_metaclass(CachedMetaClass, Wrapper)): """ Needed because of decorators. Decorators are evaluated here. """ def __init__(self, evaluator, func, is_decorated=False): """ This should not be called directly """ self._evaluator = evaluator self.base = self.base_func = func self.is_decorated = is_decorated # A property that is set by the decorator resolution. self.decorates = None @memoize_default() def get_decorated_func(self): """ Returns the function, that should to be executed in the end. This is also the places where the decorators are processed. """ f = self.base_func decorators = self.base_func.get_decorators() if not decorators or self.is_decorated: return self # Only enter it, if has not already been processed. if not self.is_decorated: for dec in reversed(decorators): debug.dbg('decorator: %s %s', dec, f) dec_results = self._evaluator.eval_element(dec.children[1]) trailer = dec.children[2:-1] if trailer: # Create a trailer and evaluate it. trailer = tree.Node('trailer', trailer) trailer.parent = dec dec_results = self._evaluator.eval_trailer( dec_results, trailer) if not len(dec_results): debug.warning('decorator not found: %s on %s', dec, self.base_func) return self decorator = dec_results.pop() if dec_results: debug.warning('multiple decorators found %s %s', self.base_func, dec_results) # Create param array. if isinstance(f, Function): old_func = f # TODO this is just hacky. change. id:133 gh:134 elif f.type == 'funcdef': old_func = Function(self._evaluator, f, is_decorated=True) else: old_func = f wrappers = self._evaluator.execute_evaluated( decorator, old_func) if not len(wrappers): debug.warning('no wrappers found %s', self.base_func) return self if len(wrappers) > 1: # TODO resolve issue with multiple wrappers -> multiple types id:88 gh:89 debug.warning('multiple wrappers found %s %s', self.base_func, wrappers) f = list(wrappers)[0] if isinstance(f, (Instance, Function)): f.decorates = self debug.dbg('decorator end %s', f) return f def names_dicts(self, search_global): if search_global: yield self.names_dict else: scope = self.py__class__() for names_dict in scope.names_dicts(False): yield names_dict @Python3Method def py__call__(self, params): if self.base.is_generator(): return set([iterable.Generator(self._evaluator, self, params)]) else: return FunctionExecution(self._evaluator, self, params).get_return_types() @memoize_default() def py__annotations__(self): parser_func = self.base return_annotation = parser_func.annotation() if return_annotation: dct = {'return': return_annotation} else: dct = {} for function_param in parser_func.params: param_annotation = function_param.annotation() if param_annotation is not None: dct[function_param.name.value] = param_annotation return dct def py__class__(self): # This differentiation is only necessary for Python2. Python3 does not # use a different method class. if isinstance(self.base.get_parent_scope(), tree.Class): name = 'METHOD_CLASS' else: name = 'FUNCTION_CLASS' return compiled.get_special_object(self._evaluator, name) def __getattr__(self, name): return getattr(self.base_func, name) def __repr__(self): dec = '' if self.decorates is not None: dec = " decorates " + repr(self.decorates) return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)