def names_dicts(self, search_global): if search_global: yield self.names_dict else: scope = compiled.get_special_object(self._evaluator, 'FUNCTION_CLASS') for names_dict in scope.names_dicts(False): yield names_dict
def __init__(self, grammar, sys_path=None): self.grammar = grammar self.latest_grammar = parso.load_grammar(version='3.6') self.memoize_cache = {} # for memoize decorators # To memorize modules -> equals `sys.modules`. self.modules = {} # like `sys.modules`. self.compiled_cache = {} # see `evaluate.compiled.create()` self.inferred_element_counts = {} self.mixed_cache = {} # see `evaluate.compiled.mixed._create()` self.analysis = [] self.dynamic_params_depth = 0 self.is_analysis = False self.python_version = sys.version_info[:2] if sys_path is None: sys_path = sys.path self.sys_path = copy.copy(sys_path) try: self.sys_path.remove('') except ValueError: pass self.reset_recursion_limitations() # Constants self.BUILTINS = compiled.get_special_object(self, 'BUILTINS')
def test_fake_loading(): e = _evaluator() assert isinstance(compiled.create(e, next), Function) builtin = compiled.get_special_object(e, 'BUILTINS') string = builtin.get_subscope_by_name('str') from_name = compiled._create_from_name(e, builtin, string, '__init__') assert isinstance(from_name, Function)
def test_fake_loading(): e = _evaluator() assert isinstance(compiled.create(e, next), FunctionContext) builtin = compiled.get_special_object(e, 'BUILTINS') string, = builtin.py__getattribute__('str') from_name = compiled._create_from_name(e, builtin, string, '__init__') assert isinstance(from_name, FunctionContext)
def py__class__(self): # This differentiation is only necessary for Python2. Python3 does not # use a different method class. if isinstance(parser_utils.get_parent_scope(self.tree_node), tree.Class): name = u'METHOD_CLASS' else: name = u'FUNCTION_CLASS' return compiled.get_special_object(self.evaluator, name)
def py__class__(self): # This differentiation is only necessary for Python2. Python3 does not # use a different method class. if isinstance(parser_utils.get_parent_scope(self.tree_node), tree.Class): name = 'METHOD_CLASS' else: name = 'FUNCTION_CLASS' return compiled.get_special_object(self.evaluator, name)
def get_filters(self, search_global, until_position=None, origin_scope=None): gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT') yield SpecialMethodFilter(self, self.builtin_methods, gen_obj) for filter in gen_obj.get_filters(search_global): yield filter
def actual(params): # Parse the docstring to find the return type: ret_type = '' if '->' in self.obj.__doc__: ret_type = self.obj.__doc__.split('->')[1].strip() ret_type = ret_type.replace(' or None', '') if ret_type.startswith('iter:'): ret_type = ret_type[len( 'iter:'):] # we don't care if it's an iterator if hasattr(__builtins__, ret_type): # The function we're inspecting returns a builtin python type, that's easy # (see test/test_evaluate/test_compiled.py in the jedi source code for usage) builtins = get_special_object(self.evaluator, 'BUILTINS') builtin_obj = builtins.py__getattribute__(ret_type) obj = _create_from_name(self.evaluator, builtins, builtin_obj, "") return self.evaluator.execute(obj, params) else: # The function we're inspecting returns a GObject type parent = self.parent_context.obj.__name__ if parent.startswith('gi.repository'): parent = parent[len('gi.repository.'):] else: # a module with overrides, such as Gtk, behaves differently parent_module = self.parent_context.obj.__module__ if parent_module.startswith('gi.overrides'): parent_module = parent_module[ len('gi.overrides.'):] parent = '%s.%s' % (parent_module, parent) if ret_type.startswith(parent): # A pygobject type in the same module ret_type = ret_type[len(parent):] else: # A pygobject type in a different module return_type_parent = ret_type.split('.', 1)[0] ret_type = 'from gi.repository import %s\n%s' % ( return_type_parent, ret_type) result = _evaluate_for_statement_string( self.parent_context, ret_type) return set(result)
def __init__(self, grammar, project): self.grammar = grammar self.latest_grammar = parso.load_grammar(version='3.6') self.memoize_cache = {} # for memoize decorators # To memorize modules -> equals `sys.modules`. self.modules = {} # like `sys.modules`. self.compiled_cache = {} # see `evaluate.compiled.create()` self.inferred_element_counts = {} self.mixed_cache = {} # see `evaluate.compiled.mixed._create()` self.analysis = [] self.dynamic_params_depth = 0 self.is_analysis = False self.python_version = sys.version_info[:2] self.project = project project.add_evaluator(self) self.reset_recursion_limitations() # Constants self.BUILTINS = compiled.get_special_object(self, 'BUILTINS')
def __init__(self, grammar, sys_path=None): self.grammar = grammar self.memoize_cache = {} # for memoize decorators # To memorize modules -> equals `sys.modules`. self.modules = {} # like `sys.modules`. self.compiled_cache = {} # see `evaluate.compiled.create()` self.mixed_cache = {} # see `evaluate.compiled.mixed.create()` self.analysis = [] self.predefined_if_name_dict_dict = {} self.is_analysis = False if sys_path is None: sys_path = sys.path self.sys_path = copy.copy(sys_path) try: self.sys_path.remove('') except ValueError: pass self.reset_recursion_limitations() # Constants self.BUILTINS = compiled.get_special_object(self, 'BUILTINS')
def actual(params): # Parse the docstring to find the return type: ret_type = '' if '->' in self.obj.__doc__: ret_type = self.obj.__doc__.split('->')[1].strip() ret_type = ret_type.replace(' or None', '') if ret_type.startswith('iter:'): ret_type = ret_type[len('iter:'):] # we don't care if it's an iterator if hasattr(__builtins__, ret_type): # The function we're inspecting returns a builtin python type, that's easy # (see test/test_evaluate/test_compiled.py in the jedi source code for usage) builtins = get_special_object(self.evaluator, 'BUILTINS') builtin_obj = builtins.py__getattribute__(ret_type) obj = _create_from_name(self.evaluator, builtins, builtin_obj, "") return self.evaluator.execute(obj, params) else: # The function we're inspecting returns a GObject type parent = self.parent_context.obj.__name__ if parent.startswith('gi.repository'): parent = parent[len('gi.repository.'):] else: # a module with overrides, such as Gtk, behaves differently parent_module = self.parent_context.obj.__module__ if parent_module.startswith('gi.overrides'): parent_module = parent_module[len('gi.overrides.'):] parent = '%s.%s' % (parent_module, parent) if ret_type.startswith(parent): # A pygobject type in the same module ret_type = ret_type[len(parent):] else: # A pygobject type in a different module return_type_parent = ret_type.split('.', 1)[0] ret_type = 'from gi.repository import %s\n%s' % (return_type_parent, ret_type) result = _evaluate_for_statement_string(self.parent_context, ret_type) return set(result)
def get_object(self): from jedi.evaluate import compiled assert self.special_object_identifier return compiled.get_special_object(self.evaluator, self.special_object_identifier)
def test_fake_loading(evaluator): builtin = compiled.get_special_object(evaluator, u'BUILTINS') string, = builtin.py__getattribute__(u'str') from_name = compiled.context.create_from_name(evaluator, string, u'__init__') assert from_name.tree_node
def py__class__(self): return compiled.get_special_object(self.evaluator, 'MODULE_CLASS')
def names_dicts(self, search_global=False): # is always False gen_obj = compiled.get_special_object(self._evaluator, 'GENERATOR_OBJECT') yield self._get_names_dict(gen_obj.names_dict)
def py__class__(self): return compiled.get_special_object(self.evaluator, u'BOUND_METHOD_CLASS')
def py__class__(self): return compiled.get_special_object(self._evaluator, 'FUNCTION_CLASS')
def builtins_module(self): return compiled.get_special_object(self, u'BUILTINS')
def py__class__(self): gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT') return gen_obj.py__class__()
def py__class__(self): return compiled.get_special_object(self.evaluator, u'FUNCTION_CLASS')
def py__class__(self): return compiled.get_special_object(self.evaluator, u'MODULE_CLASS')