def recursion(obj): new_obj = copy.copy(obj) new_elements[obj] = new_obj try: items = list(new_obj.__dict__.items()) except AttributeError: # __dict__ not available, because of __slots__ items = [] before = () for cls in new_obj.__class__.__mro__: with common.ignored(AttributeError): if before == cls.__slots__: continue before = cls.__slots__ items += [(n, getattr(new_obj, n)) for n in before] for key, value in items: # replace parent (first try _parent and then parent) if key in ['parent', '_parent'] and value is not None: if key == 'parent' and '_parent' in items: # parent can be a property continue with common.ignored(KeyError): setattr(new_obj, key, new_elements[value]) elif key in ['parent_function', 'use_as_parent', '_sub_module']: continue elif isinstance(value, list): setattr(new_obj, key, list_rec(value)) elif isinstance(value, (pr.Simple, pr.Call)): setattr(new_obj, key, recursion(value)) return new_obj
def _check_getattr(self, inst): """Checks for both __getattr__ and __getattribute__ methods""" result = [] # str is important to lose the NamePart! name = compiled.create(self._evaluator, str(self.name_str)) with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattr__', [name]) if not result: # this is a little bit special. `__getattribute__` is executed # before anything else. But: I know no use case, where this # could be practical and the jedi would return wrong types. If # you ever have something, let me know! with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattribute__', [name]) return result
def _check_getattr(self, inst): """Checks for both __getattr__ and __getattribute__ methods""" result = [] # str is important, because it shouldn't be `Name`! name = compiled.create(self._evaluator, str(self.name_str)) with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattr__', name) if not result: # this is a little bit special. `__getattribute__` is executed # before anything else. But: I know no use case, where this # could be practical and the jedi would return wrong types. If # you ever have something, let me know! with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattribute__', name) return result
def full_name(self): """ Dot-separated path of this object. It is in the form of ``<module>[.<submodule>[...]][.<object>]``. It is useful when you want to look up Python manual of the object at hand. Example: >>> from jedi import Script >>> source = ''' ... import os ... os.path.join''' >>> script = Script(source, 3, len('os.path.join'), 'example.py') >>> print(script.goto_definitions()[0].full_name) os.path.join Notice that it correctly returns ``'os.path.join'`` instead of (for example) ``'posixpath.join'``. """ path = [unicode(p) for p in self._path()] # TODO add further checks, the mapping should only occur on stdlib. if not path: return None # for keywords the path is empty with common.ignored(KeyError): path[0] = self._mapping[path[0]] for key, repl in self._tuple_mapping.items(): if tuple(path[:len(key)]) == key: path = [repl] + path[len(key):] return '.'.join(path if path[0] else path[1:])
def check_getattr(inst, name_str): """Checks for both __getattr__ and __getattribute__ methods""" result = [] # str is important to lose the NamePart! module = builtin.Builtin.scope name = pr.String(module, "'%s'" % name_str, (0, 0), (0, 0), inst) with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattr__', [name]) if not result: # this is a little bit special. `__getattribute__` is executed # before anything else. But: I know no use case, where this # could be practical and the jedi would return wrong types. If # you ever have something, let me know! with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattribute__', [name]) return result
def sys_path_with_modifications(evaluator, module): if module.path is None: # Support for modules without a path is bad, therefore return the # normal path. return list(get_sys_path()) curdir = os.path.abspath(os.curdir) with common.ignored(OSError): os.chdir(os.path.dirname(module.path)) result = _check_module(evaluator, module) result += _detect_django_path(module.path) # buildout scripts often contain the same sys.path modifications # the set here is used to avoid duplicate sys.path entries buildout_paths = set() for module_path in _get_buildout_scripts(module.path): try: with open(module_path, 'rb') as f: source = f.read() except IOError: pass else: p = Parser(evaluator.grammar, common.source_to_unicode(source), module_path) for path in _check_module(p.module): if path not in buildout_paths: buildout_paths.add(path) result.append(path) # cleanup, back to old directory os.chdir(curdir) return list(result)
def imitate_pydoc(string): """ It's not possible to get the pydoc's without starting the annoying pager stuff. """ # str needed because of possible unicode stuff in py2k (pydoc doesn't work # with unicode strings) string = str(string) h = pydoc.help with common.ignored(KeyError): # try to access symbols string = h.symbols[string] string, _, related = string.partition(' ') get_target = lambda s: h.topics.get(s, h.keywords.get(s)) while isinstance(string, str): string = get_target(string) try: # is a tuple now label, related = string except TypeError: return '' try: return pydoc_topics.topics[label] if pydoc_topics else '' except KeyError: return ''
def check_getattr(inst, name_str): """Checks for both __getattr__ and __getattribute__ methods""" result = [] # str is important to lose the NamePart! module = builtin.Builtin.scope name = pr.Call(module, str(name_str), pr.Call.STRING, (0, 0), inst) with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattr__', [name]) if not result: # this is a little bit special. `__getattribute__` is executed # before anything else. But: I know no use case, where this # could be practical and the jedi would return wrong types. If # you ever have something, let me know! with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattribute__', [name]) return result
def _check_getattr(self, inst): """Checks for both __getattr__ and __getattribute__ methods""" result = set() # str is important, because it shouldn't be `Name`! name = compiled.create(self._evaluator, str(self.name_str)) with common.ignored(KeyError): result = inst.execute_subscope_by_name("__getattr__", name) if not result: # This is a little bit special. `__getattribute__` is in Python # executed before `__getattr__`. But: I know no use case, where # this could be practical and where jedi would return wrong types. # If you ever find something, let me know! # We are inversing this, because a hand-crafted `__getattribute__` # could still call another hand-crafted `__getattr__`, but not the # other way around. with common.ignored(KeyError): result = inst.execute_subscope_by_name("__getattribute__", name) return result
def usages_add_import_modules(evaluator, definitions, search_name): """ Adds the modules of the imports """ new = set() for d in definitions: if isinstance(d.parent, pr.Import): s = imports.ImportWrapper(evaluator, d.parent, nested_resolve=True) with common.ignored(IndexError): new.add(s.follow(is_goto=True)[0]) return set(definitions) | new
def get_venv_path(venv): """Get sys.path for specified virtual environment.""" sys_path = _get_venv_path_dirs(venv) with common.ignored(ValueError): sys_path.remove('') sys_path = _get_sys_path_with_egglinks(sys_path) # As of now, get_venv_path_dirs does not scan built-in pythonpath and # user-local site-packages, let's approximate them using path from Jedi # interpreter. return sys_path + sys.path
def _check_getattr(self, inst): """Checks for both __getattr__ and __getattribute__ methods""" result = set() # str is important, because it shouldn't be `Name`! name = compiled.create(self._evaluator, str(self.name_str)) with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattr__', name) if not result: # This is a little bit special. `__getattribute__` is in Python # executed before `__getattr__`. But: I know no use case, where # this could be practical and where jedi would return wrong types. # If you ever find something, let me know! # We are inversing this, because a hand-crafted `__getattribute__` # could still call another hand-crafted `__getattr__`, but not the # other way around. with common.ignored(KeyError): result = inst.execute_subscope_by_name('__getattribute__', name) return result
def path(self): """The module path.""" path = [] if not isinstance(self._definition, keywords.Keyword): par = self._definition while par is not None: with common.ignored(AttributeError): path.insert(0, par.name) par = par.parent return path
def _detect_django_path(module_path): """ Detects the path of the very well known Django library (if used) """ result = [] for parent in _traverse_parents(module_path): with common.ignored(IOError): with open(parent + os.path.sep + 'manage.py'): debug.dbg('Found django path: %s', module_path) result.append(parent) return result
def wrapper(evaluator, scope, *args, **kwargs): with common.ignored(KeyError): mods = _star_import_cache[scope] if mods[0] + settings.star_import_cache_validity > time.time(): return mods[1] # cache is too old and therefore invalid or not available _invalidate_star_import_cache_module(scope) mods = func(evaluator, scope, *args, **kwargs) _star_import_cache[scope] = time.time(), mods return mods
def wrapper(scope, *args, **kwargs): with common.ignored(KeyError): mods = star_import_cache[scope] if mods[0] + settings.star_import_cache_validity > time.time(): return mods[1] # cache is too old and therefore invalid or not available invalidate_star_import_cache(scope) mods = func(scope, *args, **kwargs) star_import_cache[scope] = time.time(), mods return mods
def descriptor_check(result): """Processes descriptors""" res_new = [] for r in result: if isinstance(scope, (er.Instance, er.Class)) \ and hasattr(r, 'get_descriptor_return'): # handle descriptors with common.ignored(KeyError): res_new += r.get_descriptor_return(scope) continue res_new.append(r) return res_new
def test_keyword(): """ github jedi-vim issue #44 """ defs = Script("print").goto_definitions() assert [d.doc for d in defs] defs = Script("import").goto_assignments() assert len(defs) == 0 completions = Script("import", 1, 1).completions() assert len(completions) == 0 with common.ignored(jedi.NotFoundError): # TODO shouldn't throw that. defs = Script("assert").goto_definitions() assert len(defs) == 1
def eval_results(index): types = [] for r in results: try: func = r.get_exact_index_types except AttributeError: debug.warning("invalid tuple lookup %s of result %s in %s", tup, results, seek_name) else: with common.ignored(IndexError): types += func(index) return types
def _path(self): """The module path.""" path = [] par = self._definition while par is not None: if isinstance(par, pr.Import): path += imports.ImportWrapper(self._evaluator, self._name).import_path break with common.ignored(AttributeError): path.insert(0, par.name) par = par.parent return path
def _resolve_descriptors(self, types): """Processes descriptors""" result = [] for r in types: if isinstance(self.scope, (er.Instance, er.Class)) \ and hasattr(r, 'get_descriptor_return'): # handle descriptors with common.ignored(KeyError): result += r.get_descriptor_return(self.scope) continue result.append(r) return result
def get_code(self): map = {self.NOARRAY: "(%s)", self.TUPLE: "(%s)", self.LIST: "[%s]", self.DICT: "{%s}", self.SET: "{%s}"} inner = [] for i, stmt in enumerate(self.values): s = "" with common.ignored(IndexError): key = self.keys[i] s += key.get_code(new_line=False) + ": " s += stmt.get_code(new_line=False) inner.append(s) add = "," if self.type == self.TUPLE and len(self) == 1 else "" s = map[self.type] % (", ".join(inner) + add) return s + super(Array, self).get_code()
def get_defined_names(self, on_import_stmt=False): names = [] for scope in self.follow(): if scope is ImportPath.GlobalNamespace: if self._is_relative_import() == 0: names += self._get_module_names() if self.file_path is not None: path = os.path.abspath(self.file_path) for i in range(self.import_stmt.relative_count - 1): path = os.path.dirname(path) names += self._get_module_names([path]) if self._is_relative_import(): rel_path = self._get_relative_path() + '/__init__.py' with common.ignored(IOError): m = modules.Module(rel_path) names += m.parser.module.get_defined_names() else: if on_import_stmt and isinstance(scope, pr.Module) \ and scope.path.endswith('__init__.py'): pkg_path = os.path.dirname(scope.path) paths = self._namespace_packages(pkg_path, self.import_path) names += self._get_module_names([pkg_path] + paths) if self.is_just_from: # In the case of an import like `from x.` we don't need to # add all the variables. if [ 'os' ] == self.import_path and not self._is_relative_import(): # os.path is a hardcoded exception, because it's a # ``sys.modules`` modification. p = (0, 0) names.append( pr.Name(self.GlobalNamespace, [('path', p)], p, p, self.import_stmt)) continue for s, scope_names in evaluate.get_names_of_scope( scope, include_builtin=False): for n in scope_names: if self.import_stmt.from_ns is None \ or self.is_partial_import: # from_ns must be defined to access module # values plus a partial import means that there # is something after the import, which # automatically implies that there must not be # any non-module scope. continue names.append(n) return names
def __init__(self, base, var_args=()): super(Instance, self).__init__(base, var_args) if str(base.name) in ['list', 'set'] \ and builtin.Builtin.scope == base.get_parent_until(): # compare the module path with the builtin name. self.var_args = dynamic.check_array_instances(self) else: # need to execute the __init__ function, because the dynamic param # searching needs it. with common.ignored(KeyError): self.execute_subscope_by_name('__init__', self.var_args) # Generated instances are classes that are just generated by self # (No var_args) used. self.is_generated = False
def _path(self): """The module path.""" path = [] par = self._definition while par is not None: if isinstance(par, pr.Import): path += par.from_names path += par.namespace_names if par.relative_count == 0: break with common.ignored(AttributeError): path.insert(0, par.name) par = par.parent return path
def __init__(self, evaluator, base, var_args=()): super(Instance, self).__init__(evaluator, base, var_args) if str(base.name) in ['list', 'set'] \ and compiled.builtin == base.get_parent_until(): # compare the module path with the builtin name. self.var_args = iterable.check_array_instances(evaluator, self) else: # need to execute the __init__ function, because the dynamic param # searching needs it. with common.ignored(KeyError): self.execute_subscope_by_name('__init__', self.var_args) # Generated instances are classes that are just generated by self # (No var_args) used. self.is_generated = False
def test_keyword(self): """ github jedi-vim issue #44 """ defs = self.goto_definitions("print") assert [d.doc for d in defs] defs = self.goto_definitions("import") assert len(defs) == 1 and [1 for d in defs if d.doc] # unrelated to #44 defs = self.goto_assignments("import") assert len(defs) == 0 completions = self.completions("import", (1,1)) assert len(completions) == 0 with common.ignored(jedi.NotFoundError): # TODO shouldn't throw that. defs = self.goto_definitions("assert") assert len(defs) == 1
def parser(self): """ get the parser lazy """ if not self._parser: with common.ignored(KeyError): parser = cache.parser_cache[self.path].parser cache.invalidate_star_import_cache(parser.module) # Call the parser already here, because it will be used anyways. # Also, the position is here important (which will not be used by # default), therefore fill the cache here. self._parser = fast_parser.FastParser(self.source, self.path, self.position) # don't pickle that module, because it's changing fast cache.save_module(self.path, self.name, self._parser, pickling=False) return self._parser
def from_param(cls, param, parent, var_args): instance = cls() before = () for cls in param.__class__.__mro__: with common.ignored(AttributeError): if before == cls.__slots__: continue before = cls.__slots__ for name in before: setattr(instance, name, getattr(param, name)) instance.original_param = param instance.is_generated = True instance.parent = parent instance.var_args = var_args return instance
def detect_django_path(module_path): """ Detects the path of the very well known Django library (if used) """ result = [] while True: new = os.path.dirname(module_path) # If the module_path doesn't change anymore, we're finished -> / if new == module_path: break else: module_path = new with common.ignored(IOError): with open(module_path + os.path.sep + 'manage.py'): debug.dbg('Found django path: %s' % module_path) result.append(module_path) return result
def invalidate_star_import_cache(module, only_main=False): """ Important if some new modules are being reparsed """ with common.ignored(KeyError): t, mods = star_import_cache[module] del star_import_cache[module] for m in mods: invalidate_star_import_cache(m, only_main=True) if not only_main: # We need a list here because otherwise the list is being changed # during the iteration in py3k: iteritems -> items. for key, (t, mods) in list(star_import_cache.items()): if module in mods: invalidate_star_import_cache(key)
def _invalidate_star_import_cache_module(module, only_main=False): """ Important if some new modules are being reparsed """ with common.ignored(KeyError): t, mods = _star_import_cache[module] del _star_import_cache[module] for m in mods: _invalidate_star_import_cache_module(m, only_main=True) if not only_main: # We need a list here because otherwise the list is being changed # during the iteration in py3k: iteritems -> items. for key, (t, mods) in list(_star_import_cache.items()): if module in mods: _invalidate_star_import_cache_module(key)
def get_defined_names(self, on_import_stmt=False): names = [] for scope in self.follow(): if scope is ImportPath.GlobalNamespace: if self._is_relative_import() == 0: names += self._get_module_names() if self.file_path is not None: path = os.path.abspath(self.file_path) for i in range(self.import_stmt.relative_count - 1): path = os.path.dirname(path) names += self._get_module_names([path]) if self._is_relative_import(): rel_path = self._get_relative_path() + '/__init__.py' with common.ignored(IOError): m = modules.Module(rel_path) names += m.parser.module.get_defined_names() else: if on_import_stmt and isinstance(scope, pr.Module) \ and scope.path.endswith('__init__.py'): pkg_path = os.path.dirname(scope.path) paths = self._namespace_packages(pkg_path, self.import_path) names += self._get_module_names([pkg_path] + paths) if self.is_just_from: # In the case of an import like `from x.` we don't need to # add all the variables. if ['os'] == self.import_path and not self._is_relative_import(): # os.path is a hardcoded exception, because it's a # ``sys.modules`` modification. p = (0, 0) names.append(pr.Name(self.GlobalNamespace, [('path', p)], p, p, self.import_stmt)) continue for s, scope_names in evaluate.get_names_of_scope(scope, include_builtin=False): for n in scope_names: if self.import_stmt.from_ns is None \ or self.is_partial_import: # from_ns must be defined to access module # values plus a partial import means that there # is something after the import, which # automatically implies that there must not be # any non-module scope. continue names.append(n) return names
def inline(script): """ :type script: api.Script """ new_lines = modules.source_to_unicode(script.source).splitlines() dct = {} definitions = script.goto() with common.ignored(AssertionError): assert len(definitions) == 1 stmt = definitions[0]._definition usages = script.usages() inlines = [ r for r in usages if not stmt.start_pos <= r.start_pos <= stmt.end_pos ] inlines = sorted(inlines, key=lambda x: (x.module_path, x.start_pos), reverse=True) commands = stmt.get_commands() # don't allow multiline refactorings for now. assert stmt.start_pos[0] == stmt.end_pos[0] index = stmt.start_pos[0] - 1 line = new_lines[index] replace_str = line[commands[0].start_pos[1]:stmt.end_pos[1] + 1] replace_str = replace_str.strip() # tuples need parentheses if commands and isinstance(commands[0], pr.Array): arr = commands[0] if replace_str[0] not in ['(', '[', '{'] and len(arr) > 1: replace_str = '(%s)' % replace_str # if it's the only assignment, remove the statement if len(stmt.set_vars) == 1: line = line[:stmt.start_pos[1]] + line[stmt.end_pos[1]:] dct = _rename(inlines, replace_str) # remove the empty line new_lines = dct[script.source_path][2] if line.strip(): new_lines[index] = line else: new_lines.pop(index) return Refactoring(dct)
def inline(script): """ :type script: api.Script """ new_lines = modules.source_to_unicode(script.source).splitlines() dct = {} definitions = script.goto_assignments() with common.ignored(AssertionError): assert len(definitions) == 1 stmt = definitions[0]._definition usages = script.usages() inlines = [r for r in usages if not stmt.start_pos <= (r.line, r.column) <= stmt.end_pos] inlines = sorted(inlines, key=lambda x: (x.module_path, x.line, x.column), reverse=True) commands = stmt.get_commands() # don't allow multiline refactorings for now. assert stmt.start_pos[0] == stmt.end_pos[0] index = stmt.start_pos[0] - 1 line = new_lines[index] replace_str = line[commands[0].start_pos[1]:stmt.end_pos[1] + 1] replace_str = replace_str.strip() # tuples need parentheses if commands and isinstance(commands[0], pr.Array): arr = commands[0] if replace_str[0] not in ['(', '[', '{'] and len(arr) > 1: replace_str = '(%s)' % replace_str # if it's the only assignment, remove the statement if len(stmt.set_vars) == 1: line = line[:stmt.start_pos[1]] + line[stmt.end_pos[1]:] dct = _rename(inlines, replace_str) # remove the empty line new_lines = dct[script.path][2] if line.strip(): new_lines[index] = line else: new_lines.pop(index) return Refactoring(dct)
def test_preload_modules(self): def check_loaded(*modules): # + 1 for builtin, +1 for None module (currently used) assert len(new) == len(modules) + 2 for i in modules + ('__builtin__',): assert [i in k for k in new.keys() if k is not None] from jedi import cache temp_cache, cache.parser_cache = cache.parser_cache, {} new = cache.parser_cache with common.ignored(KeyError): # performance of tests -> no reload new['__builtin__'] = temp_cache['__builtin__'] jedi.preload_module('datetime') check_loaded('datetime') jedi.preload_module('json', 'token') check_loaded('datetime', 'json', 'token') cache.parser_cache = temp_cache
def get_code(self): map = { self.NOARRAY: '(%s)', self.TUPLE: '(%s)', self.LIST: '[%s]', self.DICT: '{%s}', self.SET: '{%s}' } inner = [] for i, stmt in enumerate(self.values): s = '' with common.ignored(IndexError): key = self.keys[i] s += key.get_code(new_line=False) + ': ' s += stmt.get_code(new_line=False) inner.append(s) add = ',' if self.type == self.TUPLE and len(self) == 1 else '' s = map[self.type] % (', '.join(inner) + add) return s + super(Array, self).get_code()
def test_preload_modules(): def check_loaded(*modules): # + 1 for builtin, +1 for None module (currently used) assert len(new) == len(modules) + 2 for i in modules + ('__builtin__', ): assert [i in k for k in new.keys() if k is not None] from jedi import cache temp_cache, cache.parser_cache = cache.parser_cache, {} new = cache.parser_cache with common.ignored(KeyError): # performance of tests -> no reload new['__builtin__'] = temp_cache['__builtin__'] api.preload_module('datetime') check_loaded('datetime') api.preload_module('json', 'token') check_loaded('datetime', 'json', 'token') cache.parser_cache = temp_cache
def get_index_types(self, evaluator, index=()): """ Get the types of a specific index or all, if not given. :param index: A subscriptlist node (or subnode). """ indexes = create_indexes_or_slices(evaluator, index) lookup_done = False types = [] for index in indexes: if isinstance(index, Slice): types += [self] lookup_done = True elif isinstance(index, compiled.CompiledObject) \ and isinstance(index.obj, (int, str, unicode)): with common.ignored(KeyError, IndexError, TypeError): types += self.get_exact_index_types(index.obj) lookup_done = True return types if lookup_done else self.values()
def sys_path_with_modifications(evaluator, module): if module.path is None: # Support for modules without a path is bad, therefore return the # normal path. return list(get_sys_path()) curdir = os.path.abspath(os.curdir) with common.ignored(OSError): os.chdir(os.path.dirname(module.path)) buildout_script_paths = set() result = _check_module(evaluator, module) result += _detect_django_path(module.path) for buildout_script in _get_buildout_scripts(module.path): for path in _get_paths_from_buildout_script(evaluator, buildout_script): buildout_script_paths.add(path) # cleanup, back to old directory os.chdir(curdir) return list(result) + list(buildout_script_paths)
def _path(self): """The module path.""" path = [] def insert_nonnone(x): if x: path.insert(0, x) if not isinstance(self._definition, keywords.Keyword): par = self._definition while par is not None: if isinstance(par, pr.Import): insert_nonnone(par.namespace) insert_nonnone(par.from_ns) if par.relative_count == 0: break with common.ignored(AttributeError): path.insert(0, par.name) par = par.parent return path
def get_index_types(self, index_arr=None): """ Get the types of a specific index or all, if not given """ if index_arr is not None: if index_arr and [x for x in index_arr if ':' in x.expression_list()]: # array slicing return [self] index_possibilities = self._follow_values(index_arr) if len(index_possibilities) == 1: # This is indexing only one element, with a fixed index number, # otherwise it just ignores the index (e.g. [1+1]). index = index_possibilities[0] if isinstance(index, compiled.CompiledObject) \ and isinstance(index.obj, (int, str, unicode)): with common.ignored(KeyError, IndexError, TypeError): return self.get_exact_index_types(index.obj) result = list(self._follow_values(self._array.values)) result += check_array_additions(self._evaluator, self) return result