def collections_namedtuple(value, arguments, callback): """ Implementation of the namedtuple function. This has to be done by processing the namedtuple class template and inferring the result. """ inference_state = value.inference_state # Process arguments name = u'jedi_unknown_namedtuple' for c in _follow_param(inference_state, arguments, 0): x = get_str_or_none(c) if x is not None: name = force_unicode(x) break # TODO here we only use one of the types, we should use all. param_values = _follow_param(inference_state, arguments, 1) if not param_values: return NO_VALUES _fields = list(param_values)[0] string = get_str_or_none(_fields) if string is not None: fields = force_unicode(string).replace(',', ' ').split() elif isinstance(_fields, iterable.Sequence): fields = [ force_unicode(get_str_or_none(v)) for lazy_value in _fields.py__iter__() for v in lazy_value.infer() ] fields = [f for f in fields if f is not None] else: return NO_VALUES # Build source code code = _NAMEDTUPLE_CLASS_TEMPLATE.format( typename=name, field_names=tuple(fields), num_fields=len(fields), arg_list=repr(tuple(fields)).replace("u'", "").replace("'", "")[1:-1], repr_fmt='', field_defs='\n'.join( _NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name) for index, name in enumerate(fields))) # Parse source code module = inference_state.grammar.parse(code) generated_class = next(module.iter_classdefs()) parent_context = ModuleValue( inference_state, module, file_io=None, string_names=None, code_lines=parso.split_lines(code, keepends=True), ).as_context() return ValueSet( [ClassValue(inference_state, parent_context, generated_class)])
def test_module__file__(Script, environment): assert not Script('__file__').infer() def_, = Script('__file__', path='example.py').infer() value = force_unicode(def_._name._value.get_safe_value()) assert value.endswith('example.py') def_, = Script('import antigravity; antigravity.__file__').infer() value = force_unicode(def_._name._value.get_safe_value()) assert value.endswith('.py')
def collections_namedtuple(obj, arguments): """ Implementation of the namedtuple function. This has to be done by processing the namedtuple class template and evaluating the result. """ evaluator = obj.evaluator # Process arguments name = u'jedi_unknown_namedtuple' for c in _follow_param(evaluator, arguments, 0): x = get_str_or_none(c) if x is not None: name = force_unicode(x) break # TODO here we only use one of the types, we should use all. param_contexts = _follow_param(evaluator, arguments, 1) if not param_contexts: return NO_CONTEXTS _fields = list(param_contexts)[0] if isinstance(_fields, compiled.CompiledValue): fields = force_unicode(_fields.get_safe_value()).replace(',', ' ').split() elif isinstance(_fields, iterable.Sequence): fields = [ force_unicode(v.get_safe_value()) for lazy_context in _fields.py__iter__() for v in lazy_context.infer() if is_string(v) ] else: return NO_CONTEXTS # Build source code code = _NAMEDTUPLE_CLASS_TEMPLATE.format( typename=name, field_names=tuple(fields), num_fields=len(fields), arg_list=repr(tuple(fields)).replace("u'", "").replace("'", "")[1:-1], repr_fmt='', field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name) for index, name in enumerate(fields)) ) # Parse source code module = evaluator.grammar.parse(code) generated_class = next(module.iter_classdefs()) parent_context = ModuleContext( evaluator, module, file_io=None, string_names=None, code_lines=parso.split_lines(code, keepends=True), ) return ContextSet([ClassContext(evaluator, parent_context, generated_class)])
def __getattr__(self, name): if name in ('id', 'access') or name.startswith('_'): raise AttributeError("Something went wrong with unpickling") #if not is_py3: print >> sys.stderr, name #print('getattr', name, file=sys.stderr) return partial(self._workaround, force_unicode(name))
def _send(self, evaluator_id, function, args=(), kwargs={}): if self._crashed: raise InternalError("The subprocess %s has crashed." % self._executable) if not is_py3: # Python 2 compatibility kwargs = {force_unicode(key): value for key, value in kwargs.items()} data = evaluator_id, function, args, kwargs try: pickle_dump(data, self._process.stdin) except (socket.error, IOError) as e: # Once Python2 will be removed we can just use `BrokenPipeError`. # Also, somehow in windows it returns EINVAL instead of EPIPE if # the subprocess dies. if e.errno not in (errno.EPIPE, errno.EINVAL): # Not a broken pipe raise self.kill() raise InternalError("The subprocess %s was killed. Maybe out of memory?" % self._executable) try: is_exception, traceback, result = pickle_load(self._process.stdout) except EOFError: self.kill() raise InternalError("The subprocess %s has crashed." % self._executable) if is_exception: # Replace the attribute error message with a the traceback. It's # way more informative. result.args = (traceback,) raise result return result
def import_module_by_names(inference_state, import_names, sys_path=None, module_context=None, prefer_stubs=True): if sys_path is None: sys_path = inference_state.get_sys_path() str_import_names = tuple( force_unicode(i.value if isinstance(i, tree.Name) else i) for i in import_names) value_set = [None] for i, name in enumerate(import_names): value_set = ValueSet.from_sets([ import_module( inference_state, str_import_names[:i + 1], parent_module_value, sys_path, prefer_stubs=prefer_stubs, ) for parent_module_value in value_set ]) if not value_set: message = 'No module named ' + '.'.join(str_import_names) if module_context is not None: _add_error(module_context, name, message) else: debug.warning(message) return NO_VALUES return value_set
def _get(self, name, allowed_getattr_callback, in_dir_callback, check_has_attribute=False): """ To remove quite a few access calls we introduced the callback here. """ # Always use unicode objects in Python 2 from here. name = force_unicode(name) if self._inference_state.allow_descriptor_getattr: pass has_attribute, is_descriptor = allowed_getattr_callback( name, unsafe=self._inference_state.allow_descriptor_getattr) if check_has_attribute and not has_attribute: return [] if (is_descriptor or not has_attribute) \ and not self._inference_state.allow_descriptor_getattr: return [self._get_cached_name(name, is_empty=True)] if self.is_instance and not in_dir_callback(name): return [] return [self._get_cached_name(name)]
def check_hasattr(node, suite): try: assert suite.start_pos <= jedi_name.start_pos < suite.end_pos assert node.type in ('power', 'atom_expr') base = node.children[0] assert base.type == 'name' and base.value == 'hasattr' trailer = node.children[1] assert trailer.type == 'trailer' arglist = trailer.children[1] assert arglist.type == 'arglist' from jedi.evaluate.arguments import TreeArguments args = list(TreeArguments(node_context.evaluator, node_context, arglist).unpack()) # Arguments should be very simple assert len(args) == 2 # Check name key, lazy_context = args[1] names = list(lazy_context.infer()) assert len(names) == 1 and is_string(names[0]) assert force_unicode(names[0].get_safe_value()) == payload[1].value # Check objects key, lazy_context = args[0] objects = lazy_context.infer() return payload[0] in objects except AssertionError: return False
def check_hasattr(node, suite): try: assert suite.start_pos <= jedi_name.start_pos < suite.end_pos assert node.type in ('power', 'atom_expr') base = node.children[0] assert base.type == 'name' and base.value == 'hasattr' trailer = node.children[1] assert trailer.type == 'trailer' arglist = trailer.children[1] assert arglist.type == 'arglist' from jedi.evaluate.arguments import TreeArguments args = list(TreeArguments(node_context.evaluator, node_context, arglist).unpack()) # Arguments should be very simple assert len(args) == 2 # Check name key, lazy_context = args[1] names = list(lazy_context.infer()) assert len(names) == 1 and is_string(names[0]) assert force_unicode(names[0].get_safe_value()) == payload[1].value # Check objects key, lazy_context = args[0] objects = lazy_context.infer() return payload[0] in objects except AssertionError: return False
def __getattr__(self, name): if name in ('id', 'access') or name.startswith('_'): raise AttributeError("Something went wrong with unpickling") #if not is_py3: print >> sys.stderr, name #print('getattr', name, file=sys.stderr) return partial(self._workaround, force_unicode(name))
def _eval_comparison_part(evaluator, context, left, operator, right): l_is_num = is_number(left) r_is_num = is_number(right) if isinstance(operator, unicode): str_operator = operator else: str_operator = force_unicode(str(operator.value)) if str_operator == '*': # for iterables, ignore * operations if isinstance(left, iterable.Sequence) or is_string(left): return ContextSet(left) elif isinstance(right, iterable.Sequence) or is_string(right): return ContextSet(right) elif str_operator == '+': if l_is_num and r_is_num or is_string(left) and is_string(right): return ContextSet(left.execute_operation(right, str_operator)) elif _is_tuple(left) and _is_tuple(right) or _is_list( left) and _is_list(right): return ContextSet(iterable.MergedArray(evaluator, (left, right))) elif str_operator == '-': if l_is_num and r_is_num: return ContextSet(left.execute_operation(right, str_operator)) elif str_operator == '%': # With strings and numbers the left type typically remains. Except for # `int() % float()`. return ContextSet(left) elif str_operator in COMPARISON_OPERATORS: if is_compiled(left) and is_compiled(right): # Possible, because the return is not an option. Just compare. try: return ContextSet(left.execute_operation(right, str_operator)) except TypeError: # Could be True or False. pass else: if str_operator in ('is', '!=', '==', 'is not'): operation = COMPARISON_OPERATORS[str_operator] bool_ = operation(left, right) return ContextSet(_bool_to_context(evaluator, bool_)) return ContextSet(_bool_to_context(evaluator, True), _bool_to_context(evaluator, False)) elif str_operator == 'in': return NO_CONTEXTS def check(obj): """Checks if a Jedi object is either a float or an int.""" return isinstance(obj, CompiledInstance) and \ obj.name.string_name in ('int', 'float') # Static analysis, one is a number, the other one is not. if str_operator in ('+', '-') and l_is_num != r_is_num \ and not (check(left) or check(right)): message = "TypeError: unsupported operand type(s) for +: %s and %s" analysis.add(context, 'type-error-operation', operator, message % (left, right)) return ContextSet(left, right)
def _parse_function_doc(doc): """ Takes a function and returns the params and return value as a tuple. This is nothing more than a docstring parser. TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None TODO docstrings like 'tuple of integers' """ doc = force_unicode(doc) # parse round parentheses: def func(a, (b,c)) try: count = 0 start = doc.index('(') for i, s in enumerate(doc[start:]): if s == '(': count += 1 elif s == ')': count -= 1 if count == 0: end = start + i break param_str = doc[start + 1:end] except (ValueError, UnboundLocalError): # ValueError for doc.index # UnboundLocalError for undefined end in last line debug.dbg('no brackets found - no param') end = 0 param_str = u'' else: # remove square brackets, that show an optional param ( = None) def change_options(m): args = m.group(1).split(',') for i, a in enumerate(args): if a and '=' not in a: args[i] += '=None' return ','.join(args) while True: param_str, changes = re.subn(r' ?\[([^\[\]]+)\]', change_options, param_str) if changes == 0: break param_str = param_str.replace('-', '_') # see: isinstance.__doc__ # parse return value r = re.search(u'-[>-]* ', doc[end:end + 7]) if r is None: ret = u'' else: index = end + r.end() # get result type, which can contain newlines pattern = re.compile(r'(,\n|[^\n-])+') ret_str = pattern.match(doc, index).group(0).strip() # New object -> object() ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str) ret = docstr_defaults.get(ret_str, ret_str) return param_str, ret
def _parse_function_doc(doc): """ Takes a function and returns the params and return value as a tuple. This is nothing more than a docstring parser. TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None TODO docstrings like 'tuple of integers' """ doc = force_unicode(doc) # parse round parentheses: def func(a, (b,c)) try: count = 0 start = doc.index('(') for i, s in enumerate(doc[start:]): if s == '(': count += 1 elif s == ')': count -= 1 if count == 0: end = start + i break param_str = doc[start + 1:end] except (ValueError, UnboundLocalError): # ValueError for doc.index # UnboundLocalError for undefined end in last line debug.dbg('no brackets found - no param') end = 0 param_str = u'' else: # remove square brackets, that show an optional param ( = None) def change_options(m): args = m.group(1).split(',') for i, a in enumerate(args): if a and '=' not in a: args[i] += '=None' return ','.join(args) while True: param_str, changes = re.subn(r' ?\[([^\[\]]+)\]', change_options, param_str) if changes == 0: break param_str = param_str.replace('-', '_') # see: isinstance.__doc__ # parse return value r = re.search(u'-[>-]* ', doc[end:end + 7]) if r is None: ret = u'' else: index = end + r.end() # get result type, which can contain newlines pattern = re.compile(r'(,\n|[^\n-])+') ret_str = pattern.match(doc, index).group(0).strip() # New object -> object() ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str) ret = docstr_defaults.get(ret_str, ret_str) return param_str, ret
def get_dir_infos(self): """ Used to return a couple of infos that are needed when accessing the sub objects of an objects """ tuples = dict((force_unicode(name), self.is_allowed_getattr(name)) for name in self.dir()) return self.needs_type_completions(), tuples
def _eval_comparison_part(evaluator, context, left, operator, right): l_is_num = is_number(left) r_is_num = is_number(right) if isinstance(operator, unicode): str_operator = operator else: str_operator = force_unicode(str(operator.value)) if str_operator == '*': # for iterables, ignore * operations if isinstance(left, iterable.Sequence) or is_string(left): return ContextSet(left) elif isinstance(right, iterable.Sequence) or is_string(right): return ContextSet(right) elif str_operator == '+': if l_is_num and r_is_num or is_string(left) and is_string(right): return ContextSet(left.execute_operation(right, str_operator)) elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right): return ContextSet(iterable.MergedArray(evaluator, (left, right))) elif str_operator == '-': if l_is_num and r_is_num: return ContextSet(left.execute_operation(right, str_operator)) elif str_operator == '%': # With strings and numbers the left type typically remains. Except for # `int() % float()`. return ContextSet(left) elif str_operator in COMPARISON_OPERATORS: if is_compiled(left) and is_compiled(right): # Possible, because the return is not an option. Just compare. try: return ContextSet(left.execute_operation(right, str_operator)) except TypeError: # Could be True or False. pass else: if str_operator in ('is', '!=', '==', 'is not'): operation = COMPARISON_OPERATORS[str_operator] bool_ = operation(left, right) return ContextSet(_bool_to_context(evaluator, bool_)) return ContextSet(_bool_to_context(evaluator, True), _bool_to_context(evaluator, False)) elif str_operator == 'in': return NO_CONTEXTS def check(obj): """Checks if a Jedi object is either a float or an int.""" return isinstance(obj, CompiledInstance) and \ obj.name.string_name in ('int', 'float') # Static analysis, one is a number, the other one is not. if str_operator in ('+', '-') and l_is_num != r_is_num \ and not (check(left) or check(right)): message = "TypeError: unsupported operand type(s) for +: %s and %s" analysis.add(context, 'type-error-operation', operator, message % (left, right)) return ContextSet(left, right)
def builtins_getattr(evaluator, objects, names, defaults=None): # follow the first param for obj in objects: for name in names: if is_string(name): return obj.py__getattribute__(force_unicode(name.get_safe_value())) else: debug.warning('getattr called without str') continue return NO_CONTEXTS
def sys_path_with_modifications(self): sys_path_mod = self._evaluator.get_sys_path() \ + sys_path.check_sys_path_modifications(self.module_context) if self.import_path and self.file_path is not None \ and self._evaluator.environment.version_info.major == 2: # Python2 uses an old strange way of importing relative imports. sys_path_mod.append(force_unicode(os.path.dirname(self.file_path))) return sys_path_mod
def sys_path_with_modifications(self): sys_path_mod = self._evaluator.get_sys_path() \ + sys_path.check_sys_path_modifications(self.module_context) if self.import_path and self.file_path is not None \ and self._evaluator.environment.version_info.major == 2: # Python2 uses an old strange way of importing relative imports. sys_path_mod.append(force_unicode(os.path.dirname(self.file_path))) return sys_path_mod
def sys_path_with_modifications(self): in_path = [] sys_path_mod = self._evaluator.get_sys_path() \ + sys_path.check_sys_path_modifications(self.module_context) if self.file_path is not None: # If you edit e.g. gunicorn, there will be imports like this: # `from gunicorn import something`. But gunicorn is not in the # sys.path. Therefore look if gunicorn is a parent directory, #56. if self.import_path: # TODO is this check really needed? for path in sys_path.traverse_parents(self.file_path): if os.path.basename(path) == self.str_import_path[0]: in_path.append(force_unicode(os.path.dirname(path))) # Since we know nothing about the call location of the sys.path, # it's a possibility that the current directory is the origin of # the Python execution. sys_path_mod.append(force_unicode(os.path.dirname(self.file_path))) return in_path + sys_path_mod
def get_dir_infos(self): """ Used to return a couple of infos that are needed when accessing the sub objects of an objects """ # TODO is_allowed_getattr might raise an AttributeError tuples = dict( (force_unicode(name), self.is_allowed_getattr(name)) for name in self.dir() ) return self.needs_type_completions(), tuples
def builtins_getattr(objects, names, defaults=None): # follow the first param for obj in objects: for name in names: string = get_str_or_none(name) if string is None: debug.warning('getattr called without str') continue else: return obj.py__getattribute__(force_unicode(string)) return NO_CONTEXTS
def get_qualified_names(self): def try_to_get_name(obj): return getattr(obj, '__qualname__', getattr(obj, '__name__', None)) if self.is_module(): return () name = try_to_get_name(self._obj) if name is None: name = try_to_get_name(type(self._obj)) if name is None: return () return tuple(force_unicode(n) for n in name.split('.'))
def _send(self, evaluator_id, function, args=(), kwargs={}): if self.is_crashed: raise InternalError("The subprocess %s has crashed." % self._executable) if not is_py3: # Python 2 compatibility kwargs = { force_unicode(key): value for key, value in kwargs.items() } data = evaluator_id, function, args, kwargs try: pickle_dump(data, self._get_process().stdin, self._pickle_protocol) except (socket.error, IOError) as e: # Once Python2 will be removed we can just use `BrokenPipeError`. # Also, somehow in windows it returns EINVAL instead of EPIPE if # the subprocess dies. if e.errno not in (errno.EPIPE, errno.EINVAL): # Not a broken pipe raise self._kill() raise InternalError( "The subprocess %s was killed. Maybe out of memory?" % self._executable) try: is_exception, traceback, result = pickle_load( self._get_process().stdout) except EOFError as eof_error: try: stderr = self._get_process().stderr.read().decode( 'utf-8', 'replace') except Exception as exc: stderr = '<empty/not available (%r)>' % exc self._kill() _add_stderr_to_debug(self._stderr_queue) raise InternalError( "The subprocess %s has crashed (%r, stderr=%s)." % ( self._executable, eof_error, stderr, )) _add_stderr_to_debug(self._stderr_queue) if is_exception: # Replace the attribute error message with a the traceback. It's # way more informative. result.args = (traceback, ) raise result return result
def _get_forward_reference_node(context, string): try: new_node = context.evaluator.grammar.parse(force_unicode(string), start_symbol='eval_input', error_recovery=False) except ParserSyntaxError: debug.warning('Annotation not parsed: %s' % string) return None else: module = context.tree_node.get_root_node() parser_utils.move(new_node, module.end_pos[0]) new_node.parent = context.tree_node return new_node
def find_statement_documentation(tree_node): if tree_node.type == 'expr_stmt': tree_node = tree_node.parent # simple_stmt maybe_string = tree_node.get_next_sibling() if maybe_string is not None: if maybe_string.type == 'simple_stmt': maybe_string = maybe_string.children[0] if maybe_string.type == 'string': cleaned = cleandoc(safe_literal_eval(maybe_string.value)) # Since we want the docstr output to be always unicode, just # force it. return force_unicode(cleaned) return ''
def _abs_path(module_context, path): if os.path.isabs(path): return path module_path = module_context.py__file__() if module_path is None: # In this case we have no idea where we actually are in the file # system. return None base_dir = os.path.dirname(module_path) path = force_unicode(path) return os.path.abspath(os.path.join(base_dir, path))
def _abs_path(module_context, path): if os.path.isabs(path): return path module_path = module_context.py__file__() if module_path is None: # In this case we have no idea where we actually are in the file # system. return None base_dir = os.path.dirname(module_path) path = force_unicode(path) return os.path.abspath(os.path.join(base_dir, path))
def clean_scope_docstring(scope_node): """ Returns a cleaned version of the docstring token. """ node = scope_node.get_doc_node() if node is not None: # TODO We have to check next leaves until there are no new # leaves anymore that might be part of the docstring. A # docstring can also look like this: ``'foo' 'bar' # Returns a literal cleaned version of the ``Token``. cleaned = cleandoc(safe_literal_eval(node.value)) # Since we want the docstr output to be always unicode, just # force it. return force_unicode(cleaned) return ''
def _get_forward_reference_node(context, string): try: new_node = context.evaluator.grammar.parse( force_unicode(string), start_symbol='eval_input', error_recovery=False ) except ParserSyntaxError: debug.warning('Annotation not parsed: %s' % string) return None else: module = context.tree_node.get_root_node() parser_utils.move(new_node, module.end_pos[0]) new_node.parent = context.tree_node return new_node
def _sys_path_with_modifications(self): if self._fixed_sys_path is not None: return self._fixed_sys_path sys_path_mod = ( self._evaluator.get_sys_path() + sys_path.check_sys_path_modifications(self.module_context)) if self._evaluator.environment.version_info.major == 2: file_path = self.module_context.py__file__() if file_path is not None: # Python2 uses an old strange way of importing relative imports. sys_path_mod.append(force_unicode(os.path.dirname(file_path))) return sys_path_mod
def py__name__(self): if not _is_class_instance(self._obj) or \ inspect.ismethoddescriptor(self._obj): # slots cls = self._obj else: try: cls = self._obj.__class__ except AttributeError: # happens with numpy.core.umath._UFUNC_API (you get it # automatically by doing `import numpy`. return None try: return force_unicode(cls.__name__) except AttributeError: return None
def _add_strings(context, nodes, add_slash=False): string = '' first = True for child_node in nodes: values = context.infer_node(child_node) if len(values) != 1: return None c, = values s = get_str_or_none(c) if s is None: return None if not first and add_slash: string += os.path.sep string += force_unicode(s) first = False return string
def py__name__(self): if not _is_class_instance(self._obj) or \ inspect.ismethoddescriptor(self._obj): # slots cls = self._obj else: try: cls = self._obj.__class__ except AttributeError: # happens with numpy.core.umath._UFUNC_API (you get it # automatically by doing `import numpy`. return None try: return force_unicode(cls.__name__) except AttributeError: return None
def _get(self, name, allowed_getattr_callback, dir_callback, check_has_attribute=False): """ To remove quite a few access calls we introduced the callback here. """ has_attribute, is_descriptor = allowed_getattr_callback() if check_has_attribute and not has_attribute: return [] # Always use unicode objects in Python 2 from here. name = force_unicode(name) if is_descriptor or not has_attribute: return [self._get_cached_name(name, is_empty=True)] if self.is_instance and name not in dir_callback(): return [] return [self._get_cached_name(name)]
def py__getitem__(self, index): if is_py3 and self.evaluator.environment.version_info.major == 2: # In Python 2 bytes and unicode compare. if isinstance(index, bytes): index_unicode = force_unicode(index) try: return self._dct[index_unicode].infer() except KeyError: pass elif isinstance(index, str): index_bytes = index.encode('utf-8') try: return self._dct[index_bytes].infer() except KeyError: pass return self._dct[index].infer()
def py__getitem__(self, index): if is_py3 and self.evaluator.environment.version_info.major == 2: # In Python 2 bytes and unicode compare. if isinstance(index, bytes): index_unicode = force_unicode(index) try: return self._dct[index_unicode].infer() except KeyError: pass elif isinstance(index, str): index_bytes = index.encode('utf-8') try: return self._dct[index_bytes].infer() except KeyError: pass return self._dct[index].infer()
def _send(self, evaluator_id, function, args=(), kwargs={}): if self.is_crashed: raise InternalError("The subprocess %s has crashed." % self._executable) if not is_py3: # Python 2 compatibility kwargs = {force_unicode(key): value for key, value in kwargs.items()} data = evaluator_id, function, args, kwargs try: pickle_dump(data, self._process.stdin, self._pickle_protocol) except (socket.error, IOError) as e: # Once Python2 will be removed we can just use `BrokenPipeError`. # Also, somehow in windows it returns EINVAL instead of EPIPE if # the subprocess dies. if e.errno not in (errno.EPIPE, errno.EINVAL): # Not a broken pipe raise self._kill() raise InternalError("The subprocess %s was killed. Maybe out of memory?" % self._executable) try: is_exception, traceback, result = pickle_load(self._process.stdout) except EOFError as eof_error: try: stderr = self._process.stderr.read().decode('utf-8', 'replace') except Exception as exc: stderr = '<empty/not available (%r)>' % exc self._kill() _add_stderr_to_debug(self._stderr_queue) raise InternalError( "The subprocess %s has crashed (%r, stderr=%s)." % ( self._executable, eof_error, stderr, )) _add_stderr_to_debug(self._stderr_queue) if is_exception: # Replace the attribute error message with a the traceback. It's # way more informative. result.args = (traceback,) raise result return result
def _get(self, name, allowed_getattr_callback, dir_callback, check_has_attribute=False): """ To remove quite a few access calls we introduced the callback here. """ has_attribute, is_descriptor = allowed_getattr_callback() if check_has_attribute and not has_attribute: return [] # Always use unicode objects in Python 2 from here. name = force_unicode(name) if is_descriptor or not has_attribute: return [self._get_cached_name(name, is_empty=True)] if self._is_instance and name not in dir_callback(): return [] return [self._get_cached_name(name)]
def _os_path_join(args_set, callback): if len(args_set) == 1: string = u'' sequence, = args_set is_first = True for lazy_value in sequence.py__iter__(): string_values = lazy_value.infer() if len(string_values) != 1: break s = get_str_or_none(next(iter(string_values))) if s is None: break if not is_first: string += os.path.sep string += force_unicode(s) is_first = False else: return ValueSet([compiled.create_simple_object(sequence.inference_state, string)]) return callback()
def py__simple_getitem__(self, index): if is_py3 and self.inference_state.environment.version_info.major == 2: # In Python 2 bytes and unicode compare. if isinstance(index, bytes): index_unicode = force_unicode(index) try: return self._dct[index_unicode].infer() except KeyError: pass elif isinstance(index, str): index_bytes = index.encode('utf-8') try: return self._dct[index_bytes].infer() except KeyError: pass with reraise_getitem_errors(KeyError, TypeError): lazy_value = self._dct[index] return lazy_value.infer()
def _do_import(self, import_path, sys_path): """ This method is very similar to importlib's `_gcd_import`. Uses :meth:`_do_import_` to find the given module. If this fails, the available extension import functions are called. :returns: The found modules :class:`ContextSet` in case of success or :data:`NO_CONTEXTS` otherwise. .. seealso:: :meth:`extensions.do_import` """ import_parts = [ force_unicode(i.value if isinstance(i, tree.Name) else i) for i in import_path ] result = self._do_import_(import_parts, import_path, sys_path) if result == NO_CONTEXTS: result = extensions.do_import(self, import_parts, import_path, sys_path) return result
def follow(self): if not self.import_path or not self._inference_possible: return NO_CONTEXTS import_names = tuple( force_unicode(i.value if isinstance(i, tree.Name) else i) for i in self.import_path) sys_path = self._sys_path_with_modifications() context_set = [None] for i, name in enumerate(self.import_path): context_set = ContextSet.from_sets([ self._evaluator.import_module(import_names[:i + 1], parent_module_context, sys_path) for parent_module_context in context_set ]) if not context_set: message = 'No module named ' + '.'.join(import_names) _add_error(self.module_context, name, message) return NO_CONTEXTS return context_set
def _send(self, evaluator_id, function, args=(), kwargs={}): if self._crashed: raise InternalError("The subprocess %s has crashed." % self._executable) if not is_py3: # Python 2 compatibility kwargs = { force_unicode(key): value for key, value in kwargs.items() } data = evaluator_id, function, args, kwargs try: pickle_dump(data, self._process.stdin) except (socket.error, IOError) as e: # Once Python2 will be removed we can just use `BrokenPipeError`. # Also, somehow in windows it returns EINVAL instead of EPIPE if # the subprocess dies. if e.errno not in (errno.EPIPE, errno.EINVAL): # Not a broken pipe raise self.kill() raise InternalError( "The subprocess %s was killed. Maybe out of memory?" % self._executable) try: is_exception, traceback, result = pickle_load(self._process.stdout) except EOFError: self.kill() raise InternalError("The subprocess %s has crashed." % self._executable) if is_exception: # Replace the attribute error message with a the traceback. It's # way more informative. result.args = (traceback, ) raise result return result
def builtins_isinstance(evaluator, objects, types, arguments): bool_results = set() for o in objects: cls = o.py__class__() try: mro_func = cls.py__mro__ except AttributeError: # This is temporary. Everything should have a class attribute in # Python?! Maybe we'll leave it here, because some numpy objects or # whatever might not. bool_results = set([True, False]) break mro = mro_func() for cls_or_tup in types: if cls_or_tup.is_class(): bool_results.add(cls_or_tup in mro) elif cls_or_tup.name.string_name == 'tuple' \ and cls_or_tup.get_root_context() == evaluator.builtins_module: # Check for tuples. classes = ContextSet.from_sets( lazy_context.infer() for lazy_context in cls_or_tup.iterate() ) bool_results.add(any(cls in mro for cls in classes)) else: _, lazy_context = list(arguments.unpack())[1] if isinstance(lazy_context, LazyTreeContext): node = lazy_context.data message = 'TypeError: isinstance() arg 2 must be a ' \ 'class, type, or tuple of classes and types, ' \ 'not %s.' % cls_or_tup analysis.add(lazy_context._context, 'type-error-isinstance', node, message) return ContextSet.from_iterable( compiled.builtin_from_name(evaluator, force_unicode(str(b))) for b in bool_results )
def _do_import(self, import_path, sys_path): """ This method is very similar to importlib's `_gcd_import`. """ import_parts = [ force_unicode(i.value if isinstance(i, tree.Name) else i) for i in import_path ] # Handle "magic" Flask extension imports: # ``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``. if len(import_path) > 2 and import_parts[:2] == ['flask', 'ext']: # New style. ipath = ('flask_' + str(import_parts[2]),) + import_path[3:] modules = self._do_import(ipath, sys_path) if modules: return modules else: # Old style return self._do_import(('flaskext',) + import_path[2:], sys_path) if import_parts[0] in settings.auto_import_modules: module = _load_module( self._evaluator, import_names=import_parts, sys_path=sys_path, ) return ContextSet(module) module_name = '.'.join(import_parts) try: return ContextSet(self._evaluator.module_cache.get(module_name)) except KeyError: pass if len(import_path) > 1: # This is a recursive way of importing that works great with # the module cache. bases = self._do_import(import_path[:-1], sys_path) if not bases: return NO_CONTEXTS # We can take the first element, because only the os special # case yields multiple modules, which is not important for # further imports. parent_module = list(bases)[0] # This is a huge exception, we follow a nested import # ``os.path``, because it's a very important one in Python # that is being achieved by messing with ``sys.modules`` in # ``os``. if import_parts == ['os', 'path']: return parent_module.py__getattribute__('path') try: method = parent_module.py__path__ except AttributeError: # The module is not a package. _add_error(self.module_context, import_path[-1]) return NO_CONTEXTS else: paths = method() debug.dbg('search_module %s in paths %s', module_name, paths) for path in paths: code, module_path, is_pkg = self._evaluator.compiled_subprocess.get_module_info( string=import_parts[-1], path=[force_unicode(path)], full_name=module_name, is_global_search=False, ) if module_path is not None: break else: _add_error(self.module_context, import_path[-1]) return NO_CONTEXTS else: debug.dbg('global search_module %s in %s', import_parts[-1], self.file_path) # Override the sys.path. It works only good that way. # Injecting the path directly into `find_module` did not work. code, module_path, is_pkg = self._evaluator.compiled_subprocess.get_module_info( string=import_parts[-1], full_name=module_name, sys_path=sys_path, is_global_search=True, ) if module_path is None: # The module is not a package. _add_error(self.module_context, import_path[-1]) return NO_CONTEXTS module = _load_module( self._evaluator, module_path, code, sys_path, import_names=import_parts, safe_module_name=True, ) if module is None: # The file might raise an ImportError e.g. and therefore not be # importable. return NO_CONTEXTS return ContextSet(module)
def list_module_names(evaluator, search_path): return [ force_unicode(name) for module_loader, name, is_pkg in iter_modules(search_path) ]
def __init__(self, func): self.func = func # Remove the py in front of e.g. py__call__. self.check_name = force_unicode(func.__name__[2:])
def py__doc__(self, include_call_signature=False): return force_unicode(inspect.getdoc(self._obj)) or u''
def _force_unicode_decorator(func): return lambda *args, **kwargs: force_unicode(func(*args, **kwargs))
def _bool_to_context(evaluator, bool_): return compiled.builtin_from_name(evaluator, force_unicode(str(bool_)))