def get_safe_object(): all_builtin = { name: obj for name, obj in inspect.getmembers(__builtins__) if inspect.isbuiltin(obj) or (inspect.isclass(obj) and not issubclass(obj, Exception)) } all_global = { name: obj for name, obj in globals().items() if inspect.isbuiltin(obj) or (inspect.isclass(obj) and not issubclass(obj, Exception)) } def _get_safe_object(name): try: if name in all_global: debug_info('get `{}` from global.'.format(name)) return all_global[name] elif name in all_builtin: debug_info('get `{}` from builtin.'.format(name)) return all_builtin[name] else: debug_info("can't find allowed object for `{}`.".format(name)) return None except Exception as ex: debug_info(ex) return None return dict([(k, _get_safe_object(k)) for k in safe_list])
def imusim_api_role(role, rawtext, text, lineno, inliner, options={}, content=[]): """ Sphinx extension to link into to IMUSim API. """ uribase = "../api/" try: # Look for object in imusim.all. obj = reduce(lambda x,y: getattr(x,y), [imusim.all] + text.split('.')) if inspect.ismodule(obj): file = '%s-module.html' % obj.__name__ elif inspect.isclass(obj): file = '%s.%s-class.html' % (obj.__module__, obj.__name__) elif inspect.isfunction(obj): file = '%s-module.html#%s' % (obj.__module__, obj.__name__) elif inspect.ismethod(obj): cls = obj.im_class file = '%s.%s-class.html#%s' \ % (cls.__module__, cls.__name__, obj.__name__) elif inspect.isbuiltin(obj): if hasattr(obj, '__module__'): # Native function file = obj.__module__ + '-module.html#' + obj.__name__ elif hasattr(obj, '__objclass__'): # Native method cls = obj.__objclass__ file = '%s.%s-class.html#%s' \ % (cls.__module__, cls.__name__, obj.__name__) else: raise TypeError, \ "Don't know how to document native object " + repr(obj) else: raise TypeError, \ "Don't know how to document Python object " + repr(obj) except AttributeError: # Look for object as an imusim submodule. __import__("imusim.%s" % text) obj = reduce(lambda x,y: getattr(x,y), [imusim] + text.split('.')) file = 'imusim.%s-module.html' % text except ImportError: raise KeyError, "Could not find an IMUSim object called '%s'" % text if inspect.ismethod(obj) \ or (inspect.isbuiltin(obj) and hasattr(obj, '__objclass__')): name = obj.__name__ else: name = text uri = uribase + file node = nodes.reference(rawtext, name, refuri=uri, **options) return [node], []
def nodoc_check(self, mod, depth, why): """ Walk the tree of modules and classes looking for routines with no doc string and report them """ global count try: already = self.already except AttributeError: count = 0 self.already = ['glob', 'fcntl', 're', 'pexpect', 'unittest', 'difflib', 'pprint', 'warnings', 'heapq', 'os', 'pdb', 'optparse', 'traceback', 'linecache', 'bdb', 'logging', 'StringIO', 'inspect', 'stat', 'tokenize', 'socket', 'dis', 'getopt', 'shlex', 'pickle', 'shutil', 'pytest', ] already = self.already rval = '' for name, item in inspect.getmembers(mod, inspect.isroutine): if all([not inspect.isbuiltin(item), name not in dir(unittest.TestCase), item.__name__ not in already, not name.startswith('_')]): already.append(":".join([mod.__name__, name])) if item.__doc__ is None: try: filename = U.basename(mod.__file__) except AttributeError: tmod = sys.modules[mod.__module__] filename = U.basename(tmod.__file__) rval += "\n%3d. %s(%s): %s" % (count, filename, why, name) try: count += 1 except NameError: count = 1 for name, item in inspect.getmembers(mod, inspect.isclass): if all([hasattr(item, 'tearDown'), item.__name__ not in already, depth < 5]): already.append(item.__name__) rval += self.nodoc_check(item, depth+1, 'c') for name, item in inspect.getmembers(mod, inspect.ismodule): if all([not inspect.isbuiltin(item), item.__name__ not in already, not name.startswith('@'), not name.startswith('_'), depth < 5]): already.append(item.__name__) rval += self.nodoc_check(item, depth+1, 'm') return rval
def testIsBuiltin(self): self.assertEqual( tf_inspect.isbuiltin(TestDecoratedClass), inspect.isbuiltin(TestDecoratedClass)) self.assertEqual( tf_inspect.isbuiltin(test_decorated_function), inspect.isbuiltin(test_decorated_function)) self.assertEqual( tf_inspect.isbuiltin(test_undecorated_function), inspect.isbuiltin(test_undecorated_function)) self.assertEqual(tf_inspect.isbuiltin(range), inspect.isbuiltin(range)) self.assertEqual(tf_inspect.isbuiltin(max), inspect.isbuiltin(max))
def add_fileline_to_docstring(module, incursive=True): """Append the definition position to each function contained in module. Examples -------- # Put the following codes at the end of a file add_fileline_to_docstring(__name__) """ def _add_fileline(obj): """Add fileinto to a object. """ if obj.__doc__ is None or 'From:' in obj.__doc__: return fname = inspect.getsourcefile(obj) if fname is None: return try: line = inspect.getsourcelines(obj)[-1] except IOError: return obj.__doc__ += '\n\nFrom:%s:%d' % (fname, line) if isinstance(module, str): module = sys.modules[module] for _, obj in inspect.getmembers(module): if inspect.isbuiltin(obj): continue if inspect.isfunction(obj): _add_fileline(obj) if inspect.ismethod(obj): _add_fileline(obj.__func__) if inspect.isclass(obj) and incursive: add_fileline_to_docstring(obj, False)
def eargs (name, imports): """Get arglist of NAME for Eldoc &c. Exec IMPORTS first.""" try: try: # don't give up if the imports fail if imports: execit (imports) parts = name.split ('.') if len (parts) > 1: execit ('import ' + parts[0]) except: pass func = eval (name) if inspect.isbuiltin (func) or inspect.isclass (func): doc = func.__doc__ if doc.find (' ->') != -1: printit ('_emacs_out ' + doc.split (' ->')[0]) elif doc.find ('\n') != -1: printit ('_emacs_out ' + doc.split ('\n')[0]) else: raise RuntimeError return if inspect.ismethod (func): try: func = func.im_func # Python 2 except: func = func.__func__ # Python 3 if not inspect.isfunction (func): raise RuntimeError (args, varargs, varkw, defaults) = inspect.getargspec (func) # No space between name and arglist for consistency with builtins. printit ('_emacs_out ' + \ func.__name__ + \ inspect.formatargspec (args, varargs, varkw, defaults)) except: printit ('_emacs_out ')
def _from_module(self, module, object): """ Return true if the given object is defined in the given module. """ if module is None: return True elif inspect.isfunction(object): return module.__dict__ is object.__globals__ elif inspect.isbuiltin(object): return module.__name__ == object.__module__ elif inspect.isclass(object): return module.__name__ == object.__module__ elif inspect.ismethod(object): # This one may be a bug in cython that fails to correctly set the # __module__ attribute of methods, but since the same error is easy # to make by extension code writers, having this safety in place # isn't such a bad idea return module.__name__ == object.__self__.__class__.__module__ elif inspect.getmodule(object) is not None: return module is inspect.getmodule(object) elif hasattr(object, '__module__'): return module.__name__ == object.__module__ elif isinstance(object, property): return True # [XX] no way not be sure. elif inspect.ismethoddescriptor(object): # Unbound PyQt signals reach this point in Python 3.4b3, and we want # to avoid throwing an error. See also http://bugs.python.org/issue3158 return False else: raise ValueError("object must be a class or function, got %r" % object)
def _ScopeInspector_GetFunctionAttributes(definitions): if type(definitions) != dict: return [] from mantid.simpleapi import _get_function_spec keywords = [] for name,obj in _iteritems(definitions): if name.startswith('_') : continue if _inspect.isclass(obj) or _inspect.ismodule(obj): continue if _inspect.isfunction(obj) or _inspect.isbuiltin(obj): keywords.append(name + _get_function_spec(obj)) continue # Object could be a proxy so check and use underlying object if hasattr(obj,"_getHeldObject"): obj = obj._getHeldObject() attrs = dir(obj) for att in attrs: try: fattr = getattr(obj,att) except Exception: continue # not much we do if not even calling it causes an exception if att.startswith('_'): continue if _inspect.isfunction(fattr) or _inspect.ismethod(fattr) or \ hasattr(fattr,'im_func'): keywords.append(name + '.' + att + _get_function_spec(fattr)) return keywords;
def extend(self, lst): """adds items to the list of expander functions or variables. Here is an example: >>> a=1 >>> b=2 >>> def t(x): ... return x+1 ... >>> block= Block(parse_list=[],external_definitions=globals()) >>> block.extend(["a","b","t"]) >>> _pr_set(block.direct_vars) set(['a', 'b']) >>> _pr_set(block.direct_funcs) set(['t']) """ for elm in lst: obj= self.globals_[elm] if inspect.isbuiltin(obj): self.direct_funcs.add(elm) continue if inspect.isfunction(obj): self.direct_funcs.add(elm) continue # assume elm to be a variable: self.direct_vars.add(elm)
def get_class_vars(clazz, candidates=[]): """ Class method to get all (uppercase) class variables of a class as a dict """ import inspect if type(clazz) is str: clazz = Factory.create_class(clazz) members = inspect.getmembers(clazz) # return members vars = dict() for member in members: key, value = member if key in candidates: vars[key] = value continue if not key.startswith('__') and key.isupper() \ and not inspect.isclass(value) \ and not inspect.isfunction(value) \ and not inspect.isbuiltin(value) \ and not inspect.ismethod(value): vars[key] = value return vars
def do_obj_mirror(self, args): """ For the supplied object, all of it's methods/attributes etc are mirrored in the calling objects namespace This is a dirty way of acting as an object proxy meaning we can be injected in place of another object and be sure we won't break the larger app If no frame is specified then the frame from which the debugger was called is used If "debugger" is given as the frame the debugger frame is used Usage: obj_mirror <instantiated object to mirror> """ import inspect if not args: print "[-] No object supplied to mirror" return arg_list = args.split(" ") s_obj_to_mirror = arg_list[0] if len(arg_list) > 1: frame = arg_list[1] else: frame = None if not frame: ##Use context of calling frame frame_context = self.curframe elif frame == "debugger": ##Use context of the frame the debugger is executing in frame_context = self.debugger_frame else: ##None frame object supplied ... bail print "[-] None frame object supplied - object type was %s"%(type(frame)) return locals = frame_context.f_locals globals = frame_context.f_globals print "[=] Mirroring %s in the context of %s"%(s_obj_to_mirror, frame_context) try: obj_to_mirror = eval(s_obj_to_mirror, globals, locals) except: print "[-] Unknown object specified, cannot mirror" return for x in dir(obj_to_mirror): skip_list = ["__init__", "__builtins__", "__doc__", "__name__"] if inspect.isbuiltin(x) or x in skip_list: print "[-] skipping %s"%(x) continue print "[+] %s -> %s.%s"%(x, obj_to_mirror.__name__, x) exec("%s = %s.%s"%(x, obj_to_mirror.__name__, x), globals, locals)
def __init__(self, py_callable, args=None, kwargs=None, task=None): #pylint: disable=W0231 self.py_callable = py_callable self.task = task self.out = None self.err = None self.result = None self.values = {} if args is None: self.args = [] else: self.args = args if kwargs is None: self.kwargs = {} else: self.kwargs = kwargs # check valid parameters if not hasattr(self.py_callable, '__call__'): msg = "%r PythonAction must be a 'callable' got %r." raise InvalidTask(msg % (self.task, self.py_callable)) if inspect.isclass(self.py_callable): msg = "%r PythonAction can not be a class got %r." raise InvalidTask(msg % (self.task, self.py_callable)) if inspect.isbuiltin(self.py_callable): msg = "%r PythonAction can not be a built-in got %r." raise InvalidTask(msg % (self.task, self.py_callable)) if type(self.args) is not tuple and type(self.args) is not list: msg = "%r args must be a 'tuple' or a 'list'. got '%s'." raise InvalidTask(msg % (self.task, self.args)) if type(self.kwargs) is not dict: msg = "%r kwargs must be a 'dict'. got '%s'" raise InvalidTask(msg % (self.task, self.kwargs))
def _faked(module, obj, name): # Crazy underscore actions to try to escape all the internal madness. if module is None: module = get_module(obj) faked_mod = _load_faked_module(module) if faked_mod is None: return # Having the module as a `parser.representation.module`, we need to scan # for methods. if name is None: if inspect.isbuiltin(obj): return search_scope(faked_mod, obj.__name__) elif not inspect.isclass(obj): # object is a method or descriptor cls = search_scope(faked_mod, obj.__objclass__.__name__) if cls is None: return return search_scope(cls, obj.__name__) else: if obj == module: return search_scope(faked_mod, name) else: cls = search_scope(faked_mod, obj.__name__) if cls is None: return return search_scope(cls, name)
def hs(arg): """ Display the attributes of an object (except methods and those starting with an underscore) or an ndarray with composite dtype alongside the names of the records. The display is truncated so that each name fits in one line. """ import inspect if isinstance(arg, np.ndarray): names = arg.dtype.names if names is None: print(arg) return print(str(arg.size) + ' element' + ('s' if arg.size > 1 else '')) else: members = inspect.getmembers(arg, lambda x: not inspect.ismethod(x) \ and not inspect.isbuiltin(x)) members = [x for x in members if x[0][0] != '_'] names = [x[0] for x in members] length = np.max(list(map(len, names))) lnames = np.array([names[i].ljust(length)+': ' for i in range(len(names))]) for name, lname in zip(names, lnames): value = str(getattr(arg, name))[0:72-length-2].replace('\n', ' ') if len(value) == 72-length-2: value = value[0:-3] + '...' print(lname+value)
def _default_arguments(self, obj): """Return the list of default arguments of obj if it is callable, or empty list otherwise.""" call_obj = obj ret = [] if inspect.isbuiltin(obj): pass elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): if inspect.isclass(obj): #for cython embededsignature=True the constructor docstring #belongs to the object itself not __init__ ret += self._default_arguments_from_docstring( getattr(obj, '__doc__', '')) # for classes, check for __init__,__new__ call_obj = (getattr(obj, '__init__', None) or getattr(obj, '__new__', None)) # for all others, check if they are __call__able elif hasattr(obj, '__call__'): call_obj = obj.__call__ ret += self._default_arguments_from_docstring( getattr(call_obj, '__doc__', '')) _keeps = (inspect.Parameter.KEYWORD_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD) try: sig = inspect.signature(call_obj) ret.extend(k for k, v in sig.parameters.items() if v.kind in _keeps) except ValueError: pass return list(set(ret))
def getMembers(cls): lfunctions = [] lmethods = [] lattributes = [] for m in inspect.getmembers(cls): m_name = m[0] m_object = m[1] if cls.__dict__.get(m_name): # Do not print inherited names #print(type(m_object)) if m_name[0] != "_" and m_name not in kobject.ignore_list: if inspect.isbuiltin(m_object): pass elif inspect.iscode(m_object): pass elif inspect.ismodule(m_object): pass elif inspect.ismethoddescriptor(m_object): pass elif inspect.isdatadescriptor(m_object): pass elif inspect.ismethod(m_object): lmethods.append(m) elif inspect.isfunction(m_object): lfunctions.append(m) elif inspect.isroutine(m_object): pass else: lattributes.append(m) return {"functions" : lfunctions, "methods" : lmethods, "attributes" : lattributes}
def should_be_checked( self, obj, module=None ): """returns True, if obj should be checked, False otherwise""" if id(obj) in self.__checked: return False if inspect.isbuiltin( obj ): return False if inspect.ismodule( obj ): if obj.__name__ in self.__already_imported: return False #do not check already imported modules source_file = self.getsourcefile(obj) if source_file: return contains_parent_dir( source_file, self.__include_paths ) else: return False obj_module = inspect.getmodule( obj ) if not obj_module is module: return False if inspect.isclass( obj ) \ or inspect.ismethod( obj ) \ or inspect.isfunction( obj ) \ or inspect.isroutine( obj ) \ or inspect.ismethoddescriptor( obj ) \ or inspect.isdatadescriptor( obj ): return True return False
def _from_module(self, module, object): """ Return true if the given object is defined in the given module. """ if module is None: return True elif inspect.isfunction(object): return module.__dict__ is object.__globals__ elif inspect.isbuiltin(object): return module.__name__ == object.__module__ elif inspect.isclass(object): return module.__name__ == object.__module__ elif inspect.ismethod(object): # This one may be a bug in cython that fails to correctly set the # __module__ attribute of methods, but since the same error is easy # to make by extension code writers, having this safety in place # isn't such a bad idea return module.__name__ == object.__self__.__class__.__module__ elif inspect.getmodule(object) is not None: return module is inspect.getmodule(object) elif hasattr(object, "__module__"): return module.__name__ == object.__module__ elif isinstance(object, property): return True # [XX] no way not be sure. else: raise ValueError("object must be a class or function, got %r" % object)
def connect(self, slot, connType = QtCore.Qt.AutoCompatConnection): # Sanity check on slot. if not callable(slot): raise TypeError('Slot type not callable: %s' % type(slot)) if not isbuiltin(slot): # Slot is implemented in Python; check arguments. args, varargs_, varkw_, defaults = getargspec(slot) numSlotArgs = len(args) if numSlotArgs != 0 and args[0] == 'self': numSlotArgs -= 1 if defaults is not None: numSlotArgs -= len(defaults) if numSlotArgs > self.__numArgs: raise TypeError( 'Slot requires %d arguments, while signal only supplies %d' % ( numSlotArgs, self.__numArgs ) ) # Note: It is allowed for a slot to have less arguments than the # signal: the superfluous arguments are ignored. # Make connection. ok = QtCore.QObject.connect( self.__object, self.__macroSignature, slot, connType ) # Note: I have never seen False being returned in practice, even on # failed connections. assert ok, 'Failed to connect to "%s"' % self.__signature
def _wrap_callback(self, fn): if fn is None: return None elif not callable(fn): raise InputDeviceError('value must be None or a callable') elif inspect.isbuiltin(fn): # We can't introspect the prototype of builtins. In this case we # assume that the builtin has no (mandatory) parameters; this is # the most reasonable assumption on the basis that pre-existing # builtins have no knowledge of gpiozero, and the sole parameter # we would pass is a gpiozero object return fn else: # Try binding ourselves to the argspec of the provided callable. # If this works, assume the function is capable of accepting no # parameters try: inspect.getcallargs(fn) return fn except TypeError: try: # If the above fails, try binding with a single parameter # (ourselves). If this works, wrap the specified callback inspect.getcallargs(fn, self) @wraps(fn) def wrapper(): return fn(self) return wrapper except TypeError: raise InputDeviceError( 'value must be a callable which accepts up to one ' 'mandatory parameter')
def _inspect_isroutine_override(object): import inspect return (isinstance(object, partial) or inspect.isbuiltin(object) or inspect.isfunction(object) or inspect.ismethod(object) or inspect.ismethoddescriptor(object))
def get_scope_objects(names): """ Looks for the names defined with dir() in an objects and divides them into different object types. """ classes = {} funcs = {} stmts = {} members = {} for n in names: if '__' in n and n not in mixin_funcs: continue try: # this has a builtin_function_or_method exe = getattr(scope, n) except AttributeError: # happens e.g. in properties of # PyQt4.QtGui.QStyleOptionComboBox.currentText # -> just set it to None members[n] = None else: if inspect.isclass(scope): if is_in_base_classes(scope, n, exe): continue if inspect.isbuiltin(exe) or inspect.ismethod(exe) \ or inspect.ismethoddescriptor(exe): funcs[n] = exe elif inspect.isclass(exe): classes[n] = exe elif inspect.ismemberdescriptor(exe): members[n] = exe else: stmts[n] = exe return classes, funcs, stmts, members
def default(self, obj): if isinstance(obj, datetime): return self.default(date_time_2_millis(obj)) elif isinstance(obj, Enum): return self.default(obj.name) elif isinstance(obj, Color): return self.default(obj.hex()) elif hasattr(obj, "__dict__"): d = dict( (key, value) for key, value in inspect.getmembers(obj) if value is not None and not key == "Position" and not key.startswith("__") and not inspect.isabstract(value) and not inspect.isbuiltin(value) and not inspect.isfunction(value) and not inspect.isgenerator(value) and not inspect.isgeneratorfunction(value) and not inspect.ismethod(value) and not inspect.ismethoddescriptor(value) and not inspect.isroutine(value) ) return self.default(d) return obj
def _func_name(func): """Return name of a callable (function, class, partial, etc.)""" module = '' if hasattr(func,'__module__'): module = (func.__module__ if func.__module__ else '__main__') # Return a human readable name associated with a function if inspect.ismethod(func): nme = '.'.join([module,func.im_class.__name__,func.__name__]) elif inspect.isfunction(func): nme = '.'.join([module,func.__name__]) elif inspect.isbuiltin(func): return '.'.join([module,func.__name__]) elif isinstance(func,partial): return 'partial_of_' + JobModule._func_name(func.func) elif inspect.isclass(func): nme = '.'.join([module,func.__name__]) if hasattr(func, '__init__') and inspect.ismethod(func.__init__): func = func.__init__ else: return nme else: nme = 'type %s' % type(func) if hasattr(func, '__name__'): nme = '%s of %s' % (func.__name__, type(func)) return nme nme += ' at ' + ':'.join([func.func_code.co_filename, str(func.func_code.co_firstlineno)]) return nme
def _finddoc(obj): # type: (Any) -> unicode if inspect.isclass(obj): for base in obj.__mro__: if base is not object: try: doc = base.__doc__ except AttributeError: continue if doc is not None: return doc return None if inspect.ismethod(obj) and getattr(obj, '__self__', None): name = obj.__func__.__name__ self = obj.__self__ if (inspect.isclass(self) and getattr(getattr(self, name, None), '__func__') is obj.__func__): # classmethod cls = self else: cls = self.__class__ elif inspect.isfunction(obj) or inspect.ismethod(obj): name = obj.__name__ cls = _findclass(obj) if cls is None or getattr(cls, name) != obj: return None elif inspect.isbuiltin(obj): name = obj.__name__ self = obj.__self__ if (inspect.isclass(self) and self.__qualname__ + '.' + name == obj.__qualname__): # classmethod cls = self else: cls = self.__class__ # Should be tested before isdatadescriptor(). elif isinstance(obj, property): func = obj.fget name = func.__name__ cls = _findclass(func) if cls is None or getattr(cls, name) is not obj: return None elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj): name = obj.__name__ cls = obj.__objclass__ if getattr(cls, name) is not obj: return None else: return None for base in cls.__mro__: try: doc = getattr(base, name).__doc__ except AttributeError: continue if doc is not None: return doc return None
def _store_state(self, tempfolder, localfilename): state = {"filename": localfilename, "main": self.modtoken.main, "called": {}, "bound": any([main.isbound for main in self.modtoken.functions[self.modtoken.main]]) } for name in list(self.modtoken.functions.keys()): if name != self.fkt.__name__: fkt_hash = None # distinguish if a function does not exists or if its an object method if not name in self.fkt.__globals__: pass # raise Exception("Function not accessible form global scope of function: {0} ({1})".format(self.fkt.__name__, name)) # TODO: remove this! elif isinstance(self.fkt.__globals__[name], Wrapper): fkt_hash = get_fkt_hash(self.fkt.__globals__[name].fkt) elif inspect.isbuiltin(self.fkt.__globals__[name]) and hasattr(cache, str(id(self.fkt.__globals__[name]))): fkt_hash = get_fkt_hash(getattr(cache, str(id(self.fkt.__globals__[name])))) elif inspect.isfunction(self.fkt.__globals__[name]): fkt_hash = get_fkt_hash(self.fkt.__globals__[name]) else: raise Exception("Function not a unbound, pure python function: {0} ({1})".format(self.fkt.__name__, name)) if fkt_hash is not None: state["called"][name] = fkt_hash for name in get_config_attrs(): state[name] = getattr(config, name) with open(os.path.join(tempfolder, "{0}.pck".format(self.filename)), "wb") as fp: pickle.dump(state, fp)
def trace_dispatch(self, frame, event, arg): if event not in ['call', 'c_call']: return # skip built in funcs if inspect.isbuiltin(arg): return # skip properties, we're only really interested in function calls # this will unfortunently skip any important logic that is wrapped # in property logic code = frame.f_code if is_property(code): return indent, first_parent = self.indent_level(frame) f = frame.f_back if event == "c_call": func_name = arg.__name__ fn = (indent, "", 0, func_name, id(frame),id(first_parent)) elif event == 'call': fcode = frame.f_code fn = (indent, fcode.co_filename, fcode.co_firstlineno, fcode.co_name, id(frame), id(first_parent)) self.timings.append(fn)
def create(cls, func, args=None, kwargs=None, connection=None, result_ttl=None, status=None, description=None, dependency=None): """Creates a new Job instance for the given function, arguments, and keyword arguments. """ if args is None: args = () if kwargs is None: kwargs = {} assert isinstance(args, (tuple, list)), '%r is not a valid args list.' % (args,) assert isinstance(kwargs, dict), '%r is not a valid kwargs dict.' % (kwargs,) job = cls(connection=connection) if inspect.ismethod(func): job._instance = func.__self__ job._func_name = func.__name__ elif inspect.isfunction(func) or inspect.isbuiltin(func): job._func_name = '%s.%s' % (func.__module__, func.__name__) else: # we expect a string job._func_name = func job._args = args job._kwargs = kwargs job.description = description or job.get_call_string() job.result_ttl = result_ttl job._status = status # dependency could be job instance or id if dependency is not None: job._dependency_id = dependency.id if isinstance(dependency, Job) else dependency return job
def _default_arguments(self, obj): """Return the list of default arguments of obj if it is callable, or empty list otherwise.""" call_obj = obj ret = [] if inspect.isbuiltin(obj): pass elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): if inspect.isclass(obj): #for cython embededsignature=True the constructor docstring #belongs to the object itself not __init__ ret += self._default_arguments_from_docstring( getattr(obj, '__doc__', '')) # for classes, check for __init__,__new__ call_obj = (getattr(obj, '__init__', None) or getattr(obj, '__new__', None)) # for all others, check if they are __call__able elif hasattr(obj, '__call__'): call_obj = obj.__call__ ret += self._default_arguments_from_docstring( getattr(call_obj, '__doc__', '')) try: args,_,_1,defaults = inspect.getargspec(call_obj) if defaults: ret+=args[-len(defaults):] except TypeError: pass return list(set(ret))
def eargs (name, imports): "Get arglist of NAME for Eldoc &c." try: if imports: exec(imports) parts = name.split ('.') if len (parts) > 1: exec('import ' + parts[0]) # might fail func = eval (name) if inspect.isbuiltin (func) or type(func) is type: doc = func.__doc__ if doc.find (' ->') != -1: print('_emacs_out', doc.split (' ->')[0]) else: print('_emacs_out', doc.split ('\n')[0]) return if inspect.ismethod (func): func = func.im_func if not inspect.isfunction (func): print('_emacs_out ') return (args, varargs, varkw, defaults) = inspect.getargspec (func) # No space between name and arglist for consistency with builtins. print('_emacs_out', \ func.__name__ + inspect.formatargspec (args, varargs, varkw, defaults)) except: print("_emacs_out ")
def inspect(self, objectLocationPath="j", recursive=True, parent=None, obj=None): """ walk over objects in memory and create code completion api in jumpscale cfgDir under codecompletionapi @param object is start object @param objectLocationPath is full location name in object tree e.g. j.sal.fs , no need to fill in """ self.logger.debug(objectLocationPath) if obj is None: try: obj = eval(objectLocationPath) except: self.raiseError("could not eval:%s" % objectLocationPath) return # only process our files try: if "__file__" in dir(obj): filepath = inspect.getabsfile(obj.__file__) filepath = os.path.normpath(filepath) # normalize path if not filepath.startswith(self.base): return else: clsfile = inspect.getfile(obj.__class__) clsfile = os.path.normpath(clsfile) if not clsfile.startswith(self.base): return except Exception as e: # print "COULD NOT DEFINE FILE OF:%s"%objectLocationPath pass if obj not in self.visited and obj: self.visited.append(obj) else: self.logger.debug("RECURSIVE:%s" % objectLocationPath) return attrs = dir(obj) ignore = [ "constructor_args", "NOTHING", "template_class", "redirect_cache" ] def check(item): if item == "_getFactoryEnabledClasses": return True if item.startswith("_"): return False if item.startswith("im_"): return False if item in ignore: return False return True # if objectLocationPath == 'j.actions.logger.disabled': attrs = [item for item in attrs if check(item)] for objattributename in attrs: filepath = None objectLocationPath2 = "%s.%s" % (objectLocationPath, objattributename) try: objattribute = eval("obj.%s" % objattributename) except Exception as e: self.logger.error(str(e)) self.raiseError("cannot eval %s" % objectLocationPath2) continue if objattributename.upper() == objattributename: # is special type or constant self.logger.debug("special type: %s" % objectLocationPath2) j.sal.fs.writeFile(self.apiFileLocation, "%s?7\n" % objectLocationPath2, True) self.jstree[objectLocationPath2] = attrib( objattributename, "const", '', objectLocationPath2, filepath) elif objattributename == "_getFactoryEnabledClasses": try: for fclparent, name, obj2 in obj._getFactoryEnabledClasses( ): if fclparent != "": objectLocationPath2 = objectLocationPath + "." + fclparent + "." + name else: objectLocationPath2 = objectLocationPath + "." + name self._processClass(name, objectLocationPath2, obj) if not isinstance( objattribute, (str, bool, int, float, dict, list, tuple)): self.inspect( objectLocationPath=objectLocationPath2, recursive=True, parent=obj, obj=obj2) except Exception as e: self.logger.error( "the _getFactoryEnabledClasses gives error") import ipdb elif inspect.isfunction(objattribute) or inspect.ismethod( objattribute) or inspect.isbuiltin( objattribute) or inspect.isgenerator(objattribute): # isinstance(objattribute, (types.BuiltinMethodType, # types.BuiltinFunctionType, types.MethodType, types.FunctionType)): try: methodpath = inspect.getabsfile(objattribute) methodargs = ", ".join(objattribute.__code__.co_varnames) filepath = methodpath if not methodpath.startswith(self.base): self.classDocs.pop(objectLocationPath2, "") self.logger.info("SKIPPED:%s" % objectLocationPath2) return except Exception as e: self.logger.error(str(e)) source, params = self._processMethod(objattributename, objattribute, objectLocationPath2, obj) self.logger.debug("instancemethod: %s" % objectLocationPath2) j.sal.fs.writeFile( self.apiFileLocation, "%s?4(%s)\n" % (objectLocationPath2, params), True) self.jstree[objectLocationPath2] = attrib( objattributename, "method", objattribute.__doc__, objectLocationPath2, filepath, methodargs) elif isinstance(objattribute, (str, bool, int, float, list, tuple, dict, property)) or objattribute is None: self.logger.debug("property: %s" % objectLocationPath2) j.sal.fs.writeFile(self.apiFileLocation, "%s?8\n" % objectLocationPath2, True) self.jstree[objectLocationPath2] = attrib( objattributename, "property", objattribute.__doc__, objectLocationPath2) elif isinstance(objattribute.__class__, type): j.sal.fs.writeFile(self.apiFileLocation, "%s?8\n" % objectLocationPath2, True) self.logger.debug("class or instance: %s" % objectLocationPath2) try: filepath = inspect.getfile(objattribute.__class__) except: pass self.jstree[objectLocationPath2] = attrib( objattributename, "class", objattribute.__doc__, objectLocationPath2, filepath) try: if not isinstance(objattribute, (str, bool, int, float, dict, list, tuple)) or objattribute is not None: self.inspect(objectLocationPath2, parent=objattribute) except Exception as e: self.logger.error(str(e)) else: pass
def dump(self, obj: object): obj_id = id(obj) if is_none(obj): return None if is_primitive(obj): return obj if type(obj) in [list, set, tuple, dict, frozenset]: if isinstance(obj, dict): result = {key: self.dump(obj[key]) for key in obj} elif type(obj) in [frozenset, set, tuple]: result = {".list": [self.dump(el) for el in obj], ".collection_type": f"{obj.__class__.__name__}"} else: result = [self.dump(el) for el in obj] return result if isinstance(obj, datetime): return {".time": str(obj.isoformat())} if obj_id in self.proceeded: return {".metaid": str(obj_id)} elif not getattr(obj, "__name__", None) in dir(builtins): self.proceeded.append(obj_id) if inspect.ismodule(obj): try: if self.metainfo.get(str(obj_id)) == None: if obj.__name__ in builtin_module_names: self.metainfo.update({str(obj_id): {".metatype": "module", ".name": obj.__name__}}) else: self.metainfo.update( {str(obj_id): {".code": get_code(obj), ".metatype": "module", ".name": obj.__name__}}) except Exception: self.metainfo.update({str(obj_id): {".metatype": "module", ".name": obj.__name__}}) return {".metaid": str(obj_id)} if getattr(obj, "__name__", None) and not is_basetype(obj): if obj.__name__ in dir(builtins): try: self.proceeded.remove(str(obj_id)) except Exception: pass return {".metatype": "builtin", ".builtin": obj.__name__} if inspect.ismethod(obj) or inspect.isfunction(obj) or isinstance(obj, staticmethod): return self.funcdump(obj) if inspect.isbuiltin(obj): self.metainfo.update( {str(obj_id): {".metatype": "builtin-func", ".module": obj.__module__, ".name": obj.__name__}}) return {".metaid": str(obj_id)} if is_instance(obj): type_, fields = deconstruct_instance(obj) type_id = id(type_) self.dump(type_) data = {key: self.dump(fields[key]) for key in fields} return {".metaid": str(type_id), ".fields": data} if inspect.isclass(obj): mro = fetch_typereferences(obj) attrs = deconstruct_class(obj) mro = [self.dump(el) for el in mro] attrs = [self.dump((el[0], self.dump(el[1]), el[2])) for el in attrs] if self.metainfo.get(str(obj_id)) == None: self.metainfo.update({str(obj_id): {".metatype": "class", ".name": obj.__name__, ".module": getattr(obj, "__module__", None), ".class": {"mro": mro, "attrs": attrs}}}) return {".metaid": str(obj_id)} else: if inspect.ismethod(obj) or inspect.isfunction(obj) or isinstance(obj, staticmethod): return self.funcdump(obj) if is_instance(obj): type_, fields = deconstruct_instance(obj) type_id = id(type_) self.dump(type_) data = {key: self.dump(fields[key]) for key in fields} return {".metaid": str(type_id), ".fields": data} return None
def importable(obj, alias='', source=None, builtin=True): """get an importable string (i.e. source code or the import string) for the given object, including any required objects from the enclosing and global scope This function will attempt to discover the name of the object, or the repr of the object, or the source code for the object. To attempt to force discovery of the source code, use source=True, to attempt to force the use of an import, use source=False; otherwise an import will be sought for objects not defined in __main__. The intent is to build a string that can be imported from a python file. obj is the object to inspect. If alias is provided, then rename the object with the given alias. If builtin=True, then force an import for builtins where possible. """ #NOTE: we always 'force', and 'lstrip' as necessary #NOTE: for 'enclosing', use importable(outermost(obj)) if source is None: source = True if isfrommain(obj) else False elif builtin and isbuiltin(obj): source = False tried_source = tried_import = False while True: if not source: # we want an import try: if _isinstance(obj): # for instances, punt to _importable return _importable(obj, alias, source=False, builtin=builtin) src = _closuredimport(obj, alias=alias, builtin=builtin) if len(src) == 0: raise NotImplementedError('not implemented') if len(src) > 1: raise NotImplementedError('not implemented') return list(src.values())[0] except: if tried_source: raise tried_import = True # we want the source try: src = _closuredsource(obj, alias=alias) if len(src) == 0: raise NotImplementedError('not implemented') # groan... an inline code stitcher def _code_stitcher(block): "stitch together the strings in tuple 'block'" if block[0] and block[-1]: block = '\n'.join(block) elif block[0]: block = block[0] elif block[-1]: block = block[-1] else: block = '' return block # get free_vars first _src = _code_stitcher(src.pop(None)) _src = [_src] if _src else [] # get func_vars for xxx in src.values(): xxx = _code_stitcher(xxx) if xxx: _src.append(xxx) # make a single source string if not len(_src): src = '' elif len(_src) == 1: src = _src[0] else: src = '\n'.join(_src) # get source code of objects referred to by obj in global scope from .detect import globalvars obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc? obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj)) obj = '\n'.join(obj) if obj else '' # combine all referred-to source (global then enclosing) if not obj: return src if not src: return obj return obj + src except: if tried_import: raise tried_source = True source = not source # should never get here return
def is_c_function(obj: object) -> bool: return inspect.isbuiltin(obj) or type(obj) is type(ord)
def get_public_fields(obj): return [ attr for attr in dir(obj) if not (attr.startswith("_") or inspect.isbuiltin(attr) or inspect.isfunction(attr) or inspect.ismethod(attr)) ]
def count(self): """Return the total number of objects, across all pages.""" c = getattr(self.object_list, 'count', None) if callable(c) and not inspect.isbuiltin(c) and method_has_no_args(c): return c() return len(self.object_list)
def decorator(custom): if inspect.isbuiltin(orig_func): # without function-to-method transformation custom = functools.update_wrapper( functools.partial(custom), custom) return custom
def create(cls, func, args=None, kwargs=None, connection=None, result_ttl=None, ttl=None, status=None, description=None, depends_on=None, timeout=None, id=None, origin=None): """Creates a new Job instance for the given function, arguments, and keyword arguments. """ if args is None: args = () if kwargs is None: kwargs = {} if not isinstance(args, (tuple, list)): raise TypeError('{0!r} is not a valid args list.'.format(args)) if not isinstance(kwargs, dict): raise TypeError('{0!r} is not a valid kwargs dict.'.format(kwargs)) job = cls(connection=connection) if id is not None: job.set_id(id) if origin is not None: job.origin = origin # Set the core job tuple properties job._instance = None if inspect.ismethod(func): job._instance = func.__self__ job._func_name = func.__name__ elif inspect.isfunction(func) or inspect.isbuiltin(func): job._func_name = '%s.%s' % (func.__module__, func.__name__) elif isinstance(func, string_types): job._func_name = as_text(func) elif not inspect.isclass(func) and hasattr( func, '__call__'): # a callable class instance job._instance = func job._func_name = '__call__' else: raise TypeError( 'Expected a callable or a string, but got: {}'.format(func)) job._args = args job._kwargs = kwargs # Extra meta data job.description = description or job.get_call_string() job.result_ttl = result_ttl job.ttl = ttl job.timeout = timeout job._status = status # dependency could be job instance or id if depends_on is not None: job._dependency_id = depends_on.id if isinstance( depends_on, Job) else depends_on return job
def do_obj_mirror(self, args): """ For the supplied object, all of it's methods/attributes etc are mirrored in the calling objects namespace This is a dirty way of acting as an object proxy meaning we can be injected in place of another object and be sure we won't break the larger app If no frame is specified then the frame from which the debugger was called is used If "debugger" is given as the frame the debugger frame is used Usage: obj_mirror <instantiated object to mirror> """ import inspect if not args: print "[-] No object supplied to mirror" return arg_list = args.split(" ") s_obj_to_mirror = arg_list[0] if len(arg_list) > 1: frame = arg_list[1] else: frame = None if not frame: ##Use context of calling frame frame_context = self.curframe elif frame == "debugger": ##Use context of the frame the debugger is executing in frame_context = self.debugger_frame else: ##None frame object supplied ... bail print "[-] None frame object supplied - object type was %s" % ( type(frame)) return locals = frame_context.f_locals globals = frame_context.f_globals print "[=] Mirroring %s in the context of %s" % (s_obj_to_mirror, frame_context) try: obj_to_mirror = eval(s_obj_to_mirror, globals, locals) except: print "[-] Unknown object specified, cannot mirror" return for x in dir(obj_to_mirror): skip_list = ["__init__", "__builtins__", "__doc__", "__name__"] if inspect.isbuiltin(x) or x in skip_list: print "[-] skipping %s" % (x) continue print "[+] %s -> %s.%s" % (x, obj_to_mirror.__name__, x) exec("%s = %s.%s" % (x, obj_to_mirror.__name__, x), globals, locals)
def write_module(module): modules = [] functions = [] classes = [] constants = [] others = [] for name, value in inspect.getmembers(module): if not isprivateobj(name): if inspect.ismodule(value): modules.append(name) elif inspect.isbuiltin(value): functions.append((name, value)) elif inspect.isclass(value): classes.append((name, value)) else: if name.upper() == name: constants.append((name, value)) else: others.append((name, value)) out = [] out_modules = [] if modules: out_modules.append('\nModules:') for m in modules: fullname = '{0}.{1}'.format(module.__name__, m) out_modules.append(' {0}'.format(fullname)) evalm = eval(fullname) write_module(evalm) # ------------------------------------WRITE out_classes = [] if classes: out_classes.append('\nClasses:') for name, value in classes: fullname = '{0}.{1}'.format(module.__name__, name) out_classes.append(' {0}'.format(fullname)) write_class(module, name, value) # -----------------------WRITE out.append( fmt_module.format(modulename=module.__name__, docstring=module.__doc__, modules='\n'.join(out_modules), classes='\n'.join(out_classes))) if functions: out.append('# {0:-<60}'.format('Functions ')) for name, value in functions: out.append(s_function(name, value.__doc__)) if constants: out.append('# {0:-<60}'.format('Constants ')) for name, value in constants: out.append(fmt_constants.format(name=name, value=value)) if others: out.append('# {0:-<60}'.format('Everything else ')) for name, value in others: out.append(fmt_constants.format(name=name, value=value)) mpath = modulepath(module) if not os.path.exists(mpath): os.makedirs(mpath) mfile = os.path.join(mpath, '__init__.py') try: with open(mfile, 'w') as f: f.write('\n'.join(out)) f.close() print(mfile) except IOError: print "IOError"
def _make_py_item_url(fn): if not inspect.isbuiltin(fn): fn_name = func_name(fn, None, mod=1, fqdn=1, human=0) if fn_name: return f"../code.html#{fn_name}"
def _to_bytes(self, obj: Any) -> bytes: """Hash objects to bytes, including code with dependencies. Python's built in `hash` does not produce consistent results across runs. """ if isinstance(obj, unittest.mock.Mock): # Mock objects can appear to be infinitely # deep, so we don't try to hash them at all. return self.to_bytes(id(obj)) elif isinstance(obj, bytes) or isinstance(obj, bytearray): return obj elif isinstance(obj, str): return obj.encode() elif isinstance(obj, float): return self.to_bytes(hash(obj)) elif isinstance(obj, int): return _int_to_bytes(obj) elif isinstance(obj, (list, tuple)): h = hashlib.new("md5") for item in obj: self.update(h, item) return h.digest() elif isinstance(obj, dict): h = hashlib.new("md5") for item in obj.items(): self.update(h, item) return h.digest() elif obj is None: return b"0" elif obj is True: return b"1" elif obj is False: return b"0" elif type_util.is_type( obj, "pandas.core.frame.DataFrame") or type_util.is_type( obj, "pandas.core.series.Series"): import pandas as pd if len(obj) >= _PANDAS_ROWS_LARGE: obj = obj.sample(n=_PANDAS_SAMPLE_SIZE, random_state=0) try: return b"%s" % pd.util.hash_pandas_object(obj).sum() except TypeError: # Use pickle if pandas cannot hash the object for example if # it contains unhashable objects. return b"%s" % pickle.dumps(obj, pickle.HIGHEST_PROTOCOL) elif type_util.is_type(obj, "numpy.ndarray"): h = hashlib.new("md5") self.update(h, obj.shape) if obj.size >= _NP_SIZE_LARGE: import numpy as np state = np.random.RandomState(0) obj = state.choice(obj.flat, size=_NP_SAMPLE_SIZE) self.update(h, obj.tobytes()) return h.digest() elif inspect.isbuiltin(obj): return bytes(obj.__name__.encode()) elif type_util.is_type(obj, "builtins.mappingproxy") or type_util.is_type( obj, "builtins.dict_items"): return self.to_bytes(dict(obj)) elif type_util.is_type(obj, "builtins.getset_descriptor"): return bytes(obj.__qualname__.encode()) elif isinstance(obj, UploadedFile): # UploadedFile is a BytesIO (thus IOBase) but has a name. # It does not have a timestamp so this must come before # temproary files h = hashlib.new("md5") self.update(h, obj.name) self.update(h, obj.tell()) self.update(h, obj.getvalue()) return h.digest() elif hasattr(obj, "name") and ( isinstance(obj, io.IOBase) # Handle temporary files used during testing or isinstance(obj, tempfile._TemporaryFileWrapper)): # Hash files as name + last modification date + offset. # NB: we're using hasattr("name") to differentiate between # on-disk and in-memory StringIO/BytesIO file representations. # That means that this condition must come *before* the next # condition, which just checks for StringIO/BytesIO. h = hashlib.new("md5") obj_name = getattr(obj, "name", "wonthappen") # Just to appease MyPy. self.update(h, obj_name) self.update(h, os.path.getmtime(obj_name)) self.update(h, obj.tell()) return h.digest() elif isinstance(obj, Pattern): return self.to_bytes([obj.pattern, obj.flags]) elif isinstance(obj, io.StringIO) or isinstance(obj, io.BytesIO): # Hash in-memory StringIO/BytesIO by their full contents # and seek position. h = hashlib.new("md5") self.update(h, obj.tell()) self.update(h, obj.getvalue()) return h.digest() elif type_util.is_type(obj, "numpy.ufunc"): # For numpy.remainder, this returns remainder. return bytes(obj.__name__.encode()) elif inspect.ismodule(obj): # TODO: Figure out how to best show this kind of warning to the # user. In the meantime, show nothing. This scenario is too common, # so the current warning is quite annoying... # st.warning(('Streamlit does not support hashing modules. ' # 'We did not hash `%s`.') % obj.__name__) # TODO: Hash more than just the name for internal modules. return self.to_bytes(obj.__name__) elif inspect.isclass(obj): # TODO: Figure out how to best show this kind of warning to the # user. In the meantime, show nothing. This scenario is too common, # (e.g. in every "except" statement) so the current warning is # quite annoying... # st.warning(('Streamlit does not support hashing classes. ' # 'We did not hash `%s`.') % obj.__name__) # TODO: Hash more than just the name of classes. return self.to_bytes(obj.__name__) elif isinstance(obj, functools.partial): # The return value of functools.partial is not a plain function: # it's a callable object that remembers the original function plus # the values you pickled into it. So here we need to special-case it. h = hashlib.new("md5") self.update(h, obj.args) self.update(h, obj.func) self.update(h, obj.keywords) return h.digest() else: # As a last resort, hash the output of the object's __reduce__ method h = hashlib.new("md5") try: reduce_data = obj.__reduce__() except BaseException as e: raise UnhashableTypeError() from e for item in reduce_data: self.update(h, item) return h.digest()
def isbuiltin(obj: Any) -> bool: """Check if the object is builtin.""" return inspect.isbuiltin(unwrap_all(obj))
def members_predicate(m): return inspect.isclass(m) and not inspect.isbuiltin(m)
def get_function_spec(func): """ Get the python function signature for the given function object. First the args are inspected followed by varargs, which are set by some modules, e.g. mantid.simpleapi algorithm functions :param func: A Python function object :returns: A string containing the function specification """ try: argspec = getfullargspec(func) except TypeError: try: args_obj = inspect.getargs(func.__code__) argspec = ArgSpec(args_obj.args, args_obj.varargs, args_obj.varkw, defaults=None) except (TypeError, AttributeError, ValueError): if inspect.isbuiltin(func): argspec = get_builtin_argspec(func) if not argspec: return '' else: return '' # mantid algorithm functions have varargs set not args args = argspec[0] if args: # For methods strip the self argument if callable(func) and args[0] == "self": args = args[1:] defs = argspec[3] elif argspec[1] is not None: # Get from varargs/keywords arg_str = argspec[1].strip().lstrip('\b').replace(',', ', ') defs = [] # Keyword args kwargs = argspec[2] if kwargs is not None: kwargs = kwargs.strip().lstrip('\b\b') if kwargs == 'kwargs': kwargs = '**' + kwargs + '=None' arg_str += ', %s' % kwargs # Any default argument appears in the string # on the rhs of an equal for arg in arg_str.split(', '): arg = arg.strip() if '=' in arg: arg_token = arg.split('=') args.append(arg_token[0]) defs.append(arg_token[1]) else: args.append(arg) if len(defs) == 0: defs = None else: return '' if defs is None: call_tip = "({})".format(', '.join(args)) else: # The defaults list contains the default values for the last n arguments diff = len(args) - len(defs) call_tip = '' for index in range(len(args) - 1, -1, -1): def_index = index - diff if def_index >= 0: call_tip = '[' + args[index] + '], ' + call_tip else: call_tip = args[index] + ", " + call_tip call_tip = '(' + call_tip.rstrip(', ') + ')' return call_tip
def Get_CallTip_Completion(word, Arg_Index=0): #print '********* start CallTip of :', word pass """ # ********************************************* # get rid of everything starting at '(...' # and return if no left bracket # ********************************************* i = word.find('(') if i < 0 : return word = word [ : i].strip() """ # ********************************************* # parse the specified word # ********************************************* Word_Parts = word.split('.') N_Parts = len(Word_Parts) # ********************************************* # special import packages that can't be detected # ********************************************* if Word_Parts[0] in Special_Imports: Imports = Special_Imports[Word_Parts[0]] for module in Imports: try: #print 'Special import :', module exec('import ' + module, globals()) Failed = False except: pass # ********************************************* # import the necessary module # because we don't know how many parts on the left of the word # are module / path information ( instead of class information) # we start with the largest left part, # and each time we don't succeed we try one part less # ********************************************* Failed = True N = N_Parts while Failed and (N > 0): module = '.'.join(Word_Parts[:N]) N -= 1 try: exec('import ' + module, globals()) Failed = False except: pass from wx.py import introspect import inspect # ********************************************* # Get the object # ********************************************* try: object = eval(word, locals()) except: return None # ********************************************* # get the objects name # ********************************************* name = '' object, dropSelf = introspect.getBaseObject(object) try: name = object.__name__ except AttributeError: pass # ********************************************* # get arguments # ********************************************* tip1 = '' argspec = '' if inspect.isbuiltin(object): # Builtin functions don't have an argspec that we can get. pass elif inspect.isfunction(object): # tip1 is a string like: "getCallTip(command='', locals=None)" argspec = apply(inspect.formatargspec, inspect.getargspec(object)) if dropSelf: # The first parameter to a method is a reference to an # instance, usually coded as "self", and is usually passed # automatically by Python; therefore we want to drop it. temp = argspec.split(',') if len(temp) == 1: # No other arguments. argspec = '()' elif temp[0][:2] == '(*': # first param is like *args, not self pass else: # Drop the first argument. argspec = '(' + ','.join(temp[1:]).lstrip() tip1 = name + argspec # ********************************************* # get doc # ********************************************* doc = '' if callable(object): try: doc = inspect.getdoc(object) except: pass if doc: # tip2 is the first separated line of the docstring, like: # "Return call tip text for a command." # tip3 is the rest of the docstring, like: # "The call tip information will be based on ... <snip> firstline = doc.split('\n')[0].lstrip() if tip1 == firstline or firstline[:len(name) + 1] == name + '(': tip1 = '' else: tip1 += '\n\n' docpieces = doc.split('\n\n') tip2 = docpieces[0] tip3 = '\n\n'.join(docpieces[1:]) tip = '%s%s\n\n%s' % (tip1, tip2, tip3) else: tip = tip1 #calltip = (name, argspec[1:-1], tip.strip()) return tip.strip()
def is_pan_function(name, val): """detect pan-function which including function and bound method in python 3 function and unbound method and bound method in python 2 """ return inspect.isfunction(val) or inspect.ismethod( val) or inspect.isbuiltin(val)
def _is_callable(f): return inspect.isfunction(f) or inspect.isbuiltin(f)
def getCallTip(command='', locals=None): """For a command, return a tuple of object name, argspec, tip text. The call tip information will be based on the locals namespace.""" calltip = ('', '', '') # object name, argspec, tip text. # Get the proper chunk of code from the command. root = getRoot(command, terminator='(') try: if locals is not None: obj = eval(root, locals) else: obj = eval(root) except: return calltip name = '' obj, dropSelf = getBaseObject(obj) try: name = obj.__name__ except AttributeError: pass tip1 = '' argspec = '' if inspect.isbuiltin(obj): # Builtin functions don't have an argspec that we can get. pass elif inspect.isfunction(obj): # tip1 is a string like: "getCallTip(command='', locals=None)" argspec = inspect.getfullargspec(obj) argspec = inspect.formatargspec(*argspec) if dropSelf: # The first parameter to a method is a reference to an # instance, usually coded as "self", and is usually passed # automatically by Python; therefore we want to drop it. temp = argspec.split(',') if len(temp) == 1: # No other arguments. argspec = '()' elif temp[0][:2] == '(*': # first param is like *args, not self pass else: # Drop the first argument. argspec = '(' + ','.join(temp[1:]).lstrip() tip1 = name + argspec doc = '' if callable(obj): try: doc = inspect.getdoc(obj) except: pass if doc: # tip2 is the first separated line of the docstring, like: # "Return call tip text for a command." # tip3 is the rest of the docstring, like: # "The call tip information will be based on ... <snip> firstline = doc.split('\n')[0].lstrip() if tip1 == firstline or firstline[:len(name)+1] == name+'(': tip1 = '' else: tip1 += '\n\n' docpieces = doc.split('\n\n') tip2 = docpieces[0] tip3 = '\n\n'.join(docpieces[1:]) tip = '%s%s\n\n%s' % (tip1, tip2, tip3) else: tip = tip1 calltip = (name, argspec[1:-1], tip.strip()) return calltip
def signature(func, variadic=True, markup=True, safe=False): """get the input signature of a function func: the function to inspect variadic: if True, also return names of (*args, **kwds) used in func markup: if True, show a "!" before any 'unsettable' parameters safe: if True, return (None,None,None,None) instead of throwing an error Returns a tuple of variable names and a dict of keywords with defaults. If variadic=True, additionally return names of func's (*args, **kwds). Python functions, methods, lambdas, and partials can be inspected. If safe=False, non-python functions (e.g. builtins) will raise an error. For partials, 'fixed' args correspond to positional arguments given in when the partial was defined. Partials have 'unsettalble' parameters, where, these parameters may be given as input but will throw errors. If markup=True, 'unsettable' parameters are denoted by a prepended '!'. For example: >>> def bar(x,y,z,a=1,b=2,*args): ... return x+y+z+a+b ... >>> signature(bar) (('x', 'y', 'z', 'a', 'b'), {'a': 1, 'b': 2}, 'args', '') >>> >>> # a partial with a 'fixed' x, thus x is 'unsettable' as a keyword >>> p = partial(bar, 0) >>> signature(p) (('y', 'z', 'a', 'b'), {'a': 1, '!x': 0, 'b': 2}, 'args', '') >>> p(0,1) 4 >>> p(0,1,2,3,4,5) 6 >>> >>> # a partial where y is 'unsettable' as a positional argument >>> p = partial(bar, y=10) >>> signature(p) (('x', '!y', 'z', 'a', 'b'), {'a': 1, 'y': 10, 'b': 2}, 'args', '') >>> p(0,1,2) Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: bar() got multiple values for keyword argument 'y' >>> p(0,z=2) 15 >>> p(0,y=1,z=2) 6 >>> >>> # a partial with a 'fixed' x, and positionally 'unsettable' b >>> p = partial(bar, 0,b=10) >>> signature(p) (('y', 'z', 'a', '!b'), {'a': 1, '!x': 0, 'b': 10}, 'args', '') >>> >>> # apply some options that reduce information content >>> signature(p, markup=False) (('y', 'z', 'a', 'b'), {'a': 1, 'b': 10}, 'args', '') >>> signature(p, markup=False, variadic=False) (('y', 'z', 'a', 'b'), {'a': 1, 'b': 10}) """ TINY_FAIL = None,None #XXX: or (),{} ? LONG_FAIL = None,None,None,None #XXX: or (),{},'','' ? if safe and inspect.isbuiltin(func) and not IS_PYPY: return LONG_FAIL if variadic else TINY_FAIL #"""fixed: if True, include any 'fixed' args in returned keywords""" # maybe it's less confusing to tie 'fixed' to 'markup'... so do that. fixed = markup identified = False if not inspect.ismethod(func) and not inspect.isfunction(func): try: # then it could be a partial... p_args = func.args # list of default arg values p_kwds = func.keywords or {} # dict of default kwd values func = func.func identified = True except AttributeError: #XXX: anything else to try? No? Give up. pass if not identified: p_args = () p_kwds = {} try: arg_spec = inspect.getargspec(func) except TypeError: if safe: return LONG_FAIL if variadic else TINY_FAIL raise TypeError('%r is not a Python function' % func) if hasattr(arg_spec, 'args'): arg_names = arg_spec.args # list of input variable names arg_defaults = arg_spec.defaults # list of kwd default values arg_keywords = arg_spec.keywords # name of **kwds arg_varargs = arg_spec.varargs # name of *args else: arg_names, arg_varargs, arg_keywords, arg_defaults = arg_spec if not arg_defaults or not arg_names: defaults = {} explicit = tuple(arg_names) or () else: defaults = dict(zip(arg_names[-len(arg_defaults):],arg_defaults)) explicit = tuple(arg_names) or () # always return all names #explicit = tuple(arg_names[:-len(arg_defaults)]) # only return args # for a partial, the first p_args are now at fixed values _fixed = dict(zip(arg_names[:len(p_args)],p_args)) # deal with the stupid case that the partial always fails errors = [i for i in _fixed if i in p_kwds] if errors: if safe: return LONG_FAIL if variadic else TINY_FAIL raise TypeError("%s() got multiple values for keyword argument '%s'" % (func.__name__,errors[0])) # the above could fail if taking a partial of a partial # for a partial, arguments given in p_kwds have new defaults defaults.update(p_kwds) if markup: X = '!' else: X = '' # remove args 'fixed' by the partial; prepend 'unsettable' args with '!' explicit = tuple(X+i if i in p_kwds else i for i in explicit \ if i not in _fixed) if fixed: #defaults.update(_fixed) defaults = dict((k,v) for (k,v) in defaults.items() if k not in _fixed) defaults.update(dict((X+k,v) for (k,v) in _fixed.items())) if inspect.ismethod(func) and getattr(func, 'im_self', func.__self__): # then it's a bound method explicit = explicit[1:] #XXX: correct to remove 'self' ? if variadic: varargs = arg_varargs or '' varkwds = arg_keywords or '' return explicit, defaults, varargs, varkwds return explicit, defaults
def is_class_instance(obj): """Like inspect.* methods.""" return not (inspect.isclass(obj) or inspect.ismodule(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj) or inspect.ismethoddescriptor(obj) or inspect.iscode(obj) or inspect.isgenerator(obj))
from numbers import Number import sys import inspect import dpctl from dpctl.memory import MemoryUSMShared debug = False def dprint(*args): if debug: print(*args) sys.stdout.flush() functions_list = [o[0] for o in getmembers(np) if isfunction(o[1]) or isbuiltin(o[1])] class_list = [o for o in getmembers(np) if isclass(o[1])] array_interface_property = "__sycl_usm_array_interface__" def has_array_interface(x): return hasattr(x, array_interface_property) def _get_usm_base(ary): ob = ary while True: if ob is None: return None elif hasattr(ob, "__sycl_usm_array_interface__"):
def __init__(): # Setup exception handling sys.excepthook = handle_exception # Wrap everything except for plugins and non-GPU stuff # Make the __init__ functions return None if we are not a compute task nonGPU_names = [['Interactions', 'MembraneParameters'], ['Interactions', 'KantorBendingParameters'], ['Interactions', 'JuelicherBendingParameters']] needing_state = ['Plugins', 'Integrators', 'ParticleVectors', 'Interactions', 'BelongingCheckers', 'Bouncers', 'Walls'] not_needing_state = [['ParticleVectors', 'MembraneMesh'], ['ParticleVectors', 'Mesh']] classes = {} submodules = inspect.getmembers(sys.modules[__name__], lambda member: inspect.ismodule(member) and 'mirheo' in member.__name__ ) for m in submodules: classes[m[0]] = inspect.getmembers(sys.modules[m[1].__name__], lambda member: inspect.isclass(member) and 'mirheo' in member.__module__ ) for module in classes.keys(): if module != 'Plugins': for cls in classes[module]: if [module, cls[0]] not in nonGPU_names: need_state = module in needing_state if [module, cls[0]] in not_needing_state: need_state = False setattr(cls[1], '__init__', decorate_object(cls[1].__init__, need_state)) setattr(cls[1], '__new__', decorate_object(cls[1].__new__ , need_state)) getattr(cls[1], '__init__').__doc__ = re.sub('state: libmirheo.MirState, ', '', getattr(cls[1], '__init__') .__doc__) # Now wrap plugins creation # Also change the names of the function # by removing the double underscore for m in submodules: if m[0] == 'Plugins': funcs = inspect.getmembers(sys.modules[m[1].__name__], lambda member: inspect.isbuiltin(member) and 'mirheo' in member.__module__) for f in funcs: if '__create' in f[0]: newname = f[0][2:] setattr(m[1], newname, decorate_plugins(f[1])) getattr(m[1], newname).__doc__ = re.sub('__' + newname, newname, getattr(m[1], newname).__doc__) getattr(m[1], newname).__doc__ = re.sub('compute_task: bool, ', '', getattr(m[1], newname).__doc__) # Wrap initialization of the mirheo coordinator Mirheo.__init__ = decorate_coordinator(Mirheo.__init__) # Wrap registration of the plugins Mirheo.registerPlugins = decorate_register_plugins(Mirheo.registerPlugins)
def _update_hash(self, hash_accumulator, obj, code_context): """ Contains the logic that analyzes the objects and encodes them into bytes that are added to the hash_accumulator. """ if isinstance(obj, bytes): add_to_hash(hash_accumulator, type_prefix=TypePrefix.BYTES, obj_bytes=obj) elif isinstance(obj, bytearray): add_to_hash(hash_accumulator, type_prefix=TypePrefix.BYTEARRAY, obj_bytes=obj) elif obj is None: add_to_hash(hash_accumulator, type_prefix=TypePrefix.NONE) elif obj is Ellipsis: add_to_hash(hash_accumulator, type_prefix=TypePrefix.ELLIPSIS) elif obj is NotImplemented: add_to_hash(hash_accumulator, type_prefix=TypePrefix.NOT_IMPLEMENTED) elif isinstance(obj, int): add_to_hash( hash_accumulator, type_prefix=TypePrefix.INT, obj_bytes=str(obj).encode(), ) elif isinstance(obj, float): add_to_hash( hash_accumulator, type_prefix=TypePrefix.FLOAT, obj_bytes=str(obj).encode(), ) elif isinstance(obj, complex): add_to_hash( hash_accumulator, type_prefix=TypePrefix.COMPLEX, obj_bytes=str(obj).encode(), ) elif isinstance(obj, str): add_to_hash( hash_accumulator, type_prefix=TypePrefix.STRING, # Avoid any encoding errors by not using utf-8. obj_bytes=obj.encode("raw_unicode_escape"), ) elif isinstance(obj, bool): add_to_hash( hash_accumulator, type_prefix=TypePrefix.BOOL, obj_bytes=str(obj).encode(), ) elif isinstance(obj, (list, tuple)): if isinstance(obj, list): type_prefix = TypePrefix.LIST else: type_prefix = TypePrefix.TUPLE obj_len_bytes = str(len(obj)).encode() add_to_hash( hash_accumulator, type_prefix=type_prefix, obj_bytes=obj_len_bytes, ) for elem in obj: add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(elem, code_context), ) elif isinstance(obj, (set, frozenset)): if isinstance(obj, set): type_prefix = TypePrefix.SET else: type_prefix = TypePrefix.FROZENSET obj_len_bytes = str(len(obj)).encode() add_to_hash( hash_accumulator, type_prefix=type_prefix, obj_bytes=obj_len_bytes, ) # set and frozenset are unordered collection and two sets with the # same elements can have different iteration order. Since the # iteration order is not stable, we first hash the elements and sort # the hash of elements instead. This way, sets with the same elements # will create the same hash. elem_hashes = sorted( self._check_and_hash(elem, code_context) for elem in obj) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(elem_hashes, code_context), ) elif isinstance(obj, range): members = [obj.start, obj.stop, obj.step] add_to_hash( hash_accumulator, type_prefix=TypePrefix.RANGE, obj_bytes=self._check_and_hash(members, code_context), ) elif isinstance(obj, (dict, types.MappingProxyType)): if isinstance(obj, dict): type_prefix = TypePrefix.DICT else: type_prefix = TypePrefix.MAPPING_PROXY obj_len_bytes = str(len(obj)).encode() add_to_hash( hash_accumulator, type_prefix=type_prefix, obj_bytes=obj_len_bytes, ) for key, elem in obj.items(): add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(key, code_context), ) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(elem, code_context), ) elif isinstance(obj, ReferenceProxy): add_to_hash( hash_accumulator, type_prefix=TypePrefix.REF_PROXY, obj_bytes=obj.val.encode(), ) # This detects only Enum values. The actual Enum class is still # detected and handled by inspect.isclass. elif isinstance(obj, Enum): add_to_hash(hash_accumulator, type_prefix=TypePrefix.ENUM) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(obj.value, code_context), ) elif isinstance(obj, (property, types.DynamicClassAttribute)): if isinstance(obj, property): type_prefix = TypePrefix.PROPERTY else: type_prefix = TypePrefix.DYNAMIC_CLASS_ATTR add_to_hash(hash_accumulator, type_prefix=type_prefix) # A property is identified using fget, fset, fdel, and doc. members = [obj.fget, obj.fset, obj.fdel, obj.__doc__] add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(members, code_context), ) elif inspect.isbuiltin(obj): add_to_hash(hash_accumulator, type_prefix=TypePrefix.BUILTIN) builtin_name = "%s.%s" % (obj.__module__, obj.__name__) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(builtin_name, code_context), ) elif inspect.ismethoddescriptor(obj) or isinstance( obj, METHOD_WRAPPER_TYPE): if inspect.ismethoddescriptor(obj): type_prefix = TypePrefix.METHOD_DESCRIPTOR else: type_prefix = TypePrefix.METHOD_WRAPPER add_to_hash( hash_accumulator, type_prefix=type_prefix, # Descriptors are part of core Python impl and method wrappers # wrap the underlying implementation of Pyton (like CPython). # It should be sufficient to hash their names. obj_bytes=obj.__name__.encode(), ) elif inspect.isroutine(obj): if (obj.__module__ is not None and obj.__module__.startswith("bionic")) or is_internal_file( obj.__code__.co_filename): # It would be nice to hash the module version as well. # But it's not easy to get the version of a submodule. # Top level modules have a version attribute that we # can use, but sub-modules don't have a field like # that. We don't want to import parent modules of the # submodules either because that can have unnecessary # side effects. add_to_hash(hash_accumulator, type_prefix=TypePrefix.INTERNAL_ROUTINE) routine_name = "%s.%s" % (obj.__module__, obj.__name__) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(routine_name), ) else: add_to_hash(hash_accumulator, type_prefix=TypePrefix.ROUTINE) code_context = get_code_context(obj) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(obj.__defaults__, code_context), ) self._update_hash_for_code(hash_accumulator, obj.__code__, code_context) elif inspect.iscode(obj): add_to_hash(hash_accumulator, type_prefix=TypePrefix.CODE) self._update_hash_for_code(hash_accumulator, obj, code_context) elif obj is attr.NOTHING: add_to_hash(hash_accumulator, type_prefix=TypePrefix.ATTR_NOTHING) # This hashes the instances of `attr.Attribute` class. The actual class # is hashed under the `inspect.isclass` block. # We have special handling for `attr.Attribute` objects because `attr` # classes contains these objects as one of the field and we should detect # any changes to the field in order to detect changes to the class. # Without this special handling, the object will be treated as a complex # variable and Bionic will warn for it. elif isinstance(obj, attr.Attribute): add_to_hash(hash_accumulator, type_prefix=TypePrefix.ATTR_ATTRIBUTE) self._update_hash_for_members_of_obj(hash_accumulator, obj) elif inspect.isclass(obj): if is_internal_class(obj): add_to_hash(hash_accumulator, type_prefix=TypePrefix.INTERNAL_CLASS) class_name = "%s.%s" % (obj.__module__, obj.__name__) add_to_hash( hash_accumulator, type_prefix=TypePrefix.HASH, obj_bytes=self._check_and_hash(class_name), ) else: add_to_hash(hash_accumulator, type_prefix=TypePrefix.CLASS) self._update_hash_for_members_of_obj(hash_accumulator, obj) else: self._update_hash_for_complex_object(hash_accumulator, obj)
.iconcat(a, b) .__iconcat__(a, b) .index(a) .__index__(a) ''' #Context manager with torch.autograd.profiler.emit_nvtx(): #Start profiler profiler.start() for op in unaryOps: assert hasattr(operator, op) f = getattr(operator, op) assert inspect.isbuiltin(f) c = f(ia) for op in invertOps: assert hasattr(operator, op) f = getattr(operator, op) assert inspect.isbuiltin(f) c = f(ba) for op in binaryOps: assert hasattr(operator, op) f = getattr(operator, op) assert inspect.isbuiltin(f) c = f(ia, ib) c = f(ia, 2)
def isbuiltin(object): # pylint: disable=redefined-builtin """TFDecorator-aware replacement for inspect.isbuiltin.""" return _inspect.isbuiltin(tf.__internal__.decorator.unwrap(object)[1])
def importable(obj, alias='', source=True, builtin=True): """get an importable string (i.e. source code or the import string) for the given object, including any required objects from the enclosing and global scope This function will attempt to discover the name of the object, or the repr of the object, or the source code for the object. To attempt to force discovery of the source code, use source=True, otherwise an import will be sought. The intent is to build a string that can be imported from a python file. obj is the object to inspect. If alias is provided, then rename the object with the given alias. If builtin=True, then force an import for builtins where possible. """ #NOTE: we always 'force', and 'lstrip' as necessary #NOTE: for 'enclosing', use importable(outermost(obj)) if builtin and isbuiltin(obj): source = False tried_source = tried_import = False while True: if not source: # we want an import try: if _isinstance(obj): # for instances, punt to _importable return _importable(obj, alias, source=False, builtin=builtin) src = _closuredimport(obj, alias=alias, builtin=builtin) if len(src) == 0: raise NotImplementedError('not implemented') if len(src) > 1: raise NotImplementedError('not implemented') return list(src.values())[0] except: if tried_source: raise tried_import = True # we want the source try: src = _closuredsource(obj, alias=alias) if len(src) == 0: raise NotImplementedError('not implemented') if len(src) > 1: raise NotImplementedError('not implemented') src = list(src.values())[0] if src[0] and src[-1]: src = '\n'.join(src) elif src[0]: src = src[0] elif src[-1]: src = src[-1] else: src = '' # get source code of objects referred to by obj in global scope from dill.detect import globalvars obj = globalvars(obj) #XXX: don't worry about alias? obj = list( getsource(_obj, name, force=True) for (name, _obj) in obj.items()) obj = '\n'.join(obj) if obj else '' # combine all referred-to source (global then enclosing) if not obj: return src if not src: return obj return obj + src except: if tried_import: raise tried_source = True source = not source # should never get here return
def add_dependency(self, obj_): if not inspect.isbuiltin(obj_): self.resolver.add_dependencies(inspect.getmodule(obj_))
def __getattr__(self, name): """Get the functions from the Gym Environment""" attribute = getattr(self.env, name) if inspect.isbuiltin(attribute): attribute = functools.partial(attribute) return attribute
def is_c_classmethod(obj: object) -> bool: return inspect.isbuiltin(obj) or type(obj).__name__ in ( 'classmethod', 'classmethod_descriptor')