def bind(self, target): if self.func not in dir(target): return None funcs = [] if self.read_pattern is not None: def getter(): return getattr(target, self.func) if inspect.isdatadescriptor(getattr(type(target), self.func)): getter.__doc__ = 'Getter: ' + inspect.getdoc(getattr(type(target), self.func)) funcs.append( Func(getter, self.read_pattern, return_mapping=self.return_mapping, doc=self.doc)) if self.write_pattern is not None: def setter(new_value): setattr(target, self.func, new_value) if inspect.isdatadescriptor(getattr(type(target), self.func)): setter.__doc__ = 'Setter: ' + inspect.getdoc(getattr(type(target), self.func)) funcs.append( Func(setter, self.write_pattern, argument_mappings=self.argument_mappings, return_mapping=self.return_mapping, doc=self.doc)) return funcs
def import_from(s1, module): syms = inspect.getmembers(module) str_syms = dir(module) name_as = "" if len(s1) == 4: name_as = s1[3][1] if not (s1[1][1] in str_syms): print("import error") exit() else: for sym in syms: if sym[0] == s1[1][1]: if inspect.isfunction(sym[1]): if len(s1) == 4: GLOBAL_SYMBOL_LIST.append(Function(name_as)) else: GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isbuiltin(sym[1]): if len(s1) == 4: GLOBAL_SYMBOL_LIST.append(Function(name_as)) else: GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.ismethod(sym[1]): pass elif inspect.isgeneratorfunction: if len(s1) == 4: GLOBAL_SYMBOL_LIST.append(Function(name_as)) else: GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isgenerator(sym[1]): pass elif inspect.istraceback(sym[1]): pass elif inspect.isframe(sym[1]): pass elif inspect.iscode(sym[1]): pass elif inspect.isroutine(sym[1]): pass elif inspect.isabstract(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isgetsetdescriptor(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isclass(sym[1]): if len(s1) == 4: GLOBAL_SYMBOL_LIST.append(Class(name_as)) else: GLOBAL_SYMBOL_LIST.append(Class(sym[0])) else: print(sym[0])
def import_name(s1): if s1[0] in NON_TERMINAL: if s1[0] in NON_TERMINAL and s1[0] == 286: dot_name = "" module_name = "" for name in s1[1]: if not isinstance(name, int): module_name += name[1] if len(s1) == 2: dot_name = module_name elif len(s1) == 4: dot_name = s1[3][1] try: module = importlib.import_module(module_name) except ImportError: print("Import Error, No module named " + module_name) exit() new_module = Module(module_name) new_module.SYMBOL_LIST = [] syms = inspect.getmembers(module) for sym in syms: if inspect.isfunction(sym[1]): #new_module.SYMBOL_LIST.append(Function(dot_name+'.' + sym[0])) new_module.SYMBOL_LIST.append(Function(sym[0])) elif inspect.isbuiltin(sym[1]): new_module.SYMBOL_LIST.append(Function(sym[0])) elif inspect.ismethod(sym[1]): pass elif inspect.isgeneratorfunction: new_module.SYMBOL_LIST.append(Function(sym[0])) elif inspect.isgenerator(sym[1]): pass elif inspect.istraceback(sym[1]): pass elif inspect.isframe(sym[1]): pass elif inspect.iscode(sym[1]): pass elif inspect.isroutine(sym[1]): pass elif inspect.isabstract(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isgetsetdescriptor(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isclass(sym[1]): new_module.SYMBOL_LIST.append(Class(sym[0], [], [])) else: print(sym[0]) self.local_names.append(new_module) else: for j in range(1,len(s1)): import_name(s1[j])
def template_ui_repr(self, imports, name=None, defaults=None, exclude=None): """ Creates user friendly representation. """ from inspect import isdatadescriptor results = {} if name is None: name = getattr(self, "__ui_name__", self.__class__.__name__.lower()) results[None] = "{1} = {0.__class__.__name__}()".format(self, name) add_to_imports(self, imports) # loop through dictionary attributes first. for key, value in self.__dict__.iteritems(): if key[0] == "_": continue if exclude is not None and key in exclude: continue if hasattr(value, "__ui_repr__"): default = None if defaults is None else defaults.__dict__.get(key, None) newname = name + "." + key partial = value.__ui_repr__(imports, newname, default) results.update(partial) else: string = repr(value) if ( defaults is not None and key in defaults.__dict__ and type(value) is type(defaults.__dict__[key]) and string == repr(defaults.__dict__[key]) ): continue key = "{0}.{1}".format(name, key) results[key] = string add_to_imports(string, imports) # then loops through class properties. for key in dir(self): if key[0] == "_": continue if key in self.__dict__: continue if exclude is not None and key in exclude: continue if not hasattr(self.__class__, key): continue value = getattr(self.__class__, key) if not isdatadescriptor(value): continue string = repr(getattr(self, key)) if defaults is None or not hasattr(defaults.__class__, key): pass elif not isdatadescriptor(getattr(defaults.__class__, key)): pass else: default = getattr(defaults, key) if type(getattr(self, key)) is type(default) and repr(default) == string: continue key = "{0}.{1}".format(name, key) results[key] = string add_to_imports(string, imports) return results
def import_name(s1): if s1[0] in NON_TERMINAL: if s1[0] in NON_TERMINAL and s1[0] == 286: dot_name = "" module_name = "" for name in s1[1]: if type(name) != type(1): module_name += name[1] if len(s1) == 2: dot_name = module_name elif len(s1) == 4: dot_name = s1[3][1] try: module = importlib.import_module(module_name) except ImportError: print("Import Error, No module named " + module_name) exit() a = dir(module) syms = inspect.getmembers(module) for sym in syms: if inspect.isfunction(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(dot_name + "." + sym[0])) elif inspect.isbuiltin(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(dot_name + "." + sym[0])) elif inspect.ismethod(sym[1]): pass elif inspect.isgeneratorfunction: GLOBAL_SYMBOL_LIST.append(Function(dot_name + "." + sym[0])) elif inspect.isgenerator(sym[1]): pass elif inspect.istraceback(sym[1]): pass elif inspect.isframe(sym[1]): pass elif inspect.iscode(sym[1]): pass elif inspect.isroutine(sym[1]): pass elif inspect.isabstract(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isgetsetdescriptor(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isclass(sym[1]): GLOBAL_SYMBOL_LIST.append(Class(dot_name + "." + sym[0], [], [])) else: print(sym[0]) else: for j in range(1, len(s1)): import_name(s1[j])
def parse_import(st): if st[0] == 283: import_name(st[2]) elif st[0] == 284: module_name = "" if type(st[2]) != type(1): for name in st[2]: if type(name) != type(1): module_name += name[1] try: module = importlib.import_module(module_name) except ImportError: print("Import Error, No module named " + module_name) exit() if len(st)==5 and st[4][1] == "*": syms = inspect.getmembers(module) str_syms = dir(module) name_as = "" for sym in syms: if inspect.isfunction(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isbuiltin(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.ismethod(sym[1]): pass elif inspect.isgeneratorfunction: GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isgenerator(sym[1]): pass elif inspect.istraceback(sym[1]): pass elif inspect.isframe(sym[1]): pass elif inspect.iscode(sym[1]): pass elif inspect.isroutine(sym[1]): pass elif inspect.isabstract(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isgetsetdescriptor(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isclass(sym[1]): GLOBAL_SYMBOL_LIST.append(Class(sym[0])) else: print(sym[0]) else: for counter in range(len(st[4])): if not isinstance(st[4][counter],int) and st[4][counter][0] == 285: import_from(st[4][counter], module)
def test_valid(self): a_value = object() b_value = object() c_value = object() d_value = object() class M(pyneric.Metaclass): __metadata__ = dict(a=None, b=None, c=c_value) __propagate__ = ('a', 'c') @property def c(cls): if hasattr(cls, 'x'): return "different value" return cls._get_metadata('c') class C(with_metaclass(M, object)): a = a_value d = d_value self.assertTrue(inspect.isdatadescriptor(M.a)) self.assertTrue(inspect.isdatadescriptor(M.b)) self.assertTrue(inspect.isdatadescriptor(M.c)) self.assertRaises(AttributeError, getattr, M, 'd') expected = dict(a=None, b=None, c=c_value, __propagate__={'a', 'c'}, __base_overrides__={}) self.assertEqual(expected, M._get_metadata()) expected.update(a=a_value) self.assertEqual(expected, C._get_metadata()) self.assertEqual(a_value, C.a) self.assertIsNone(C.b) self.assertEqual(c_value, C.c) self.assertEqual(d_value, C.d) obj = C() self.assertEqual(a_value, obj.a) self.assertRaises(AttributeError, getattr, obj, 'b') self.assertEqual(c_value, obj.c) self.assertEqual(d_value, obj.d) obj.b = b_value new_value = object() self.assertRaises(AttributeError, setattr, C, 'a', new_value) self.assertRaises(AttributeError, setattr, C, 'b', new_value) self.assertRaises(AttributeError, setattr, C, 'c', new_value) C.d = new_value self.assertEqual(new_value, C.d) self.assertEqual(a_value, obj.a) self.assertEqual(b_value, obj.b) self.assertEqual(c_value, obj.c) self.assertEqual(new_value, obj.d) self.assertRaises(AttributeError, setattr, obj, 'a', new_value) obj.b = new_value self.assertRaises(AttributeError, setattr, obj, 'c', new_value) newer_value = object() obj.d = newer_value self.assertEqual(new_value, obj.b) self.assertEqual(newer_value, obj.d) self.assertEqual(c_value, C.c) C.x = None self.assertEqual('different value', C.c)
def attrfilter(attr): # Only add methods and attributes of the class to the CIX. # - "os._Environ" seems to be a particular problem case in that # some methods defined on it are inspect.ismethod() but not # inspect.ismethoddescriptor(). Not so for other examples in # modules that stdcix.py processes, and adding .ismethod() to this # filter adds unwanted methods on C-defined module exception # classes. if not (inspect.isdatadescriptor(attr) or inspect.ismethoddescriptor(attr) or inspect.ismethod(attr) or inspect.isfunction(attr)): return False # Skip inherited attributes in the CIX. try: attrname = attr.__name__ for base in obj.__bases__: if hasattr(base, attrname) and getattr(base, attrname) is \ getattr(obj, attrname): return False except AttributeError: # staticmethod and classmethod objects don't have a __name__ pass #print "Couldn't process: %r, assuming ok" % str(attr) return True
def _finddoc(obj): # type: (Any) -> unicode if inspect.isclass(obj): for base in obj.__mro__: if base is not object: try: doc = base.__doc__ except AttributeError: continue if doc is not None: return doc return None if inspect.ismethod(obj) and getattr(obj, '__self__', None): name = obj.__func__.__name__ self = obj.__self__ if (inspect.isclass(self) and getattr(getattr(self, name, None), '__func__') is obj.__func__): # classmethod cls = self else: cls = self.__class__ elif inspect.isfunction(obj) or inspect.ismethod(obj): name = obj.__name__ cls = _findclass(obj) if cls is None or getattr(cls, name) != obj: return None elif inspect.isbuiltin(obj): name = obj.__name__ self = obj.__self__ if (inspect.isclass(self) and self.__qualname__ + '.' + name == obj.__qualname__): # classmethod cls = self else: cls = self.__class__ # Should be tested before isdatadescriptor(). elif isinstance(obj, property): func = obj.fget name = func.__name__ cls = _findclass(func) if cls is None or getattr(cls, name) is not obj: return None elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj): name = obj.__name__ cls = obj.__objclass__ if getattr(cls, name) is not obj: return None else: return None for base in cls.__mro__: try: doc = getattr(base, name).__doc__ except AttributeError: continue if doc is not None: return doc return None
def testGettingAttributes(self): a = TestMock() # dir() returns all names, including properties, attributes, methods aDir = dir(a) self.assertEqual(('_get1' in aDir), True) self.assertEqual(('attr1' in aDir), True) self.assertEqual(('method1' in aDir), True) self.assertEqual(('property1' in aDir), True) # __dict__ stores only attributes aDictKeys = a.__dict__.keys() self.assertEqual(('attr1' in aDictKeys), True) # properties are not found htere self.assertNotEqual(('property1' in aDictKeys), True) # after setting an attribute not defined in __init__ with a property # the new data value is store in __dict__ a.property1 = 3 aDictKeys = a.__dict__.keys() self.assertEqual(('attr3' in aDictKeys), True) # we cannot use insepct.isdatadescriptor to find properties self.assertEqual(inspect.isdatadescriptor(a.property1), False) methods, attributes, properties = dirPartitioned(a) self.assertEqual(('attr1' in attributes), True)
def check_parameters_match(func, doc=None): """Helper to check docstring, returns list of incorrect results""" incorrect = [] name_ = get_name(func) if not name_.startswith("vispy."): return incorrect if inspect.isdatadescriptor(func): return incorrect args, varargs, varkw, defaults = inspect.getargspec(func) # drop self if len(args) > 0 and args[0] in ("self", "cls"): args = args[1:] if doc is None: with warnings.catch_warnings(record=True) as w: doc = docscrape.FunctionDoc(func) if len(w): raise RuntimeError("Error for %s:\n%s" % (name_, w[0])) # check set param_names = [name for name, _, _ in doc["Parameters"]] # clean up some docscrape output: param_names = [name.split(":")[0].strip("` ") for name in param_names] param_names = [name for name in param_names if "*" not in name] if len(param_names) != len(args): bad = str(sorted(list(set(param_names) - set(args)) + list(set(args) - set(param_names)))) if not any(d in name_ for d in _ignores): incorrect += [name_ + " arg mismatch: " + bad] else: for n1, n2 in zip(param_names, args): if n1 != n2: incorrect += [name_ + " " + n1 + " != " + n2] return incorrect
def test_no_propagate_attr(self): behaviour = pyneric.MetadataBehaviour(propagate_attr=None) a_value = object() b_value = dict(a=None) p_value = {'a'} class M(pyneric.Metaclass): __metadata_behaviour__ = behaviour __metadata__ = dict(a=a_value) # Prove that __propagate__ is not part of metadata behaviour. __propagate__ = p_value # Prove that base overrides are not affected. __base_overrides__ = b_value class C(with_metaclass(M, object)): pass self.assertTrue(inspect.isdatadescriptor(M.a)) # Note that there is no "__propagate__" attribute in the metadata. expected = dict(a=a_value, __base_overrides__=b_value) self.assertEqual(expected, M._get_metadata()) expected.update(a=None) self.assertEqual(expected, C._get_metadata()) # Prove that __propagate__ is not part of metadata behaviour. self.assertEqual(p_value, M.__propagate__) self.assertEqual(p_value, C.__propagate__) # Prove that base overrides are not affected. self.assertEqual(None, C.a) # Prove that propagation of "a" does not occur. self.assertRaises(AttributeError, getattr, C(), 'a')
def test_union1(self): obj1 = self.__union_prep1() obj2 = self.obj_constructor(**self.obj_data[1]) obj = obj1 + obj2 for x in inspect.getmembers(obj.__class__): if (inspect.isdatadescriptor(x[1]) or\ inspect.isgetsetdescriptor(x[1]) or\ inspect.ismemberdescriptor(x[1])) and\ not x[0].startswith("__") and\ not inspect.ismethod(x[1]): if getattr(obj2, x[0]): self.assertTrue(getattr(obj, x[0]) == getattr(obj2, x[0])) else: self.assertTrue(getattr(obj, x[0]) == getattr(obj1, x[0])) for member, v in _iteritems(self.obj_list_members): _list = getattr(obj, member) self.assertTrue(len(_list) == 4) for member, v in _iteritems(self.obj_dict_members): _d1 = dict([(k, v) for k, v in _iteritems(getattr(obj1, member))]) _d2 = dict([(k, v) for k, v in _iteritems(getattr(obj2, member))]) _d1.update(_d2) _dict = getattr(obj, member) _d = dict([(k,v) for k,v in _iteritems(_dict)]) self.assertTrue(_d == _d1)
def copy_attrs(src_ns, dest, methods, props=True): '''Bind methods and properties on src class to dest class ''' cache = {} for name in methods: attr = getattr(src_ns, name) if inspect.ismethod(attr): # WARNING: CPython specific hack - `im_func` setattr(dest, name, types.MethodType(attr.im_func, None, dest)) # track get/set ifaces if 'get_' or 'set_' in name: op, sep, prop = name.rpartition('_') cache.setdefault(prop, []).append(op) elif inspect.isdatadescriptor(attr): attr = functools.partial(attr) setattr(dest.__class__, name, property(attr)) # if there are get and set methods then optionally attach a property if props: for prop, ops in cache.items(): if len(ops) == 2: setattr(dest, prop, property( getattr(dest, 'get_' + prop), getattr(dest, 'set_' + prop)))
def test_getset(self): obj = self.obj_constructor(**self.obj_data[0]) data_desc = [] for x in inspect.getmembers(obj.__class__): if (inspect.isdatadescriptor(x[1]) or\ inspect.isgetsetdescriptor(x[1]) or\ inspect.ismemberdescriptor(x[1])) and not x[0].startswith("__"): data_desc.append(x) for attr in data_desc: #z = getattr(obj, x[0]) attr_types = self.obj_getset[attr[0]] for _type, vals in _iteritems(self.obj_types): if _type not in attr_types: with self.assertRaises(TypeError): setattr(obj, attr[0], vals[0]) else: for val in vals: setattr(obj, attr[0], val) with self.assertRaises(TypeError): self.assertTrue(delattr(obj, x[0]), x[0]) setattr(obj, attr[0], self.obj_types[self.obj_getset[attr[0]][0]][0]) z = getattr(obj, attr[0]) self.assertTrue(self.obj_types[self.obj_getset[attr[0]][0]][0] == z)
def test_headers(): ''' Print the attribute names, sizes and offsets in the C structure Assuming that the sizes are correct and add up to an offset of 4100 bytes, everything should add up correctly. This information was taken from the WinSpec 2.6 Spectroscopy Software User Manual version 2.6B, page 251. If this table doesn't add up, something changed in the definitions of the datatype widths. Fix this in winspec.structs file and let me know! ''' import inspect, re A = Header() for i in [Header, AxisCalibration, ROIinfo]: fields = [] print '\n{:30s}[{:4s}]\tsize'.format(i, 'offs') for name,obj in inspect.getmembers(i): if inspect.isdatadescriptor(obj) and not inspect.ismemberdescriptor(obj) \ and not inspect.isgetsetdescriptor(obj): fields.append((name, obj)) fields.sort(key=lambda x: re.search('(?<=ofs=)([0-9]+)', str(x[1])).group(0), cmp=lambda x,y: cmp(int(x),int(y))); fields for name, obj in fields: print '{:30s}[{:4d}]\t{:4d}'.format(name, obj.size, obj.offset)
def should_be_checked( self, obj, module=None ): """returns True, if obj should be checked, False otherwise""" if id(obj) in self.__checked: return False if inspect.isbuiltin( obj ): return False if inspect.ismodule( obj ): if obj.__name__ in self.__already_imported: return False #do not check already imported modules source_file = self.getsourcefile(obj) if source_file: return contains_parent_dir( source_file, self.__include_paths ) else: return False obj_module = inspect.getmodule( obj ) if not obj_module is module: return False if inspect.isclass( obj ) \ or inspect.ismethod( obj ) \ or inspect.isfunction( obj ) \ or inspect.isroutine( obj ) \ or inspect.ismethoddescriptor( obj ) \ or inspect.isdatadescriptor( obj ): return True return False
def getMembers(cls): lfunctions = [] lmethods = [] lattributes = [] for m in inspect.getmembers(cls): m_name = m[0] m_object = m[1] if cls.__dict__.get(m_name): # Do not print inherited names #print(type(m_object)) if m_name[0] != "_" and m_name not in kobject.ignore_list: if inspect.isbuiltin(m_object): pass elif inspect.iscode(m_object): pass elif inspect.ismodule(m_object): pass elif inspect.ismethoddescriptor(m_object): pass elif inspect.isdatadescriptor(m_object): pass elif inspect.ismethod(m_object): lmethods.append(m) elif inspect.isfunction(m_object): lfunctions.append(m) elif inspect.isroutine(m_object): pass else: lattributes.append(m) return {"functions" : lfunctions, "methods" : lmethods, "attributes" : lattributes}
def get_class_descriptor_types(instance): # Get all descriptor names which are TypedParameter of instance's type descriptors = {} for descriptor_name, descriptor_object in inspect.getmembers(type(instance)): if inspect.isdatadescriptor(descriptor_object) and isinstance(descriptor_object, TypedParameter): descriptors.update({descriptor_name: type(descriptor_object)}) return descriptors
def __new__(cls, name, bases, classdict): result = type.__new__(cls, name, bases, dict(classdict)) result._ordered_options = [] def add_option(base_class, option): for i, o in enumerate(result._ordered_options): if o.name == option.name: if not issubclass(option.__class__, o.__class__): warn("Type (\"{}\") of the \"{}\" option overridden by \"{}\" is different than type (\"{}\") defined by one of super classes.".format(option.__class__.__name__, option.name, base_class.__name__, o.__class__.__name__)) result._ordered_options.pop(i) break result._ordered_options.append(option) # Get ordered options from base class. mro = inspect.getmro(result) if len(mro) > 1: for base_class in reversed(mro[1:]): if hasattr(base_class, '_ordered_options'): options = base_class._ordered_options else: # e.g. mixin options = [o for k, o in base_class.__dict__.items() if isinstance(o, BaseOption)] for o in options: add_option(base_class, o) new_options = [v for k, v in classdict.items() if inspect.isdatadescriptor(v) and isinstance(v, BaseOption)] for o in new_options: add_option(result, o) return result
def _capture_descriptors(cls): """Work around for not being able to call contribute_to_class. Too much code to fake in our meta objects etc to be able to call contribute_to_class directly, but we still want fields to be able to set custom type descriptors. So we fake a model instead, with the same fields as the composite type, and extract any custom descriptors on that. """ attrs = {field_name: field for field_name, field in cls._meta.fields} # we need to build a unique app label and model name combination for # every composite type so django doesn't complain about model reloads class Meta: app_label = cls.__module__ attrs['__module__'] = cls.__module__ attrs['Meta'] = Meta model_name = '_Fake{}Model'.format(cls.__name__) fake_model = type(model_name, (models.Model,), attrs) for field_name, _ in cls._meta.fields: # default None is for django 1.9 attr = getattr(fake_model, field_name, None) if inspect.isdatadescriptor(attr): setattr(cls, field_name, attr)
def test_no_base_override_attr(self): behaviour = pyneric.MetadataBehaviour(base_override_attr=None) a_value = object() b_value = dict(a=None) p_value = {'a'} class M(pyneric.Metaclass): __metadata_behaviour__ = behaviour __metadata__ = dict(a=a_value) # Prove that propagation is not affected. __propagate__ = p_value # Prove that __base_overrides__ is not part of metadata behaviour. __base_overrides__ = b_value class C(with_metaclass(M, object)): pass self.assertTrue(inspect.isdatadescriptor(M.a)) # Note that there is no "__base_overrides__" attribute in the metadata. expected = dict(a=a_value, __propagate__=p_value) self.assertEqual(expected, M._get_metadata()) self.assertEqual(expected, C._get_metadata()) # Prove that __base_overrides__ is not part of metadata behaviour. self.assertEqual(b_value, M.__base_overrides__) self.assertEqual(b_value, C.__base_overrides__) # Prove that "a" was not overridden. self.assertEqual(a_value, C.a) # Prove that propagation is not affected. self.assertEqual(a_value, C().a)
def Base_getattro(self, name): descr = None for base in type(self).__mro__: if name in base.__dict__: descr = base.__dict__[name] break if descr is not None and inspect.isdatadescriptor(descr): return descr.__get__(self, type(self)) try: # Don't do self.__dict__ otherwise you get recursion. inst_dict = object.__getattribute__(self, '__dict__') except AttributeError: pass else: if name in inst_dict: descr = inst_dict[name] # If the tp_descr_get of res is of_get, then call it. if name == '__parent__' or not isinstance(descr, Base): return descr if descr is not None: descr_get = getattr(descr, '__get__', None) if descr_get is None: return descr return descr_get(self, type(self)) raise AttributeError( "'%.50s' object has not attribute '%s'", type(self).__name__, name)
def _build_scope(self, get_instance): """Build a scope (dictionary) with wrappers of the public methods and properties contained in the class of the object returned by get_instance. Wrappers in the scope call get_instance to get the value of self. Args: get_instance: Function returning the instance to be used as self. Returns: Dictionary containing the built scope. """ obj = get_instance() cls = type(obj) methods = {n: x for (n, x) in getmembers(cls) if n[0] != '_' and isroutine(x)} scope = {n: update_wrapper(partial(self._method, get_instance, f), f) for (n, f) in methods.items()} functions = {n: x for (n, x) in getmembers(obj) if n[0] != '_' and isfunction(x)} scope.update(functions) properties = {n: x for (n, x) in getmembers(cls) if n[0] != '_' and isdatadescriptor(x)} scope.update({n: update_wrapper(partial(self._get_set, get_instance, p), p) for (n, p) in properties.items()}) return scope
def test_02_000_SimpleClass(self): """Import simple single class""" self.assertTrue("classa" in dir(self.tm)) self.assertTrue(inspect.isclass(self.tm.classa)) self.assertTrue(inspect.isdatadescriptor(self.tm.classa.attr)) self.assertTrue(inspect.ismethod(self.tm.classa.__init__))
def patch_instance(mod, patches=None, **kwargs_patches): if not mod: raise ValueError("mod is empty") if not patches: patches = {} patches.update(kwargs_patches) # combine both dicts if not patches: raise ValueError("patches dict is empty") for name, patch in patches.items(): val = getattr(mod, name, None) if inspect.isroutine(val): setattr(mod, name, types.MethodType(patch, mod)) else: if name in mod.__class__.__dict__ and inspect.isdatadescriptor(mod.__class__.__dict__[name]): # http://stackoverflow.com/questions/30342212/override-attribute-access-precedence-having-a-data-descriptor/30578922#30578922 monkey_class = patch_class(mod.__class__, **{name: patch}) mod.__class__ = monkey_class else: setattr(mod, name, patch) return mod
def check_parameters_match(func, doc=None): """Helper to check docstring, returns list of incorrect results""" from numpydoc import docscrape incorrect = [] name_ = get_name(func) if not name_.startswith('mne.') or name_.startswith('mne.externals'): return incorrect if inspect.isdatadescriptor(func): return incorrect args = _get_args(func) # drop self if len(args) > 0 and args[0] == 'self': args = args[1:] if doc is None: with warnings.catch_warnings(record=True) as w: doc = docscrape.FunctionDoc(func) if len(w): raise RuntimeError('Error for %s:\n%s' % (name_, w[0])) # check set param_names = [name for name, _, _ in doc['Parameters']] # clean up some docscrape output: param_names = [name.split(':')[0].strip('` ') for name in param_names] param_names = [name for name in param_names if '*' not in name] if len(param_names) != len(args): bad = str(sorted(list(set(param_names) - set(args)) + list(set(args) - set(param_names)))) if not any(d in name_ for d in _docstring_ignores) and \ 'deprecation_wrapped' not in func.__code__.co_name: incorrect += [name_ + ' arg mismatch: ' + bad] else: for n1, n2 in zip(param_names, args): if n1 != n2: incorrect += [name_ + ' ' + n1 + ' != ' + n2] return incorrect
def _build_help(obj): klass = obj.__class__ items = [] max_read, max_write = 0, 0 for name in sorted(dir(obj)): if name.startswith('_'): continue if hasattr(klass, name) and \ inspect.isdatadescriptor(getattr(klass, name)): continue member = getattr(obj, name) if callable(member): mode = getattr(member, '_mode', None) if mode is None: continue name = member._name else: if hasattr(klass, name): continue mode = 'rw' name = name.upper() read = '?'+name if 'r' in mode else '' write = name if 'w' in mode else '' max_read = max(max_read, len(read)) max_write = max(max_write, len(write)) items.append((read, write)) templ = '{{0:>{0}}} {{1:<}}'.format(max_write+2) return '$\n' + '\n'.join([templ.format(w, r) for r, w in items]) + '\n$'
def _iter_methods(klass, package=None): for member in _iter_doc_members(klass, package): if inspect.isfunction(member) or inspect.ismethod(member): if inspect.isdatadescriptor(member): continue if _name(member).startswith('jsonrpc_'): yield member
def print_offsets(): ''' Print the attribute names, sizes and offsets in the C structure Assuming that the sizes are correct and add up to an offset of 4100 bytes, everything should add up correctly. This information was taken from the WinSpec 2.6 Spectroscopy Software User Manual version 2.6B, page 251. If this table doesn't add up, something changed in the definitions of the datatype widths. Fix this in winspec.structs file and let me know! ''' import inspect, re A = Header() for i in [Header, AxisCalibration, ROIinfo]: fields = [] print('\n{:30s}[{:4s}]\tsize'.format(repr(i), 'offs')) for name,obj in inspect.getmembers(i): if inspect.isdatadescriptor(obj) and not inspect.ismemberdescriptor(obj) \ and not inspect.isgetsetdescriptor(obj): fields.append((name, obj)) fields = sorted(fields, key=lambda x: x[1].offset) for name, obj in fields: print('{:30s}[{:4d}]\t{:4d}'.format(name, obj.size, obj.offset))
def get_exposed_property_value(obj, propname, only_exposed=True): """ Return the value of an @exposed @property. If the requested property is not a @property or not exposed, an AttributeError is raised instead. """ v = getattr(obj.__class__, propname) if inspect.isdatadescriptor(v): if v.fget and getattr(v.fget, "_pyroExposed", not only_exposed): return v.fget(obj) raise AttributeError( "attempt to access unexposed or unknown remote attribute '%s'" % propname)
def get_errors(self, parent_name: str = "") -> List[str]: """ :param parent_name: :return: error messages """ # look for undeclared variables field_names = set([fie.name for fie in fields(self)]) var_names = set() f: str for f in dir(self): attr: Any = getattr(self, f) if hasattr(self.__class__, f): class_attr: Any = getattr(self.__class__, f) else: class_attr = None if (inspect.ismethod(attr) or (class_attr is not None and inspect.isdatadescriptor(class_attr)) or f.startswith("_") or inspect.isclass(attr) or f == "meta"): continue var_names.add(f) unset = field_names - var_names undeclr = var_names - field_names error_msgs: List[str] = [] if unset: error_msgs += [ f'Variable "{parent_name}.{v}" is unset!' for v in unset ] if undeclr: error_msgs += [ f'Variable "{parent_name}.{v}" is undeclared!' for v in undeclr ] fields_to_check = field_names - unset - undeclr for f in fields_to_check: attr2check: Any = getattr(self, f) if issubclass(type(attr2check), BaseEnv): error_msgs += attr2check.get_errors( parent_name=f"{parent_name}.{f}") return error_msgs
def object_build(self, node, obj): """recursive method which create a partial ast from real objects (only function, class, and method are handled) """ if obj in self._done: return self._done[obj] self._done[obj] = node for name in dir(obj): try: member = getattr(obj, name) except AttributeError: # damned ExtensionClass.Base, I know you're there ! attach_dummy_node(node, name) continue if inspect.ismethod(member): member = six.get_method_function(member) if inspect.isfunction(member): _build_from_function(node, name, member, self._module) elif inspect.isbuiltin(member): if (not _io_discrepancy(member) and self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) elif inspect.isclass(member): if self.imported_member(node, member, name): continue if member in self._done: class_node = self._done[member] if class_node not in node.locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) # recursion self.object_build(class_node, member) if name == '__class__' and class_node.parent is None: class_node.parent = self._done[self._module] elif inspect.ismethoddescriptor(member): assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif inspect.isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) elif isinstance(member, _CONSTANTS): attach_const_node(node, name, member) elif inspect.isroutine(member): # This should be called for Jython, where some builtin # methods aren't catched by isbuiltin branch. _build_from_function(node, name, member, self._module) else: # create an empty node so that the name is actually defined attach_dummy_node(node, name, member)
def object_build(self, node, obj): """recursive method which create a partial ast from real objects (only function, class, and method are handled) """ if obj in self._done: return self._done[obj] self._done[obj] = node for name in dir(obj): try: member = getattr(obj, name) except AttributeError: # damned ExtensionClass.Base, I know you're there ! attach_dummy_node(node, name) continue if ismethod(member): member = member.im_func if isfunction(member): # verify this is not an imported function if member.func_code.co_filename != getattr( self._module, '__file__', None): attach_dummy_node(node, name, member) continue object_build_function(node, member, name) elif isbuiltin(member): if self.imported_member(node, member, name): continue object_build_methoddescriptor(node, member, name) elif isclass(member): if self.imported_member(node, member, name): continue if member in self._done: class_node = self._done[member] if not class_node in node.locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) # recursion self.object_build(class_node, member) if name == '__class__' and class_node.parent is None: class_node.parent = self._done[self._module] elif ismethoddescriptor(member): assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) elif isinstance(member, _CONSTANTS): attach_const_node(node, name, member) else: # create an empty node so that the name is actually defined attach_dummy_node(node, name, member)
def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param in self[name]: param = param._replace(name=param.name.strip()) # Check if the referenced member can have a docstring or not param_obj = getattr(self._obj, param.name, None) if not (callable(param_obj) or isinstance(param_obj, property) or inspect.isdatadescriptor(param_obj)): param_obj = None if param_obj and pydoc.getdoc(param_obj): # Referenced object has a docstring autosum += [" %s%s" % (prefix, param.name)] else: others.append(param) if autosum: out += ['.. autosummary::'] if self.class_members_toctree: out += [' :toctree:'] out += [''] + autosum if others: maxlen_0 = max(3, max([len(p.name) + 4 for p in others])) hdr = "=" * maxlen_0 + " " + "=" * 10 fmt = '%%%ds %%s ' % (maxlen_0, ) out += ['', '', hdr] for param in others: name = "**" + param.name.strip() + "**" desc = " ".join(x.strip() for x in param.desc).strip() if param.type: desc = "(%s) %s" % (param.type, desc) out += [fmt % (name, desc)] out += [hdr] out += [''] return out
def dumpable(self, deep=False): """ Return nested ordered dict of dumpable attributes including brined objects if deep is True then recursively operate ._dumpable on Briner instances This is useful if want to convert to dumpable full nested Briners when using as standalone function not part of dump or dumps """ if self._Keys is None: keys = self.__dict__.keys() #include instance attribute keys if self._Propertied: # include data descripter propertiesf from class props = [ key for key in dir(self) if hasattr(self.__class__, key) and inspect.isdatadescriptor(getattr(self.__class__, key)) ] keys.extend(props) keys.sort() else: keys = self._Keys dumpable = OrderedDict() #use odict so serialization is ordered for name in keys: #build nested OrderedDict of serializible attributes if name.startswith('_'): continue #skip private try: #get the attr associate with name attr = getattr(self, name) except AttributeError as ex: #skip if fails getattr continue if inspect.isroutine(attr): continue #skip methods if deep and hasattr(attr, '_Brined'): # descend into Brined objects dumpable[name] = attr._dumpable( ) #recusively operate on Briner instances continue if not hasattr(attr, '_Brined') and self._Safed: try: #last resort, skip attributes that are not json serializible temp = json.dumps(attr) except TypeError as ex: continue dumpable[name] = attr #valid attribute if self._Hinted: dumpable["@class"] = self.__class__.__name__ return dumpable
def import_builtin(): module = importlib.import_module('builtins') syms = inspect.getmembers(module) for sym in syms: if inspect.isfunction(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isbuiltin(sym[1]): GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.ismethod(sym[1]): pass elif inspect.isgeneratorfunction: GLOBAL_SYMBOL_LIST.append(Function(sym[0])) elif inspect.isgenerator(sym[1]): pass elif inspect.istraceback(sym[1]): pass elif inspect.isframe(sym[1]): pass elif inspect.iscode(sym[1]): pass elif inspect.isroutine(sym[1]): pass elif inspect.isabstract(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isdatadescriptor(sym[1]): pass elif inspect.isgetsetdescriptor(sym[1]): pass elif inspect.ismemberdescriptor(sym[1]): pass elif inspect.isclass(sym[1]): GLOBAL_SYMBOL_LIST.append(Class(sym[0])) else: print(sym[0])
def bind(self, target): if self.func not in dir(target): return None funcs = [] if self.read_pattern is not None: def getter(): return getattr(target, self.func) if inspect.isdatadescriptor(getattr(type(target), self.func)): getter.__doc__ = 'Getter: ' + inspect.getdoc( getattr(type(target), self.func)) funcs.append( Func(getter, self.read_pattern, return_mapping=self.return_mapping, doc=self.doc)) if self.write_pattern is not None: def setter(new_value): setattr(target, self.func, new_value) if inspect.isdatadescriptor(getattr(type(target), self.func)): setter.__doc__ = 'Setter: ' + inspect.getdoc( getattr(type(target), self.func)) funcs.append( Func(setter, self.write_pattern, argument_mappings=self.argument_mappings, return_mapping=self.return_mapping, doc=self.doc)) return funcs
def whatis(self, arguments): """Prints the type of the argument. Usage: whatis <name>... """ arg = " ".join(arguments["argv"][1:]) try: value = eval(arg, self._obj.curframe.f_globals, self._obj.curframe.f_locals) except: # noqa v = sys.exc_info()[1] self._ui.printf('*** %R{}%N: {}\n'.format(type(v).__name__, v)) return if inspect.ismodule(value): filename = value.__file__ if value.__file__ else "builtin module" self._ui.print('Module:', filename) elif inspect.isasyncgenfunction(value): self._ui.print('Async Gen function:', value.__name__, inspect.signature(value)) elif inspect.isasyncgen(value): self._ui.print('Async Gen:', value.__name__, inspect.signature(value)) elif inspect.iscoroutine(value): self._ui.print('Coroutine:', value) self._ui.print(' state:', inspect.getcoroutinestate(value)) if inspect.isawaitable(value): self._ui.print(' and awaitable.') self._ui.print(' stack:', _coroutine_format_stack(value, complete=False)) elif inspect.isgenerator(value): self._ui.print('Generator:', value) self._ui.print(' state:', inspect.getgeneratorstate(value)) if inspect.isawaitable(value): self._ui.print(' and awaitable.') elif inspect.iscoroutinefunction(value): self._ui.print('Coroutine function:', value.__name__, inspect.signature(value)) elif inspect.isgeneratorfunction(value): self._ui.print('Generator function:', value.__name__, inspect.signature(value)) elif inspect.isfunction(value): self._ui.print('Function:', value.__name__, inspect.signature(value)) elif inspect.ismethod(value): self._ui.print('Method:', value.__name__, inspect.signature(value)) elif inspect.iscode(value): self._ui.print('Code object:', value.co_name) elif inspect.isclass(value): self._ui.print('Class:', value.__name__) elif inspect.ismethoddescriptor(value): self._ui.print('Method descriptor:', value.__name__) elif inspect.isdatadescriptor(value): self._ui.print('Data descriptor:', value.__name__) # None of the above... else: self._ui.print("Type of:", type(value))
def _get_overides(klass: Type, obj: Any) -> List[Override]: overrides: List[Override] = [] # We need to inspect each item in the MRO, until we get to our Type, at that # point we'll bail, because those methods are not the overriden methods, but the # "real" methods. jsii_classes = [klass] + list( itertools.chain.from_iterable( (getattr(m, "__jsii_ifaces__", []) for m in type(obj).mro()) ) ) for mro_klass in type(obj).mro(): if ( mro_klass is klass and getattr(mro_klass, "__jsii_type__", "Object") is not None ): break if mro_klass is Object: break for name, item in mro_klass.__dict__.items(): # We're only interested in things that also exist on the JSII class or # interfaces, and which are themselves, jsii members. for jsii_class in jsii_classes: original = getattr(jsii_class, name, _nothing) if original is not _nothing: if inspect.isfunction(item) and hasattr(original, "__jsii_name__"): if any( entry.method == original.__jsii_name__ for entry in overrides ): # Don't re-register an override we already discovered through a previous type continue overrides.append( Override(method=original.__jsii_name__, cookie=name) ) elif inspect.isdatadescriptor(item) and hasattr( getattr(original, "fget", None), "__jsii_name__" ): if any( entry.property == original.fget.__jsii_name__ for entry in overrides ): # Don't re-register an override we already discovered through a previous type continue overrides.append( Override(property=original.fget.__jsii_name__, cookie=name) ) return overrides
def dump_state(detector, filters='r'): klass = type(detector) members = ((name, getattr(klass, name)) for name in dir(klass) if not name.startswith('_')) descriptors = ((name, member) for name, member in members if inspect.isdatadescriptor(member)) def filt(m): if 'r' in filters and not m[1].fget: return False if 'w' in filters and not m[1].fset: return False return True descriptors = filter(filt, descriptors) return {name:getattr(detector, name) for name,_ in descriptors}
def check_parameters_match(func, doc=None, cls=None): """Check docstring, return list of incorrect results.""" from numpydoc import docscrape incorrect = [] name_ = get_name(func, cls=cls) if not name_.startswith('mne.') or name_.startswith('mne.externals'): return incorrect if inspect.isdatadescriptor(func): return incorrect args = _get_args(func) # drop self if len(args) > 0 and args[0] == 'self': args = args[1:] if doc is None: with pytest.warns(None) as w: try: doc = docscrape.FunctionDoc(func) except Exception as exp: incorrect += [name_ + ' parsing error: ' + str(exp)] return incorrect if len(w): raise RuntimeError('Error for %s:\n%s' % (name_, w[0])) # check set parameters = doc['Parameters'] # clean up some docscrape output: parameters = [[p[0].split(':')[0].strip('` '), p[2]] for p in parameters] parameters = [p for p in parameters if '*' not in p[0]] param_names = [p[0] for p in parameters] if len(param_names) != len(args): bad = str(sorted(list(set(param_names) - set(args)) + list(set(args) - set(param_names)))) if not any(re.match(d, name_) for d in docstring_ignores) and \ 'deprecation_wrapped' not in func.__code__.co_name: incorrect += [name_ + ' arg mismatch: ' + bad] else: for n1, n2 in zip(param_names, args): if n1 != n2: incorrect += [name_ + ' ' + n1 + ' != ' + n2] for param_name, desc in parameters: desc = '\n'.join(desc) full_name = name_ + '::' + param_name if full_name in docstring_length_ignores: assert len(desc) > char_limit # assert it actually needs to be elif len(desc) > char_limit: incorrect += ['%s too long (%d > %d chars)' % (full_name, len(desc), char_limit)] return incorrect
def __init__(self, wrapped, burrow_deep=False): super(WrappedObject, self).__init__() __builtin__.print_last_failure = self.print_last_failure self._wrapped_calls = {} self._access_log = [] self._burrow_deep = burrow_deep self._last_failure = {} self.__class__ = type('Wrapped_' + type(wrapped).__name__, (WrappedObject, type(wrapped)), {}) self._wrapped = wrapped for attr_name in dir(wrapped): if not attr_name.startswith('__') and (inspect.isdatadescriptor( getattr(type(wrapped), attr_name, False)) or inspect.ismethoddescriptor( getattr(type(wrapped), attr_name, False))): setattr(self.__class__, attr_name, getattr(type(wrapped), attr_name)) elif not attr_name.startswith('__'): attr = getattr(wrapped, attr_name) if callable(attr): self._wrapped_calls[attr_name] = [] setattr(self, attr_name, WrappedFunc(attr, self)) elif attr_name not in ['_access_log', '_wrapped_calls']: self._access_log.append(attr_name + ' initialized with value: ' + (str(attr) if len(str(attr)) < 100 else "VALUE TOO LONG.")) setattr(self.__class__, attr_name, WrappedAttribute(attr_name, attr)) else: setattr(self, attr_name, attr) elif attr_name in ['__enter__', '__exit__']: original = getattr(wrapped, attr_name) new_method = WrappedFunc(original, self) setattr(self.__class__, attr_name, new_method) self._access_log.append(('-' * 25) + 'INSTANTIATION COMPLETE' + ('-' * 25)) self._access_log.append( Printer.print_padded_message(type(wrapped).__name__ + " wrapped.", closed=not self._burrow_deep)) if self._burrow_deep: self._access_log.append( Printer.print_padded_message( "Currently burrowing deep (self is exchanged and all calls will be logged.)" + '\nCall burrow_deep(False) to disable.', opened=False, closed=False)) self._access_log.append('-' * 80)
def Base_getattro(self, name, _marker=object()): descr = marker = _marker # XXX: Why is this looping manually? The C code uses ``_PyType_Lookup``, # which is an internal function, but equivalent to ``getattr(type(self), name)``. for base in type(self).__mro__: if name in base.__dict__: descr = base.__dict__[name] break # A data descriptor in the type has full control. if descr is not marker and inspect.isdatadescriptor(descr): return descr.__get__(self, type(self)) # descr either wasn't defined, or it's not a data descriptor. try: # Don't do self.__dict__ otherwise you get recursion. # Not all instances will have dictionaries. inst_dict = object.__getattribute__(self, '__dict__') except AttributeError: pass else: try: descr = inst_dict[name] except KeyError: pass else: # If the tp_descr_get of res is of_get, then call it, # unless it is __parent__ --- we don't want to wrap that. # XXX: This isn't quite what the C implementation does. It actually # checks the get function. Here we test the type. if name == '__parent__' or not isinstance(descr, BasePy): return descr # Here, descr could be either a non-data descriptor # from the class dictionary, or *any* kind of object # from the instance dictionary. Unlike the way normal # Python classes handle non-data descriptors, we will invoke # __get__ even if it was found in the instance dictionary. if descr is not marker: try: descr_get = descr.__get__ except AttributeError: return descr return descr_get(self, type(self)) raise AttributeError("'%.50s' object has no attribute '%s'" % (type(self).__name__, name))
def update_from_form(instance, form_data=None): mapper = inspect(instance.__class__) cols = {c.key: c for c in mapper.columns if not c.foreign_keys} setables = dict(pyinspect.getmembers( instance.__class__, lambda p: pyinspect.isdatadescriptor(p) and getattr(p, 'fset', None))) relns = {r.key: r for r in mapper.relationships if not r.uselist and len(r._calculated_foreign_keys) == 1 and iter( r._calculated_foreign_keys).next().table == mapper.local_table } unknown = set(form_data.keys()) - ( set(cols.keys()).union(set(setables.keys())).union(set(relns.keys()))) if unknown: raise HTTPBadRequest("Unknown keys: "+",".join(unknown)) params = dict(form_data) # type checking columns = {c.key: c for c in mapper.columns} for key, value in params.items(): if key in relns and isinstance(value, string_types): val_inst = relns[key].class_.get_instance(value) if not val_inst: raise HTTPBadRequest("Unknown instance: "+value) params[key] = val_inst elif key in columns and isinstance(columns[key].type, DeclEnumType) \ and isinstance(value, string_types): val_det = columns[key].type.enum.from_string(value) if not val_det: raise HTTPBadRequest("Cannot interpret " + value) params[key] = val_det elif key in columns and columns[key].type.python_type == datetime.datetime \ and isinstance(value, string_types): val_dt = datetime.datetime.strpstr(value) if not val_dt: raise HTTPBadRequest("Cannot interpret " + value) params[key] = val_dt elif key in columns and columns[key].type.python_type == int \ and isinstance(value, string_types): try: params[key] = int(value) except ValueError: raise HTTPBadRequest("Not a number: " + value) elif key in columns and not isinstance(value, columns[key].type.python_type): raise HTTPBadRequest("Value %s for key %s should be a %s" % ( value, key, columns[key].type.python_type)) try: for key, value in params.items(): setattr(instance, key, value) except: raise HTTPBadRequest()
def check_parameters_match(func, doc=None): """Helper to check docstring, returns list of incorrect results""" incorrect = [] name_ = get_name(func) if not name_.startswith('mne.'): return incorrect if inspect.isdatadescriptor(func): return incorrect try: args, varargs, varkw, defaults = inspect.getargspec(func) except TypeError: return incorrect # drop self if len(args) > 0 and args[0] == 'self': args = args[1:] if doc is None: with warnings.catch_warnings(record=True) as w: doc = docscrape.FunctionDoc(func) if len(w): raise RuntimeError('Error for %s:\n%s' % (name_, w[0])) # check set param_names = [name for name, _, _ in doc['Parameters']] # clean up some docscrape output: param_names = [name.split(':')[0].strip('` ') for name in param_names] param_names = [name for name in param_names if '*' not in name] try: args_set = set(args) except TypeError: # TODO: handle arg tuples return incorrect extra_params = set(param_names) - args_set if extra_params and not varkw: incorrect += [get_name(func) + ' in doc ' + str(sorted(extra_params))] if defaults: none_defaults = [ arg for arg, default in zip(args[-len(defaults):], defaults) if default is None ] else: none_defaults = [] extra_args = args_set - set(param_names) - set(none_defaults) if param_names and extra_args: incorrect += [ get_name(func) + ' in argspec ' + str(sorted(extra_args)) ] # check order? return incorrect
def __new__(self, classname, classbases, classdict): try: frame = inspect.currentframe() frame = frame.f_back if frame.f_locals.has_key(classname): old_class = frame.f_locals.get(classname) for name, func in classdict.items(): #isdatadescriptor matches properties if inspect.isfunction(func) or inspect.isdatadescriptor( func): setattr(old_class, name, func) return old_class return type.__new__(self, classname, classbases, classdict) finally: del frame
def __init__(cls, name, bases, dct): def is_public_member(key): return ((key.startswith('__') and key.endswith('__') and len(key) > 4) or not key.startswith('_')) for key, val in dct.items(): if ((inspect.isfunction(val) or inspect.isdatadescriptor(val)) and is_public_member(key) and val.__doc__ is None): for base in cls.__mro__[1:]: super_method = getattr(base, key, None) if super_method is not None: val.__doc__ = super_method.__doc__ break super().__init__(name, bases, dct)
def load(cls, **kwargs): attrs = inspect.getmembers( cls, lambda x: inspect.isdatadescriptor(x) and not x.__name__. startswith('_')) ret = cls() for name, dtype in attrs: if name in kwargs: setattr(ret, name, kwargs[name]) else: env = os.getenv(name) if env: setattr(ret, name, dtype(env)) else: setattr(ret, name, getattr(cls, name)) return ret
def properties(self): result = { 'index': self.index, 'class_id': self.class_id, 'serial': self.serial, } for name, value in inspect.getmembers(self): prop_attr = getattr(self.__class__, name, None) if inspect.isdatadescriptor(prop_attr): attr = getattr(self, name, None) if not isinstance(attr, BaseEntity): result[name] = value return result
def _render_member_icon(member, cell): pbcache = PixbufCache.getInstance() if inspect.ismethod(member) or inspect.ismethoddescriptor(member): pbname = 'dfeet-method.png' elif inspect.isdatadescriptor(member) or ( hasattr(inspect, 'ismemberdescriptor') and inspect.ismemberdescriptor(member)): pbname = 'dfeet-property.png' else: pbname = 'dfeet-object.png' pixbuf = pbcache.get(pbname, size=16, trystock=True, stocksize=gtk.ICON_SIZE_MENU) cell.set_property('pixbuf', pixbuf)
def copy_dict(mod): d = {} for k, v in mod.__dict__.items(): if k.startswith("__"): d[k] = v else: if inspect.isroutine(v): d[k] = v elif inspect.isdatadescriptor(v): d[k] = v else: d[k] = copy.deepcopy(v) return d
def class_augment(cls, cls_cpp=cls_cpp): for name, member in inspect.getmembers(cls): if inspect.isfunction(member): member.__qualname__ = member.__qualname__.replace( cls.__name__, cls_cpp.__name__) setattr(cls_cpp, name, member) elif inspect.isdatadescriptor(member): setattr(cls_cpp, name, member) def block_init(self): # Prevent initialization of the support class raise NotImplementedError(self.__class__.__name__ + '.__init__') cls.__init__ = block_init return cls
def wrap(cls): if not is_dataclass(cls): raise TypeError('dataslots can be used only with dataclass') cls_dict = dict(cls.__dict__) if '__slots__' in cls_dict: raise TypeError( 'do not define __slots__ if dataslots decorator is used') # Create only missing slots inherited_slots = set().union(*(getattr(c, '__slots__', set()) for c in cls.mro())) mro_dict = ChainMap(*(getattr(c, '__dict__', {}) for c in cls.mro())) # Create slots list + space for data descriptors field_names = set() for f in fields(cls): if isinstance(mro_dict.get(f.name), DataDescriptor): field_names.add(mro_dict[f.name].slot_name) elif not isdatadescriptor(mro_dict.get(f.name)): field_names.add(f.name) if add_dict: field_names.add('__dict__') if add_weakref: field_names.add('__weakref__') cls_dict['__slots__'] = tuple(field_names - inherited_slots) # Erase filed names from class __dict__ for f in field_names: cls_dict.pop(f, None) # Erase __dict__ and __weakref__ cls_dict.pop('__dict__', None) cls_dict.pop('__weakref__', None) # Pickle fix for frozen dataclass as mentioned in https://bugs.python.org/issue36424 # Use only if __getstate__ and __setstate__ are not declared and frozen=True if all(param not in cls_dict for param in ['__getstate__', '__setstate__']) and \ cls.__dataclass_params__.frozen: cls_dict['__setstate__'] = _slots_setstate # Prepare new class with slots new_cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) new_cls.__qualname__ = getattr(cls, '__qualname__') return new_cls
def properties(self): if self._cls is None: return [] analyzer = ModuleAnalyzer.for_module(self._cls.__module__) instance_members = set([ attr_name for (class_name, attr_name) in analyzer.find_attr_docs().keys() if class_name == self._cls.__name__ ]) class_members = set([ name for name, func in getattr(self._cls, '__dict__').iteritems() if not name.startswith('_') and ( func is None or inspect.isdatadescriptor(func)) ]) return instance_members | class_members
def spilldata(msg, attrs, predicate): ok, attrs = pydoc._split_list(attrs, predicate) if ok: push(msg) for name, kind, homecls, value in ok: if callable(value) or inspect.isdatadescriptor(value): doc = pydoc.getdoc(value) else: doc = None push( self.docother(getattr(object, name), name, mod, maxlen=70, doc=doc) + '\n') return attrs
def test_signature(self): r"""Ensure signature consistency.""" msg = 'Inconsistent property signature.' self.assertTrue( inspect.isdatadescriptor(CharDictTokenizer.vocab_size), msg=msg ) self.assertFalse( inspect.isfunction(CharDictTokenizer.vocab_size), msg=msg ) self.assertFalse( inspect.ismethod(CharDictTokenizer.vocab_size), msg=msg )
def __add_endpoints(self): for name in self.__order__: if name.startswith('_'): continue func = getattr(type(self), name) if inspect.isdatadescriptor(func): continue bindings = getattr(func, STICKER, []) for binding in reversed(bindings): factory = binding.pop('endpoint', self.__endpoint) respond = getattr(self, name) endpoint = factory(self.service, name=name, respond=respond, **binding) self.push(endpoint)
def _verify_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: assert stub.func.is_property if isinstance(runtime, property): return if inspect.isdatadescriptor(runtime): # It's enough like a property... return # Sometimes attributes pretend to be properties, for instance, to express that they # are read only. So allowlist if runtime_type matches the return type of stub. runtime_type = get_mypy_type_of_runtime_value(runtime) func_type = (stub.func.type.ret_type if isinstance( stub.func.type, mypy.types.CallableType) else None) if (runtime_type is not None and func_type is not None and is_subtype_helper(runtime_type, func_type)): return yield "is inconsistent, cannot reconcile @property on stub with runtime object"
def __getattr__(self, nom_attr): """Si le nom d'attribut n'est pas trouvé, le chercher dans le prototype """ try: attribut = getattr(type(self.prototype), nom_attr) if inspect.isdatadescriptor(attribut): return attribut.fget(self) elif callable(attribut): methode = attribut else: raise AttributeError() return MethodeObjet(methode, self) except AttributeError: return getattr(self.prototype, nom_attr)