def _patch_NeuroTools(target): # return def StGen(): pass def StandardPickleFile(): pass def signals(): pass def get_display(): pass def set_labels(): pass def set_axis_limits(): pass target.StGen = types.MethodType(StGen, target) target.StandardPickleFile = types.MethodType(StandardPickleFile, target) target.signals = types.MethodType(signals, target) target.ConductanceList = types.ClassType('ConductanceList',(),{}) target.CurrentList = types.ClassType('CurrentList',(),{}) target.VmList = types.ClassType('VmList',(),{}) target.SpikeList = types.ClassType('SpikeList',(),{}) target.get_display = types.MethodType(get_display, target) target.set_labels = types.MethodType(set_labels, target) target.set_axis_limits = types.MethodType(set_axis_limits, target)
def __new__(cls, name, bases, attrs): # If this isn't a subclass of Model, don't do anything special. try: parents = [b for b in bases if issubclass(b, Model)] if not parents: return super(ModelBase, cls).__new__(cls, name, bases, attrs) except NameError: # 'Model' isn't defined yet, meaning we're looking at Django's own # Model class, defined below. return super(ModelBase, cls).__new__(cls, name, bases, attrs) # Create the class. new_class = type.__new__(cls, name, bases, {'__module__': attrs.pop('__module__')}) new_class.add_to_class('_meta', Options(attrs.pop('Meta', None))) new_class.add_to_class('DoesNotExist', types.ClassType('DoesNotExist', (ObjectDoesNotExist,), {})) new_class.add_to_class('MultipleObjectsReturned', types.ClassType('MultipleObjectsReturned', (MultipleObjectsReturned, ), {})) # Build complete list of parents for base in parents: # Things without _meta aren't functional models, so they're # uninteresting parents. if hasattr(base, '_meta'): new_class._meta.parents.append(base) new_class._meta.parents.extend(base._meta.parents) if getattr(new_class._meta, 'app_label', None) is None: # Figure out the app_label by looking one level up. # For 'django.contrib.sites.models', this would be 'sites'. model_module = sys.modules[new_class.__module__] new_class._meta.app_label = model_module.__name__.split('.')[-2] # Bail out early if we have already created this class. m = get_model(new_class._meta.app_label, name, False) if m is not None: return m # Add all attributes to the class. for obj_name, obj in attrs.items(): new_class.add_to_class(obj_name, obj) # Add Fields inherited from parents for parent in new_class._meta.parents: for field in parent._meta.fields: # Only add parent fields if they aren't defined for this class. try: new_class._meta.get_field(field.name) except FieldDoesNotExist: field.contribute_to_class(new_class, field.name) new_class._prepare() register_models(new_class._meta.app_label, new_class) # Because of the way imports happen (recursively), we may or may not be # the first class for this model to register with the framework. There # should only be one class for each model, so we must always return the # registered version. return get_model(new_class._meta.app_label, name, False)
def _deserialize_func(funcs, globalDict): items = pickle.loads(funcs) res = None for objType, name, data in items: if objType == 'func': codeArgs, funcArgs, updatedGlobals = pickle.loads(data) code = CodeType(*codeArgs) globalDict.update(**updatedGlobals) value = FunctionType(code, globalDict, *funcArgs) elif objType == 'mod': value = __import__(data) elif objType == 'oldclass': class_name, module, bases, class_dict = data value = typesmod.ClassType(class_name, bases, {k:_deserialize_func(v, globalDict) for k, v in class_dict.items()}) value.__module__ = module elif objType == 'type': raise Exception('deserialize type') else: raise Exception('Unknown serialization type') globalDict[name] = value if res is None: res = value return res
def hook_class(self, cls, hook): # Attach a new class type with the original methods on it so that # super() works as expected. hookname = "_rootpy_{0}_OrigMethods".format(cls.__name__) newcls = types.ClassType(hookname, (), {}) cls.__bases__ = (newcls, ) + cls.__bases__ # For every function-like (or property), replace `cls`'s methods for key, value in hook.__dict__.iteritems(): if not isinstance(value, interesting): continue # Save the original methods onto the newcls which has been # injected onto our bases, so that the originals can be called with # super(). orig_method = getattr(cls, key, None) if orig_method: newcls.__dict__[key] = orig_method newmeth = value if uses_super(newmeth): assert getattr(hook, "__rootpy_have_super_overridden", None), ( "Hook class {0} is not decorated with @super_overridden! " "See the ``hook`` module to understand why this must be " "the case for all classes overridden with @classhook". format(hook)) # Make super behave as though the class hierarchy is what we'd # like. newsuper = self.overridden_super(hook, cls) newmeth = inject_closure_values(value, super=newsuper) setattr(cls, key, newmeth)
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
def extension_add_event(self, code, evt, name=None): """extension_add_event(code, evt, [name]) Add an extension event. CODE is the numeric code, and EVT is the event class. EVT will be cloned, and the attribute _code of the new event class will be set to CODE. If NAME is omitted, it will be set to the name of EVT. This name is used to insert an entry in the DictWrapper extension_event. """ if hasattr(types, 'ClassType'): newevt = types.ClassType(evt.__name__, evt.__bases__, evt.__dict__.copy()) else: newevt = types.new_class(evt.__name__, evt.__bases__, evt.__dict__.copy()) newevt._code = code self.display.add_extension_event(code, newevt) if name is None: name = evt.__name__ setattr(self.extension_event, name, code)
def test_dunder_module(self): self.assertEqual(str.__module__, '__builtin__') class Foo: pass Fu = types.ClassType('Fu', (), {}) for cls in Foo, Fu: self.assert_('__module__' in cls.__dict__) self.assertEqual(cls.__module__, __name__) self.assertEqual(str(cls), '%s.%s' % (__name__, cls.__name__)) self.assert_(repr(cls).startswith('<class %s.%s at' % (__name__, cls.__name__))) obj = cls() self.assert_(str(obj).startswith('<%s.%s instance at' % (__name__, cls.__name__))) class Bar(object): pass class Baz(Object): pass Bang = type('Bang', (), {}) for cls in Bar, Baz, Bang: self.assert_('__module__' in cls.__dict__) self.assertEqual(cls.__module__, __name__) self.assertEqual(str(cls), "<class '%s.%s'>" % (__name__, cls.__name__)) self.assertEqual(repr(cls), "<class '%s.%s'>" % (__name__, cls.__name__)) self.assert_(str(Bar()).startswith('<%s.Bar object at' % __name__)) self.assert_(str(Baz()).startswith("org.python.proxies.%s$Baz" % __name__))
def extension_add_subevent(self, code, subcode, evt, name=None): """extension_add_subevent(code, evt, [name]) Add an extension subevent. CODE is the numeric code, subcode is the sub-ID of this event that shares the code ID with other sub-events and EVT is the event class. EVT will be cloned, and the attribute _code of the new event class will be set to CODE. If NAME is omitted, it will be set to the name of EVT. This name is used to insert an entry in the DictWrapper extension_event. """ if hasattr(types, 'ClassType'): newevt = types.ClassType(evt.__name__, evt.__bases__, evt.__dict__.copy()) else: newevt = types.new_class(evt.__name__, evt.__bases__, evt.__dict__.copy()) newevt._code = code self.display.add_extension_event(code, newevt, subcode) if name is None: name = evt.__name__ # store subcodes as a tuple of (event code, subcode) in the # extension dict maintained in the display object setattr(self.extension_event, name, (code, subcode))
def __new__(cls, name, bases, attrs): """Creates a combined appengine and Django model. The resulting model will be known to both the appengine libraries and Django. """ if name == 'BaseModel': # This metaclass only acts on subclasses of BaseModel. return super(PropertiedClassWithDjango, cls).__new__(cls, name, bases, attrs) new_class = super(PropertiedClassWithDjango, cls).__new__(cls, name, bases, attrs) new_class._meta = ModelOptions(new_class) new_class.objects = ModelManager(new_class) new_class._default_manager = new_class.objects new_class.DoesNotExist = types.ClassType('DoesNotExist', (ObjectDoesNotExist,), {}) m = get_model(new_class._meta.app_label, name, False) if m: return m register_models(new_class._meta.app_label, new_class) return get_model(new_class._meta.app_label, name, False)
def table_description(classname, nclassname, shape=()): """ Return a table description for testing queries. The description consists of all PyTables data types, both in the top level and in the ``c_nested`` nested column. A column of a certain TYPE gets called ``c_TYPE``. An extra integer column ``c_extra`` is also provided. If a `shape` is given, it will be used for all columns. Finally, an extra indexed column ``c_idxextra`` is added as well in order to provide some basic tests for multi-index queries. """ classdict = {} colpos = append_columns(classdict, shape) ndescr = nested_description(nclassname, colpos, shape=shape) classdict['c_nested'] = ndescr colpos += 1 extracol = tables.IntCol(shape=shape, pos=colpos) classdict['c_extra'] = extracol colpos += 1 idxextracol = tables.IntCol(shape=shape, pos=colpos) classdict['c_idxextra'] = idxextracol colpos += 1 return types.ClassType(classname, (tables.IsDescription, ), classdict)
def _patch_mpi4py(target): class COMM_WORLD(): size=1 def barrier(target, *args, **kwargs): pass def bcast(target, *args, **kwargs): pass d={'size':1, 'rank':0, 'bcast':types.MethodType(bcast, target), 'barrier': types.MethodType(barrier, target)} dd={'COMM_WORLD': types.ClassType('COMM_WORLD',(),d)} target.MPI = types.ClassType('MPI',(),dd)
def nested_description(classname, pos, shape=()): """ Return a nested column description with all PyTables data types. A column of a certain TYPE gets called ``c_TYPE``. The nested column will be placed in the position indicated by `pos`. """ classdict = {} append_columns(classdict, shape=shape) classdict['_v_pos'] = pos return types.ClassType(classname, (tables.IsDescription, ), classdict)
def __new__(cls, name, bases, attrs): # If this isn't a subclass of Model, don't do anything special. if name == 'Model' or not filter(lambda b: issubclass(b, Model), bases): return super(ModelBase, cls).__new__(cls, name, bases, attrs) # Create the class. new_class = type.__new__(cls, name, bases, {'__module__': attrs.pop('__module__')}) new_class.add_to_class('_meta', Options(attrs.pop('Meta', None))) new_class.add_to_class( 'DoesNotExist', types.ClassType('DoesNotExist', (ObjectDoesNotExist, ), {})) # Build complete list of parents for base in bases: # TODO: Checking for the presence of '_meta' is hackish. if '_meta' in dir(base): new_class._meta.parents.append(base) new_class._meta.parents.extend(base._meta.parents) model_module = sys.modules[new_class.__module__] if getattr(new_class._meta, 'app_label', None) is None: # Figure out the app_label by looking one level up. # For 'django.contrib.sites.models', this would be 'sites'. new_class._meta.app_label = model_module.__name__.split('.')[-2] # Bail out early if we have already created this class. m = get_model(new_class._meta.app_label, name, False) if m is not None: return m # Add all attributes to the class. for obj_name, obj in attrs.items(): new_class.add_to_class(obj_name, obj) # Add Fields inherited from parents for parent in new_class._meta.parents: for field in parent._meta.fields: # Only add parent fields if they aren't defined for this class. try: new_class._meta.get_field(field.name) except FieldDoesNotExist: field.contribute_to_class(new_class, field.name) new_class._prepare() register_models(new_class._meta.app_label, new_class) # Because of the way imports happen (recursively), we may or may not be # the first class for this model to register with the framework. There # should only be one class for each model, so we must always return the # registered version. return get_model(new_class._meta.app_label, name, False)
def test_newstyle_new_classobj(self): # Ensure new.classobj can create new style classes class Foo(object): pass def hello(self): return 'hello' Bar = types.ClassType('Bar', (Foo,), dict(hello=hello)) self.assert_(type(Bar), type) self.assert_(issubclass(Bar, Foo)) self.assert_(hasattr(Bar, 'hello')) self.assertEquals(Bar().hello(), 'hello')
def _create_NativeException(cliClass): from rpython.translator.cli.support import getattr_ex TYPE = cliClass._INSTANCE if PythonNet.__name__ in ('CLR', 'clr'): # we are using pythonnet -- use the .NET class name = '%s.%s' % (TYPE._namespace, TYPE._classname) res = getattr_ex(PythonNet, name) else: # we are not using pythonnet -- create a fake class res = types.ClassType(TYPE._classname, (Exception, ), {}) res._rpython_hints = {'NATIVE_INSTANCE': TYPE} return res
def watchObject(self, object, identifier, callback): """Watch the given object. Whenever I think the object might have changed, I'll send an ObjectLink of it to the callback. The identifier argument is used to generate identifiers for objects which are members of this one. """ if type(object) is not types.InstanceType: raise TypeError, "Sorry, can only place a watch on Instances." # uninstallers = [] dct = {} reflect.addMethodNamesToDict(object.__class__, dct, '') for k in object.__dict__.keys(): dct[k] = 1 members = dct.keys() clazzNS = {} clazz = types.ClassType( 'Watching%s%X' % (object.__class__.__name__, id(object)), ( _MonkeysSetattrMixin, object.__class__, ), clazzNS) clazzNS['_watchEmitChanged'] = types.MethodType( lambda slf, i=identifier, b=self, cb=callback: cb( b.browseObject(slf, i)), None, clazz) # orig_class = object.__class__ object.__class__ = clazz for name in members: m = getattr(object, name) # Only hook bound methods. if ((type(m) is types.MethodType) and (m.im_self is not None)): # What's the use of putting watch monkeys on methods # in addition to __setattr__? Well, um, uh, if the # methods modify their attributes (i.e. add a key to # a dictionary) instead of [re]setting them, then # we wouldn't know about it unless we did this. # (Is that convincing?) monkey = _WatchMonkey(object) monkey.install(name)
def __get__(self, instance, model=None): if instance != None: raise AttributeError, "Manipulator cannot be accessed via instance" else: if not self.man: # Create a class that inherits from the "Manipulator" class # given in the model class (if specified) and the automatic # manipulator. bases = [self.base] if hasattr(model, 'Manipulator'): bases = [model.Manipulator] + bases self.man = types.ClassType(self.name, tuple(bases), {}) self.man._prepare(model) return self.man
def NewTransmissionClass(table): cls = types.ClassType(table.TableName.encode("utf-8"), (TransmissionClassBase, ), {}) cls.__storm_table__ = table.TableName cls.ID = Int(primary=True) cls.TransmissionID = Int() cls.Transmission = Reference(cls.TransmissionID, Transmission.ID) cls.Order = Int() cls.fields = [] cls.TransmissionTable = table for field in table.Fields: cls.fields.append(field) setattr(cls, field.FieldName, Unicode()) return cls
def makeSQLTests(base, suffix, globals): """ Make a test case for every db connector which can connect. @param base: Base class for test case. Additional base classes will be a DBConnector subclass and unittest.TestCase @param suffix: A suffix used to create test case names. Prefixes are defined in the DBConnector subclasses. """ connectors = [GadflyConnector, SQLiteConnector, PyPgSQLConnector, PsycopgConnector, MySQLConnector, FirebirdConnector] for connclass in connectors: name = connclass.TEST_PREFIX + suffix klass = types.ClassType(name, (connclass, base, unittest.TestCase), base.__dict__) globals[name] = klass
def buildProtocol(self, addr): # here comes the EVIL obj = self.userAuthObject.instance bases = [] for base in obj.__class__.__bases__: if base == connection.SSHConnection: bases.append(SSHUnixClientProtocol) else: bases.append(base) newClass = types.ClassType(obj.__class__.__name__, tuple(bases), obj.__class__.__dict__) obj.__class__ = newClass SSHUnixClientProtocol.__init__(obj) log.msg('returning %s' % obj) if self.d: d = self.d self.d = None d.callback(None) return obj
def custom(klassName, klass='unknown'): """Define custom control based on xrcClass. klass: new object name xrcClass: name of an existing XRC object class or a class object defining class parameters. """ if type(klass) is str: # Copy correct xxx class under new name kl = xxxDict[klass] xxxClass = types.ClassType('xxx' + klassName, kl.__bases__, kl.__dict__) else: xxxClass = klass # Register param IDs for param in klass.allParams + klass.paramDict.keys(): if not paramIDs.has_key(param): paramIDs[param] = wx.NewId() # Insert in dictionaty xxxDict[klassName] = xxxClass # Add to menu g.pullDownMenu.addCustom(klassName)
def load_local_class(bytes): t = loads(bytes) if not isinstance(t, tuple): return classes_loaded[t] name, bases, internal, external = t if name in classes_loaded: return classes_loaded[name] if any(isinstance(base, type) for base in bases): cls = type(name, bases, internal) else: assert six.PY2 cls = types.ClassType(name, bases, internal) classes_loaded[name] = cls external = loads(external) for k, v in external.items(): if isinstance(k, tuple): t, k = k if t == 'property': fget, fset, fdel, doc = v v = property(fget, fset, fdel, doc) if t == 'staticmethod': v = load_closure(v) v = staticmethod(v) if t == 'classmethod': v = load_closure(v) v = classmethod(v) if t == 'method': v = load_closure(v) setattr(cls, k, v) return cls
def decorate_row(cls, row, name, bases, cls_attr): # store a backref to the container dataset row._dataset = cls # bind a ref method row.ref = Ref(cls, row) # fix inherited primary keys names_to_uninherit = [] for name in dir(row): if name in cls_attr['_primary_key']: if name not in row.__dict__: # then this was an inherited value, so we need to nullify it # without 1) disturbing the other inherited values and 2) # disturbing the inherited class. is this nuts? names_to_uninherit.append(name) bases_to_replace = [] if names_to_uninherit: base_pos = 0 for c in row.__bases__: for name in names_to_uninherit: if name in c.__dict__: bases_to_replace.append((c, base_pos)) # just need to detect one attribute... break base_pos += 1 new_bases = [b for b in row.__bases__] for base_c, base_pos in bases_to_replace: # this may not work if the row's base was a new-style class new_base = types.ClassType( base_c.__name__, base_c.__bases__, dict([(k, getattr(base_c, k)) for k in dir(base_c) \ if not k.startswith('_') and \ k not in names_to_uninherit])) new_bases[base_pos] = new_base if new_bases: row.__bases__ = tuple(new_bases)
def _patch_nest(target): def sr(target, *args, **kwargs): pass def version(target, *args, **kwargs): return 'NEST 2.2.2' def Models(target, *args, **kwargs): return [] def Install(target, *args, **kwargs): pass def GetDefaults(target, *args, **kwargs): return {'receptor_types':{'AMPA_1':0, 'AMPA_2':0, 'NMDA_1':0, 'NMDA_2':0, 'GABAA_1':0, 'GABAA_2':0, 'GABAA_3':0}} def GetKernelStatus(target, *args, **kwargs): return 1 target.GetKernelStatus = types.MethodType(GetKernelStatus, target) target.sr = types.MethodType(sr, target) target.version = types.MethodType(version, target) target.Models = types.MethodType(Models, target) target.Install = types.MethodType(Install, target) target.GetDefaults = types.MethodType(GetDefaults, target) d={'pushsli':None, 'runsli':None} target.pynestkernel = types.ClassType('pynestkernel',(),d)
BUILDER = builder.CEDICTBuilder OPTIONS = [{'enableFTS3': False}, {'filePath': './test/downloads/CEDICT', 'fileType': '.gz'}] class CEDICTGRBuilderTest(TableBuilderTest, unittest.TestCase): BUILDER = builder.CEDICTGRBuilder OPTIONS = [{'enableFTS3': False}, {'filePath': './test/downloads/CEDICTGR', 'fileType': '.zip'}] class HanDeDictBuilderTest(TableBuilderTest, unittest.TestCase): BUILDER = builder.HanDeDictBuilder OPTIONS = [{'enableFTS3': False}, {'filePath': './test/downloads/HanDeDict', 'fileType': '.tar.bz2'}] class CFDICTBuilderTest(TableBuilderTest, unittest.TestCase): BUILDER = builder.CFDICTBuilder OPTIONS = [{'enableFTS3': False}, {'filePath': './test/downloads/CFDICT', 'fileType': '.zip'}] # Generate default test classes for TableBuilder without special definitions for builderClass in DatabaseBuilder.getTableBuilderClasses( resolveConflicts=False): testClassName = '%sTest' % builderClass.__name__ if testClassName not in globals(): globals()[testClassName] = types.ClassType(testClassName, (TableBuilderTest, unittest.TestCase), {'BUILDER': builderClass}) del testClassName
def decorate_row(cls, row, name, bases, cls_attr): """Each row (an inner class) assigned to a :class:`DataSet` will be customized after it is created. This is because it's easier to type:: class MyData(DataSet): class foo: col1 = "bz" col2 = "bx" ... than it is to type: class MyData(DataSet): class foo(Row): col1 = "bz" col2 = "bx" (Note the subclassing that would be required in inner classes without this behavior.) But more importantly, rows must be able to inherit from other rows, like:: class MyData(DataSet): class joe: first_name = "Joe" last_name = "Phelps" class joe_gibbs(joe): last_name = "Gibbs" Here is what happens to each inner class object as it is assigned to a :class:`DataSet`: 1. A ``Row._dataset`` property is added which is a reference to the :class:`DataSet` instance. 2. A ``Row.ref()`` property (instance of :class:`Ref`) is added 3. Any database primary key inherited from another Row is de-referenced since primary keys must be unique per row. See :ref:`Using Dataset <using-dataset>` for an example of referencing primary key values that may or may not exist yet. """ # store a backref to the container dataset row._dataset = cls # bind a ref method row.ref = Ref(cls, row) # fix inherited primary keys names_to_uninherit = [] for name in dir(row): if name in cls_attr['_primary_key']: if name not in row.__dict__: # then this was an inherited value, so we need to nullify it # without 1) disturbing the other inherited values and 2) # disturbing the inherited class. is this nuts? names_to_uninherit.append(name) bases_to_replace = [] if names_to_uninherit: base_pos = 0 for c in row.__bases__: for name in names_to_uninherit: if name in c.__dict__: bases_to_replace.append((c, base_pos)) # just need to detect one attribute... break base_pos += 1 new_bases = [b for b in row.__bases__] for base_c, base_pos in bases_to_replace: # this may not work if the row's base was a new-style class new_base = types.ClassType( base_c.__name__, base_c.__bases__, dict([(k, getattr(base_c, k)) for k in dir(base_c) \ if not k.startswith('_') and \ k not in names_to_uninherit])) new_bases[base_pos] = new_base if new_bases: row.__bases__ = tuple(new_bases)
from celery.utils.serialization import subclass_exception from celery.utils.serialization import find_pickleable_exception as fnpe from celery.utils.serialization import UnpickleableExceptionWrapper from celery.utils.serialization import get_pickleable_exception as gpe class wrapobject(object): def __init__(self, *args, **kwargs): self.args = args if sys.version_info[0] == 3 or getattr(sys, 'pypy_version_info', None): Oldstyle = None else: Oldstyle = types.ClassType(bytes_if_py2('Oldstyle'), (), {}) Unpickleable = subclass_exception( bytes_if_py2('Unpickleable'), KeyError, 'foo.module', ) Impossible = subclass_exception( bytes_if_py2('Impossible'), object, 'foo.module', ) Lookalike = subclass_exception( bytes_if_py2('Lookalike'), wrapobject, 'foo.module', ) class test_nulldict: def test_nulldict(self): x = _nulldict()
def construct_deprecated(name, alt): doc = """Deprecated alias for :class:`%s`""" % alt.__name__ cls = types.ClassType(name, (Deprecated, alt), dict(cur=name, alt=alt, __doc__=doc)) return cls
def subclass_exception(name, parent, unused): return types.ClassType(name, (parent, ), {})
) from celery.result import result_from_tuple from celery.utils import uuid from celery.tests.case import AppCase, Mock, SkipTest, patch class wrapobject(object): def __init__(self, *args, **kwargs): self.args = args if sys.version_info[0] == 3 or getattr(sys, 'pypy_version_info', None): Oldstyle = None else: Oldstyle = types.ClassType('Oldstyle', (), {}) Unpickleable = subclass_exception('Unpickleable', KeyError, 'foo.module') Impossible = subclass_exception('Impossible', object, 'foo.module') Lookalike = subclass_exception('Lookalike', wrapobject, 'foo.module') class test_serialization(AppCase): def test_create_exception_cls(self): self.assertTrue(serialization.create_exception_cls('FooError', 'm')) self.assertTrue( serialization.create_exception_cls('FooError', 'm', KeyError)) class test_BaseBackend_interface(AppCase): def setup(self): self.b = BaseBackend(self.app)