Ejemplo n.º 1
0
def _registerEnumPicklers(): 
    from copy_reg import constructor, pickle
    def reduce_enum(e):
        enum = type(e).__name__.split('.')[-1]
        return ( _tuple2enum, ( enum, int(e) ) )
    constructor( _tuple2enum)
    pickle(openravepy_int.IkParameterizationType,reduce_enum)
Ejemplo n.º 2
0
def _registerEnumPicklers(): 
    from copy_reg import constructor, pickle
    def reduce_enum(e):
        enum = type(e).__name__.split('.')[-1]
        return ( _tuple2enum, ( enum, int(e) ) )
    constructor( _tuple2enum)
    pickle(openravepy_int.IkParameterizationType,reduce_enum)
Ejemplo n.º 3
0
 def test_constructor(self):
     #the argument can be callable
     copy_reg.constructor(testclass)
     
     #the argument can not be callable
     self.assertRaises(TypeError,copy_reg.constructor,0)
     self.assertRaises(TypeError,copy_reg.constructor,"Hello")
     self.assertRaises(TypeError,copy_reg.constructor,True)
Ejemplo n.º 4
0
def test_constructor():
    #the argument can be callable
    copy_reg.constructor(testclass)
    
    #the argument can not be callable
    AssertError(TypeError,copy_reg.constructor,0)
    AssertError(TypeError,copy_reg.constructor,"Hello")
    AssertError(TypeError,copy_reg.constructor,True)
Ejemplo n.º 5
0
def _registerEnumPicklers(): 
    from copy_reg import constructor, pickle
    def reduce_enum(e):
        enum = type(e).__name__.split('.')[-1]
        return ( _tuple2enum, ( enum, int(e) ) )
    constructor( _tuple2enum)
    for e in [ e for e in vars(ctimb).itervalues() if isEnumType(e) ]:
        pickle(e, reduce_enum)
Ejemplo n.º 6
0
    def test_constructor(self):
        #the argument can be callable
        copy_reg.constructor(testclass)

        #the argument can not be callable
        self.assertRaises(TypeError, copy_reg.constructor, 0)
        self.assertRaises(TypeError, copy_reg.constructor, "Hello")
        self.assertRaises(TypeError, copy_reg.constructor, True)
Ejemplo n.º 7
0
def _registerEnumPicklers(): 
    from copy_reg import constructor, pickle
    def reduce_enum(e):
        enum = type(e).__name__.split('.')[-1]
        return ( _tuple2enum, ( enum, int(e) ) )
    constructor( _tuple2enum)
    for e in [ e for e in vars(libplanner_interfaces_python).itervalues() if _isEnumType(e) ]:
        pickle(e, reduce_enum)
Ejemplo n.º 8
0
def test_constructor():
    #the argument can be callable
    copy_reg.constructor(testclass)

    #the argument can not be callable
    AssertError(TypeError, copy_reg.constructor, 0)
    AssertError(TypeError, copy_reg.constructor, "Hello")
    AssertError(TypeError, copy_reg.constructor, True)
Ejemplo n.º 9
0
from copy_reg import pickle, constructor
from sets import Set as _Set, ImmutableSet as _ImmutableSet


def Set(*args):
    return _Set(*args)


def ImmutableSet(*args):
    return _ImmutableSet(*args)


constructor(Set)
constructor(ImmutableSet)
Ejemplo n.º 10
0
if needsGlobal:
    # change dl flags to load dictionaries from pre-linked .so's
    dlflags = sys.getdlopenflags()
    sys.setdlopenflags(0x100 | 0x2)  # RTLD_GLOBAL | RTLD_NOW

import libPyROOT as _root

# reset dl flags if needed
if needsGlobal:
    sys.setdlopenflags(dlflags)
del needsGlobal

## convince 2.2 it's ok to use the expand function
if sys.version[0:3] == '2.2':
    import copy_reg
    copy_reg.constructor(_root._ObjectProxy__expand__)

## convince inspect that PyROOT method proxies are possible drop-ins for python
## methods and classes for pydoc
import inspect

inspect._old_isfunction = inspect.isfunction


def isfunction(object):
    if type(object) == _root.MethodProxy and not object.im_class:
        return True
    return inspect._old_isfunction(object)


inspect.isfunction = isfunction
Ejemplo n.º 11
0
        } else {
            if(CudaNdarray_CopyFromCudaNdarray(%(oname)s, %(iname)s)) {
                PyErr_SetString(PyExc_ValueError,
            "DeepCopyOp: the copy failed into already allocated space!");
                %(fail)s;
            }
        }
        """,
                                            version=3)


# THIS WORKS But CudaNdarray instances don't compare equal to one
# another, and what about __hash__ ?  So the unpickled version doesn't
# equal the pickled version, and the cmodule cache is not happy with
# the situation.
def CudaNdarray_unpickler(npa):
    return cuda.CudaNdarray(npa)


copy_reg.constructor(CudaNdarray_unpickler)


def CudaNdarray_pickler(cnda):
    return (CudaNdarray_unpickler, (numpy.asarray(cnda), ))


# In case cuda is not imported.
if cuda is not None:
    copy_reg.pickle(cuda.CudaNdarray, CudaNdarray_pickler,
                    CudaNdarray_unpickler)
Ejemplo n.º 12
0
            'Coconut uses Python 3 "input" instead of Python 2 "raw_input"')

    def xrange(*args):
        """Coconut uses Python 3 "range" instead of Python 2 "xrange"."""
        raise _coconut.NameError(
            'Coconut uses Python 3 "range" instead of Python 2 "xrange"')

    if _coconut_sys.version_info < (2, 7):
        import functools as _coconut_functools, copy_reg as _coconut_copy_reg

        def _coconut_new_partial(func, args, keywords):
            return _coconut_functools.partial(
                func, *(args if args is not None else ()),
                **(keywords if keywords is not None else {}))

        _coconut_copy_reg.constructor(_coconut_new_partial)

        def _coconut_reduce_partial(self):
            return (_coconut_new_partial, (self.func, self.args,
                                           self.keywords))

        _coconut_copy_reg.pickle(_coconut_functools.partial,
                                 _coconut_reduce_partial)
else:
    py_chr, py_filter, py_hex, py_input, py_int, py_map, py_oct, py_open, py_print, py_range, py_str, py_zip, py_filter, py_reversed, py_enumerate = chr, filter, hex, input, int, map, oct, open, print, range, str, zip, filter, reversed, enumerate


class _coconut(object):
    import collections, functools, imp, itertools, operator, types, copy, pickle
    if _coconut_sys.version_info < (3, 3):
        abc = collections
Ejemplo n.º 13
0
import copy_reg
import lxml.etree
import lxml.objectify


def treeFactory(state):
    """Un-Pickle factory."""
    try:
        return lxml.objectify.fromstring(state)
    except Exception as e:
        return lxml.objectify.fromstring(
            '<error><!-- XML-FEHLER: %s\n\n%s\n\n--></error>' % (e, state))


copy_reg.constructor(treeFactory)


def reduceObjectifiedElement(object):
    """Reduce function for lxml.objectify trees.
    See http://docs.python.org/lib/pickle-protocol.html for details.
    """
    state = lxml.etree.tostring(object.getroottree())
    return (treeFactory, (state, ))


copy_reg.pickle(lxml.objectify.ObjectifiedElement, reduceObjectifiedElement,
                treeFactory)
Ejemplo n.º 14
0
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Provide access to Persistent and PersistentMapping.

$Id$
"""

from cPersistence import Persistent, GHOST, UPTODATE, CHANGED, STICKY
from cPickleCache import PickleCache

from cPersistence import simple_new
import copy_reg
copy_reg.constructor(simple_new)

# Make an interface declaration for Persistent,
# if zope.interface is available.
try:
    from zope.interface import classImplements
except ImportError:
    pass
else:
    from persistent.interfaces import IPersistent
    classImplements(Persistent, IPersistent)
Ejemplo n.º 15
0
def reconstruct_hmm(matrices, training_data):
    # print "Matrices:",matrices
    # print "Training data:", training_data[:5]
    # import sys;sys.stdout.flush()
    data = [flatten_to_emission(d) for d in training_data]
    data = ghmm.SequenceSet(ghmm.Float(), data)
    return HMM(hmm_obj=None,
               training_data=data,
               matrices=matrices,
               hmm_type='ghmm')


def reduce_hmm(hmm):
    assert isinstance(hmm, HMM)
    # print "Multivariate?", hmm.multivariate
    # print "NESTEEEED"#, nest(hmm.training_data)
    data = hmm.hmm_object.obs
    assert isinstance(data, ghmm.SequenceSet)
    if hmm.multivariate:
        data = [nest(d) for d in data]
    else:
        data = [list(d) for d in data]
    assert not isinstance(data, ghmm.EmissionSequence)
    assert not isinstance(data, ghmm.SequenceSet)
    return (reconstruct_hmm, (hmm.hmm_object.asMatrices(),
                              data))

import copy_reg

copy_reg.constructor(reconstruct_hmm)
copy_reg.pickle(HMM, reduce_hmm)
Ejemplo n.º 16
0
            "config.experimental.unpickle_shared_gpu_on_cpu is set to True."
            " Unpickling CudaNdarraySharedVariable as TensorSharedVariable."
        )
        cls = theano.tensor.sharedvar.TensorSharedVariable
        type = theano.tensor.TensorType(
            dtype=npa[1].dtype, broadcastable=npa[1].broadcastable)
        return cls(name=npa[0],
                   type=type,
                   value=numpy.asarray(npa[2].data),
                   strict=None)
    # Mimic what the normal unpickler would do.
    return CudaNdarraySharedVariable(name=npa[0],
                                     type=npa[1],
                                     value=None,
                                     strict=None,
                                     container=npa[2])

copy_reg.constructor(CudaNdarraySharedVariable_unpickler)


def CudaNdarraySharedVariable_pickler(sh_var):
    return (CudaNdarraySharedVariable_unpickler,
            (sh_var.name,
             sh_var.type,
             sh_var.container))


copy_reg.pickle(CudaNdarraySharedVariable,
                CudaNdarraySharedVariable_pickler,
                CudaNdarraySharedVariable_unpickler)
Ejemplo n.º 17
0
                "unpickle_gpu_on_cpu must be also True. Otherwise, "
                "there could be aliasing problems between shared variables.")
        warnings.warn(
            "config.experimental.unpickle_shared_gpu_on_cpu is set to True."
            " Unpickling CudaNdarraySharedVariable as TensorSharedVariable.")
        cls = theano.tensor.sharedvar.TensorSharedVariable
        type = theano.tensor.TensorType(dtype=npa[1].dtype,
                                        broadcastable=npa[1].broadcastable)
        return cls(name=npa[0],
                   type=type,
                   value=numpy.asarray(npa[2].data),
                   strict=None)
    # Mimic what the normal unpickler would do.
    return CudaNdarraySharedVariable(name=npa[0],
                                     type=npa[1],
                                     value=None,
                                     strict=None,
                                     container=npa[2])


copy_reg.constructor(CudaNdarraySharedVariable_unpickler)


def CudaNdarraySharedVariable_pickler(sh_var):
    return (CudaNdarraySharedVariable_unpickler, (sh_var.name, sh_var.type,
                                                  sh_var.container))


copy_reg.pickle(CudaNdarraySharedVariable, CudaNdarraySharedVariable_pickler,
                CudaNdarraySharedVariable_unpickler)
Ejemplo n.º 18
0
        flat, b = __unpack('!%dd' % s, b)
        p.addContour(tuple(__couples(flat)), isHole)
    return p


## support for pickling and unpickling


def __createPolygon(contour, hole):
    """rebuild Polygon from pickled data"""
    p = Polygon()
    map(p.addContour, contour, hole)
    return p


def __tuples(a):
    """map an array or list of lists to a tuple of tuples"""
    return tuple(map(tuple, a))


def __reducePolygon(p):
    """return pickle data for Polygon """
    return (__createPolygon, (tuple([__tuples(x) for x in p]), p.isHole()))


import copy_reg

copy_reg.constructor(__createPolygon)
copy_reg.pickle(PolygonType, __reducePolygon)
del copy_reg
from copy_reg import pickle, constructor
from sets import Set as _Set, ImmutableSet as _ImmutableSet

def Set(*args):
  return _Set(*args)
def ImmutableSet(*args):
  return _ImmutableSet(*args)

constructor(Set)
constructor(ImmutableSet)
Ejemplo n.º 20
0
else:
    # Python implementation
    from spark.internal.parse.values_python import \
         Symbol, isSymbol, Variable, isVariable, Structure, isStructure, \
         value_str, append_value_str, inverse_eval, \
         setUnpickleFunctions, VALUE_CONSTRUCTOR



################################################################
# Specify functions for unpickling types that are not standard Python
# These all have to be top level functions for pickling to work.
# For pickling to be compatible across implementations,
# these functions have to be defined in the same module across implementations.

def __Variable(x):
    return Variable(x)

def __Symbol(x):
    return Symbol(x)

def __Structure(f, args):
    return Structure(f, args)

import copy_reg
copy_reg.constructor(__Symbol)
copy_reg.constructor(__Variable)
copy_reg.constructor(__Structure)

setUnpickleFunctions(__Variable, __Symbol, __Structure)
Ejemplo n.º 21
0
        parentCls = cls.__bases__[0]
        parentSlots = getClassStateSlots(parentCls)
        if cls.__stateslots__ == parentCls.__stateslots__:
            slots = parentSlots
        else:
            slots = parentSlots + cls.__stateslots__
        _CLASS_STATE_SLOTS[cls] = slots
    return slots


def _constructObject1(*statevalues):
    #print "EXTRACTING %s, args are %s"%(name, str(args))
    for (i, value) in enumerate(statevalues):
        if i == 0:
            new = value.__new__(value)
            stateslots = getClassStateSlots(value)
        else:
            setattr(new, stateslots[i], value)
    new.__restore__()
    return new


copy_reg.constructor(_constructObject1)
# def _constructObject(cls, *args):
#     #print "EXTRACTING %s, args are %s"%(name, str(args))
#     new = cls.__new__(cls)
#     new.__setstate__(args)
#     return new

# copy_reg.constructor(_constructObject)
Ejemplo n.º 22
0
        }
        """,
        version=3)


# THIS WORKS But CudaNdarray instances don't compare equal to one
# another, and what about __hash__ ?  So the unpickled version doesn't
# equal the pickled version, and the cmodule cache is not happy with
# the situation.
def CudaNdarray_unpickler(npa):

    if config.experimental.unpickle_gpu_on_cpu:
        # directly return numpy array
        warnings.warn("config.experimental.unpickle_gpu_on_cpu is set to True. Unpickling CudaNdarray as numpy.ndarray")
        return npa
    elif cuda:
        return cuda.CudaNdarray(npa)
    else:
        raise ImportError("Cuda not found. Cannot unpickle CudaNdarray")

copy_reg.constructor(CudaNdarray_unpickler)


def CudaNdarray_pickler(cnda):
    return (CudaNdarray_unpickler, (numpy.asarray(cnda),))

# In case cuda is not imported.
if cuda is not None:
    copy_reg.pickle(cuda.CudaNdarray, CudaNdarray_pickler,
                    CudaNdarray_unpickler)
Ejemplo n.º 23
0
        self.__description = description

    def getDescription(self):
        return self.__description

    def getName(self):
        return self.__name

    def __str__(self):
        return "PermissionSetting: %s" % self.__name

    __repr__ = __str__

# register PermissionSettings to be symbolic constants by identity,
# even when pickled and unpickled.
import copy_reg
copy_reg.constructor(PermissionSetting)
copy_reg.pickle(PermissionSetting,
                PermissionSetting.getName,
                PermissionSetting)


Allow = PermissionSetting('Allow',
    'Explicit allow setting for permissions')

Deny = PermissionSetting('Deny',
    'Explicit deny setting for permissions')

Unset = PermissionSetting('Unset',
    'Unset constant that denotes no setting for permission')
Ejemplo n.º 24
0

## support for pickling and unpickling

def __createCoordSys(o, u, v, w, tol2D):
    c = CoordSys(o, u, v, w)
    c.setTol2D(tol2D)
    return c


def __reduceCoordSys(c):
    return (__createCoordSys, (c.o(), c.u(), c.v(), c.w(), c.getTol2D()))


import copy_reg
copy_reg.constructor(__createCoordSys)
copy_reg.pickle(CoordSysType, __reduceCoordSys)
del copy_reg


if __name__ == '__main__':
    import math, random, time

    def randomTest():
        def r():
            return math.sin(random.random()*2.0*math.pi)* random.random() * 1000.0
        def tdiff(x, y):
            for i in range(len(x)):
                for j in range(3):
                    if math.fabs(x[i][j] - y[i][j]) > 1.0e-10:
                        return 1
Ejemplo n.º 25
0
 # module readline typically doesn't exist on non-Unix platforms
   pass

## special filter on MacOS X (warnings caused by linking that is still required)
if sys.platform == 'darwin':
   import warnings
   warnings.filterwarnings( action='ignore', category=RuntimeWarning, module='ROOT',\
      message='class \S* already in TClassTable$' )

### load PyROOT C++ extension module, special case for linux and Sun ------------
_root = cppyy._backend

## convince 2.2 it's ok to use the expand function
if sys.version[0:3] == '2.2':
   import copy_reg
   copy_reg.constructor( _root._ObjectProxy__expand__ )

## convince inspect that PyROOT method proxies are possible drop-ins for python
## methods and classes for pydoc
import inspect

inspect._old_isfunction = inspect.isfunction
def isfunction( object ):
   if type(object) == _root.MethodProxy and not object.im_class:
      return True
   return inspect._old_isfunction( object )
inspect.isfunction = isfunction

inspect._old_ismethod = inspect.ismethod
def ismethod( object ):
   if type(object) == _root.MethodProxy:
Ejemplo n.º 26
0
import zet
import string
import tempfile
import os
import sys
import copy_reg
import re

# register de-pickler
copy_reg.constructor(zet.unpickle_search_result)


class ZetIndex(zet.Index):
    """Extends C-wrapper index."""

    DEFAULT_LEN = 20

    def __init__(self, prefix="index"):
        zet.Index.__init__(self, prefix)
        self.prefix = prefix

    def search(self, query, *args, **kys):
        baseResults = zet.Index.search(self, query, *args, **kys)
        return ZetSearchResults(list(baseResults.results),
                                baseResults.total_results)

    def trec_search(self, trec_query, len, *args, **kys):
        return self.search(trec_query.query, 0, len, *args,
                           **kys).to_trec_eval_list(trec_query.topic_num)

    def __getitem__(self, key):
Ejemplo n.º 27
0
# Polyhedron
#-------------------------------------------------------------------------------
def construct_Polyhedron(encoded_string):
    return unpackElementPolyhedron(encoded_string)


def reduce_Polyhedron(obj):
    return construct_Polyhedron, (packElementPolyhedron(obj), )


copy_reg.pickle(type(Polyhedron()), reduce_Polyhedron, construct_Polyhedron)

#------------------------------------------------------------------------------
# std::vectors
#------------------------------------------------------------------------------
vector_template = """
def construct_vector_of_%(value_type)s(encoded_string):
    return string2vector_of_%(value_type)s(encoded_string)
def reduce_vector_of_%(value_type)s(obj):
    return construct_vector_of_%(value_type)s, (vector2string(obj, 20),)
copy_reg.constructor(construct_vector_of_%(value_type)s)
copy_reg.pickle(vector_of_%(value_type)s, reduce_vector_of_%(value_type)s, construct_vector_of_%(value_type)s)
"""

for t in ("int", "unsigned", "ULL", "double", "string", "Vector1d", "Vector2d",
          "Vector3d", "Tensor1d", "Tensor2d", "Tensor3d", "SymTensor1d",
          "SymTensor2d", "SymTensor3d", "ThirdRankTensor1d",
          "ThirdRankTensor2d", "ThirdRankTensor3d"):
    exec(vector_template % {"value_type": t})

#------------------------------------------------------------------------------
Ejemplo n.º 28
0
__version__ = "1.2.0"

__all__ = "Proxy",

try:
    import copy_reg as copyreg
except ImportError:
    import copyreg

from .utils import identity

copyreg.constructor(identity)

try:
    from .cext import Proxy
    from .cext import identity
except ImportError:
    from .slots import Proxy
else:
    copyreg.constructor(identity)
Ejemplo n.º 29
0
        parentCls = cls.__bases__[0]
        parentSlots = getClassStateSlots(parentCls)
        if cls.__stateslots__ == parentCls.__stateslots__:
            slots = parentSlots
        else:
            slots = parentSlots + cls.__stateslots__
        _CLASS_STATE_SLOTS[cls] = slots
    return slots


def _constructObject1(*statevalues):
    # print "EXTRACTING %s, args are %s"%(name, str(args))
    for (i, value) in enumerate(statevalues):
        if i == 0:
            new = value.__new__(value)
            stateslots = getClassStateSlots(value)
        else:
            setattr(new, stateslots[i], value)
    new.__restore__()
    return new


copy_reg.constructor(_constructObject1)
# def _constructObject(cls, *args):
#     #print "EXTRACTING %s, args are %s"%(name, str(args))
#     new = cls.__new__(cls)
#     new.__setstate__(args)
#     return new

# copy_reg.constructor(_constructObject)
Ejemplo n.º 30
0
# keep namespace clean
__version__ = version
__author__ = author
__license__ = license
del version, author, license


# support functions for pickling and unpickling
def __createPolygon(contour, hole):
    """rebuild Polygon from pickled data"""
    p = Polygon()
    map(p.addContour, contour, hole)
    return p


def __tuples(a):
    """map an array or list of lists to a tuple of tuples"""
    return tuple(map(tuple, a))


def __reducePolygon(p):
    """return pickle data for Polygon """
    return (__createPolygon, (tuple([__tuples(x) for x in p]), p.isHole()))


import copy_reg

copy_reg.constructor(__createPolygon)
copy_reg.pickle(Polygon, __reducePolygon)
del copy_reg
Ejemplo n.º 31
0
import zet
import string
import tempfile
import os
import sys
import copy_reg
import re

# register de-pickler
copy_reg.constructor(zet.unpickle_search_result)

class ZetIndex(zet.Index):
    """Extends C-wrapper index."""

    DEFAULT_LEN=20

    def __init__(self, prefix="index"):
        zet.Index.__init__(self, prefix)
        self.prefix = prefix

    def search(self, query, *args, **kys):
        baseResults = zet.Index.search(self, query, *args, **kys)
        return ZetSearchResults(list(baseResults.results), 
                baseResults.total_results)

    def trec_search(self, trec_query, len, *args, **kys):
        return self.search(trec_query.query, 0, 
                len, *args, **kys).to_trec_eval_list(trec_query.topic_num)

    def __getitem__(self, key):
        return self.search(key)
Ejemplo n.º 32
0
            return _coconut_repr(obj)[1:]
        else:
            return _coconut_repr(obj)
    ascii = repr
    print.__doc__, input.__doc__, repr.__doc__ = _coconut_print.__doc__, _coconut_raw_input.__doc__, _coconut_repr.__doc__
    def raw_input(*args):
        """Coconut uses Python 3 "input" instead of Python 2 "raw_input"."""
        raise _coconut.NameError('Coconut uses Python 3 "input" instead of Python 2 "raw_input"')
    def xrange(*args):
        """Coconut uses Python 3 "range" instead of Python 2 "xrange"."""
        raise _coconut.NameError('Coconut uses Python 3 "range" instead of Python 2 "xrange"')
    if _coconut_sys.version_info < (2, 7):
        import functools as _coconut_functools, copy_reg as _coconut_copy_reg
        def _coconut_new_partial(func, args, keywords):
            return _coconut_functools.partial(func, *(args if args is not None else ()), **(keywords if keywords is not None else {}))
        _coconut_copy_reg.constructor(_coconut_new_partial)
        def _coconut_reduce_partial(self):
            return (_coconut_new_partial, (self.func, self.args, self.keywords))
        _coconut_copy_reg.pickle(_coconut_functools.partial, _coconut_reduce_partial)
else:
    py3_map, py3_zip = map, zip

class _coconut(object):
    import collections, functools, imp, itertools, operator, types
    if _coconut_sys.version_info < (3, 3):
        abc = collections
    else:
        import collections.abc as abc
    IndexError, NameError, ValueError, map, zip, bytearray, dict, frozenset, getattr, hasattr, isinstance, iter, len, list, min, next, object, range, reversed, set, slice, super, tuple, repr = IndexError, NameError, ValueError, map, zip, bytearray, dict, frozenset, getattr, hasattr, isinstance, iter, len, list, min, next, object, range, reversed, set, slice, super, tuple, staticmethod(repr)

class _coconut_MatchError(Exception):
Ejemplo n.º 33
0
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Provide access to Persistent and PersistentMapping.

$Id: __init__.py 113734 2010-06-21 15:33:46Z ctheune $
"""

from cPersistence import Persistent, GHOST, UPTODATE, CHANGED, STICKY
from cPickleCache import PickleCache

from cPersistence import simple_new
import copy_reg
copy_reg.constructor(simple_new)

# Make an interface declaration for Persistent,
# if zope.interface is available.
try:
    from zope.interface import classImplements
except ImportError:
    pass
else:
    from persistent.interfaces import IPersistent
    classImplements(Persistent, IPersistent)