Esempio n. 1
0
File: base.py Progetto: gityow/fugue
 def __getstate__(self):
     """
     :raises pickle.PicklingError: serialization
       of ``RPCHandler`` is not allowed
     """
     raise pickle.PicklingError(f"{self} is not serializable")
Esempio n. 2
0
    def save_function(self, obj, name=None):
        """ Registered with the dispatch to handle all function types.

        Determines what kind of function obj is (e.g. lambda, defined at
        interactive prompt, etc) and handles the pickling appropriately.
        """
        try:
            should_special_case = obj in _BUILTIN_TYPE_CONSTRUCTORS
        except TypeError:
            # Methods of builtin types aren't hashable in python 2.
            should_special_case = False

        if should_special_case:
            # We keep a special-cased cache of built-in type constructors at
            # global scope, because these functions are structured very
            # differently in different python versions and implementations (for
            # example, they're instances of types.BuiltinFunctionType in
            # CPython, but they're ordinary types.FunctionType instances in
            # PyPy).
            #
            # If the function we've received is in that cache, we just
            # serialize it as a lookup into the cache.
            return self.save_reduce(_BUILTIN_TYPE_CONSTRUCTORS[obj], (), obj=obj)

        write = self.write

        if name is None:
            name = obj.__name__
        try:
            # whichmodule() could fail, see
            # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
            modname = pickle.whichmodule(obj, name)
        except Exception:
            modname = None
        # print('which gives %s %s %s' % (modname, obj, name))
        try:
            themodule = sys.modules[modname]
        except KeyError:
            # eval'd items such as namedtuple give invalid items for their function __module__
            modname = '__main__'

        if modname == '__main__':
            themodule = None

        try:
            lookedup_by_name = getattr(themodule, name, None)
        except Exception:
            lookedup_by_name = None

        if themodule:
            if lookedup_by_name is obj:
                return self.save_global(obj, name)

        # a builtin_function_or_method which comes in as an attribute of some
        # object (e.g., itertools.chain.from_iterable) will end
        # up with modname "__main__" and so end up here. But these functions
        # have no __code__ attribute in CPython, so the handling for
        # user-defined functions below will fail.
        # So we pickle them here using save_reduce; have to do it differently
        # for different python versions.
        if not hasattr(obj, '__code__'):
            if PY3:  # pragma: no branch
                rv = obj.__reduce_ex__(self.proto)
            else:
                if hasattr(obj, '__self__'):
                    rv = (getattr, (obj.__self__, name))
                else:
                    raise pickle.PicklingError("Can't pickle %r" % obj)
            return self.save_reduce(obj=obj, *rv)

        # if func is lambda, def'ed at prompt, is in main, or is nested, then
        # we'll pickle the actual function object rather than simply saving a
        # reference (as is done in default pickler), via save_function_tuple.
        if (islambda(obj)
                or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
                or themodule is None):
            self.save_function_tuple(obj)
            return
        else:
            # func is nested
            if lookedup_by_name is None or lookedup_by_name is not obj:
                self.save_function_tuple(obj)
                return

        if obj.__dict__:
            # essentially save_reduce, but workaround needed to avoid recursion
            self.save(_restore_attr)
            write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
            self.save(obj.__dict__)
            write(pickle.TUPLE + pickle.REDUCE)
        else:
            write(pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
 def save_unsupported(self, obj):
     raise pickle.PicklingError("Cannot pickle objects of type %s" %
                                type(obj))
Esempio n. 4
0
    def _file_reduce(self):
        import pickle

        raise pickle.PicklingError(
            "Cannot pickle a ZoneInfo file created from a file stream.")
Esempio n. 5
0
 def __reduce__(self):
     raise pickle.PicklingError("not picklable")
Esempio n. 6
0
    def save_file(self, obj):
        """Save a file"""
        try:
            import StringIO as pystringIO  #we can't use cStringIO as it lacks the name attribute
        except ImportError:
            import io as pystringIO

        if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
            raise pickle.PicklingError(
                "Cannot pickle files that do not map to an actual file")
        if obj is sys.stdout:
            return self.save_reduce(getattr, (sys, 'stdout'), obj=obj)
        if obj is sys.stderr:
            return self.save_reduce(getattr, (sys, 'stderr'), obj=obj)
        if obj is sys.stdin:
            raise pickle.PicklingError("Cannot pickle standard input")
        if hasattr(obj, 'isatty') and obj.isatty():
            raise pickle.PicklingError(
                "Cannot pickle files that map to tty objects")
        if 'r' not in obj.mode:
            raise pickle.PicklingError(
                "Cannot pickle files that are not opened for reading")
        name = obj.name
        try:
            fsize = os.stat(name).st_size
        except OSError:
            raise pickle.PicklingError(
                "Cannot pickle file %s as it cannot be stat" % name)

        if obj.closed:
            #create an empty closed string io
            retval = pystringIO.StringIO("")
            retval.close()
        elif not fsize:  #empty file
            retval = pystringIO.StringIO("")
            try:
                tmpfile = file(name)
                tst = tmpfile.read(1)
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            tmpfile.close()
            if tst != '':
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it does not appear to map to a physical, real file"
                    % name)
        else:
            try:
                tmpfile = file(name)
                contents = tmpfile.read()
                tmpfile.close()
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            retval = pystringIO.StringIO(contents)
            curloc = obj.tell()
            retval.seek(curloc)

        retval.name = name
        self.save(retval)
        self.memoize(obj)
Esempio n. 7
0
 def __reduce__(self):
     raise pickle.PicklingError("make this nonpickleable")
Esempio n. 8
0
 def disabled_reduce_ex(self, proto):
     raise pickle.PicklingError("'%s' is unpicklable" %
                                (type(self).__name__, ))
Esempio n. 9
0
 def get_contents(cell):
     try:
         return cell.cell_contents
     except ValueError, e: #cell is empty error on not yet assigned
         raise pickle.PicklingError('Function to be pickled has free variables that are referenced before assignment in enclosing scope')
Esempio n. 10
0
def _no_pickle(obj):
    raise pickle.PicklingError(f"Pickling of {type(obj)} is unsupported")
Esempio n. 11
0
    def _reduce_Logger(logger):
        if logging.getLogger(logger.name) is not logger:
            import pickle

            raise pickle.PicklingError('logger cannot be pickled')
        return logging.getLogger, (logger.name,)
Esempio n. 12
0
 def save_unsupported(self, obj): # pylint: disable=no-self-use
   raise pickle.PicklingError("Cannot pickle objects of type %s" % type(obj))
Esempio n. 13
0
 def __getstate__(self):
     raise pickle.PicklingError("You shall not serialize me!")
Esempio n. 14
0
 def __getstate__(self):
     ## This one is too complicated to pickle right now
     raise pickle.PicklingError("Array objects do not support caching")
Esempio n. 15
0
 def __getstate__(self):
     raise pickle.PicklingError(
         "Pickling of datastore_query.Batch is unsupported.")
Esempio n. 16
0
 def __getstate__(self):
     raise pickle.PicklingError()
Esempio n. 17
0
    def save_global(self, obj, name=None, pack=struct.pack):
        if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
            if obj in _BUILTIN_TYPE_NAMES:
                return self.save_reduce(_builtin_type,
                                        (_BUILTIN_TYPE_NAMES[obj], ),
                                        obj=obj)

        if name is None:
            name = obj.__name__

        modname = getattr(obj, "__module__", None)
        if modname is None:
            try:
                # whichmodule() could fail, see
                # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
                modname = pickle.whichmodule(obj, name)
            except Exception:
                modname = '__main__'

        if modname == '__main__':
            themodule = None
        else:
            __import__(modname)
            themodule = sys.modules[modname]
            self.modules.add(themodule)

        if hasattr(themodule, name) and getattr(themodule, name) is obj:
            return Pickler.save_global(self, obj, name)

        typ = type(obj)
        if typ is not obj and isinstance(obj, (type, types.ClassType)):
            d = dict(obj.__dict__)  # copy dict proxy to a dict
            if not isinstance(d.get('__dict__', None), property):
                # don't extract dict that are properties
                d.pop('__dict__', None)
            d.pop('__weakref__', None)

            # hack as __new__ is stored differently in the __dict__
            new_override = d.get('__new__', None)
            if new_override:
                d['__new__'] = obj.__new__

            # workaround for namedtuple (hijacked by PySpark)
            if getattr(obj, '_is_namedtuple_', False):
                self.save_reduce(_load_namedtuple, (obj.__name__, obj._fields))
                return

            self.save(_load_class)
            self.save_reduce(typ, (obj.__name__, obj.__bases__, {
                "__doc__": obj.__doc__
            }),
                             obj=obj)
            d.pop('__doc__', None)
            # handle property and staticmethod
            dd = {}
            for k, v in d.items():
                if isinstance(v, property):
                    k = ('property', k)
                    v = (v.fget, v.fset, v.fdel, v.__doc__)
                elif isinstance(v, staticmethod) and hasattr(v, '__func__'):
                    k = ('staticmethod', k)
                    v = v.__func__
                elif isinstance(v, classmethod) and hasattr(v, '__func__'):
                    k = ('classmethod', k)
                    v = v.__func__
                dd[k] = v
            self.save(dd)
            self.write(pickle.TUPLE2)
            self.write(pickle.REDUCE)

        else:
            raise pickle.PicklingError("Can't pickle %r" % obj)
Esempio n. 18
0
 def __reduce__(self):
     raise pickle.PicklingError(
         '{0} - Cannot pickle Actor instances'.format(self))
Esempio n. 19
0
 def __reduce__(self):
     raise pickle.PicklingError("to make this error non-serializable")
Esempio n. 20
0
    def save_file(self, obj):
        """Save a file"""
        import StringIO as pystringIO  #we can't use cStringIO as it lacks the name attribute
        from ..transport.adapter import SerializingAdapter

        if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
            raise pickle.PicklingError(
                "Cannot pickle files that do not map to an actual file")
        if obj.name == '<stdout>':
            return self.save_reduce(getattr, (sys, 'stdout'), obj=obj)
        if obj.name == '<stderr>':
            return self.save_reduce(getattr, (sys, 'stderr'), obj=obj)
        if obj.name == '<stdin>':
            raise pickle.PicklingError("Cannot pickle standard input")
        if hasattr(obj, 'isatty') and obj.isatty():
            raise pickle.PicklingError(
                "Cannot pickle files that map to tty objects")
        if 'r' not in obj.mode:
            raise pickle.PicklingError(
                "Cannot pickle files that are not opened for reading")
        name = obj.name
        try:
            fsize = os.stat(name).st_size
        except OSError:
            raise pickle.PicklingError(
                "Cannot pickle file %s as it cannot be stat" % name)

        if obj.closed:
            #create an empty closed string io
            retval = pystringIO.StringIO("")
            retval.close()
        elif not fsize:  #empty file
            retval = pystringIO.StringIO("")
            try:
                tmpfile = file(name)
                tst = tmpfile.read(1)
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            tmpfile.close()
            if tst != '':
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it does not appear to map to a physical, real file"
                    % name)
        elif fsize > SerializingAdapter.max_transmit_data:
            raise pickle.PicklingError(
                "Cannot pickle file %s as it exceeds cloudconf.py's max_transmit_data of %d"
                % (name, SerializingAdapter.max_transmit_data))
        else:
            try:
                tmpfile = file(name)
                contents = tmpfile.read(SerializingAdapter.max_transmit_data)
                tmpfile.close()
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            retval = pystringIO.StringIO(contents)
            curloc = obj.tell()
            retval.seek(curloc)

        retval.name = name
        self.save(retval)  #save stringIO
        self.memoize(obj)
Esempio n. 21
0
 def _handle_pickling_exception_event(
         self, action):  # Not doctested: mandatory picklability
     raise pickle.PicklingError()
Esempio n. 22
0
    def save_global(self, obj, name=None, pack=struct.pack):
        write = self.write
        memo = self.memo

        if name is None:
            name = obj.__name__

        modname = getattr(obj, "__module__", None)
        if modname is None:
            modname = whichmodule(obj, name)

        try:
            __import__(modname)
            themodule = sys.modules[modname]
        except (ImportError, KeyError, AttributeError):  # should never occur
            raise pickle.PicklingError(
                "Can't pickle %r: Module %s cannot be found" %
                (obj, modname))

        if modname == '__main__':
            themodule = None

        if themodule:
            self.modules.add(themodule)

        sendRef = True
        typ = type(obj)
        #print 'saving', obj, typ
        try:
            try:  # Deal with case when getattribute fails with exceptions
                klass = getattr(themodule, name)
            except (AttributeError):
                if modname == '__builtin__':  # new.* are misrepeported
                    modname = 'new'
                    __import__(modname)
                    themodule = sys.modules[modname]
                    try:
                        klass = getattr(themodule, name)
                    except AttributeError, a:
                        #print themodule, name, obj, type(obj)
                        raise pickle.PicklingError("Can't pickle builtin %s" %
                                                   obj)
                else:
                    raise

        except (ImportError, KeyError, AttributeError):
            if isinstance(obj, types.TypeType) or \
                    isinstance(obj, types.ClassType):
                sendRef = False
            else:  # we can't deal with this
                raise
        else:
            if klass is not obj and (isinstance(type, types.TypeType) or
                                     isinstance(type, types.ClassType)):
                sendRef = False
        if not sendRef:
            #note: Third party types might crash this - add better checks!
            d = dict(obj.__dict__)  # copy dict proxy to a dict
            d.pop('__dict__', None)
            d.pop('__weakref__', None)
            self.save_reduce(type(obj), (obj.__name__, obj.__bases__,
                                         d), obj=obj)
            return

        if self.proto >= 2:
            code = _extension_registry.get((modname, name))
            if code:
                assert code > 0
                if code <= 0xff:
                    write(pickle.EXT1 + chr(code))
                elif code <= 0xffff:
                    write("%c%c%c" % (pickle.EXT2, code & 0xff, code >> 8))
                else:
                    write(pickle.EXT4 + pack("<i", code))
                return

        write(pickle.GLOBAL + modname + '\n' + name + '\n')
        self.memoize(obj)
Esempio n. 23
0
 def __getstate__(self):
     raise pickle.PicklingError(CustomMetric.__name__)
Esempio n. 24
0
 def __getstate__(self):
     raise pickle.PicklingError(
         'Pickling of datastore_query.Query is unsupported.')
Esempio n. 25
0
 def __getstate__(self) -> None:
     raise pickle.PicklingError("PySpark datasets can't be serialized")
Esempio n. 26
0
 def __getstate__(self):
     print 'L getstate '
     raise pickle.PicklingError("Not supported, pickle the dict")
Esempio n. 27
0
    def save_function(self, obj, name=None):
        """ Registered with the dispatch to handle all function types.

        Determines what kind of function obj is (e.g. lambda, defined at
        interactive prompt, etc) and handles the pickling appropriately.
        """
        write = self.write

        if name is None:
            name = obj.__name__
        try:
            # whichmodule() could fail, see
            # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
            modname = pickle.whichmodule(obj, name)
        except Exception:
            modname = None
        # print('which gives %s %s %s' % (modname, obj, name))
        try:
            themodule = sys.modules[modname]
        except KeyError:
            # eval'd items such as namedtuple give invalid items for their function __module__
            modname = '__main__'

        if modname == '__main__':
            themodule = None

        if themodule:
            self.modules.add(themodule)
            if getattr(themodule, name, None) is obj:
                return self.save_global(obj, name)

        # a builtin_function_or_method which comes in as an attribute of some
        # object (e.g., object.__new__, itertools.chain.from_iterable) will end
        # up with modname "__main__" and so end up here. But these functions
        # have no __code__ attribute in CPython, so the handling for
        # user-defined functions below will fail.
        # So we pickle them here using save_reduce; have to do it differently
        # for different python versions.
        if not hasattr(obj, '__code__'):
            if PY3:
                if sys.version_info < (3, 4):
                    raise pickle.PicklingError("Can't pickle %r" % obj)
                else:
                    rv = obj.__reduce_ex__(self.proto)
            else:
                if hasattr(obj, '__self__'):
                    rv = (getattr, (obj.__self__, name))
                else:
                    raise pickle.PicklingError("Can't pickle %r" % obj)
            return Pickler.save_reduce(self, obj=obj, *rv)

        # if func is lambda, def'ed at prompt, is in main, or is nested, then
        # we'll pickle the actual function object rather than simply saving a
        # reference (as is done in default pickler), via save_function_tuple.
        if (islambda(obj)
                or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
                or themodule is None):
            self.save_function_tuple(obj)
            return
        else:
            # func is nested
            klass = getattr(themodule, name, None)
            if klass is None or klass is not obj:
                self.save_function_tuple(obj)
                return

        if obj.__dict__:
            # essentially save_reduce, but workaround needed to avoid recursion
            self.save(_restore_attr)
            write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
            self.save(obj.__dict__)
            write(pickle.TUPLE + pickle.REDUCE)
        else:
            write(pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
Esempio n. 28
0
 def __getstate__(self):
     raise pickle.PicklingError(
         "Pickling of datastore_query.PropertyOrder is unsupported.")
    def save_reduce(self,
                    func,
                    args,
                    state=None,
                    listitems=None,
                    dictitems=None,
                    obj=None):
        """Modified to support __transient__ on new objects
        Change only affects protocol level 2 (which is always used by PiCloud"""
        # Assert that args is a tuple or None
        if not isinstance(args, tuple):
            raise pickle.PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not hasattr(func, '__call__'):
            raise pickle.PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            #Added fix to allow transient
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)

            #Don't pickle transient entries
            if hasattr(obj, '__transient__'):
                transient = obj.__transient__
                state = state.copy()

                for k in list(state.keys()):
                    if k in transient:
                        del state[k]

            save(args)
            write(pickle.NEWOBJ)
        else:
            save(func)
            save(args)
            write(pickle.REDUCE)

        # modify here to avoid assert error
        if obj is not None and id(obj) not in self.memo:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(pickle.BUILD)
Esempio n. 30
0
File: base.py Progetto: gityow/fugue
 def __getstate__(self):
     raise pickle.PicklingError(f"{self} is not serializable")