def __reduce__(self): mod = self.__fn.__module__ name = self.__fn.__name__ try: obj = load_back(mod, name) except (ImportError, KeyError, AttributeError): raise cPickle.PicklingError( "Can't pickle as_op(), not found as %s.%s" % (mod, name)) else: if obj is not self: raise cPickle.PicklingError( "Can't pickle as_op(), not the object " "at %s.%s" % (mod, name)) return load_back, (mod, name)
def __getstate__(self): """ This controls how we pickle and unpickle the objects """ try: thetype = self._vol_theType.__name__ except AttributeError: thetype = self._vol_theType # Note: we lose the parent attribute here result = dict(offset=self.obj_offset, name=self.obj_name, vm=self.obj_vm, native_vm=self.obj_native_vm, theType=thetype) ## Introspect the kwargs for the constructor and store in the dict try: for arg in self.__init__.func_code.co_varnames: if (arg not in result and arg not in "self parent profile args".split()): result[arg] = self.__dict__[arg] except KeyError: debug.post_mortem() raise pickle.PicklingError( "Object {0} at 0x{1:08x} cannot be cached because of missing attribute {2}" .format(self.obj_name, self.obj_offset, arg)) return result
def from_file(cls, filepath): with gzip.open(filepath) as f: self = pickle.load(f) if not isinstance(self, cls): raise pickle.PicklingError( "loaded object must be an instance of {}, instead received a {}" .format(cls, self.__class__)) return self
def _send_delete_request(self, table, obj_id): """Constructs the message to be sent to delete from the DB on the server.""" try: self.sock.send_by_size(SQLClient.delete_str) self.sock.send_by_size(table + "~" + str(obj_id)) except socket.error: raise socket.error("Could not send request to the server.") except pickle.PicklingError: raise pickle.PicklingError("Could not pickle values.")
def _send_add_request(self, table, **values): """Constructs the message to be sent for an add request to the server and sends it.""" try: self.sock.send_by_size(SQLClient.add_str) self.sock.send_by_size( table + "~" + pickle.dumps(values, pickle.HIGHEST_PROTOCOL)) except socket.error: raise socket.error("Could not send request to the server.") except pickle.PicklingError: raise pickle.PicklingError("Could not pickle values.")
def _send_update_request(self, table, obj_id, **updates): """Constructs the message to be sent to update DB on the server.""" try: self.sock.send_by_size(SQLClient.update_str) self.sock.send_by_size( table + "~" + str(obj_id) + "~" + pickle.dumps(updates, pickle.HIGHEST_PROTOCOL)) except socket.error: raise socket.error("Could not send request to the server.") except pickle.PicklingError: raise pickle.PicklingError("Could not pickle values.")
def dump(self, value, f): try: pickle.dump(value, f, 2) except pickle.PickleError: raise except Exception as e: msg = "Could not serialize broadcast: " + e.__class__.__name__ + ": " + e.message print_exec(sys.stderr) raise pickle.PicklingError(msg) f.close() return f.name
def dump(self, value, f): try: pickle.dump(value, f, pickle_protocol) except pickle.PickleError: raise except Exception as e: msg = "Could not serialize broadcast: %s: %s" \ % (e.__class__.__name__, _exception_message(e)) print_exec(sys.stderr) raise pickle.PicklingError(msg) f.close()
def dump(value, f): try: pickle.dump(value, f, 2) except pickle.PickleError: raise except Exception as e: msg = "Could not serialize broadcast: %s" \ % (e.message) sys.stderr raise pickle.PicklingError(msg) f.close() return f.name
def dumps(self, obj): try: return cloudpickle.dumps(obj, 2) except pickle.PickleError: raise except Exception as e: emsg = _exception_message(e) if "'i' format requires" in emsg: msg = "Object too large to serialize: %s" % emsg else: msg = "Could not serialize object: %s: %s" % (e.__class__.__name__, emsg) cloudpickle.print_exec(sys.stderr) raise pickle.PicklingError(msg)
def _send_receive_request(self, table, ratio="=", **constraints): """Constructs the message to be sent for a receive request to the server and sends it.""" try: self.sock.send_by_size(SQLClient.get) if not constraints: self.sock.send_by_size( str(table)[0].upper() + str(table)[1:].lower()) else: self.sock.send_by_size( str(table)[0].upper() + str(table)[1:].lower() + "~" + str(ratio) + "~" + pickle.dumps(constraints, pickle.HIGHEST_PROTOCOL)) except socket.error: raise socket.error("Could not send request to the server.") except pickle.PicklingError: raise pickle.PicklingError("Could not pickle values.")
def savePickle(location, obj): head, tail = os.path.split(location) with tempfile.NamedTemporaryFile(prefix = '.'+tail+'.', dir = head, mode='wb', delete = False) as f: tempLocation = f.name pickle.dump(obj, f, protocol = 2) # (FIXME : could remove below checks for speed, especially once we're dealing with large pickled files) # make sure we can read in again objAgain = loadPickle(tempLocation) # make sure pickled stream doesn't contain __main__ references # (these can't be unpickled from a different script) with open(tempLocation, 'rb') as f: pickledString = f.read() if pickledString.find('__main__') != -1: raise pickle.PicklingError('objects to be pickled should be defined in a module and not in the currently-running script') # move into place atomically (at least on linux) # (FIXME : does windows behavior in fact still give correct result in our # use cases?) os.rename(tempLocation, location)
def __getstate__(self): ## This one is too complicated to pickle right now raise pickle.PicklingError("Array objects do not support caching")