def __setstate__(self, state): if len(self._objects) < self._i[0]: raise cPickle.PickleError("The workflow instance " "inconsistent state, " "too few objects") self.log = logging.getLogger("workflow.%s" % self.__class__) # default logging self.__dict__ = state
def __setstate__(self, state): if len(self._objects) < self._i[0]: raise cPickle.PickleError( "The workflow instance inconsistent state, " "too few objects") self.__dict__ = state self.add_log()
def __getstate__(self): if not self._picklable_safe: raise cPickle.PickleError("The instance of the workflow engine " "cannot be serialized, " "because it was constructed with " "custom, user-supplied callbacks. " "Either use PickableWorkflowEngine or " "provide your own __getstate__ method.") state = self.__dict__.copy() del state['log'] return state
def _read_cache(self): try: f = open(self.file, 'r') try: data = pickle.load(f) except Exception, e: # Python simplicity in example : Pickle raise just one ... sorry, # Pickle could raise more than 7 exceptions in case of bad input file... # So we have to catch all of them and re-raise a single exception. raise pickle.PickleError(e) f.close() mtime = os.stat(self.file).st_mtime if (int(time.time()) - mtime > self.ttl): log.debug("[PCH]", "EXPIRED", self.file) raise InvalidCacheException("Cache expired") else: log.debug("[PCH]", "HIT", self.file) return data
def __reduce__(self, *args, **kwargs): raise cPickle.PickleError('Not pickleable.')
def __getstate__(self): raise pickle.PickleError("Never pickle generator.runtime.Cache.")