def find_nearest_pickleable_exception(exc): """With an exception instance, iterate over its super classes (by mro) and find the first super exception that is pickleable. It does not go below :exc:`Exception` (i.e. it skips :exc:`Exception`, :class:`BaseException` and :class:`object`). If that happens you should use :exc:`UnpickleableException` instead. :param exc: An exception instance. :returns: the nearest exception if it's not :exc:`Exception` or below, if it is it returns ``None``. :rtype: :exc:`Exception` """ unwanted = (Exception, BaseException, object) is_unwanted = lambda exc: any(map(curry(operator.is_, exc), unwanted)) mro_ = getattr(exc.__class__, "mro", lambda: []) for supercls in mro_(): if is_unwanted(supercls): # only BaseException and object, from here on down, # we don't care about these. return None try: exc_args = getattr(exc, "args", []) superexc = supercls(*exc_args) pickle.dumps(superexc) except: pass else: return superexc return None
def prepare_exception(self, exc): """Prepare exception for serialization.""" nearest = find_nearest_pickleable_exception(exc) if nearest: return nearest try: pickle.dumps(exc) except pickle.PickleError: excwrapper = UnpickleableExceptionWrapper( exc.__class__.__module__, exc.__class__.__name__, getattr(exc, "args", []) ) return excwrapper
def _store_result(self, task_id, result, status, traceback=None): """Store return value and status of an executed task.""" from pymongo.binary import Binary meta = {"_id": task_id, "status": status, "result": Binary(pickle.dumps(result)), "date_done": datetime.now(), "traceback": Binary(pickle.dumps(traceback))} db = self._get_database() taskmeta_collection = db[self.mongodb_taskmeta_collection] taskmeta_collection.save(meta, safe=True)
def _store_result(self, task_id, result, status, traceback=None): """Store return value and status of an executed task.""" cf = self._get_column_family() date_done = datetime.utcnow() index_key = 'celery.results.index!%02x' % ( random.randrange(self._index_shards)) index_column_name = '%8x!%s' % (time.mktime(date_done.timetuple()), task_id) meta = {"status": status, "result": pickle.dumps(result), "date_done": date_done.strftime('%Y-%m-%dT%H:%M:%SZ'), "traceback": pickle.dumps(traceback)} cf.insert(task_id, meta) cf.insert(index_key, {index_column_name: status})
def _store_result(self, task_id, result, status, traceback=None): """Store return value and status of an executed task.""" from pymongo.binary import Binary meta = { "_id": task_id, "status": status, "result": Binary(pickle.dumps(result)), "date_done": datetime.now(), "traceback": Binary(pickle.dumps(traceback)) } db = self._get_database() taskmeta_collection = db[self.mongodb_taskmeta_collection] taskmeta_collection.save(meta, safe=True) return result
def store_result(self, task_id, result, status): """Store task result and status.""" if status == "DONE": result = self.prepare_result(result) elif status == "FAILURE": result = self.prepare_exception(result) meta = {"status": status, "result": result} self.set(self.get_cache_key_for_task(task_id), pickle.dumps(meta))
def create_message(backend, **data): data["id"] = gen_unique_id() return BaseMessage( backend, body=pickle.dumps(dict(**data)), content_type="application/x-python-serialize", content_encoding="binary", )
def map_async(cls, func, args, timeout=None): """Distribute processing of the arguments and collect the results asynchronously. :returns: :class:`celery.result.AsyncResult` instance. """ serfunc = pickle.dumps(func) return AsynchronousMapTask.delay(serfunc, args, timeout=timeout)
class Listener(models.Model): hook = models.CharField(_("hook"), max_length=255, help_text=_("Connects to hook")) url = models.URLField(verify_exists=False, help_text=_("The URL I'm listening at.")) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) config = PickledObjectField(_("configuration"), default=pickle.dumps({}), help_text=_("Hook specific configuration.")) match = PickledObjectField(_(u"conditions"), default=pickle.dumps({}), help_text=_("Hook specific event filter")) class Meta: verbose_name = _("listener") verbose_name_plural = _("listeners") def __unicode__(self): return "%s match:%s config:%s" % (self.url, self.match, self.config)
def execute_remote(fun, *args, **kwargs): """Execute arbitrary function/object remotely. :param fun: A callable function or object. :param \*args: Positional arguments to apply to the function. :param \*\*kwargs: Keyword arguments to apply to the function. The object must be picklable, so you can't use lambdas or functions defined in the REPL (the objects must have an associated module). :returns: class:`celery.result.AsyncResult`. """ return ExecuteRemoteTask.delay(pickle.dumps(fun), args, kwargs)
class TestPickle(unittest.TestCase): def test_pickle_regular_exception(self): exc = None try: raise RegularException("RegularException raised") except RegularException, exc: pass pickled = pickle.dumps({"exception": exc}) unpickled = pickle.loads(pickled) exception = unpickled.get("exception") self.assertTrue(exception) self.assertIsInstance(exception, RegularException) self.assertTupleEqual(exception.args, ("RegularException raised", ))
def execute_remote(fun, *args, **kwargs): """Execute arbitrary function/object remotely. :param fun: A callable function or object. :param \*args: Positional arguments to apply to the function. :param \*\*kwargs: Keyword arguments to apply to the function. The object must be picklable, so you can't use lambdas or functions defined in the REPL (the objects must have an associated module). :returns class:`celery.result.AsyncResult`: """ return ExecuteRemoteTask.delay(pickle.dumps(fun), args, kwargs)
def dmap_async(fun, args, timeout=None): """Distribute processing of the arguments and collect the results asynchronously. :returns :class:`celery.result.AsyncResult`: Example >>> from celery.task import dmap_async >>> import operator >>> presult = dmap_async(operator.add, [[2, 2], [4, 4], [8, 8]]) >>> presult <AsyncResult: 373550e8-b9a0-4666-bc61-ace01fa4f91d> >>> presult.status 'SUCCESS' >>> presult.result [4, 8, 16] """ return AsynchronousMapTask.delay(pickle.dumps(fun), args, timeout=timeout)
def dmap_async(fun, args, timeout=None): """Distribute processing of the arguments and collect the results asynchronously. :returns: :class:`celery.result.AsyncResult` object. Example >>> from celery.task import dmap_async >>> import operator >>> presult = dmap_async(operator.add, [[2, 2], [4, 4], [8, 8]]) >>> presult <AsyncResult: 373550e8-b9a0-4666-bc61-ace01fa4f91d> >>> presult.status 'SUCCESS' >>> presult.result [4, 8, 16] """ return AsynchronousMapTask.delay(pickle.dumps(fun), args, timeout=timeout)
def _dmap(fun, args, timeout=None): pickled = pickle.dumps(fun) arguments = [((pickled, arg, {}), {}) for arg in args] ts = TaskSet(ExecuteRemoteTask, arguments) return ts.apply_async().join(timeout=timeout)
def create_message(backend, **data): data.setdefault("id", gen_unique_id()) return Message(backend, body=pickle.dumps(dict(**data)), content_type="application/x-python-serialize", content_encoding="binary")
def get_db_prep_save(self, value): """get_db_prep_save""" if value is not None and not isinstance(value, PickledObject): value = PickledObject(pickle.dumps(value)) return value
def test_execute_remote(self): self.assertEqual( ExecuteRemoteTask.apply( args=[pickle.dumps(some_func), [10], {}]).get(), 100)
def create_message(backend, **data): data.setdefault("id", gen_unique_id()) return BaseMessage(backend, body=pickle.dumps(dict(**data)), content_type="application/x-python-serialize", content_encoding="binary")
def test_execute_remote(self): self.assertEquals(ExecuteRemoteTask.apply( args=[pickle.dumps(some_func), [10], {}]).get(), 100)
def _store_result(self, task_id, result, status, traceback=None): meta = {"status": status, "result": result, "traceback": traceback} self.set(self.get_key_for_task(task_id), pickle.dumps(meta)) return result
def _save_taskset(self, taskset_id, result): meta = {"result": result} self.set(self.get_key_for_taskset(taskset_id), pickle.dumps(meta)) return result
def remote_execute(cls, func, args): """Apply ``args`` to function by distributing the args to the celery server(s).""" pickled = pickle.dumps(func) arguments = [[[pickled, arg, {}], {}] for arg in args] return cls(ExecuteRemoteTask, arguments)
exc = None try: raise RegularException("RegularException raised") except RegularException, exc: pass pickled = pickle.dumps({"exception": exc}) unpickled = pickle.loads(pickled) exception = unpickled.get("exception") self.assertTrue(exception) self.assertIsInstance(exception, RegularException) self.assertTupleEqual(exception.args, ("RegularException raised", )) def test_pickle_arg_override_exception(self): exc = None try: raise ArgOverrideException("ArgOverrideException raised", status_code=100) except ArgOverrideException, exc: pass pickled = pickle.dumps({"exception": exc}) unpickled = pickle.loads(pickled) exception = unpickled.get("exception") self.assertTrue(exception) self.assertIsInstance(exception, ArgOverrideException) self.assertTupleEqual(exception.args, ("ArgOverrideException raised", 100)) self.assertEqual(exception.status_code, 100)
e = None try: raise RegularException("RegularException raised") except RegularException, e: pass pickled = pickle.dumps({"exception": e}) unpickled = pickle.loads(pickled) exception = unpickled.get("exception") self.assertTrue(exception) self.assertTrue(isinstance(exception, RegularException)) self.assertEquals(exception.args, ("RegularException raised", )) def test_pickle_arg_override_exception(self): e = None try: raise ArgOverrideException("ArgOverrideException raised", status_code=100) except ArgOverrideException, e: pass pickled = pickle.dumps({"exception": e}) unpickled = pickle.loads(pickled) exception = unpickled.get("exception") self.assertTrue(exception) self.assertTrue(isinstance(exception, ArgOverrideException)) self.assertEquals(exception.args, ("ArgOverrideException raised", 100)) self.assertEquals(exception.status_code, 100)