def get_job_params(self): """ Loads job params from Redis key """ key = self.get_job_params_key() data = self.connection.get(key) if data is not None: return loads(data) return None
def merge_job_params(self, keys, decrement, pipeline): """ Merges job parameters to allow to skip one of these jobs """ key = self.get_job_params_key() data = self.connection.get(key) old_keys, old_decrement = loads(data) new_params = (keys | old_keys, decrement + old_decrement) pipeline.set(key, dumps(new_params)) return new_params
def result(self): """Returns the return value of the job. Initially, right after enqueueing a job, the return value will be None. But when the job has been executed, and had a return value or exception, this will return that value or exception. Note that, when the job has no return value (i.e. returns None), the ReadOnlyJob object is useless, as the result won't be written back to Redis. Also note that you cannot draw the conclusion that a job has _not_ been executed when its return value is None, since return values written back to Redis will expire after a given amount of time (500 seconds by default). """ if self._result is None: rv = yield from self.connection.hget(self.key, 'result') if rv is not None: self._result = loads(rv) return self._result