def refresh(self): # noqa """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError('No such job: %s' % (key,)) def to_date(date_str): if date_str is None: return else: return utcparse(as_text(date_str)) try: self.data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) self.created_at = to_date(as_text(obj.get('created_at'))) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(as_text(obj.get('enqueued_at'))) self.ended_at = to_date(as_text(obj.get('ended_at'))) self._result = unpickle(obj.get('result')) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self._status = as_text(obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def refresh(self): # noqa """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError('No such job: {0}'.format(key)) def to_date(date_str): if date_str is None: return else: return utcparse(as_text(date_str)) try: self.data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) self.created_at = to_date(as_text(obj.get('created_at'))) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(as_text(obj.get('enqueued_at'))) self.ended_at = to_date(as_text(obj.get('ended_at'))) self._result = unpickle(obj.get('result')) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self._status = as_text(obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def restore(self, raw_data): """Overwrite properties with the provided values stored in Redis""" obj = decode_redis_hash(raw_data) try: raw_data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) try: self.data = zlib.decompress(raw_data) except zlib.error: # Fallback to uncompressed string self.data = raw_data self.created_at = str_to_date(obj.get('created_at')) self.origin = as_text(obj.get('origin')) self.worker_name = obj.get('worker_name').decode() if obj.get( 'worker_name') else None self.description = as_text(obj.get('description')) self.enqueued_at = str_to_date(obj.get('enqueued_at')) self.started_at = str_to_date(obj.get('started_at')) self.ended_at = str_to_date(obj.get('ended_at')) self.last_heartbeat = str_to_date(obj.get('last_heartbeat')) result = obj.get('result') if result: try: self._result = self.serializer.loads(obj.get('result')) except Exception as e: self._result = "Unserializable return value" self.timeout = parse_timeout( obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int( obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self.failure_ttl = int( obj.get('failure_ttl')) if obj.get('failure_ttl') else None # noqa self._status = obj.get('status').decode() if obj.get( 'status') else None dep_ids = obj.get('dependency_ids') dep_id = obj.get('dependency_id') # for backwards compatibility self._dependency_ids = (json.loads(dep_ids.decode()) if dep_ids else [dep_id.decode()] if dep_id else []) self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = self.serializer.loads( obj.get('meta')) if obj.get('meta') else {} self.retries_left = int( obj.get('retries_left')) if obj.get('retries_left') else None if obj.get('retry_intervals'): self.retry_intervals = json.loads( obj.get('retry_intervals').decode()) raw_exc_info = obj.get('exc_info') if raw_exc_info: try: self.exc_info = as_text(zlib.decompress(raw_exc_info)) except zlib.error: # Fallback to uncompressed string self.exc_info = as_text(raw_exc_info)
def refresh(self, safe=False): # noqa key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError('No such job: %s' % (key, )) def to_date(date_str): if date_str is None: return None else: return times.to_universal(as_text(date_str)) self.created_at = to_date(as_text(obj.get('created_at'))) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(as_text(obj.get('enqueued_at'))) self.ended_at = to_date(as_text(obj.get('ended_at'))) self._result = unpickle( obj.get('result')) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int( obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self._status = as_text( obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def refresh(self): # noqa """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError("No such job: %s" % (key,)) def to_date(date_str): if date_str is None: return else: return utcparse(as_text(date_str)) try: self.data = obj["data"] except KeyError: raise NoSuchJobError("Unexpected job format: {0}".format(obj)) self.created_at = to_date(as_text(obj.get("created_at"))) self.origin = as_text(obj.get("origin")) self.description = as_text(obj.get("description")) self.enqueued_at = to_date(as_text(obj.get("enqueued_at"))) self.ended_at = to_date(as_text(obj.get("ended_at"))) self._result = unpickle(obj.get("result")) if obj.get("result") else None # noqa self.exc_info = obj.get("exc_info") self.timeout = int(obj.get("timeout")) if obj.get("timeout") else None self.result_ttl = int(obj.get("result_ttl")) if obj.get("result_ttl") else None # noqa self._status = as_text(obj.get("status") if obj.get("status") else None) self._dependency_id = as_text(obj.get("dependency_id", None)) self.meta = unpickle(obj.get("meta")) if obj.get("meta") else {}
def refresh(self, safe=False): # noqa key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError('No such job: %s' % (key,)) def to_date(date_str): if date_str is None: return None else: return times.to_universal(as_text(date_str)) self.created_at = to_date(as_text(obj.get('created_at'))) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(as_text(obj.get('enqueued_at'))) self.ended_at = to_date(as_text(obj.get('ended_at'))) self._result = unpickle(obj.get('result')) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self._status = as_text(obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def restore(self, raw_data): """Overwrite properties with the provided values stored in Redis""" obj = decode_redis_hash(raw_data) try: raw_data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) try: self.data = zlib.decompress(raw_data) except zlib.error: # Fallback to uncompressed string self.data = raw_data self.created_at = str_to_date(obj.get('created_at')) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = str_to_date(obj.get('enqueued_at')) self.started_at = str_to_date(obj.get('started_at')) self.ended_at = str_to_date(obj.get('ended_at')) result = obj.get('result') if result: try: self._result = self.serializer.loads(obj.get('result')) except Exception as e: self._result = "Unserializable return value" self.timeout = parse_timeout( obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int( obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self.failure_ttl = int( obj.get('failure_ttl')) if obj.get('failure_ttl') else None # noqa self._status = as_text( obj.get('status')) if obj.get('status') else None dependency_id = obj.get('dependency_id', None) self._dependency_ids = [as_text(dependency_id) ] if dependency_id else [] self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = self.serializer.loads( obj.get('meta')) if obj.get('meta') else {} raw_exc_info = obj.get('exc_info') if raw_exc_info: try: self.exc_info = as_text(zlib.decompress(raw_exc_info)) except zlib.error: # Fallback to uncompressed string self.exc_info = as_text(raw_exc_info)
def _get_job_ids(self, sort=True, statuses=[Status.QUEUED, Status.STARTED, Status.FINISHED]): jobs = [] keys = self.connection.keys(self.redis_job_namespace_prefix + '*') for key in keys: fields = ['origin', 'status', 'enqueued_at', 'ended_at'] job = dict(zip( fields, decode_redis_hash(self.connection.hmget(key, fields)))) if job.get('origin') == self.name and job.get('status') in statuses: job['id'] = key[len(self.redis_job_namespace_prefix):] jobs.append(job) if sort: # criteria jobs.sort(key=lambda j: j.get('ended_at') or j.get('enqueued_at'), reverse=True) return [j['id'] for j in jobs]
def refresh(self, safe=False): # noqa """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash(self.connection.hgetall(key)) if len(obj) == 0: raise NoSuchJobError('No such job: %s' % (key, )) def to_date(date_str): if date_str is None: return None else: return times.to_universal(as_text(date_str)) try: self.data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) try: self._func_name, self._instance, self._args, self._kwargs = unpickle( self.data) except UnpickleError: if not safe: raise self.created_at = to_date(as_text(obj.get('created_at'))) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(as_text(obj.get('enqueued_at'))) self.ended_at = to_date(as_text(obj.get('ended_at'))) self._result = unpickle( obj.get('result')) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int( obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self._status = as_text( obj.get('status') if obj.get('status') else None) self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def restore(self, raw_data): """Overwrite properties with the provided values stored in Redis""" obj = decode_redis_hash(raw_data) try: raw_data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) try: self.data = zlib.decompress(raw_data) except zlib.error: # Fallback to uncompressed string self.data = raw_data json_origin = True if obj.get('json_origin') else False self.created_at = str_to_date(obj.get('created_at')) self.origin = as_text(obj.get('origin')) self.worker_name = obj.get('worker_name').decode() if obj.get( 'worker_name') else None self.description = as_text(obj.get('description')) self.enqueued_at = str_to_date(obj.get('enqueued_at')) self.started_at = str_to_date(obj.get('started_at')) self.ended_at = str_to_date(obj.get('ended_at')) self.last_heartbeat = str_to_date(obj.get('last_heartbeat')) self._func_name = as_text( obj.get('func_name')) if json_origin else self._func_name self._instance = None self._args = () # self._kwargs = {'url': as_text(obj.get('func_url'))} if json_origin else self._kwargs temp_args = {} if json_origin: for key, value in obj.items(): if key.startswith('func_') and key != 'func_name': temp_args[key.split("func_", 1)[1]] = as_text(value) self._kwargs = temp_args result = obj.get('result') if result: try: self._result = self.serializer.loads(obj.get('result')) except Exception as e: self._result = "Unserializable return value" self.timeout = parse_timeout( obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = int( obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self.failure_ttl = int( obj.get('failure_ttl')) if obj.get('failure_ttl') else None # noqa self._status = obj.get('status').decode() if obj.get( 'status') else None dependency_id = obj.get('dependency_id', None) self._dependency_ids = [as_text(dependency_id) ] if dependency_id else [] self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = self.serializer.loads( obj.get('meta')) if obj.get('meta') else {} self.retries_left = int( obj.get('retries_left')) if obj.get('retries_left') else None if obj.get('retry_intervals'): self.retry_intervals = json.loads( obj.get('retry_intervals').decode()) raw_exc_info = obj.get('exc_info') if raw_exc_info: try: self.exc_info = as_text(zlib.decompress(raw_exc_info)) except zlib.error: # Fallback to uncompressed string self.exc_info = as_text(raw_exc_info)
def get_all_jobs(queue=None, redis_conn=None): ''' This is pretty much a ripoff of what RQ does for queued jobs, recreated so that we can grab ALL jobs. ''' def to_date(date_str): if date_str is None: return else: return as_text(date_str) return utcparse(as_text(date_str)) def unpickle(pickled_string): try: obj = pickle.loads(pickled_string) except Exception as e: print str(e) obj = None #raise 'Could not unpickle. {0}'.format(str(e)) #raise UnpickleError('Could not unpickle.', pickled_string, e) return obj job_ids = redis_conn.keys('rq:job:*') #jobs = {} jobs = [] for job_id in job_ids: obj = decode_redis_hash(redis_conn.hgetall(job_id)) if len(obj) == 0: pass if queue is not None: if queue != as_text(obj.get('origin')): # If a specific queue was requested and this job isn't it, don't # process the details of this job and don't return the job. pass #jobs[job_id] = { jobs.append({ 'job_id': job_id.replace('rq:job:', ''), 'created_at': obj.get('created_at'), 'origin': as_text(obj.get('origin')), 'description': as_text(obj.get('description')), 'enqueued_at': to_date(as_text(obj.get('enqueued_at'))), 'ended_at': to_date(as_text(obj.get('ended_at'))), 'result': unpickle(obj.get('result')) if obj.get('result') else None, # noqa 'exc_info': obj.get('exc_info'), 'timeout': int(obj.get('timeout')) if obj.get('timeout') else None, 'result_ttl': int(obj.get('result_ttl')) if obj.get('result_ttl') else None, # noqa 'status': as_text(obj.get('status') if obj.get('status') else None), 'dependency_id': as_text(obj.get('dependency_id', None)), 'meta': unpickle(obj.get('meta')) if obj.get('meta') else {} }) return sorted(jobs, key=lambda k: k['status'])