def refresh(self): """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ obj = yield self.connection.hgetall(self.key) if len(obj) == 0: e = NoSuchJobError('No such job: %s' % (self.key,)) e.job_id = self.id raise e def to_date(date_str): if date_str is None: return None else: return times.to_universal(date_str) try: self.data = str(obj['data']) except KeyError: e = NoSuchJobError('Unexpected job format: {0}'.format(obj)) e.job_id = self.id raise e try: self._func_name, self._instance, self._args, self._kwargs = unpickle(self.data) except UnpickleError as e: e.job_id = self.id raise e self.created_at = to_date(obj.get('created_at')) self.origin = obj.get('origin') self.description = obj.get('description') self.enqueued_at = to_date(obj.get('enqueued_at')) self.ended_at = to_date(obj.get('ended_at')) self._result = unpickle(str(obj.get('result'))) if obj.get('result') else None # noqa self.exc_info = obj.get('exc_info') if obj.get('exc_info') else None # noqa self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = None if obj.get('result_ttl') is None else int(obj.get('result_ttl')) self._status = obj.get('status') if obj.get('status') else None self.meta = unpickle(str(obj.get('meta'))) if obj.get('meta') else {}
def queryset(self, request, queryset): # not optimum, but readable: # values below could have been computed # in specific if statements below # but it would have been cumbersome # UGLY hack to get current job # https://github.com/nvie/rq/pull/269 in_progress_ids = [] redis_conn = django_rq.get_connection() for k in redis_conn.keys(): try: data = unpickle(redis_conn.hget(k, 'data')) status = redis_conn.hget(k, 'status') if data[0] == 'archives.admin.encode' and status == 'started': in_progress_ids = [data[2][0], ] break except: pass queue = django_rq.get_queue('default') in_queue_ids = [job.args[0] for job in queue.jobs if job.func_name == 'archives.admin.encode'] failed_queue = django_rq.get_failed_queue('default') failed_ids = [job.args[0] for job in failed_queue.jobs if job.func_name == 'archives.admin.encode'] if self.value() == 'no_file': # We can't do file__isnull for queryset # because FileField is represented internally # as a CharField, and Django stores non files # as an empty string '' in the database. return queryset.filter(file="") if self.value() == 'in_queue': return queryset.filter(id__in=in_queue_ids) if self.value() == 'in_progress': return queryset.filter(id__in=in_progress_ids) if self.value() == 'failed': return queryset.filter(id__in=failed_ids) if self.value() == 'encoded': encoded = [media.id for media in queryset if media.is_encoded] return queryset.exclude(file="").exclude(id__in=in_queue_ids)\ .exclude(id__in=in_progress_ids)\ .exclude(id__in=failed_ids).filter(id__in=encoded) if self.value() == 'not_encoded': not_encoded = [media.id for media in queryset if not media.is_encoded] return queryset.exclude(file="").exclude(id__in=in_progress_ids)\ .exclude(id__in=failed_ids).filter(id__in=not_encoded)\ .exclude(id__in=in_queue_ids)
def _current_file_copied_in_progress(self): """ return current files being copied """ _in_progress = [] redis_conn = django_rq.get_connection() for k in redis_conn.keys(): try: data = unpickle(redis_conn.hget(k, 'data')) status = redis_conn.hget(k, 'status') if data[0] == 'archives.admin.archive' and status == 'started': _in_progress = [data[2][0], ] break except: pass return _in_progress
def refresh(self): """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash((yield from self.connection.hgetall(key))) if len(obj) == 0: raise NoSuchJobError('No such job: {0}'.format(key)) to_date = lambda text: utcparse(as_text(text)) if text else None try: self.data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) self.created_at = to_date(obj.get('created_at')) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(obj.get('enqueued_at')) self.started_at = to_date(obj.get('started_at')) self.ended_at = to_date(obj.get('ended_at')) self._result = (unpickle(obj.get('result')) if obj.get('result') else None) self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = (int(obj.get('result_ttl')) if obj.get('result_ttl') else None) self._status = as_text(obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def _encoding_state(self): # No file associated with the media if not self.file: return ENCODING_NO_FILE # The file is currently processed redis_conn = django_rq.get_connection() for k in redis_conn.keys(): try: data = unpickle(redis_conn.hget(k, 'data')) status = redis_conn.hget(k, 'status') if data[0] == 'archives.admin.encode' and status == 'started' and self.id == data[2][0]: return ENCODING_IN_PROGRESS except: pass # The file is currently in queue for encoding process queue = django_rq.get_queue('default') for index, job in enumerate(queue.jobs): if job.func_name == 'archives.admin.encode': if job.args[0] == self.id: return ENCODING_IN_QUEUE, index # If not, the encoding process should have failed failed_queue = django_rq.get_failed_queue('default') for job in failed_queue.jobs: if job.func_name == 'archives.admin.encode': if job.args[0] == self.id: return ENCODING_FAILED # Or, there's no job for this media # So, we have two cases: the encoded files are available # and media is encoded, or files are not availabble # and we checked before that we weren't processing file # or having a 'failed' encoding process # so, the file is just 'not encoded' if self.file: # Test if files in stream repository exist if self.is_encoded: return ENCODING_ENCODED else: return ENCODING_NOT_ENCODED
def serialize_job(job): try: job.refresh() except Exception: pass try: result = unpickle(job._result) except UnpickleError: result = job._result return dict( id=job.id, created_at=serialize_date(job.created_at), enqueued_at=serialize_date(job.enqueued_at), ended_at=serialize_date(job.ended_at), age=str(get_job_age(job)), origin=job.origin, result=result, exc_info=job.exc_info, description=job.description, )