Example #1
0
File: job.py Project: qq18436558/rq
    def refresh(self):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))
        if len(obj) == 0:
            raise NoSuchJobError('No such job: {0}'.format(key))

        def to_date(date_str):
            if date_str is None:
                return
            else:
                return utcparse(as_text(date_str))

        try:
            raw_data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        try:
            self.data = zlib.decompress(raw_data)
        except zlib.error:
            # Fallback to uncompressed string
            self.data = raw_data

        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.started_at = to_date(as_text(obj.get('started_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(obj.get('result')) if obj.get('result') else None  # noqa
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self._status = as_text(obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}

        raw_exc_info = obj.get('exc_info')
        if raw_exc_info:
            try:
                self.exc_info = as_text(zlib.decompress(raw_exc_info))
            except zlib.error:
                # Fallback to uncompressed string
                self.exc_info = as_text(raw_exc_info)
Example #2
0
File: job.py Project: wangyibing/rq
    def get_call_string(self):  # noqa
        """Returns a string representation of the call, formatted as a regular
        Python function invocation statement.
        """
        if self.func_name is None:
            return None

        arg_list = [as_text(repr(arg)) for arg in self.args]

        kwargs = ['{0}={1}'.format(k, as_text(repr(v))) for k, v in self.kwargs.items()]
        # Sort here because python 3.3 & 3.4 makes different call_string
        arg_list += sorted(kwargs)
        args = ', '.join(arg_list)

        return '{0}({1})'.format(self.func_name, args)
Example #3
0
    def test_timeouts(self):
        """Worker kills jobs after timeout."""
        sentinel_file = '/tmp/.rq_sentinel'

        q = Queue()
        w = Worker([q])

        # Put it on the queue with a timeout value
        res = q.enqueue(create_file_after_timeout,
                        args=(sentinel_file, 4),
                        timeout=1)

        try:
            os.unlink(sentinel_file)
        except OSError as e:
            if e.errno == 2:
                pass

        self.assertEquals(os.path.exists(sentinel_file), False)
        w.work(burst=True)
        self.assertEquals(os.path.exists(sentinel_file), False)

        # TODO: Having to do the manual refresh() here is really ugly!
        res.refresh()
        self.assertIn('JobTimeoutException', as_text(res.exc_info))
Example #4
0
    def perform_job(self, job):
        """Performs the actual work of a job.  Will/should only be called
        inside the work horse's process.
        """
        self.prepare_job_execution(job)

        with self.connection._pipeline() as pipeline:
            started_job_registry = StartedJobRegistry(job.origin, self.connection)

            try:
                with self.death_penalty_class(job.timeout or self.queue_class.DEFAULT_TIMEOUT):
                    rv = job.perform()  # 执行job

                # 更新job在redis中的信息

                # Pickle the result in the same try-except block since we need
                # to use the same exc handling when pickling fails
                job._result = rv

                self.set_current_job_id(None, pipeline=pipeline)

                result_ttl = job.get_result_ttl(self.default_result_ttl)
                if result_ttl != 0:
                    job.ended_at = utcnow()
                    job._status = JobStatus.FINISHED
                    job.save(pipeline=pipeline)

                    finished_job_registry = FinishedJobRegistry(job.origin, self.connection)
                    finished_job_registry.add(job, result_ttl, pipeline)

                job.cleanup(result_ttl, pipeline=pipeline)
                started_job_registry.remove(job, pipeline=pipeline)  # 从队列中移除job

                pipeline.execute()

            except Exception:
                job.set_status(JobStatus.FAILED, pipeline=pipeline)
                started_job_registry.remove(job, pipeline=pipeline)
                try:
                    pipeline.execute()
                except Exception:
                    # Ensure that custom exception handlers are called
                    # even if Redis is down
                    pass
                self.handle_exception(job, *sys.exc_info())
                return False

        self.log.info(green('Job OK'))
        if rv:
            log_result = "{0!r}".format(as_text(text_type(rv)))
            self.log.debug('Result: {0}'.format(yellow(log_result)))

        if result_ttl == 0:
            self.log.info('Result discarded immediately')
        elif result_ttl > 0:
            self.log.info('Result is kept for {0} seconds'.format(result_ttl))
        else:
            self.log.warning('Result will never expire, clean up result key manually')

        return True
Example #5
0
    def test_timeouts(self):
        """Worker kills jobs after timeout."""
        sentinel_file = '/tmp/.rq_sentinel'

        q = Queue()
        w = Worker([q])

        # Put it on the queue with a timeout value
        res = q.enqueue(create_file_after_timeout,
                        args=(sentinel_file, 4),
                        timeout=1)

        try:
            os.unlink(sentinel_file)
        except OSError as e:
            if e.errno == 2:
                pass

        self.assertEquals(os.path.exists(sentinel_file), False)
        w.work(burst=True)
        self.assertEquals(os.path.exists(sentinel_file), False)

        # TODO: Having to do the manual refresh() here is really ugly!
        res.refresh()
        self.assertIn('JobTimeoutException', as_text(res.exc_info))
Example #6
0
 def test_add(self):
     """Adding a job to DeferredJobsRegistry."""
     job = Job()
     self.registry.add(job)
     job_ids = [as_text(job_id) for job_id in
                self.testconn.zrange(self.registry.key, 0, -1)]
     self.assertEqual(job_ids, [job.id])
Example #7
0
 def test_add(self):
     """Adding a job to DeferredJobsRegistry."""
     job = Job()
     self.registry.add(job)
     job_ids = [as_text(job_id) for job_id in
                self.testconn.zrange(self.registry.key, 0, -1)]
     self.assertEqual(job_ids, [job.id])
Example #8
0
File: test_job.py Project: Gwill/rq
 def test_register_dependency(self):
     """Test that jobs updates the correct job dependents."""
     job = Job.create(func=say_hello)
     job._dependency_id = 'id'
     job.save()
     job.register_dependency()
     self.assertEqual(as_text(self.testconn.spop('rq:job:id:dependents')), job.id)
Example #9
0
File: worker.py Project: nugit/rq
    def find_by_key(cls, worker_key, connection=None):
        """Returns a Worker instance, based on the naming conventions for
        naming the internal Redis keys.  Can be used to reverse-lookup Workers
        by their Redis keys.
        """
        prefix = cls.redis_worker_namespace_prefix
        if not worker_key.startswith(prefix):
            raise ValueError('Not a valid RQ worker key: {0}'.format(worker_key))

        if connection is None:
            connection = get_current_connection()
        if not connection.exists(worker_key):
            connection.srem(cls.redis_workers_keys, worker_key)
            return None

        name = worker_key[len(prefix):]
        worker = cls([], name, connection=connection)
        queues = as_text(connection.hget(worker.key, 'queues'))
        worker._state = connection.hget(worker.key, 'state') or '?'
        worker._job_id = connection.hget(worker.key, 'current_job') or None
        sja = connection.hget(worker.key, 'started_job_at')
        worker.started_job_at = None if sja is None else utcparse(sja)
        if queues:
            worker.queues = [cls.queue_class(queue, connection=connection)
                             for queue in queues.split(',')]
        return worker
Example #10
0
    def find_by_key(cls, worker_key, connection=None):
        """Returns a Worker instance, based on the naming conventions for
        naming the internal Redis keys.  Can be used to reverse-lookup Workers
        by their Redis keys.
        """
        prefix = cls.redis_worker_namespace_prefix
        name = worker_key[len(prefix):]
        if not worker_key.startswith(prefix):
            raise ValueError('Not a valid RQ worker key: %s' % (worker_key, ))

        if connection is None:
            connection = get_current_connection()
        if not connection.exists(worker_key):
            connection.srem(cls.redis_workers_keys, worker_key)
            return None

        name = worker_key[len(prefix):]
        worker = cls([], name, connection=connection)
        queues = as_text(connection.hget(worker.key, 'queues'))
        worker._state = connection.hget(worker.key, 'state') or '?'
        if queues:
            worker.queues = [
                Queue(queue, connection=connection)
                for queue in queues.split(',')
            ]
        return worker
Example #11
0
    def perform_job(self, job):
        """Performs the actual work of a job.  Will/should only be called
        inside the work horse's process.
        """
        self.prepare_job_execution(job)

        with self.connection._pipeline() as pipeline:
            started_job_registry = StartedJobRegistry(job.origin, self.connection)

            try:
                with self.death_penalty_class(job.timeout or self.queue_class.DEFAULT_TIMEOUT):
                    rv = job.perform()

                # Pickle the result in the same try-except block since we need
                # to use the same exc handling when pickling fails
                job._result = rv

                self.set_current_job_id(None, pipeline=pipeline)

                result_ttl = job.get_result_ttl(self.default_result_ttl)
                if result_ttl != 0:
                    job.ended_at = utcnow()
                    job.set_status(JobStatus.FINISHED, pipeline=pipeline)
                    job.save(pipeline=pipeline)

                    finished_job_registry = FinishedJobRegistry(job.origin, self.connection)
                    finished_job_registry.add(job, result_ttl, pipeline)

                job.cleanup(result_ttl, pipeline=pipeline)
                started_job_registry.remove(job, pipeline=pipeline)

                pipeline.execute()

            except Exception:
                job.set_status(JobStatus.FAILED, pipeline=pipeline)
                started_job_registry.remove(job, pipeline=pipeline)
                self.set_current_job_id(None, pipeline=pipeline)
                try:
                    pipeline.execute()
                except Exception:
                    # Ensure that custom exception handlers are called
                    # even if Redis is down
                    pass
                self.handle_exception(job, *sys.exc_info())
                return False

        self.log.info('{0}: {1} ({2})'.format(green(job.origin), blue('Job OK'), job.id))
        if rv:
            log_result = "{0!r}".format(as_text(text_type(rv)))
            self.log.debug('Result: {0}'.format(yellow(log_result)))

        if result_ttl == 0:
            self.log.info('Result discarded immediately')
        elif result_ttl > 0:
            self.log.info('Result is kept for {0} seconds'.format(result_ttl))
        else:
            self.log.warning('Result will never expire, clean up result key manually')

        return True
Example #12
0
 def all(cls, connection=None):
     """Returns an iterable of all Workers.
     """
     if connection is None:
         connection = get_current_connection()
     reported_working = connection.smembers(cls.redis_workers_keys)
     workers = [cls.find_by_key(as_text(key), connection) for key in reported_working]
     return compact(workers)
Example #13
0
 def test_register_dependency(self):
     """Test that jobs updates the correct job dependents."""
     job = Job.create(func=say_hello)
     job._dependency_id = 'id'
     job.save()
     job.register_dependency()
     self.assertEqual(as_text(self.testconn.spop('rq:job:id:dependents')),
                      job.id)
Example #14
0
    def refresh(self):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))
        if len(obj) == 0:
            raise NoSuchJobError("No such job: %s" % (key,))

        def to_date(date_str):
            if date_str is None:
                return
            else:
                return utcparse(as_text(date_str))

        try:
            self.data = obj["data"]
        except KeyError:
            raise NoSuchJobError("Unexpected job format: {0}".format(obj))

        self.created_at = to_date(as_text(obj.get("created_at")))
        self.origin = as_text(obj.get("origin"))
        self.description = as_text(obj.get("description"))
        self.enqueued_at = to_date(as_text(obj.get("enqueued_at")))
        self.ended_at = to_date(as_text(obj.get("ended_at")))
        self._result = unpickle(obj.get("result")) if obj.get("result") else None  # noqa
        self.exc_info = obj.get("exc_info")
        self.timeout = int(obj.get("timeout")) if obj.get("timeout") else None
        self.result_ttl = int(obj.get("result_ttl")) if obj.get("result_ttl") else None  # noqa
        self._status = as_text(obj.get("status") if obj.get("status") else None)
        self._dependency_id = as_text(obj.get("dependency_id", None))
        self.meta = unpickle(obj.get("meta")) if obj.get("meta") else {}
Example #15
0
    def refresh(self):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))
        if len(obj) == 0:
            raise NoSuchJobError('No such job: {0}'.format(key))

        def to_date(date_str):
            if date_str is None:
                return
            else:
                return utcparse(as_text(date_str))

        try:
            self.data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self._status = as_text(obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #16
0
    def refresh(self, safe=False):  # noqa

        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))

        if len(obj) == 0:
            raise NoSuchJobError('No such job: %s' % (key,))

        def to_date(date_str):
            if date_str is None:
                return None
            else:
                return times.to_universal(as_text(date_str))

        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa
        self._status = as_text(obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #17
0
    def refresh(self, safe=False):  # noqa

        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))

        if len(obj) == 0:
            raise NoSuchJobError('No such job: %s' % (key, ))

        def to_date(date_str):
            if date_str is None:
                return None
            else:
                return times.to_universal(as_text(date_str))

        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(
            obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(
            obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self._status = as_text(
            obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #18
0
 def get_job_ids(self, offset=0, length=-1):
     """Returns a slice of job IDs in the Scheduler Queue."""
     start = offset
     if length >= 0:
         end = offset + (length - 1)
     else:
         end = length
     return [as_text(job_id) for job_id in
             self.connection.zrange(self.key, start, end)]
Example #19
0
 def all(cls, connection=None):
     """Returns an iterable of all Workers.
     """
     if connection is None:
         connection = get_current_connection()
     reported_working = connection.smembers(cls.redis_workers_keys)
     workers = [cls.find_by_key(as_text(key), connection)
                for key in reported_working]
     return compact(workers)
Example #20
0
File: job.py Project: alex-jerez/rq
    def get_call_string(self):  # noqa
        """Returns a string representation of the call, formatted as a regular
        Python function invocation statement.
        """
        try:
            if self.func_name is None:
                return None

            arg_list = [as_text(repr(arg)) for arg in self.args]

            kwargs = ["{0}={1}".format(k, as_text(repr(v))) for k, v in self.kwargs.items()]
            # Sort here because python 3.3 & 3.4 makes different call_string
            arg_list += sorted(kwargs)
            args = ", ".join(arg_list)

            return "{0}({1})".format(self.func_name, args)
        except ValueError:
            return "get_call_string() didnt work :("
Example #21
0
    def get_call_string(self):  # noqa
        """Returns a string representation of the call, formatted as a regular
        Python function invocation statement.
        """
        if self.func_name is None:
            return None

        arg_list = [as_text(repr(arg)) for arg in self.args]

        kwargs = [
            '{0}={1}'.format(k, as_text(repr(v)))
            for k, v in self.kwargs.items()
        ]
        # Sort here because python 3.3 & 3.4 makes different call_string
        arg_list += sorted(kwargs)
        args = ', '.join(arg_list)

        return '{0}({1})'.format(self.func_name, args)
Example #22
0
    def restore(self, raw_data):
        """Overwrite properties with the provided values stored in Redis"""
        obj = decode_redis_hash(raw_data)
        try:
            raw_data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        try:
            self.data = zlib.decompress(raw_data)
        except zlib.error:
            # Fallback to uncompressed string
            self.data = raw_data

        self.created_at = str_to_date(obj.get('created_at'))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = str_to_date(obj.get('enqueued_at'))
        self.started_at = str_to_date(obj.get('started_at'))
        self.ended_at = str_to_date(obj.get('ended_at'))
        self._result = unpickle(
            obj.get('result')) if obj.get('result') else None  # noqa
        self.timeout = parse_timeout(
            obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(
            obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self.failure_ttl = int(
            obj.get('failure_ttl')) if obj.get('failure_ttl') else None  # noqa
        self._status = obj.get('status') if obj.get('status') else None

        dependency_id = obj.get('dependency_id', None)
        self._dependency_ids = [as_text(dependency_id)
                                ] if dependency_id else []

        self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}

        raw_exc_info = obj.get('exc_info')
        if raw_exc_info:
            try:
                self.exc_info = as_text(zlib.decompress(raw_exc_info))
            except zlib.error:
                # Fallback to uncompressed string
                self.exc_info = as_text(raw_exc_info)
Example #23
0
    def get_job_ids(self, offset=0, length=-1):
        """Returns a slice of job IDs in the queue."""

        start = offset
        if length >= 0:
            end = offset + (length - 1)
        else:
            end = length
        return [as_text(job_id) for job_id in
                (yield from self.connection.lrange(self.key, start, end))]
Example #24
0
    def test_get_current_job(self):
        """Ensure worker.get_current_job() works properly"""
        q = Queue()
        worker = Worker([q])
        job = q.enqueue_call(say_hello)

        self.assertEqual(self.testconn.hget(worker.key, "current_job"), None)
        worker.set_current_job_id(job.id)
        self.assertEqual(worker.get_current_job_id(), as_text(self.testconn.hget(worker.key, "current_job")))
        self.assertEqual(worker.get_current_job(), job)
Example #25
0
File: job.py Project: Kisioj/rq
    def create(cls, func, args=None, kwargs=None, connection=None,
               result_ttl=None, ttl=None, status=None, description=None,
               depends_on=None, timeout=None, id=None, origin=None):
        """Creates a new Job instance for the given function, arguments, and
        keyword arguments.
        """
        if args is None:
            args = ()
        if kwargs is None:
            kwargs = {}

        if not isinstance(args, (tuple, list)):
            raise TypeError('{0!r} is not a valid args list'.format(args))
        if not isinstance(kwargs, dict):
            raise TypeError('{0!r} is not a valid kwargs dict'.format(kwargs))

        job = cls(connection=connection)
        if id is not None:
            job.set_id(id)

        if origin is not None:
            job.origin = origin

        # Set the core job tuple properties
        job._instance = None
        if inspect.ismethod(func):
            job._instance = func.__self__
            job._func_name = func.__name__
        elif inspect.isfunction(func) or inspect.isbuiltin(func):
            job._func_name = '{0}.{1}'.format(func.__module__, func.__name__)
        elif isinstance(func, string_types):
            job._func_name = as_text(func)
        elif not inspect.isclass(func) and hasattr(func, '__call__'):  # a callable class instance
            job._instance = func
            job._func_name = '__call__'
        else:
            raise TypeError('Expected a callable or a string, but got: {}'.format(func))
        job._args = args
        job._kwargs = kwargs

        # Extra meta data
        job.description = description or job.get_call_string()
        job.result_ttl = result_ttl
        job.ttl = ttl
        job.timeout = timeout
        job._status = status

        # dependency could be job instance or job id, or list thereof
        if depends_on:
            if isinstance(depends_on, list):
                job._dependency_ids = [tmp.id for tmp in depends_on]
            else:
                job._dependency_ids = [depends_on.id] if isinstance(depends_on, Job) else [depends_on]

        return job
Example #26
0
    def create(cls,
               func,
               args=None,
               kwargs=None,
               connection=None,
               result_ttl=None,
               status=None,
               description=None,
               depends_on=None,
               timeout=None):
        """Creates a new Job instance for the given function, arguments, and
        keyword arguments.
        """
        if args is None:
            args = ()
        if kwargs is None:
            kwargs = {}

        if not isinstance(args, (tuple, list)):
            raise TypeError('{0!r} is not a valid args list.'.format(args))
        if not isinstance(kwargs, dict):
            raise TypeError('{0!r} is not a valid kwargs dict.'.format(kwargs))

        job = cls(connection=connection)

        # Set the core job tuple properties
        job._instance = None
        if inspect.ismethod(func):
            job._instance = func.__self__
            job._func_name = func.__name__
        elif inspect.isfunction(func) or inspect.isbuiltin(func):
            job._func_name = '%s.%s' % (func.__module__, func.__name__)
        elif isinstance(func, string_types):
            job._func_name = as_text(func)
        elif not inspect.isclass(func) and hasattr(
                func, '__call__'):  # a callable class instance
            job._instance = func
            job._func_name = '__call__'
        else:
            raise TypeError(
                'Expected a callable or a string, but got: {}'.format(func))
        job._args = args
        job._kwargs = kwargs

        # Extra meta data
        job.description = description or job.get_call_string()
        job.result_ttl = result_ttl
        job.timeout = timeout
        job._status = status

        # dependency could be job instance or id
        if depends_on is not None:
            job._dependency_id = depends_on.id if isinstance(
                depends_on, Job) else depends_on
        return job
Example #27
0
    def perform_job(self, job, queue, heartbeat_ttl=None):
        """Performs the actual work of a job.  Will/should only be called
        inside the work horse's process.
        """
        self.prepare_job_execution(job, heartbeat_ttl)

        push_connection(self.connection)

        started_job_registry = StartedJobRegistry(job.origin,
                                                  self.connection,
                                                  job_class=self.job_class)

        try:
            job.started_at = utcnow()
            timeout = job.timeout or self.queue_class.DEFAULT_TIMEOUT
            with self.death_penalty_class(timeout, JobTimeoutException, job_id=job.id):
                rv = job.perform(self.workerKwargs)

            job.ended_at = utcnow()

            # Pickle the result in the same try-except block since we need
            # to use the same exc handling when pickling fails
            job._result = rv

            self.handle_job_success(job=job,
                                    queue=queue,
                                    started_job_registry=started_job_registry)
        except:
            job.ended_at = utcnow()
            self.handle_job_failure(job=job,
                                    started_job_registry=started_job_registry)
            self.handle_exception(job, *sys.exc_info())
            return False

        finally:
            pop_connection()

        self.log.info('{0}: {1} ({2})'.format(
            green(job.origin), blue('Job OK'), job.id))
        if rv is not None:
            log_result = "{0!r}".format(as_text(text_type(rv)))
            self.log.debug('Result: %s', yellow(log_result))

        if self.log_result_lifespan:
            result_ttl = job.get_result_ttl(self.default_result_ttl)
            if result_ttl == 0:
                self.log.info('Result discarded immediately')
            elif result_ttl > 0:
                self.log.info(
                    'Result is kept for {0} seconds'.format(result_ttl))
            else:
                self.log.warning(
                    'Result will never expire, clean up result key manually')

        return True
Example #28
0
def test_get_current_job(redis):
    """Ensure worker.get_current_job() works properly."""

    q = Queue()
    worker = Worker([q])
    job = yield from q.enqueue_call(say_hello)

    assert not (yield from redis.hget(worker.key, 'current_job'))
    yield from worker.set_current_job_id(job.id)
    current_id = as_text((yield from redis.hget(worker.key, 'current_job')))
    assert (yield from worker.get_current_job_id()) == current_id
    assert (yield from worker.get_current_job()) == job
Example #29
0
    def test_get_current_job(self):
        """Ensure worker.get_current_job() works properly"""
        q = Queue()
        worker = Worker([q])
        job = q.enqueue_call(say_hello)

        self.assertEqual(self.testconn.hget(worker.key, 'current_job'), None)
        worker.set_current_job_id(job.id)
        self.assertEqual(
            worker.get_current_job_id(),
            as_text(self.testconn.hget(worker.key, 'current_job')))
        self.assertEqual(worker.get_current_job(), job)
Example #30
0
    def find_by_key(cls,
                    worker_key,
                    connection=None,
                    job_class=None,
                    queue_class=None):
        """Returns a Worker instance, based on the naming conventions for
        naming the internal Redis keys.  Can be used to reverse-lookup Workers
        by their Redis keys.
        """
        prefix = cls.redis_worker_namespace_prefix
        if not worker_key.startswith(prefix):
            raise ValueError(
                'Not a valid RQ worker key: {0}'.format(worker_key))

        if connection is None:
            connection = get_current_connection()
        if not connection.exists(worker_key):
            connection.srem(cls.redis_workers_keys, worker_key)
            return None

        name = worker_key[len(prefix):]
        worker = cls([],
                     name,
                     connection=connection,
                     job_class=job_class,
                     queue_class=queue_class)
        queues, state, job_id = connection.hmget(worker.key, 'queues', 'state',
                                                 'current_job')
        queues = as_text(queues)
        worker._state = as_text(state or '?')
        worker._job_id = job_id or None
        if queues:
            worker.queues = [
                worker.queue_class(queue,
                                   connection=connection,
                                   job_class=job_class)
                for queue in queues.split(',')
            ]
        return worker
Example #31
0
    def test_register_dependency(self):
        """Ensure dependency registration works properly."""
        origin = 'some_queue'
        registry = DeferredJobRegistry(origin, self.testconn)

        job = Job.create(func=fixtures.say_hello, origin=origin)
        job._dependency_id = 'id'
        job.save()

        self.assertEqual(registry.get_job_ids(), [])
        job.register_dependency()
        self.assertEqual(as_text(self.testconn.spop('rq:job:id:dependents')), job.id)
        self.assertEqual(registry.get_job_ids(), [job.id])
Example #32
0
    def test_register_dependency(self):
        """Ensure dependency registration works properly."""
        origin = 'some_queue'
        registry = DeferredJobRegistry(origin, self.testconn)

        job = Job.create(func=fixtures.say_hello, origin=origin)
        job._dependency_id = 'id'
        job.save()

        self.assertEqual(registry.get_job_ids(), [])
        job.register_dependency()
        self.assertEqual(as_text(self.testconn.spop('rq:job:id:dependents')), job.id)
        self.assertEqual(registry.get_job_ids(), [job.id])
Example #33
0
    def compact(self):
        """Removes all "dead" jobs from the queue by cycling through it, while
        guaranteeing FIFO semantics.
        """

        COMPACT_QUEUE = 'rq:queue:_compact:{0}'.format(uuid.uuid4())

        yield from self.connection.rename(self.key, COMPACT_QUEUE)
        while True:
            job_id = as_text((yield from self.connection.lpop(COMPACT_QUEUE)))
            if job_id is None:
                break
            if (yield from self.job_class.exists(job_id, self.connection)):
                (yield from self.connection.rpush(self.key, job_id))
Example #34
0
    def test_heartbeat(self):
        """Heartbeat saves last_heartbeat"""
        q = Queue()
        w = Worker([q])
        w.register_birth()

        self.assertEqual(str(w.pid), as_text(self.testconn.hget(w.key, 'pid')))
        self.assertEqual(w.hostname,
                         as_text(self.testconn.hget(w.key, 'hostname')))
        last_heartbeat = self.testconn.hget(w.key, 'last_heartbeat')
        self.assertIsNotNone(self.testconn.hget(w.key, 'birth'))
        self.assertTrue(last_heartbeat is not None)
        w = Worker.find_by_key(w.key)
        self.assertIsInstance(w.last_heartbeat, datetime)

        # worker.refresh() shouldn't fail if last_heartbeat is None
        # for compatibility reasons
        self.testconn.hdel(w.key, 'last_heartbeat')
        w.refresh()
        # worker.refresh() shouldn't fail if birth is None
        # for compatibility reasons
        self.testconn.hdel(w.key, 'birth')
        w.refresh()
Example #35
0
    def perform_job(self, job, queue):
        """Performs the actual work of a job.  Will/should only be called
        inside the work horse's process.
        """
        self.prepare_job_execution(job)

        push_connection(self.connection)

        started_job_registry = StartedJobRegistry(job.origin, self.connection)

        try:
            with self.death_penalty_class(job.timeout or self.queue_class.DEFAULT_TIMEOUT):
                rv = job.perform()

            job.ended_at = utcnow()

            # Pickle the result in the same try-except block since we need
            # to use the same exc handling when pickling fails
            job._result = rv

            self.handle_job_success(
                job=job,
                queue=queue,
                started_job_registry=started_job_registry
            )
        except Exception:
            self.handle_job_failure(
                job=job,
                started_job_registry=started_job_registry
            )
            self.handle_exception(job, *sys.exc_info())
            return False

        finally:
            pop_connection()

        self.log.info('{0}: {1} ({2})'.format(green(job.origin), blue('Job OK'), job.id))
        if rv is not None:
            log_result = "{0!r}".format(as_text(text_type(rv)))
            self.log.debug('Result: {0}'.format(yellow(log_result)))

        result_ttl = job.get_result_ttl(self.default_result_ttl)
        if result_ttl == 0:
            self.log.info('Result discarded immediately')
        elif result_ttl > 0:
            self.log.info('Result is kept for {0} seconds'.format(result_ttl))
        else:
            self.log.warning('Result will never expire, clean up result key manually')

        return True
Example #36
0
def test_register_dependency(redis):
    """Ensure dependency registration works properly."""

    origin = 'some_queue'
    registry = DeferredJobRegistry(origin, redis)

    job = Job.create(func=say_hello, origin=origin)
    job._dependency_id = 'id'
    yield from job.save()

    assert not (yield from registry.get_job_ids())
    yield from job.register_dependency()
    assert as_text((yield from redis.spop('rq:job:id:dependents'))) == job.id
    assert (yield from registry.get_job_ids()) == [job.id]
Example #37
0
    def test_heartbeat(self):
        """Heartbeat saves last_heartbeat"""
        q = Queue()
        w = Worker([q])
        w.register_birth()

        self.assertEqual(str(w.pid), as_text(self.testconn.hget(w.key, 'pid')))
        self.assertEqual(w.hostname,
                         as_text(self.testconn.hget(w.key, 'hostname')))
        last_heartbeat = self.testconn.hget(w.key, 'last_heartbeat')
        self.assertIsNotNone(self.testconn.hget(w.key, 'birth'))
        self.assertTrue(last_heartbeat is not None)
        w = Worker.find_by_key(w.key)
        self.assertIsInstance(w.last_heartbeat, datetime)

        # worker.refresh() shouldn't fail if last_heartbeat is None
        # for compatibility reasons
        self.testconn.hdel(w.key, 'last_heartbeat')
        w.refresh()
        # worker.refresh() shouldn't fail if birth is None
        # for compatibility reasons
        self.testconn.hdel(w.key, 'birth')
        w.refresh()
Example #38
0
File: job.py Project: essobi/aiorq
    def refresh(self):
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key
        exists.
        """

        key = self.key
        obj = decode_redis_hash((yield from self.connection.hgetall(key)))
        if len(obj) == 0:
            raise NoSuchJobError('No such job: {0}'.format(key))

        to_date = lambda text: utcparse(as_text(text)) if text else None

        try:
            self.data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        self.created_at = to_date(obj.get('created_at'))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(obj.get('enqueued_at'))
        self.started_at = to_date(obj.get('started_at'))
        self.ended_at = to_date(obj.get('ended_at'))
        self._result = (unpickle(obj.get('result'))
                        if obj.get('result') else None)
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = (int(obj.get('result_ttl'))
                           if obj.get('result_ttl') else None)
        self._status = as_text(obj.get('status')
                               if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #39
0
    def refresh(self, safe=False):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))
        if len(obj) == 0:
            raise NoSuchJobError('No such job: %s' % (key,))

        def to_date(date_str):
            if date_str is None:
                return None
            else:
                return times.to_universal(as_text(date_str))

        try:
            self.data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        try:
            self._func_name, self._instance, self._args, self._kwargs = unpickle(self.data)
        except UnpickleError:
            if not safe:
                raise
        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.started_at = to_date(as_text(obj.get('started_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa
        self._status = as_text(obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
        self._annotations = as_text(obj.get('annotations'))
Example #40
0
def test_timeouts(set_loop):
    """Worker kills jobs after timeout."""

    q = Queue()
    w = Worker([q])

    # Put it on the queue with a timeout value
    res = yield from q.enqueue(
        touch_a_mock_after_timeout, args=(4,), timeout=1)

    assert not mock.call_count
    yield from w.work(burst=True)
    assert not mock.call_count

    # TODO: Having to do the manual refresh() here is really ugly!
    yield from res.refresh()
    assert 'JobTimeoutException' in as_text(res.exc_info)
    mock.reset_mock()
Example #41
0
    def test_compressed_exc_info_handling(self):
        """Jobs handle both compressed and uncompressed exc_info"""
        exception_string = 'Some exception'

        job = Job.create(func=fixtures.say_hello, args=('Lionel', ))
        job.exc_info = exception_string
        job.save()

        # exc_info is stored in compressed format
        exc_info = self.testconn.hget(job.key, 'exc_info')
        self.assertEqual(as_text(zlib.decompress(exc_info)), exception_string)

        job.refresh()
        self.assertEqual(job.exc_info, exception_string)

        # Uncompressed exc_info is also handled
        self.testconn.hset(job.key, 'exc_info', exception_string)

        job.refresh()
        self.assertEqual(job.exc_info, exception_string)
Example #42
0
File: job.py Project: Gwill/rq
    def create(cls, func, args=None, kwargs=None, connection=None,
               result_ttl=None, status=None, description=None, depends_on=None, timeout=None):
        """Creates a new Job instance for the given function, arguments, and
        keyword arguments.
        """
        if args is None:
            args = ()
        if kwargs is None:
            kwargs = {}

        if not isinstance(args, (tuple, list)):
            raise TypeError('{0!r} is not a valid args list.'.format(args))
        if not isinstance(kwargs, dict):
            raise TypeError('{0!r} is not a valid kwargs dict.'.format(kwargs))

        job = cls(connection=connection)

        # Set the core job tuple properties
        job._instance = None
        if inspect.ismethod(func):
            job._instance = func.__self__
            job._func_name = func.__name__
        elif inspect.isfunction(func) or inspect.isbuiltin(func):
            job._func_name = '%s.%s' % (func.__module__, func.__name__)
        elif isinstance(func, string_types):
            job._func_name = as_text(func)
        else:
            raise TypeError('Expected a function/method/string, but got: {}'.format(func))
        job._args = args
        job._kwargs = kwargs

        # Extra meta data
        job.description = description or job.get_call_string()
        job.result_ttl = result_ttl
        job.timeout = timeout
        job._status = status

        # dependency could be job instance or id
        if depends_on is not None:
            job._dependency_id = depends_on.id if isinstance(depends_on, Job) else depends_on
        return job
Example #43
0
    def find_by_key(cls, worker_key, connection=None):
        """Returns a Worker instance, based on the naming conventions for
        naming the internal Redis keys.  Can be used to reverse-lookup Workers
        by their Redis keys.
        """
        prefix = cls.redis_worker_namespace_prefix
        if not worker_key.startswith(prefix):
            raise ValueError("Not a valid RQ worker key: %s" % (worker_key,))

        if connection is None:
            connection = get_current_connection()
        if not connection.exists(worker_key):
            connection.srem(cls.redis_workers_keys, worker_key)
            return None

        name = worker_key[len(prefix) :]
        worker = cls([], name, connection=connection)
        queues = as_text(connection.hget(worker.key, "queues"))
        worker._state = connection.hget(worker.key, "state") or "?"
        if queues:
            worker.queues = [Queue(queue, connection=connection) for queue in queues.split(",")]
        return worker
Example #44
0
    def find_by_key(cls, worker_key, connection=None):
        """Returns a Worker instance, based on the naming conventions for
        naming the internal Redis keys.  Can be used to reverse-lookup Workers
        by their Redis keys.
        """
        prefix = cls.redis_worker_namespace_prefix
        if not worker_key.startswith(prefix):
            raise ValueError('Not a valid RQ worker key: %s' % (worker_key,))

        if connection is None:
            connection = get_current_connection()
        if not connection.exists(worker_key): # 不存在 worker_key
            connection.srem(cls.redis_workers_keys, worker_key) # srem 移除集合 key 中的一个或多个 member 元素
            return None

        name = worker_key[len(prefix):]
        worker = cls([], name, connection=connection)
        queues = as_text(connection.hget(worker.key, 'queues'))
        worker._state = connection.hget(worker.key, 'state') or '?'
        if queues:
            worker.queues = [Queue(queue, connection=connection)
                             for queue in queues.split(',')] # queues 用逗号分隔多个 queue,见 register_birth()
        return worker
Example #45
0
    def test_compressed_exc_info_handling(self):
        """Jobs handle both compressed and uncompressed exc_info"""
        exception_string = 'Some exception'

        job = Job.create(func=fixtures.say_hello, args=('Lionel',))
        job.exc_info = exception_string
        job.save()

        # exc_info is stored in compressed format
        exc_info = self.testconn.hget(job.key, 'exc_info')
        self.assertEqual(
            as_text(zlib.decompress(exc_info)),
            exception_string
        )

        job.refresh()
        self.assertEqual(job.exc_info, exception_string)

        # Uncompressed exc_info is also handled
        self.testconn.hset(job.key, 'exc_info', exception_string)

        job.refresh()
        self.assertEqual(job.exc_info, exception_string)
Example #46
0
    def refresh(self, safe=False):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key))
        if len(obj) == 0:
            raise NoSuchJobError('No such job: %s' % (key, ))

        def to_date(date_str):
            if date_str is None:
                return None
            else:
                return times.to_universal(as_text(date_str))

        try:
            self.data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        try:
            self._func_name, self._instance, self._args, self._kwargs = unpickle(
                self.data)
        except UnpickleError:
            if not safe:
                raise
        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(
            obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(
            obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self._status = as_text(
            obj.get('status') if obj.get('status') else None)
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #47
0
File: job.py Project: haolloyin/rq
    def refresh(self):  # noqa
        """Overwrite the current instance's properties with the values in the
        corresponding Redis key.

        Will raise a NoSuchJobError if no corresponding Redis key exists.

        用 redis 中对应保存的值替换当前 job 的属性,有点类似于重新构造
        """
        key = self.key
        obj = decode_redis_hash(self.connection.hgetall(key)) # hgetall 返回哈希表 key 中,所有的域和值
        if len(obj) == 0:
            raise NoSuchJobError('No such job: %s' % (key,))

        def to_date(date_str):
            if date_str is None:
                return
            else:
                return utcparse(as_text(date_str))

        try:
            self.data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        self.created_at = to_date(as_text(obj.get('created_at')))
        self.origin = as_text(obj.get('origin'))
        self.description = as_text(obj.get('description'))
        self.enqueued_at = to_date(as_text(obj.get('enqueued_at')))
        self.ended_at = to_date(as_text(obj.get('ended_at')))
        self._result = unpickle(obj.get('result')) if obj.get('result') else None  # noqa
        self.exc_info = obj.get('exc_info')
        self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self._status = as_text(obj.get('status') if obj.get('status') else None)
        self._dependency_id = as_text(obj.get('dependency_id', None))
        self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
Example #48
0
 def get_current_job_id(self, pipeline=None):
     connection = pipeline if pipeline is not None else self.connection
     return as_text(connection.hget(self.key, 'current_job'))
Example #49
0
File: worker.py Project: Kisioj/rq
 def get_current_job_id(self, pipeline=None):
     connection = pipeline if pipeline is not None else self.connection
     return as_text(connection.hget(self.key, 'current_job'))
Example #50
0
File: worker.py Project: Kisioj/rq
 def death_date(self):
     """Fetches death date from Redis."""
     death_timestamp = self.connection.hget(self.key, 'death')
     if death_timestamp is not None:
         return utcparse(as_text(death_timestamp))
Example #51
0
    def restore(self, raw_data):
        """Overwrite properties with the provided values stored in Redis"""
        obj = decode_redis_hash(raw_data)
        try:
            raw_data = obj['data']
        except KeyError:
            raise NoSuchJobError('Unexpected job format: {0}'.format(obj))

        try:
            self.data = zlib.decompress(raw_data)
        except zlib.error:
            # Fallback to uncompressed string
            self.data = raw_data

        json_origin = True if obj.get('json_origin') else False

        self.created_at = str_to_date(obj.get('created_at'))
        self.origin = as_text(obj.get('origin'))
        self.worker_name = obj.get('worker_name').decode() if obj.get(
            'worker_name') else None
        self.description = as_text(obj.get('description'))
        self.enqueued_at = str_to_date(obj.get('enqueued_at'))
        self.started_at = str_to_date(obj.get('started_at'))
        self.ended_at = str_to_date(obj.get('ended_at'))
        self.last_heartbeat = str_to_date(obj.get('last_heartbeat'))

        self._func_name = as_text(
            obj.get('func_name')) if json_origin else self._func_name
        self._instance = None
        self._args = ()
        # self._kwargs = {'url': as_text(obj.get('func_url'))} if json_origin else self._kwargs
        temp_args = {}
        if json_origin:
            for key, value in obj.items():
                if key.startswith('func_') and key != 'func_name':
                    temp_args[key.split("func_", 1)[1]] = as_text(value)
            self._kwargs = temp_args

        result = obj.get('result')
        if result:
            try:
                self._result = self.serializer.loads(obj.get('result'))
            except Exception as e:
                self._result = "Unserializable return value"
        self.timeout = parse_timeout(
            obj.get('timeout')) if obj.get('timeout') else None
        self.result_ttl = int(
            obj.get('result_ttl')) if obj.get('result_ttl') else None  # noqa
        self.failure_ttl = int(
            obj.get('failure_ttl')) if obj.get('failure_ttl') else None  # noqa
        self._status = obj.get('status').decode() if obj.get(
            'status') else None

        dependency_id = obj.get('dependency_id', None)
        self._dependency_ids = [as_text(dependency_id)
                                ] if dependency_id else []

        self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None
        self.meta = self.serializer.loads(
            obj.get('meta')) if obj.get('meta') else {}

        self.retries_left = int(
            obj.get('retries_left')) if obj.get('retries_left') else None
        if obj.get('retry_intervals'):
            self.retry_intervals = json.loads(
                obj.get('retry_intervals').decode())

        raw_exc_info = obj.get('exc_info')
        if raw_exc_info:
            try:
                self.exc_info = as_text(zlib.decompress(raw_exc_info))
            except zlib.error:
                # Fallback to uncompressed string
                self.exc_info = as_text(raw_exc_info)
Example #52
0
def truncate_long_string(data, maxlen=75):
    """ Truncates strings longer than maxlen
    """
    data = as_text(data)
    return (data[:maxlen] + '...') if len(data) > maxlen else data
Example #53
0
 def death_date(self):
     """Fetches death date from Redis."""
     death_timestamp = self.connection.hget(self.key, 'death')
     if death_timestamp is not None:
         return utcparse(as_text(death_timestamp))
Example #54
0
 def birth_date(self):
     """Fetches birth date from Redis."""
     birth_timestamp = self.connection.hget(self.key, 'birth')
     if birth_timestamp is not None:
         return utcparse(as_text(birth_timestamp))
Example #55
0
def tl(l):
    return [as_text(i) for i in l]
Example #56
0
 def to_date(date_str):
     if date_str is None:
         return
     else:
         return utcparse(as_text(date_str))
Example #57
0
 def get_status(self):
     self._status = as_text(self.connection.hget(self.key, 'status'))
     return self._status
Example #58
0
 def shutdown_requested_date(self):
     """Fetches shutdown_requested_date from Redis."""
     shutdown_requested_timestamp = self.connection.hget(
         self.key, 'shutdown_requested_date')
     if shutdown_requested_timestamp is not None:
         return utcparse(as_text(shutdown_requested_timestamp))
Example #59
0
File: job.py Project: nmanovic/rq
    def create(cls,
               func,
               args=None,
               kwargs=None,
               connection=None,
               result_ttl=None,
               ttl=None,
               status=None,
               description=None,
               depends_on=None,
               timeout=None,
               id=None,
               origin=None,
               meta=None,
               failure_ttl=None,
               serializer=None):
        """Creates a new Job instance for the given function, arguments, and
        keyword arguments.
        """
        if args is None:
            args = ()
        if kwargs is None:
            kwargs = {}

        if not isinstance(args, (tuple, list)):
            raise TypeError('{0!r} is not a valid args list'.format(args))
        if not isinstance(kwargs, dict):
            raise TypeError('{0!r} is not a valid kwargs dict'.format(kwargs))

        job = cls(connection=connection, serializer=serializer)
        if id is not None:
            job.set_id(id)

        if origin is not None:
            job.origin = origin

        # Set the core job tuple properties
        job._instance = None
        if inspect.ismethod(func):
            job._instance = func.__self__
            job._func_name = func.__name__
        elif inspect.isfunction(func) or inspect.isbuiltin(func):
            if func.__name__ != func.__qualname__:
                class_name = func.__qualname__.rsplit('.', 1)[0]
                module = inspect.getmodule(func)
                job._instance = operator.attrgetter(class_name)(module)
                job._func_name = func.__name__
            else:
                job._func_name = '{0}.{1}'.format(func.__module__,
                                                  func.__name__)
        elif isinstance(func, string_types):
            job._func_name = as_text(func)
        elif not inspect.isclass(func) and hasattr(
                func, '__call__'):  # a callable class instance
            job._instance = func
            job._func_name = '__call__'
        else:
            raise TypeError(
                'Expected a callable or a string, but got: {0}'.format(func))
        job._args = args
        job._kwargs = kwargs

        # Extra meta data
        job.description = description or job.get_call_string()
        job.result_ttl = parse_timeout(result_ttl)
        job.failure_ttl = parse_timeout(failure_ttl)
        job.ttl = parse_timeout(ttl)
        job.timeout = parse_timeout(timeout)
        job._status = status
        job.meta = meta or {}

        # dependency could be job instance or id
        if depends_on is not None:
            job._dependency_ids = [
                depends_on.id if isinstance(depends_on, Job) else depends_on
            ]
        return job
Example #60
0
File: job.py Project: nmanovic/rq
    def get_status(self, refresh=True):
        if refresh:
            self._status = as_text(self.connection.hget(self.key, 'status'))

        return self._status