示例#1
0
def get_crawl_job(timeout='24h'):
    """Returns a function that will add a crawl call to the Redis queue

    Args:
        timeout (int/string): the maximum runtime of the job
    """
    return job('default', connection=redis_conn, timeout=timeout)(crawl).delay
示例#2
0
    def restart_worker(self, wid, **kwargs):
        """
        Registers restart_worker as a new task in RQ
        The delay method executes it asynchronously

        @param wid: uuid of the workflow to be run
        @type wid: string
        """
        return job(queue='default', connection=redis_conn)(restart_worker).\
            delay(wid, **kwargs)
示例#3
0
    def run(self, wname, data, external_save=None):
        """
        Registers runit function as a new task in RQ
        The delay method executes it asynchronously

        @wname: str, name of the workflow to be run
        @data: list of dictionaries, objects for the workflow
        """
        return job(queue='default', connection=redis_conn)(runit). \
            delay(wname, data, external_save=external_save)
示例#4
0
    def restart_worker(self, wid, **kwargs):
        """
        Registers restart_worker as a new task in RQ
        The delay method executes it asynchronously

        @param wid: uuid of the workflow to be run
        @type wid: string
        """
        return job(queue='default', connection=redis_conn)(restart_worker).\
            delay(wid, **kwargs)
示例#5
0
    def continue_worker(self, oid, restart_point, **kwargs):
        """
        Registers continue_worker as a new task in RQ
        The delay method executes it asynchronously

        @param oid: uuid of the object to be started
        @type oid: string

        @param restart_point: sets the start point
        @type restart_point: string
        """
        return job(queue='default', connection=redis_conn)(continue_worker). \
            delay(oid, restart_point, **kwargs)
示例#6
0
    def run_worker(self, workflow_name, data, **kwargs):
        """
        Registers run_worker function as a new task in RQ
        The delay method executes it asynchronously

        @param workflow_name: name of the workflow to be run
        @type workflow_name: string

        @param data: list of objects for the workflow
        @type data: list
        """
        return job(queue='default', connection=redis_conn)(run_worker). \
            delay(workflow_name, data, **kwargs)
示例#7
0
文件: rq.py 项目: BrianHicks/emit
    def wrap_node(self, node, options):
        '''
        we have the option to construct nodes here, so we can use different
        queues for nodes without having to have different queue objects.
        '''
        job_kwargs = {
            'queue': options.get('queue', 'default'),
            'connection': options.get('connection', self.redis_connection),
            'timeout': options.get('timeout', None),
            'result_ttl': options.get('result_ttl', 500),
        }

        return job(**job_kwargs)(node)
示例#8
0
    def continue_worker(self, oid, restart_point, **kwargs):
        """
        Registers continue_worker as a new task in RQ
        The delay method executes it asynchronously

        @param oid: uuid of the object to be started
        @type oid: string

        @param restart_point: sets the start point
        @type restart_point: string
        """
        return job(queue='default', connection=redis_conn)(continue_worker). \
            delay(oid, restart_point, **kwargs)
示例#9
0
    def run_worker(self, workflow_name, data, **kwargs):
        """
        Registers run_worker function as a new task in RQ
        The delay method executes it asynchronously

        @param workflow_name: name of the workflow to be run
        @type workflow_name: string

        @param data: list of objects for the workflow
        @type data: list
        """
        return job(queue='default', connection=redis_conn)(run_worker). \
            delay(workflow_name, data, **kwargs)
示例#10
0
    def wrap_node(self, node, options):
        '''
        we have the option to construct nodes here, so we can use different
        queues for nodes without having to have different queue objects.
        '''
        job_kwargs = {
            'queue': options.get('queue', 'default'),
            'connection': options.get('connection', self.redis_connection),
            'timeout': options.get('timeout', None),
            'result_ttl': options.get('result_ttl', 500),
        }

        return job(**job_kwargs)(node)
示例#11
0
    def test_decorator_custom_queue_class(self):
        """Ensure that a custom queue class can be passed to the job decorator"""
        class CustomQueue(Queue):
            pass

        CustomQueue.enqueue_call = mock.MagicMock(
            spec=lambda *args, **kwargs: None, name='enqueue_call')

        custom_decorator = job(queue='default', queue_class=CustomQueue)
        self.assertIs(custom_decorator.queue_class, CustomQueue)

        @custom_decorator
        def custom_queue_class_job(x, y):
            return x + y

        custom_queue_class_job.delay(1, 2)
        self.assertEqual(CustomQueue.enqueue_call.call_count, 1)
示例#12
0
    def test_decorator_custom_queue_class(self):
        """Ensure that a custom queue class can be passed to the job decorator"""
        class CustomQueue(Queue):
            pass
        CustomQueue.enqueue_call = mock.MagicMock(
            spec=lambda *args, **kwargs: None,
            name='enqueue_call'
        )

        custom_decorator = job(queue='default', queue_class=CustomQueue)
        self.assertIs(custom_decorator.queue_class, CustomQueue)

        @custom_decorator
        def custom_queue_class_job(x, y):
            return x + y

        custom_queue_class_job.delay(1, 2)
        self.assertEqual(CustomQueue.enqueue_call.call_count, 1)
示例#13
0
    def restart(self,
                wid,
                data=None,
                restart_point="beginning",
                external_save=None):
        """
        Registers restartit as a new task in RQ
        The delay method executes it asynchronously

        @wname: str, name of the workflow to be run
        @data:  set to None if not given. In this case they are retrieved
        from the db
        list of dictionaries, objects for the workflow
        @restart_point: str, sets the restart point
        """
        return job(queue='default', connection=redis_conn)(restartit).\
            delay(wid, data=data, restart_point=restart_point,
                  external_save=external_save)
示例#14
0
文件: tasks.py 项目: yxy/django-slack
def get_rq_task(connection=None):
    from rq.decorators import job
    from django_rq.queues import get_queue
    return job(queue=get_queue(app_settings.BACKEND_QUEUE_NAME))(_sender)