Пример #1
0
import os

import redis
from rq import Worker, Queue, Connection
from rq.handlers import move_to_failed_queue

from exception_handler import post_to_db

listen = ['default']

# Heroku provides the env variable REDISTOGO_URL for Heroku RedisToGo;
# the default redis://redis_worker:6379 points to the local docker redis
redis_url = os.getenv('REDISTOGO_URL', 'redis://redis_worker:6379')
connection = redis.from_url(redis_url)

if __name__ == '__main__':
    with Connection(connection):
        worker = Worker(map(Queue, listen),
                        exception_handlers=[post_to_db, move_to_failed_queue])
        worker.work()
Пример #2
0
def start():
    with Connection(redis_conn):
        worker = Worker(map(Queue, listen),
                        exception_handlers=[handle_exception])
        worker.work()
Пример #3
0
import os

import redis
from rq import Worker, Queue, Connection

listen = ['high', 'default', 'low']

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')

conn = redis.from_url(redis_url)

if __name__ == '__main__':
    with Connection(conn):
        worker = Worker(map(Queue, listen))
        worker.work()
Пример #4
0
 def test_custom_job_class(self):
     """Ensure Worker accepts custom job class."""
     q = Queue()
     worker = Worker([q], job_class=CustomJob)
     self.assertEqual(worker.job_class, CustomJob)
Пример #5
0
 def handle(self, *args, **kwargs):
     if REDIS_QUEUE is not None:
         Worker(REDIS_QUEUE, connection=REDIS_QUEUE.connection).work()
Пример #6
0
 def start_rq_worker(self, queue, burst=True):
     with Connection():
         worker = Worker([queue])
         worker.work(burst=True)
Пример #7
0
import requests
from rq import Connection, Worker

from constants import *
from backend.logger import log
from backend.storage import rdbq
import backend.util

with Connection(connection=rdbq):
    w = Worker([ 'QMO', 'QEV' ])
    w.work()


import stats
from rq import Worker, Queue, Connection

queue = Queue("echo", connection=stats.CONN)
worker = Worker(["searches", "test"],
                name="test_worker",
                connection=stats.CONN)


def teardown_function():
    queue.delete(delete_jobs=True)
    worker.register_death()


def test_stat_scrape():
    queue.enqueue("echo_worker", "HEY!")
    queue.enqueue("echo_worker", "HO!")
    worker.register_birth()

    jobs, workers = stats.scrape()
    assert jobs == [{"queue_name": "echo", "size": 2}]
    assert workers == [{
        "name": "test_worker",
        "queues": "searches,test",
        "state": "?"
    }]
Пример #9
0
def startWorker():
    conn = redisQueue()
    worker = Worker(Queue(connection=conn), connection=conn)
    worker.work()
Пример #10
0
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
                        unicode_literals)

from rq import Connection, Queue, Worker

if __name__ == '__main__':
    # Tell rq what Redis connection to use
    with Connection():
        q = Queue()
        Worker(q).work()
Пример #11
0
def run_preload_worker_cmd():
    with Connection():
        w = Worker(current_app.config['PRELOAD_QUEUE'])
        w.work()
Пример #12
0
def runworker():
    with Connection(redis_conn):
        worker = Worker(config['QUEUES'])
        worker.work()
Пример #13
0
import os
import json
import uuid
import time
import pathlib

from redis import Redis
from rq import Queue, Worker, Connection

from config import basedir


redis_conn = Redis()
q = Queue(connection=redis_conn)
w = Worker(q)


class Database(object):

    def __init__(self, dummy=False):
        self.data = []
        self.ids = []
        self.i_am_a_dummy = dummy
        self.timestamp_backup = time.time()

        if self.i_am_a_dummy:
            self.jsonl_fp = init_dummy_dictionary()
        else:
            self.jsonl_fp = os.path.join(basedir, 'data', 'dictionary.jsonl')
            self.backup_dir = os.path.join(basedir, 'data', 'backups')
Пример #14
0
def run_worker(queue):
    with Connection(current_app.redis):
        qs = [queue] or ['default']

        worker = Worker(qs)
        worker.work()
Пример #15
0
 def test_create_worker(self):
     """Worker creation."""
     fooq, barq = Queue('foo'), Queue('bar')
     w = Worker([fooq, barq])
     self.assertEquals(w.queues, [fooq, barq])
Пример #16
0
def worker(burst=False):
    with Connection(conn):
        worker = Worker(list(map(Queue, listen)))
        worker.work(burst=burst)
Пример #17
0
#!/usr/bin/env python
import sys
from rq import Connection, Worker

with Connection():
    qs = sys.argv[1:] or ['default']
    w = Worker(qs)
    w.work()
Пример #18
0
import os
import redis
from rq import Connection, Queue, Worker
from url_shortener import create_app

redis_url = os.environ.get('REDISTOGO_URL') or 'redis://localhost:6379'

conn = redis.from_url(redis_url)
try:
    conn.ping()
except redis.ConnectionError as err:
    import sys
    sys.exit(0)

app = create_app(os.environ.get('FLAsK_CONFIG') or 'default')
app.app_context().push()

if __name__ == '__main__':
    with Connection(conn):
        worker = Worker(['default'], connection=conn, name='db_cleaner')
        worker.work()
Пример #19
0
def worker():
    logging.info('this is worker')
    with Connection(conn):
        worker = Worker(map(Queue, listen))
        worker.work()
Пример #20
0
#!/usr/bin/env python
"""Run a worker for the job queue.

:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""

from rq import Worker

from byceps.util.jobqueue import connection, get_queue
from byceps.util.system import get_config_filename_from_env_or_exit

from bootstrap.util import app_context

if __name__ == '__main__':
    config_filename = get_config_filename_from_env_or_exit()

    with app_context(config_filename):
        with connection():
            queues = [get_queue()]

            worker = Worker(queues)
            worker.work()
Пример #21
0
    def test_create_worker(self):
        """Worker creation using various inputs."""

        # With single string argument
        w = Worker('foo')
        self.assertEqual(w.queues[0].name, 'foo')

        # With list of strings
        w = Worker(['foo', 'bar'])
        self.assertEqual(w.queues[0].name, 'foo')
        self.assertEqual(w.queues[1].name, 'bar')

        self.assertEqual(w.queue_keys(), [w.queues[0].key, w.queues[1].key])
        self.assertEqual(w.queue_names(), ['foo', 'bar'])

        # With iterable of strings
        w = Worker(iter(['foo', 'bar']))
        self.assertEqual(w.queues[0].name, 'foo')
        self.assertEqual(w.queues[1].name, 'bar')

        # Also accept byte strings in Python 2
        if PY2:
            # With single byte string argument
            w = Worker(b'foo')
            self.assertEqual(w.queues[0].name, 'foo')

            # With list of byte strings
            w = Worker([b'foo', b'bar'])
            self.assertEqual(w.queues[0].name, 'foo')
            self.assertEqual(w.queues[1].name, 'bar')

            # With iterable of byte strings
            w = Worker(iter([b'foo', b'bar']))
            self.assertEqual(w.queues[0].name, 'foo')
            self.assertEqual(w.queues[1].name, 'bar')

        # With single Queue
        w = Worker(Queue('foo'))
        self.assertEqual(w.queues[0].name, 'foo')

        # With iterable of Queues
        w = Worker(iter([Queue('foo'), Queue('bar')]))
        self.assertEqual(w.queues[0].name, 'foo')
        self.assertEqual(w.queues[1].name, 'bar')

        # With list of Queues
        w = Worker([Queue('foo'), Queue('bar')])
        self.assertEqual(w.queues[0].name, 'foo')
        self.assertEqual(w.queues[1].name, 'bar')

        # With string and serializer
        w = Worker('foo', serializer=json)
        self.assertEqual(w.queues[0].name, 'foo')

        # With queue having serializer
        w = Worker(Queue('foo'), serializer=json)
        self.assertEqual(w.queues[0].name, 'foo')
Пример #22
0
def run_worker():
    redis_connection = Redis(app.config['REDIS_URL'])
    with Connection(redis_connection):
        worker = Worker(['default'], exception_handlers=[rq_error_handler])
        worker.work()
Пример #23
0
 def test_custom_queue_class(self):
     """Ensure Worker accepts custom queue class."""
     q = CustomQueue()
     worker = Worker([q], queue_class=CustomQueue)
     self.assertEqual(worker.queue_class, CustomQueue)
Пример #24
0
def run_async_worker():
    worker = Worker([JOB_QUEUE], connection=REDIS_CONNECTION)
    worker.work()
Пример #25
0
def run_worker():
    redis_url = app.config['REDIS_URL']
    redis_connection = redis.from_url(redis_url)
    with Connection(redis_connection):
        worker = Worker(app.config['QUEUES'])
        worker.work()
Пример #26
0
def start_worker(channels):
    with Connection(connection=redis_conn):
        worker = Worker(map(Queue, channels))
        worker.work()
Пример #27
0
def listen():
    worker = Worker(queues=[queue], connection=redis)
    worker.work(with_scheduler=True)
Пример #28
0
    if dt_boxes is None or rec_res is None:
        temp_rec_res = []
        rec_res_data = json.dumps(temp_rec_res, indent=2, ensure_ascii=False)
        elapse = 0
        image = cv2.imencode('.jpg', img)[1]
        img = str(base64.b64encode(image))[2:-1]
    else:
        temp_rec_res = []
        for i, value in enumerate(rec_res):
            temp_rec_res.append([i, value[0], value[1]])
        temp_rec_res = np.array(temp_rec_res)
        rec_res_data = json.dumps(temp_rec_res.tolist(),
                                  indent=2,
                                  ensure_ascii=False)

        det_im = draw_text_det_res(dt_boxes, img)
        image = cv2.imencode('.jpg', det_im)[1]
        img = str(base64.b64encode(image))[2:-1]

        elapse = f'{t2-t1:.3f}'
    return json.dumps({
        'image': img,
        'elapse': elapse,
        'rec_res': rec_res_data
    })


if __name__ == '__main__':
    with Connection(conn):  # 建立与redis server的连接
        worker = Worker(list(map(Queue, listen)))  # 建立worker监听给定的队列
        worker.work()
Пример #29
0
#!/usr/bin/env python
import sys
from rq import Connection, Worker
import os

from retrobiocat_web.app.app import create_app

scheduler = os.environ.get('SCHEDULER') or False
production_mode = os.environ.get('PRODUCTION') or False

if __name__ == '__main__':
    app = create_app(use_talisman=production_mode)
    app.app_context().push()

    with Connection(app.redis):
        qs = sys.argv[1:] or [
            'tasks', 'network', 'pathway', 'db', 'process_blasts', 'alignment',
            'blast', 'preprocess', 'osra'
        ]
        if 'auto_jobs' in qs:
            scheduler = True
        w = Worker(qs, log_job_description=False)
        w.work(with_scheduler=scheduler)
Пример #30
0
def main():
    with Connection(
            StrictRedis.from_url(environ.get('REDIS_URL', 'redis://localhost'),
                                 retry_on_timeout=True)):
        Worker(['d3a'], name='simulation.{}.{:%s}'.format(getpid(),
                                                          now())).work()