def main():
    max_workers = 5
    if "GOL_WORKERS" in os.environ:
        max_workers = int(os.getenv("GOL_WORKERS"))

    processes = []
    q = None
    for _ in range(0, 3):
        try:
            rq.use_connection(redis.Redis())
            q = rq.Queue("lifeboxQueue")
            break
        except Exception:
            time.sleep(5)

    if q is None:
        print("Could not establish connection to redis, exiting")
        sys.exit(99)

    for _ in range(0, max_workers):
        try:
            proc = multiprocessing.Process(target=rq.Worker(q).work)
            proc.start()
            processes.append(proc)
        except Exception as ex:
            print(ex)
            traceback.print_exc()

    try:
        while True:
            tbd = []
            for proc in processes:
                if not proc.is_alive():
                    try:
                        tbd.append(proc)
                        newproc = multiprocessing.Process(
                            target=rq.Worker(q).work)
                        newproc.start()
                        processes.append(newproc)
                    except Exception as ex:
                        print(ex)
                        traceback.print_exc()

            for proc in tbd:
                processes.remove(proc)

            time.sleep(10)

    except KeyboardInterrupt:
        print("interrupred, exiting and killing")
        pass
    finally:
        for proc in processes:
            proc.kill()
 def run(self):
     """ Run the queue-worker """
     with rq.Connection():
         # start the task worker
         self.worker = rq.Worker(self.q)
         self.worker.work()
         print('Worker was startetd')
Example #3
0
def main():
    # check deps
    planner_url = os.environ.get('KRAKEN_PLANNER_URL',
                                 consts.DEFAULT_PLANNER_URL)
    srvcheck.check_url('planner', planner_url, 7997)

    redis_addr = os.environ.get('KRAKEN_REDIS_ADDR', consts.DEFAULT_REDIS_ADDR)
    srvcheck.check_tcp_service('redis', redis_addr, 6379)
    rds = redis.Redis(host=redis_addr, port=6379, db=consts.REDIS_RQ_DB)

    db_url = os.environ.get('KRAKEN_DB_URL', consts.DEFAULT_DB_URL)
    srvcheck.check_postgresql(db_url)

    logs.setup_logging('rq')
    log.info('Kraken RQ started, version %s', version.version)

    # Create Flask app instance
    app = Flask('Kraken RQ')
    app.config["SQLALCHEMY_ECHO"] = False
    app.config["SQLALCHEMY_DATABASE_URI"] = db_url + '?application_name=rq'
    app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False

    # initialize SqlAlchemy
    db.init_app(app)

    # setup sentry
    with app.app_context():
        sentry_url = get_setting('monitoring', 'sentry_dsn')
        logs.setup_sentry(sentry_url)

    worker = rq.Worker('kq',
                       connection=rds,
                       exception_handlers=[_exception_handler])
    worker.work()
Example #4
0
def test_transport_shutdown(sentry_init):
    sentry_init(integrations=[RqIntegration()])

    events_r, events_w = os.pipe()
    events_r = os.fdopen(events_r, "rb", 0)
    events_w = os.fdopen(events_w, "wb", 0)

    def capture_event(event):
        events_w.write(json.dumps(event).encode("utf-8"))
        events_w.write(b"\n")

    def flush(timeout=None, callback=None):
        events_w.write(b"flush\n")

    Hub.current.client.transport.capture_event = capture_event
    Hub.current.client.flush = flush

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.Worker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    event = events_r.readline()
    event = json.loads(event.decode("utf-8"))
    exception, = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    assert events_r.readline() == b"flush\n"
Example #5
0
def run():
    q = None
    queue_name = raw_input()
    try:
        redis_url = settings.STRUCTURE_REDIS
    except:
        redis_url = None
    connection = redis.Redis.from_url(redis_url)
    too_high_queue = rq.Queue(Consts.QUEUE_PRIORITY_TOO_HIGH,
                              connection=connection)
    high_queue = rq.Queue(Consts.QUEUE_PRIORITY_HIGH, connection=connection)
    normal_queue = rq.Queue(Consts.QUEUE_PRIORITY_NORMAL,
                            connection=connection)
    low_queue = rq.Queue(Consts.QUEUE_PRIORITY_LOW, connection=connection)
    if queue_name == "structure:higher":
        q = too_high_queue
    elif queue_name == "structure:high":
        q = high_queue
    elif queue_name == "structure:normal":
        q = normal_queue
    elif queue_name == "structure:low":
        q = low_queue
    else:
        print "Error: Cannot find the queue from Redis."
    if q is not None:
        with rq.Connection(connection):
            w = rq.Worker([q])
            w.work()
Example #6
0
 def run(self, redis):
     redis_connection = Redis(redis[0], redis[1], password=redis[2])
     with Connection(redis_connection):
         qs = map(Queue, self.queues) or [Queue()]
         w = rq.Worker(qs)
         w.work()
         print('Items in queue \'{0}\': {1}'.format(self.queues[0],
                                                    len(qs)))
Example #7
0
def main():
    utils.setup_logging()
    config.log()
    gh_pr.monkeypatch_github()
    if config.FLUSH_REDIS_ON_STARTUP:
        utils.get_redis().flushall()
    with rq.Connection(utils.get_redis()):
        worker = rq.Worker(['default'])
        worker.work()
Example #8
0
def work():
    """Start an rq worker on the connection provided by create_connection."""
    # Preload ROOT module to reduce worker startup time
    import ROOT  # noqa
    with rq.Connection(create_connection()):
        worker = rq.Worker(list(map(create_queue, QUEUES)))
        # Quiet workers to suppress large result output
        # https://github.com/nvie/rq/issues/136
        worker.log.setLevel(logging.WARNING)
        worker.work()
Example #9
0
def main():
    """Sets up Redis connection and starts the worker."""
    redis_connection = redis.Redis(host="queue-server")
    with rq.Connection(redis_connection):
        queue = rq.Queue('build_n_run_queue')
        worker = rq.Worker([queue])

        while queue.count + queue.deferred_job_registry.count > 0:
            worker.work(burst=True)
            time.sleep(5)
Example #10
0
def start_worker( queue_name, redis_url, with_scheduler=False ):

    redis_conn = redis.from_url( redis_url )

    with rq.Connection( redis_conn ):

        worker = rq.Worker( queues     = [queue_name],
                            connection = redis_conn )

        worker.work( with_scheduler=with_scheduler )
Example #11
0
def main():  # pragma: no cover
    utils.setup_logging()
    config.log()
    gh_pr.monkeypatch_github()
    r = utils.get_redis_for_rq()
    if config.FLUSH_REDIS_ON_STARTUP:
        r.flushall()
    with rq.Connection(r):
        worker = rq.Worker(['default'])
        if config.SENTRY_URL:
            client = raven.Client(config.SENTRY_URL, transport=HTTPTransport)
            register_sentry(client, worker)
        worker.work()
Example #12
0
 def command(self):
     self._load_config()
     queue.rq_config.in_worker = True
     connection = queue.rq_config.connection
     if not connection:
         log.error('No redis connection available')
         exit(1)
     queue_ = queue.rq_config.queue
     if not queue_:
         log.error('No queue available.')
         exit(1)
     worker = rq.Worker([queue_], connection=connection)
     worker.work()
Example #13
0
def load_app(config):
    database = config.get('database')
    queues = config.get('queues', ['default'])

    redis_conn = redis.from_url(database)

    hostname = socket.gethostname()
    name = '%s-%s' % (hostname, uuid.uuid4().hex)

    with rq.Connection(redis_conn):
        worker = rq.Worker(map(rq.Queue, queues), name=name)

    return worker
Example #14
0
def main(argv):
    # CONNECT TO DATABASE

    # UNCOMMENT NEXT 4 LINES
    # connect("extensionsASTnpantel", username="******", host="localhost", port=37017)

    # load data from query
    # queue = readFromDatabase()
    # queuetest = ['a','b','c','d','e','f','g','h','i','j','k','l']
    # put them and run them in the queue
    # specialLCSHandler(queue)
    with rq.Connection(redis.Redis(REDIS_HOST, REDIS_PORT)) as connection:

        # print(job.result)
        w = rq.Worker([QUEUE_NAME])
        w.work()
Example #15
0
def test_transport_shutdown(sentry_init, capture_events_forksafe):
    sentry_init(integrations=[RqIntegration()])

    events = capture_events_forksafe()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.Worker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    event = events.read_event()
    events.read_flush()

    (exception, ) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
Example #16
0
    def run(self, args):
        redis = StrictRedis(config.REDIS_HOST, config.REDIS_PORT)
        workers = []

        queue_config = parse_queue_config(args.config)
        with rq.Connection(redis):
            for name, count in queue_config:
                queue = rq.Queue(name)

                for i in range(count):
                    w = rq.Worker([queue], name='{}:{}'.format(name, i))
                    workers.append(w)

        procs = [Process(target=start_worker, args=(w, )) for w in workers]
        for p in procs:
            p.start()

        for p in procs:
            p.join()
Example #17
0
#!/usr/bin/env python
import sys
import rq

# Preload libraries
from app import router
router.resolve_node_modules()

# Provide queue names to listen to as arguments to this script,
# similar to rqworker
with rq.Connection():
    qs = map(rq.Queue, sys.argv[1:]) or [rq.Queue()]

    w = rq.Worker(qs)
    w.work()
Example #18
0
import rq  # type: ignore
from redis import Redis
import os

import init_log
import logging

if __name__ == "__main__":
    init_log.config_worker_log()
    # Tell rq what Redis connection to use
    with rq.Connection(
            Redis.from_url(os.environ.get("REDIS_URL") or "redis://")):
        q = rq.Queue()
        rq.Worker(q).work()
Example #19
0
parser.add_argument(
    "--burst",
    action="store_const",
    const=True,
    default=False,
    help="enable burst mode")
args = parser.parse_args()

wiring = backend.wiring.Wiring()


class JobWithWiring(Job):

    @property
    def kwargs(self):
        result = dict(super().kwargs)
        result["wiring"] = backend.wiring.Wiring()
        return result

    @kwargs.setter
    def kwargs(self, value):
        super().kwargs = value


with rq.Connection(wiring.redis):
    w = rq.Worker(
        queues=[wiring.settings.TASK_QUEUE_NAME],
        name=uuid.uuid4().hex,
        job_class=JobWithWiring)
    w.work(burst=args.burst)
Example #20
0
def run_workers_py(q):
    for i in xrange(2):
        print 'Started worker {}'.format(i)
        worker = rq.Worker([q], connection=q.connection)
        worker.work(burst=True)  # Runs enqueued job
Example #21
0
 def exec_task(self, queue=ExtracterConsts.QUEUE_PRIORITY_NORMAL):
     with rq.Connection():
         w = rq.Worker([queue])
         w.work()
Example #22
0
    # Too many failures
    if job.meta['failures'] >= MAX_FAILURES:
        logger.warn(
            'job %s: failed too many times times - moving to failed queue' %
            job.id)
        job.save()
        return True

    # Requeue job and stop it from being moved into the failed queue
    logger.warn('job %s: failed %d times - retrying' %
                (job.id, job.meta['failures']))

    for queue in queues:
        if queue.name == job.origin:
            queue.enqueue_job(job, timeout=job.timeout)
            return False

    # Can't find queue, which should basically never happen as we only work jobs that match the given queue names and
    # queues are transient in rq.
    logger.warn('job %s: cannot find queue %s - moving to failed queue' %
                (job.id, job.origin))
    return True


with rq.Connection():
    queues = map(rq.Queue, sys.argv[1:]) or [rq.Queue()]

    worker = rq.Worker(queues)
    worker.push_exc_handler(retry_handler)
    worker.work()
Example #23
0
# Workaround current bug in docutils:
# http://permalink.gmane.org/gmane.text.docutils.devel/6324
import docutils.utils

# Make sure our PyPI directory is on the sys.path
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path = [root] + sys.path

import config

conf = config.Config(
    os.environ.get("PYPI_CONFIG", os.path.join(root, "config.ini")))
redis_conn = redis.Redis.from_url(conf.queue_redis_url)

# Create our queues
if sys.argv[1:]:
    queues = [rq.Queue(name, connection=redis_conn) for name in sys.argv[1:]]
else:
    queues = [rq.Queue(connection=redis_conn)]

# Create our Worker
worker = rq.Worker(queues, connection=redis_conn)

# Create our Sentry Client
if conf.sentry_dsn:
    raven_client = raven.Client(conf.sentry_dsn)
    rq.contrib.sentry.register_sentry(raven_client, worker)

# Run our worker, fetching jobs from the queue
worker.work()
Example #24
0
def main():
    with rq.Connection(redis.from_url(REDIS_URL)):
        worker = rq.Worker(['default'])
        worker.work()
Example #25
0
import os
import redis
import rq

RQ_REDIS_URL = "redis://redis_server"

with rq.Connection(redis.from_url(RQ_REDIS_URL)):
    worker = rq.Worker(['default'])
    worker.work()
Example #26
0
                    action="store_const",
                    const=True,
                    default=False,
                    help="enable burst mode")
args = parser.parse_args()

# Нам нужны настройки и подключение к Redis.
wiring = backend.wiring.Wiring()


class JobWithWiring(Job):
    @property
    def kwargs(self):
        result = dict(super().kwargs)
        result["wiring"] = backend.wiring.Wiring()
        return result

    @kwargs.setter
    def kwargs(self, value):
        super().kwargs = value


with rq.Connection(wiring.redis):
    w = rq.Worker(
        queues=[wiring.settings.TASK_QUEUE_NAME],
        # Если мы захотим запускать несколько воркеров в разных
        # контейнерах, им потребуются уникальные имена.
        name=uuid.uuid4().hex,
        job_class=JobWithWiring)
    w.work(burst=args.burst)
Example #27
0
 def run_worker():
     with rq.Connection():
         qs = 'labmanager_unittests'
         w = rq.Worker(qs)
         w.work()
Example #28
0
def run_worker():
    print("WORKING")
    worker = rq.Worker([queue], connection=queue.connection)
    worker.work()
Example #29
0
import os
import redis
import rq

listen = ['high', 'default', 'low']

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')

connection = redis.from_url(redis_url)

if __name__ == '__main__':
    with rq.Connection(connection):
        worker = rq.Worker(map(rq.Queue, listen))
        worker.work()
Example #30
0
 def worker(self):
     """Return the worker class."""
     if not self._worker:
         self._worker = rq.Worker(queues=self._queues, name=self._name)
     return self._worker