コード例 #1
0
#!/usr/bin/env python
#! coding: utf-8
# pylint: disable-msg=W0311

import sys
reload(sys)
sys.setdefaultencoding("utf-8")

import api
from rq import Queue, Worker, Connection

with Connection(api.QUEUE):
    q = Queue(sys.argv[1])
    Worker(q).work()
コード例 #2
0
from store import create_app
from rq import Connection, Worker
from store.services import DownloadQueueFactory

app = create_app()
app.app_context().push()

with Connection(DownloadQueueFactory.create()):
    worker = Worker(app.config['REDIS_QUEUES'])
    worker.work()
コード例 #3
0
ファイル: fixtures.py プロジェクト: mio2mugi/rq

class CallableObject(object):
    def __call__(self):
        return u"I'm callable"


class UnicodeStringObject(object):
    def __repr__(self):
        if PY2:
            return u'é'.encode('utf-8')
        else:
            return u'é'


with Connection():

    @job(queue='default')
    def decorated_job(x, y):
        return x + y


def black_hole(job, *exc_info):
    # Don't fall through to default behaviour (moving to failed queue)
    return False


def long_running_job(timeout=10):
    time.sleep(timeout)
    return 'Done sleeping...'
コード例 #4
0
    os.path.join(args.oil_change_data_root, 'annotations', args.ann_file))
cats = {cat['id']: cat for cat in coco.dataset['categories']}

cameras = {camera['name']: camera for camera in coco.dataset['cameras']}
camera_info = cameras[args.camera_name]
camera_parameters = {
    'width': camera_info['width'],
    'height': camera_info['height'],
    'f_x': camera_info['K'][0],
    'f_y': camera_info['K'][4],
    'p_x': camera_info['K'][2],
    'p_y': camera_info['K'][5]
}

cat_ids = [int(c.strip()) for c in args.cat_ids.split(',')]
with Connection(Redis(args.redis_host, args.redis_port)):
    q = Queue(args.queue_name)
    for i, subset_start_index in enumerate(
            tqdm(range(0, args.num_examples, args.subset_size))):
        for cat_id in cat_ids:
            cat = cats[cat_id]
            model_path = os.path.join(args.oil_change_data_root, 'meshes',
                                      cat['mesh'])
            completed_dir = os.path.join(args.output_cache_dir,
                                         args.camera_name, cat['name'],
                                         'completed')
            if not os.path.exists(completed_dir):
                os.makedirs(completed_dir)
            output_dir = os.path.join(args.output_cache_dir, args.camera_name,
                                      cat['name'],
                                      'subset_{:08}'.format(i + 1))
コード例 #5
0
#!/usr/bin/env python
import sys
from rq import Connection, Worker
import redis
import os

# Provide queue names to listen to as arguments to this script,
# similar to rq worker
r = redis.Redis(host=os.environ["REDIS_HOST"])
with Connection(connection=r):
    qs = sys.argv[1:] or [os.environ["RQ_QUEUE"]]

    w = Worker(qs)

    w.work()
コード例 #6
0
def sender_task(message, interval, no_wa):
    url = BaseConfig.REDIS_URL
    with Connection(redis.from_url(url)):
        q = Queue()
        q.enqueue(create_task, message, interval, no_wa)
コード例 #7
0
import os

from dotenv import load_dotenv
from rq import Connection, Queue

load_dotenv(verbose=True)

from recommender.db_config import primary_redis_conn


# import correct worker
if os.environ["IS_HEROKU"]:
    from rq.worker import HerokuWorker as Worker
else:
    from rq.worker import Worker

# optimization -> Import all libraries used in the consumer function
from recommender.rcv.election_result_update_consumer import ElectionResultUpdateConsumer

QUEUES_TO_WORK = [ElectionResultUpdateConsumer.QUEUE_NAME]

if __name__ == "__main__":
    with Connection(primary_redis_conn):
        worker = Worker(map(Queue, QUEUES_TO_WORK))
        worker.work()
コード例 #8
0
ファイル: paropt_manager.py プロジェクト: ZhuozhaoLi/ParaOpt
 def getQueuedJobs(cls):
     """Get a list of currently enqueued jobs"""
     with Connection(redis.from_url(current_app.config['REDIS_URL'])):
         q = Queue()
         return q.jobs # Gets a list of enqueued job instances
コード例 #9
0
ファイル: worker.py プロジェクト: latha43/parrot_modified
#!/usr/bin/env python
import sys
from redis import Redis
from rq import Connection, Worker
from utils import serializer

redis = Redis(host = serializer.rq_host, port = serializer.rq_port)

with Connection(connection=redis):
    qs = sys.argv[1:] or ['default']
    w = Worker(qs)
    w.work()
コード例 #10
0
from rq import Queue, Worker, Connection
from redis import Redis
import crawl_config

if __name__ == '__main__':
    redis_conn = Redis(crawl_config.redis_server)
    with Connection(connection=redis_conn):
        q = Queue() 
        Worker(q).work()
コード例 #11
0
ファイル: paropt_manager.py プロジェクト: ZhuozhaoLi/ParaOpt
 def getDeferredExperiments(cls):
     with Connection(redis.from_url(current_app.config['REDIS_URL'])) as conn:
         registry = DeferredJobRegistry('default', connection=conn)
         return [Job.fetch(id, connection=conn) for id in registry.get_job_ids()]
コード例 #12
0
ファイル: web.py プロジェクト: peerplays-network/bos-auto
def trigger():
    """ This endpoint is used to submit data to the queue so we can process it
        asynchronously to the web requests. The webrequests should be answered
        fast, while the processing might take more time

        The endpoint opens an API according to the ``--port`` and ``--host``
        settings on launch. Thise API provides an endpoint on

            /trigger

        and consumes POST messages with JSON formatted body.

        The body is validated against the incident schema defined in
        bos-incidents

        .. note:: The trigger endpoint stores the incidents through
                  (bos-incidents) already to allow later replaying.
    """
    if request.method == "POST":
        # Don't bother wit requests from IPs that are not
        # whitelisted
        if request.remote_addr not in api_whitelist and "0.0.0.0" not in api_whitelist:
            return "Your IP address is not allowed to post here!", 403

        # Obtain message from request body
        incident = request.get_json()

        # Ensure it is json
        try:
            validator.validate_incident(incident)
        except InvalidIncidentFormatException:
            log.error("Received invalid request: {}".format(str(incident)))
            return "Invalid data format", 400

        # Only accept normalizable incidents
        # Normalize incident
        normalizer = IncidentsNormalizer(chain=config.get("network", "beatrice"))
        try:
            incident = normalizer.normalize(incident, True)
        except NotNormalizableException:
            log.warning(
                "Received not normalizable incident, discarding {}".format(
                    str(incident)
                )
            )
            return "Not normalized incident", 400

        try:
            # FIXME, remove copy()
            storage.insert_incident(incident.copy())
        except exceptions.DuplicateIncidentException as e:
            # We merely pass here since we have the incident already
            # alerting anyone won't do anything
            # traceback.print_exc()
            pass

        # Send incident to redis
        with Connection(redis):
            q = Queue(connection=redis)
            job = q.enqueue(
                work.process,
                args=(incident,),
                kwargs=dict(
                    proposer=app.config.get("BOOKIE_PROPOSER"),
                    approver=app.config.get("BOOKIE_APPROVER"),
                ),
            )
            log.info("Forwarded incident {} to worker via redis".format(str(incident)))
        # Return message with id
        return jsonify(
            dict(
                result="processing",
                message=incident,
                id=str(job.id),
                id_approve=str(job.id),
            )
        )
    return "", 503
コード例 #13
0
ファイル: worker.py プロジェクト: dekked/slack-rekarma
import os

import redis
from rq import Worker, Queue, Connection

listen = ['high', 'default', 'low']

# Redis URL setting in env REDISTOGO_URL or REDISTOGO_URL or default to localhost
REDIS_URL = os.environ.get('REDISTOGO_URL',
                           os.environ.get('REDIS_URL', 'localhost:6379'))

redis_server = redis.from_url(REDIS_URL)

if __name__ == '__main__':
    with Connection(redis_server):
        worker = Worker(map(Queue, listen))
        worker.work()
コード例 #14
0
def worker(queues='default'):
    if not queues:
        queues = ('default', )
    with Connection(rq_redis_connection):
        w = Worker(queues)
        w.work()
コード例 #15
0
    get_repo_object,
    db,
    status,
    get_trans_object
)

from utils import (
    DockerUtils,
    set_server_status
)

logger = status.logger
doc_utils = DockerUtils(status)
doc = doc_utils.doc

with Connection(db):
    transaction_queue = Queue('transactions')
    repo_queue = Queue('update_repo')
    w1 = Worker([transaction_queue])
    w2 = Worker([repo_queue])


def handle_hook():
    saved_status = set_server_status(first=True)

    logger.debug('calling maybe build docker image')

    if not status.iso_flag:
        image = doc_utils.maybe_build_base_devel()
    else:
        status.iso_flag = False
コード例 #16
0
    Queue,
)

from utils import (all_file_paths_exist, copy_or_symlink, try_run_command,
                   DockerUtils, PacmanPackageCache, remove)

from . import (RedisHash, get_build_object, get_pkg_object, status,
               get_repo_object)

logger = status.logger
doc_util = DockerUtils(status)
doc = doc_util.doc

pkg_cache_obj = PacmanPackageCache()

with Connection(status.db):
    repo_queue = Queue('update_repo')


class TransactionMeta(RedisHash):
    """
    This is the base class for `Transaction`(s). It simply sets up the attributes
    which are stored in redis so they can be properly accessed. This class should
    not be used directly.

    Args:
        See `Transaction` docstring.

    Attributes:
        See `Transaction` docstring.
    """
コード例 #17
0
ファイル: worker.py プロジェクト: ThewApp/gentle-naiad
import app.logging
from app.rich_menu import RichMenu
from app.scheduling import ReminderJob, ReminderWorker
from rasa.lineagent import LineAgent
from rasa.store import scheduler_store, tracker_store

logger = logging.getLogger(__name__)

logger.debug("Starting worker")

line_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
rich_menu = RichMenu(line_access_token)
rich_menu.setup()

agent = LineAgent.load("models/dialogue",
                       interpreter=RasaNLUInterpreter("models/current/nlu"),
                       tracker_store=tracker_store)

workerKwargs = {"rich_menu": rich_menu, "agent": agent}

listen = ['high', 'default', 'low']
scheduler = Scheduler(connection=scheduler_store,
                      interval=60,
                      job_class=ReminderJob)
Process(target=scheduler.run).start()
with Connection(scheduler_store):
    worker = ReminderWorker(map(Queue, listen), job_class=ReminderJob)
    logger.info("Worker is ready.")
    worker.work(workerKwargs=workerKwargs)
コード例 #18
0
ファイル: ECMA.py プロジェクト: D4-project/snake-oil-crypto
 def report(self, processid):
     with Connection(Redis()):
         q = Queue()
         res = q.fetch_job(processid)
         print(res.return_value)
コード例 #19
0
import os
from Bio import pairwise2
from Bio import Align

import redis
from rq import Worker, Queue, Connection

listen = ['high', 'default', 'low']

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')

conn2 = redis.from_url(redis_url)

if __name__ == '__main__':
    with Connection(conn2):
        worker = Worker(map(Queue, listen))
        worker.work()
コード例 #20
0
ファイル: doctor.py プロジェクト: indictranstech/indic_frappe
def get_workers():
    with Connection(get_redis_conn()):
        workers = Worker.all()
        return workers
コード例 #21
0
ファイル: worker.py プロジェクト: mwcm/spleeter_service
def run_worker():
    redis_connection = redis.from_url(REDIS_URL)
    with Connection(redis_connection):
        worker = Worker(REDIS_QUEUES)
        worker.work()
コード例 #22
0
def main():
    with Connection(redis_connection):
        worker = Worker(map(Queue, listen))
        worker.work()
コード例 #23
0
ファイル: manage.py プロジェクト: alx-sts/meli-api
def run_worker():
    redis_url = app.config["REDIS_URL"]
    redis_connection = redis.from_url(redis_url)
    with Connection(redis_connection):
        worker = Worker(app.config["QUEUES"])
        worker.work()
コード例 #24
0
import os
import redis
from rq import Worker, Queue, Connection

listen = ['high', 'default', 'low']

redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')
# redis_url = os.getenv('REDISTOGO_URL')
r = redis.from_url(redis_url)
# r = redis.Redis()

if __name__ == '__main__':
    with Connection(r):
        worker = Worker(map(Queue, listen))
        worker.work()
コード例 #25
0
 def worker_wrapper(self, worker):
     with silence_stdout():
         with Connection(self.conn):
             worker.work()
コード例 #26
0
def worker():
    with Connection(Redis("jobqueue.local")):
        qs = sys.argv[1:] or ['default']
        print("foo")
        w = Worker(qs)
        w.work()
コード例 #27
0
ファイル: worker.py プロジェクト: Sean10/SpiderDouban
from redis import Redis,ConnectionPool
from rq import Worker, Queue, Connection
import time

listen = ['high', 'default', 'low']

pool = ConnectionPool(db=0, host='localhost', port=6379)
redis_conn = Redis(connection_pool=pool)

if __name__ == '__main__':
    start = time.time()
    with Connection(redis_conn):
        worker = Worker(map(Queue, listen))
        worker.work()
        stop = time.time()
        print(stop-start)
コード例 #28
0
def run_worker(redis_connection):
    with Connection(redis_connection):
        worker = Worker(app.config['QUEUES'])
        worker.work()
コード例 #29
0
import os

import redis
from rq import Worker, Queue, Connection

listen = ['default']

redis_url = os.getenv('REDIS', 'redis://localhost:6379')

conn = redis.from_url(redis_url)

if __name__ == '__main__':
    with Connection(conn):
        worker = Worker(list(map(Queue, listen)))
        worker.work()
コード例 #30
0
ファイル: rq_worker.py プロジェクト: zhangchangwei/IncetOps
# -*- coding: utf-8 -*-
"""
    IncetOps.rq_worker
    ~~~~~~~~~~~~~~

    The working process of the RQ queue.

    :copyright: (c) 2018 by staugur.
    :license: MIT, see LICENSE for more details.
"""

if __name__ == '__main__':
    import setproctitle
    from redis import from_url
    from config import GLOBAL, REDIS
    from rq import Worker, Queue, Connection
    listen = ['high', 'default', 'low']
    setproctitle.setproctitle(GLOBAL['ProcessName'] + '.rq')
    with Connection(from_url(REDIS)):
        worker = Worker(map(Queue, listen))
        worker.work()