Exemplo n.º 1
0
    def run(self, params):

        # If there are more than this much items on the queue, we don't try to check if our mongodb
        # jobs are still queued.
        max_queue_items = params.get("max_queue_items", 1000)

        stats = {"fetched": 0, "requeued": 0}

        all_queues = Queue.all_known()

        for queue_name in all_queues:

            queue = Queue(queue_name)
            queue_size = queue.size()

            if queue.is_raw:
                continue

            log.info("Checking queue %s" % queue_name)

            if queue_size > max_queue_items:
                log.info("Stopping because queue %s has %s items" %
                         (queue_name, queue_size))
                continue

            queue_jobs_ids = set(queue.list_job_ids(limit=max_queue_items + 1))
            if len(queue_jobs_ids) >= max_queue_items:
                log.info(
                    "Stopping because queue %s actually had more than %s items"
                    % (queue_name, len(queue_jobs_ids)))
                continue

            for job_data in connections.mongodb_jobs.mrq_jobs.find(
                {
                    "queue": queue_name,
                    "status": "queued"
                },
                    projection={
                        "_id": 1
                    }).sort([["_id", 1]]):

                stats["fetched"] += 1

                if str(job_data["_id"]) in queue_jobs_ids:
                    log.info("Found job %s on queue %s. Stopping" %
                             (job_data["_id"], queue.id))
                    break

                # At this point, this job is not on the queue and we're sure
                # the queue is less than max_queue_items
                # We can safely requeue the job.
                log.info("Requeueing %s on %s" % (job_data["_id"], queue.id))

                stats["requeued"] += 1
                job = Job(job_data["_id"])
                job.requeue(queue=queue_name)

        return stats
Exemplo n.º 2
0
    def run(self, params):

        # If there are more than this much items on the queue, we don't try to check if our mongodb
        # jobs are still queued.
        max_queue_items = params.get("max_queue_items", 1000)

        stats = {
            "fetched": 0,
            "requeued": 0
        }

        all_queues = Queue.all_known()

        for queue_name in all_queues:

            queue = Queue(queue_name)
            queue_size = queue.size()

            if queue.is_raw:
                continue

            log.info("Checking queue %s" % queue_name)

            if queue_size > max_queue_items:
                log.info("Stopping because queue %s has %s items" %
                         (queue_name, queue_size))
                continue

            queue_jobs_ids = set(queue.list_job_ids(limit=max_queue_items + 1))
            if len(queue_jobs_ids) >= max_queue_items:
                log.info(
                    "Stopping because queue %s actually had more than %s items" %
                    (queue_name, len(queue_jobs_ids)))
                continue

            for job_data in connections.mongodb_jobs.mrq_jobs.find({
                "queue": queue_name,
                "status": "queued"
            }, projection={"_id": 1}).sort([["_id", 1]]):

                stats["fetched"] += 1

                if str(job_data["_id"]) in queue_jobs_ids:
                    log.info("Found job %s on queue %s. Stopping" % (job_data["_id"], queue.id))
                    break

                # At this point, this job is not on the queue and we're sure
                # the queue is less than max_queue_items
                # We can safely requeue the job.
                log.info("Requeueing %s on %s" % (job_data["_id"], queue.id))

                stats["requeued"] += 1
                job = Job(job_data["_id"])
                job.requeue(queue=queue_name)

        return stats
Exemplo n.º 3
0
def test_known_queues_lifecycle(worker):

    worker.start(
        queues="default_reverse xtest test_timed_set",
        flags="--config tests/fixtures/config-raw1.py --subqueues_refresh_interval=0.1"
    )
    time.sleep(1)
    worker.wait_for_idle()

    # Test known queues
    from mrq.queue import Queue, send_task, send_raw_tasks

    # Just watching queues doesn't add them to known ones.
    # BTW this doesn't read config from the worker, just db/redis.
    assert set(Queue.all_known()) == set()

    # Try queueing a task
    send_task("tests.tasks.general.Add", {"a": 41, "b": 1, "sleep": 1}, queue="x")

    jobs = list(worker.mongodb_jobs.mrq_jobs.find())
    assert len(jobs) == 1
    assert jobs[0]["queue"] == "x"

    assert set(Queue.all_known()) == set(["x"])

    Queue("x").empty()

    jobs = list(worker.mongodb_jobs.mrq_jobs.find())
    assert len(jobs) == 0
    assert set(Queue.all_known()) == set()

    all_known = worker.send_task("tests.tasks.general.QueueAllKnown", {}, queue="default")
    # Will get all from config
    assert len(all_known) > 0

    # Now add a job on a raw queue
    send_raw_tasks("test_raw/sub", ["a", "b", "c"])
    time.sleep(1)

    all_known_plus_sub = worker.send_task("tests.tasks.general.QueueAllKnown", {}, queue="default")
    assert set(all_known_plus_sub) == set(all_known).union(set(["test_raw/sub"]))
Exemplo n.º 4
0
 def run(self, params):
     return list(Queue.all_known())
Exemplo n.º 5
0
def api_datatables(unit):

    # import time
    # time.sleep(5)

    collection = None
    sort = None
    skip = int(request.args.get("iDisplayStart", 0))
    limit = int(request.args.get("iDisplayLength", 20))

    if unit == "queues":

        queues = []
        for name in Queue.all_known():
            queue = Queue(name)

            q = {
                "name": name,
                "size": queue.size(),  # Redis size
                "is_sorted": queue.is_sorted,
                "is_timed": queue.is_timed,
                "is_raw": queue.is_raw,
                "is_set": queue.is_set
            }

            if queue.is_sorted:
                raw_config = queue.get_config()
                q["graph_config"] = raw_config.get(
                    "dashboard_graph", lambda: {
                        "start": time.time() - (7 * 24 * 3600),
                        "stop": time.time() + (7 * 24 * 3600),
                        "slices": 30
                    } if queue.is_timed else {
                        "start": 0,
                        "stop": 100,
                        "slices": 30
                    })()
                if q["graph_config"]:
                    q["graph"] = queue.get_sorted_graph(**q["graph_config"])

            if queue.is_timed:
                q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue()

            queues.append(q)

        queues.sort(key=lambda x: -x["size"])

        data = {"aaData": queues, "iTotalDisplayRecords": len(queues)}

    elif unit == "workers":
        fields = None
        collection = connections.mongodb_jobs.mrq_workers
        sort = [("datestarted", -1)]

        query = {}
        if request.args.get("id"):
            query["_id"] = ObjectId(request.args["id"])
        else:
            if request.args.get("status"):
                statuses = request.args["status"].split("-")
                query["status"] = {"$in": statuses}
            if request.args.get("ip"):
                query["$or"] = [{
                    "config.local_ip": request.args["ip"]
                }, {
                    "config.external_ip": request.args["ip"]
                }]
            if request.args.get("queue"):
                query["config.queues"] = request.args["queue"]

    elif unit == "agents":
        fields = None
        query = {"status": {"$nin": ["stop"]}}
        collection = connections.mongodb_jobs.mrq_agents
        sort = [("datestarted", -1)]

        if request.args.get("showstopped"):
            query = {}

    elif unit == "scheduled_jobs":
        collection = connections.mongodb_jobs.mrq_scheduled_jobs
        fields = None
        query = {}

    elif unit == "jobs":

        fields = None
        query = build_api_datatables_query(request)
        sort = None  # TODO [("_id", 1)]

        # We can't search easily params because we store it as decoded JSON in mongo :(
        # Add a string index?
        # if request.args.get("sSearch"):
        #   query.update(json.loads(request.args.get("sSearch")))
        collection = connections.mongodb_jobs.mrq_jobs

    if collection is not None:

        cursor = collection.find(query, projection=fields)

        if sort:
            cursor.sort(sort)

        if skip is not None:
            cursor.skip(skip)

        if limit is not None:
            cursor.limit(limit)

        data = {
            "aaData": list(cursor),
            "iTotalDisplayRecords": collection.find(query).count()
        }

    data["sEcho"] = request.args["sEcho"]

    return jsonify(data)
Exemplo n.º 6
0
Arquivo: app.py Projeto: benjisg/mrq
def api_datatables(unit):

    # import time
    # time.sleep(5)

    collection = None
    sort = None
    skip = int(request.args.get("iDisplayStart", 0))
    limit = int(request.args.get("iDisplayLength", 20))

    if unit == "queues":

        queues = []
        for name, jobs in Queue.all_known().items():
            queue = Queue(name)
            q = {
                "name": name,
                "jobs": jobs,  # MongoDB size
                "size": queue.size(),  # Redis size
                "is_sorted": queue.is_sorted,
                "is_timed": queue.is_timed,
                "is_raw": queue.is_raw,
                "is_set": queue.is_set
            }

            if queue.is_sorted:
                raw_config = cfg.get("raw_queues", {}).get(name, {})
                q["graph_config"] = raw_config.get("dashboard_graph", lambda: {
                    "start": time.time() - (7 * 24 * 3600),
                    "stop": time.time() + (7 * 24 * 3600),
                    "slices": 30
                } if queue.is_timed else {
                    "start": 0,
                    "stop": 100,
                    "slices": 30
                })()
                if q["graph_config"]:
                    q["graph"] = queue.get_sorted_graph(**q["graph_config"])

            if queue.is_timed:
                q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue()

            queues.append(q)

        queues.sort(key=lambda x: -(x["jobs"] + x["size"]))

        data = {
            "aaData": queues,
            "iTotalDisplayRecords": len(queues)
        }

    elif unit == "workers":
        fields = None
        query = {"status": {"$nin": ["stop"]}}
        collection = connections.mongodb_jobs.mrq_workers
        sort = [("datestarted", -1)]

        if request.args.get("showstopped"):
            query = {}

    elif unit == "scheduled_jobs":
        collection = connections.mongodb_jobs.mrq_scheduled_jobs
        fields = None
        query = {}

    elif unit == "jobs":

        fields = None
        query = build_api_datatables_query(request)
        sort = [("_id", 1)]

        # We can't search easily params because we store it as decoded JSON in mongo :(
        # Add a string index?
        # if request.args.get("sSearch"):
        #   query.update(json.loads(request.args.get("sSearch")))
        collection = connections.mongodb_jobs.mrq_jobs

    if collection is not None:

        cursor = collection.find(query, projection=fields)

        if sort:
            cursor.sort(sort)

        if skip is not None:
            cursor.skip(skip)

        if limit is not None:
            cursor.limit(limit)

        data = {
            "aaData": list(cursor),
            "iTotalDisplayRecords": collection.find(query).count()
        }

    data["sEcho"] = request.args["sEcho"]

    return jsonify(data)
Exemplo n.º 7
0
def api_datatables(unit):
    collection = None
    sort = None
    skip = int(request.args.get("iDisplayStart", 0))
    limit = int(request.args.get("iDisplayLength", 20))
    with_mongodb_size = bool(request.args.get("with_mongodb_size"))

    if unit == "queues":

        queues = []
        for name in Queue.all_known():
            queue = Queue(name)

            jobs = None
            if with_mongodb_size:
                jobs = connections.mongodb_jobs.mrq_jobs.count({
                    "queue":
                    name,
                    "status":
                    request.args.get("status") or "queued"
                })

            q = {
                "name": name,
                "jobs": jobs,  # MongoDB size
                "size": queue.size(),  # Redis size
                "is_sorted": queue.is_sorted,
                "is_timed": queue.is_timed,
                "is_raw": queue.is_raw,
                "is_set": queue.is_set
            }

            if queue.is_sorted:
                raw_config = cfg.get("raw_queues", {}).get(name, {})
                q["graph_config"] = raw_config.get(
                    "dashboard_graph", lambda: {
                        "start": time.time() - (7 * 24 * 3600),
                        "stop": time.time() + (7 * 24 * 3600),
                        "slices": 30
                    } if queue.is_timed else {
                        "start": 0,
                        "stop": 100,
                        "slices": 30
                    })()
                if q["graph_config"]:
                    q["graph"] = queue.get_sorted_graph(**q["graph_config"])

            if queue.is_timed:
                q["jobs_to_dequeue"] = queue.count_jobs_to_dequeue()

            queues.append(q)

        sSortField, sSortDirection = _get_sort_args(request, 'size', 'desc')
        queues.sort(key=lambda x: x.get(sSortField, 0),
                    reverse=sSortDirection == 'desc')

        data = {"aaData": queues, "iTotalDisplayRecords": len(queues)}

    elif unit == "workers":
        fields = None
        query = {"status": {"$nin": ["stop"]}}
        collection = connections.mongodb_jobs.mrq_workers
        sSortField, sSortDirection = _get_sort_args(request, 'datestarted',
                                                    'desc')
        sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)]

        if request.args.get("showstopped"):
            query = {}

    elif unit == "scheduled_jobs":
        collection = connections.mongodb_jobs.mrq_scheduled_jobs
        fields = None
        query = {}
        sSortField, sSortDirection = _get_sort_args(request, 'interval',
                                                    'desc')
        sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)]

    elif unit == "jobs":

        fields = None
        query = build_api_datatables_query(request)
        sSortField, sSortDirection = _get_sort_args(request)
        sort = [(sSortField, -1 if sSortDirection == 'desc' else 1)]

        time_s = request.args.get("time_s", '')
        time_e = request.args.get("time_e", '')
        if time_s and not time_e:
            print 'datestarted', time_s
            query.update({'datestarted': {'$gte': str2datetime(time_s)}})
        elif time_e and not time_s:
            print 'datestarted', time_e
            query.update({'datestarted': {'$lte': str2datetime(time_e)}})
        elif time_s and time_e:
            print 'datestarted', time_s, time_e
            query.update({
                'datestarted': {
                    '$gte': str2datetime(time_s),
                    '$lte': str2datetime(time_e)
                }
            })

        # We can't search easily params because we store it as decoded JSON in mongo :(
        # Add a string index?
        # if request.args.get("sSearch"):
        #   query.update(json.loads(request.args.get("sSearch")))
        collection = connections.mongodb_jobs.mrq_jobs

    if collection is not None:

        cursor = collection.find(query, projection=fields)

        if sort:
            cursor.sort(sort)

        if skip is not None:
            cursor.skip(skip)

        if limit is not None:
            cursor.limit(limit)

        data = {
            "aaData": list(cursor),
            "iTotalDisplayRecords": collection.find(query).count()
        }

    data["sEcho"] = request.args["sEcho"]

    return jsonify(data)