Exemple #1
0
    def reclaim_task(self, old_node, new_node):
        task_id = self._redis.lindex(self._reserved_key(old_node), 0)

        if task_id is not None:
            self._redis.hset(RedisTask.task_key(task_id), "reclaimed_node", new_node)

        return task_id
Exemple #2
0
    def reclaim_task(self, old_node, new_node):
        task_id = self._redis.lindex(self._reserved_key(old_node), 0)

        if task_id is not None:
            self._redis.hset(RedisTask.task_key(task_id), "reclaimed_node",
                             new_node)

        return task_id
Exemple #3
0
    def start(self, task_id, node_id, pid):
        self._log("starting task %s on %s, pid %i" % (task_id, node_id, pid))
        with self._redis.pipeline() as pipeline:
            pipeline.sadd(self._started_key, self._running_job(node_id, pid, task_id))
            pipeline.hmset(
                RedisTask.task_key(task_id),
                {"status": "started", "pid": pid, "updated": time.time()})

            pipeline.execute()
Exemple #4
0
    def start(self, task_id, node_id, pid):
        self._log("starting task %s on %s, pid %i" % (task_id, node_id, pid))
        with self._redis.pipeline() as pipeline:
            pipeline.sadd(self._started_key,
                          self._running_job(node_id, pid, task_id))
            pipeline.hmset(RedisTask.task_key(task_id), {
                "status": "started",
                "pid": pid,
                "updated": time.time()
            })

            pipeline.execute()
Exemple #5
0
    def delete_task(self, task_id, task_status):
        if task_status == "complete":
            finished_queue = self._complete_key
        elif task_status == "failed":
            finished_queue = self._failed_key
        else:
            raise ValueError("Cannot delete task with status %s" % (task_status))

        self._log("deleting task %s with status %s" % (task_id, task_status))

        with self._redis.pipeline() as pipeline:
            pipeline.delete(RedisTask.task_key(task_id))
            pipeline.lrem(finished_queue, 1, task_id)

            pipeline.execute()
Exemple #6
0
    def delete_task(self, task_id, task_status):
        if task_status == "complete":
            finished_queue = self._complete_key
        elif task_status == "failed":
            finished_queue = self._failed_key
        else:
            raise ValueError("Cannot delete task with status %s" %
                             (task_status))

        self._log("deleting task %s with status %s" % (task_id, task_status))

        with self._redis.pipeline() as pipeline:
            pipeline.delete(RedisTask.task_key(task_id))
            pipeline.lrem(finished_queue, 1, task_id)

            pipeline.execute()
Exemple #7
0
        def enqueue_transaction(pipeline):
            now = time.time()
            due_tasks = pipeline.zrangebyscore(self._scheduled_key, 0, now)

            if len(due_tasks) == 0:
                self._debug("no due tasks")
                return

            self._log("enqueuing due tasks: %s" % (due_tasks))

            pipeline.multi()

            pipeline.zremrangebyscore(self._scheduled_key, 0, now)

            for task in due_tasks:
                pipeline.lpush(self._pending_name, task)
                pipeline.hmset(RedisTask.task_key(task), {"status": "pending", "updated": now})
Exemple #8
0
    def fail(self, task_id, node_id, pid, error):
        self._log("failed task %s on %s, pid: %i, error: %s" % (task_id, node_id, pid, error))

        with self._redis.pipeline() as pipeline:
            pipeline.lrem(self._reserved_key(node_id), 1, task_id)
            pipeline.srem(self._started_key, self._running_job(node_id, pid, task_id))

            pipeline.hmset(
                RedisTask.task_key(task_id),
                {
                    "status": "failed",
                    "error": error,
                    "updated": time.time()
                })

            pipeline.lpush(self._failed_key, task_id)

            pipeline.execute()
Exemple #9
0
    def fail(self, task_id, node_id, pid, error):
        self._log("failed task %s on %s, pid: %i, error: %s" %
                  (task_id, node_id, pid, error))

        with self._redis.pipeline() as pipeline:
            pipeline.lrem(self._reserved_key(node_id), 1, task_id)
            pipeline.srem(self._started_key,
                          self._running_job(node_id, pid, task_id))

            pipeline.hmset(RedisTask.task_key(task_id), {
                "status": "failed",
                "error": error,
                "updated": time.time()
            })

            pipeline.lpush(self._failed_key, task_id)

            pipeline.execute()
Exemple #10
0
    def complete(self, task_id, node_id, pid, result):
        self._log("completing task %s on %s, pid: %i, result: %s" %
                  (task_id, node_id, pid, result))

        with self._redis.pipeline() as pipeline:
            pipeline.lrem(self._reserved_key(node_id), 1, task_id)
            pipeline.srem(self._started_key,
                          self._running_job(node_id, pid, task_id))

            pipeline.hmset(RedisTask.task_key(task_id), {
                "status": "complete",
                "result": result,
                "updated": time.time()
            })

            pipeline.lpush(self._complete_key, task_id)

            pipeline.execute()
Exemple #11
0
    def dequeue(self, node_id):
        self._debug("reserving task on %s" % (node_id))

        task_id = self._redis.rpoplpush(self._pending_name,
                                        self._reserved_key(node_id))

        if task_id is None:
            return None

        self._log("got task %s" % (task_id))

        self._redis.hmset(RedisTask.task_key(task_id), {
            "status": "reserved",
            "node": node_id,
            "updated": time.time()
        })

        return task_id
Exemple #12
0
    def complete(self, task_id, node_id, pid, result):
        self._log(
            "completing task %s on %s, pid: %i, result: %s" % (task_id, node_id, pid, result))

        with self._redis.pipeline() as pipeline:
            pipeline.lrem(self._reserved_key(node_id), 1, task_id)
            pipeline.srem(self._started_key, self._running_job(node_id, pid, task_id))

            pipeline.hmset(
                RedisTask.task_key(task_id),
                {
                    "status": "complete",
                    "result": result,
                    "updated": time.time()
                })

            pipeline.lpush(self._complete_key, task_id)

            pipeline.execute()
Exemple #13
0
    def dequeue(self, node_id):
        self._debug("reserving task on %s" % (node_id))

        task_id = self._redis.rpoplpush(
            self._pending_name, self._reserved_key(node_id))

        if task_id is None:
            return None

        self._log("got task %s" % (task_id))

        self._redis.hmset(
            RedisTask.task_key(task_id),
            {
                "status": "reserved",
                "node": node_id,
                "updated": time.time()
            })

        return task_id
Exemple #14
0
        def enqueue_transaction(pipeline):
            now = time.time()
            due_tasks = pipeline.zrangebyscore(self._scheduled_key, 0, now)

            if len(due_tasks) == 0:
                self._debug("no due tasks")
                return

            self._log("enqueuing due tasks: %s" % (due_tasks))

            pipeline.multi()

            pipeline.zremrangebyscore(self._scheduled_key, 0, now)

            for task in due_tasks:
                pipeline.lpush(self._pending_name, task)
                pipeline.hmset(RedisTask.task_key(task), {
                    "status": "pending",
                    "updated": now
                })
Exemple #15
0
    def _generate_task(self, pipeline, status, parameters, **kwargs):
        task_id = self._generate_task_id()

        self._log("adding %s task %s, parameters: %s" % (status, task_id, parameters))

        now = time.time()

        task_data = {
            "status": status,
            "queue": self._name,
            "parameters": parameters,
            "created": now,
            "updated": now
        }

        task_data.update(kwargs)

        pipeline.hmset(RedisTask.task_key(task_id), task_data)

        pipeline.zincrby(self._key("queues"), self._name, amount=0)

        return task_id
Exemple #16
0
    def _generate_task(self, pipeline, status, parameters, **kwargs):
        task_id = self._generate_task_id()

        self._log("adding %s task %s, parameters: %s" %
                  (status, task_id, parameters))

        now = time.time()

        task_data = {
            "status": status,
            "queue": self._name,
            "parameters": parameters,
            "created": now,
            "updated": now
        }

        task_data.update(kwargs)

        pipeline.hmset(RedisTask.task_key(task_id), task_data)

        pipeline.zincrby(self._key("queues"), self._name, amount=0)

        return task_id