def get_task(task_id): logger = g.logger.bind(operation="get_task", task_id=task_id) logger.debug("Getting job...") task = Task.get_by_task_id(task_id) if task is None: return return_error("Task not found.", "get_task", status=404, logger=logger) logger.debug("Task retrieved successfully...") task_jobs = Job.objects(id__in=[str(job_id.id) for job_id in task.jobs]) jobs = [] for job in task_jobs: url = url_for("task.get_job", task_id=task_id, job_id=str(job.job_id), _external=True) job = { "id": str(job.job_id), "createdAt": job.created_at.isoformat(), "url": url } jobs.append(job) return jsonify({"taskId": task_id, "jobs": jobs})
def test_enqueue12(client): """Test enqueue a job works with PUT""" with client.application.app_context(): task_id = str(uuid4()) job_id = str(uuid4()) data = {"image": "ubuntu", "command": "ls"} response = client.put( f"/tasks/{task_id}/jobs/{job_id}/", data=dumps(data), follow_redirects=True ) expect(response.status_code).to_equal(200) obj = loads(response.data) expect(obj).not_to_be_null() new_job_id = obj["jobId"] expect(new_job_id).to_equal(job_id) expect(obj["queueJobId"]).not_to_be_null() expect(obj["executionId"]).not_to_be_null() task = Task.get_by_task_id(obj["taskId"]) expect(task).not_to_be_null() expect(task.jobs).not_to_be_empty() j = task.jobs[0] job = Job.objects(id=j.id).first() expect(str(job.job_id)).to_equal(job_id) expect(obj["taskUrl"]).to_equal(task.get_url()) expect(obj).to_be_enqueued() count = Task.objects.count() expect(count).to_equal(1)
def test_enqueue1(client): """Test enqueue a job works""" with client.application.app_context(): task_id = str(uuid4()) data = {"image": "ubuntu", "command": "ls"} response = client.post(f"/tasks/{task_id}/", data=dumps(data), follow_redirects=True) expect(response.status_code).to_equal(200) obj = loads(response.data) expect(obj["taskUrl"]).to_equal( f"http://localhost:10000/tasks/{task_id}/") job_id = obj["jobId"] expect(job_id).not_to_be_null() expect(obj["jobUrl"]).to_equal( f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/") expect(obj["queueJobId"]).not_to_be_null() expect(obj["executionId"]).not_to_be_null() execution_id = obj["executionId"] expect(obj["executionUrl"]).to_equal( f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/executions/{execution_id}/" ) task = Task.get_by_task_id(obj["taskId"]) expect(task).not_to_be_null() expect(task.jobs).not_to_be_empty() j = task.jobs[0] job = Job.objects(id=j.id).first() expect(str(job.job_id)).to_equal(job_id) expect(obj["taskUrl"]).to_equal(task.get_url()) expect(obj).to_be_enqueued() expect(obj).to_be_enqueued_with_value("status", "queued") expect(obj).to_be_enqueued_with_value("created_at") expect(obj).to_be_enqueued_with_value("enqueued_at") expect(obj).to_be_enqueued_with_value("data") expect(obj).to_be_enqueued_with_value("origin", "jobs") expect(obj).to_be_enqueued_with_value( "description", f"fastlane.worker.job.run_job('{obj['taskId']}', '{job_id}', '{execution_id}', 'ubuntu', 'ls')", ) expect(obj).to_be_enqueued_with_value("timeout", "-1") count = Task.objects.count() expect(count).to_equal(1)
def status(): executor = current_app.executor version = pkg_resources.get_distribution("fastlane").version metadata = {"hosts": [], "containers": {"running": []}} containers = executor.get_running_containers() for host, port, container_id in containers["running"]: metadata["containers"]["running"].append({ "host": host, "port": port, "id": container_id }) metadata[ "hosts"] = [] + containers["available"] + containers["unavailable"] metadata["queues"] = {} for queue_name in [ QueueNames.Job, QueueNames.Monitor, QueueNames.Webhook, QueueNames.Notify, ]: queue = getattr(current_app, f"{queue_name}_queue") jobs_queue_size = current_app.redis.llen(queue.queue_name) metadata["queues"][queue_name] = {"length": jobs_queue_size} next_scheduled = current_app.redis.zrange(Queue.SCHEDULED_QUEUE_NAME, 0, 0, withscores=True) if not next_scheduled: next_timestamp = None next_human = None else: next_timestamp = next_scheduled[0][1] next_human = from_unix(next_timestamp).isoformat() metadata["queues"]["scheduled"] = { "length": current_app.redis.zcard(Queue.SCHEDULED_QUEUE_NAME), "nextTimeStamp": next_timestamp, "nextHumanReadableDate": next_human, } metadata["tasks"] = {"count": Task.objects.count()} metadata["jobs"] = {"count": Job.objects.count()} metadata["jobs"]["scheduled"] = [] scheduled_jobs = Job.objects(scheduled=True).all() metadata["fastlane"] = { "version": version, "executor": current_app.config["EXECUTOR"], } for job in scheduled_jobs: j = job.to_dict(include_executions=False, blacklist_fn=current_app.blacklist_words_fn) itr = croniter.croniter(job.metadata["cron"], datetime.utcnow()) j["nextScheduledAt"] = itr.get_next(datetime).isoformat() task_id = job.task.task_id job_url = url_for("task.get_job", task_id=task_id, job_id=str(job.job_id), _external=True) j["url"] = job_url stop_job_url = url_for("task.stop_job", task_id=task_id, job_id=str(job.job_id), _external=True) j["stopUrl"] = stop_job_url task_url = url_for("task.get_task", task_id=task_id, _external=True) del j["taskId"] j["task"] = {"id": task_id, "url": task_url} metadata["jobs"]["scheduled"].append(j) return jsonify(metadata), 200
def status(): executor = current_app.executor version = pkg_resources.get_distribution("fastlane").version metadata = {"hosts": [], "containers": {"running": []}} containers = executor.get_running_containers() for host, port, container_id in containers["running"]: metadata["containers"]["running"].append({ "host": host, "port": port, "id": container_id }) metadata[ "hosts"] = [] + containers["available"] + containers["unavailable"] metadata["queues"] = {"jobs": {}, "monitor": {}, "error": {}} for queue in ["jobs", "monitor", "error"]: jobs_queue_size = current_app.redis.llen(f"rq:queue:{queue}") metadata["queues"][queue]["length"] = jobs_queue_size metadata["tasks"] = {"count": Task.objects.count()} metadata["jobs"] = {"count": Job.objects.count()} metadata["jobs"]["scheduled"] = [] scheduled_jobs = Job.objects(scheduled=True).all() metadata["fastlane"] = { "version": version, "executor": current_app.config["EXECUTOR"], } for job in scheduled_jobs: j = job.to_dict(include_executions=False) itr = croniter.croniter(job.metadata["cron"], datetime.utcnow()) j["nextScheduledAt"] = itr.get_next(datetime).isoformat() task_id = job.task.task_id job_url = url_for("task.get_job", task_id=task_id, job_id=str(job.id), _external=True) j["url"] = job_url stop_job_url = url_for("task.stop_job", task_id=task_id, job_id=str(job.id), _external=True) j["stopUrl"] = stop_job_url task_url = url_for("task.get_task", task_id=task_id, _external=True) del j["taskId"] j["task"] = {"id": task_id, "url": task_url} metadata["jobs"]["scheduled"].append(j) return jsonify(metadata), 200