def test_task_get_tasks(client): """Test getting tasks""" Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) tasks = Task.get_tasks() expect(tasks.total).to_equal(2)
def test_task_get_by_task_id(client): """Test getting a task by task id""" task_id = str(uuid4()) t = Task.create_task(task_id) topic = Task.get_by_task_id(t.task_id) expect(topic.id).to_equal(t.id)
def test_task_create2(client): """Test creating a new task fails when no task_id provided""" msg = "ValidationError (Task:None) (Field is required: ['task_id'])" with expect.error_to_happen(ValidationError, message=msg): Task.create_task(None) with expect.error_to_happen(ValidationError, message=msg): Task.create_task("")
def test_task_get_by_task_id3(client): """Test getting a task by task id fails if task id is empty""" msg = "Task ID is required and can't be None or empty." with expect.error_to_happen(RuntimeError, message=msg): Task.get_by_task_id(None) with expect.error_to_happen(RuntimeError, message=msg): Task.get_by_task_id("")
def test_enqueue2(client): """Test enqueue a job with the same task does not create a new task""" task_id = str(uuid4()) data = {"image": "ubuntu", "command": "ls"} options = dict( data=dumps(data), headers={"Content-Type": "application/json"}, follow_redirects=True, ) rv = client.post(f"/tasks/{task_id}", **options) expect(rv.status_code).to_equal(200) obj = loads(rv.data) job_id = obj["jobId"] expect(job_id).not_to_be_null() expect(obj["queueJobId"]).not_to_be_null() rv = client.post(f"/tasks/{task_id}", **options) expect(rv.status_code).to_equal(200) obj = loads(rv.data) job_id = obj["jobId"] expect(job_id).not_to_be_null() expect(obj["queueJobId"]).not_to_be_null() task = Task.get_by_task_id(obj["taskId"]) expect(task).not_to_be_null() expect(task.jobs).not_to_be_empty() with client.application.app_context(): count = Task.objects.count() expect(count).to_equal(1)
def test_monitor_job_with_retry2(client): """Test monitoring a job for a task that fails stops after max retries""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id j.metadata["retries"] = 3 j.metadata["retry_count"] = 3 ex = j.create_execution("image", "command") j.save() exec_mock = MagicMock() exec_mock.get_result.return_value = MagicMock( exit_code=1, log="".encode("utf-8"), error="error".encode("utf-8") ) client.application.executor = exec_mock queue = Queue("monitor", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.monitor_job, t.task_id, job_id, ex.execution_id) worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() keys = app.redis.keys() next_job_id = [ key for key in keys if key.decode("utf-8").startswith("rq:job") and not key.decode("utf-8").endswith(result.id) ] expect(next_job_id).to_length(0)
def create_task(task_id): details = get_details() if details is None or details == "": msg = "Failed to enqueue task because JSON body could not be parsed." g.logger.warn(msg) return make_response(msg, 400) image = details.get("image", None) command = details.get("command", None) if image is None or command is None: return make_response( "image and command must be filled in the request.", 400) logger = g.logger.bind(task_id=task_id, image=image, command=command) logger.debug("Creating task...") task = Task.objects(task_id=task_id).modify(task_id=task_id, upsert=True, new=True) logger.info("Task created successfully.") j = create_job(details, task, logger) job_id = str(j.id) queue_job_id = None start_at = details.get("startAt", None) start_in = parse_time(details.get("startIn", None)) cron = details.get("cron", None) if len( list( filter(lambda item: item is not None, (start_at, start_in, cron)))) > 1: return make_response( "Only ONE of 'startAt', 'startIn' and 'cron' should be in the request.", 400) queue_job_id = enqueue_job(task, j, image, command, start_at, start_in, cron, logger) job_url = url_for("task.get_job", task_id=task_id, job_id=job_id, _external=True) return jsonify({ "taskId": task_id, "jobId": job_id, "queueJobId": queue_job_id, "jobUrl": job_url, "taskUrl": task.get_url(), })
def get_by_id(cls, task_id, job_id): from fastlane.models.task import Task if task_id is None or task_id == "" or job_id is None or job_id == "": raise RuntimeError( "Task ID and Job ID are required and can't be None or empty.") t = Task.objects(task_id=task_id).first() j = cls.objects(task=t, job_id=job_id).first() return j
def test_get_tasks_data(client): """Test getting tasks resource data""" task = Task.create_task('my-task') resp = client.get("/tasks") data = loads(resp.data) task_data = data["items"][0] with client.application.app_context(): expect(task_data.keys()).to_equal(task.to_dict().keys())
def test_job_create(client): """Test creating a new job""" task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() expect(j.job_id).to_equal(str(j.id)) expect(j.created_at).not_to_be_null() expect(j.last_modified_at).not_to_be_null() expect(j.executions).to_be_empty()
def test_job_get_by_job_id(client): """Test getting a job by id""" task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() topic = Job.get_by_id(task_id, j.job_id) expect(topic).not_to_be_null() expect(topic.job_id).to_equal(str(j.id)) topic = Job.get_by_id("invalid", "invalid") expect(topic).to_be_null()
def test_task_get_tasks_pagination(client): """Test getting tasks pagination""" Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) tasks = Task.get_tasks(page=1, per_page=1) expect(tasks.total).to_equal(3) expect(tasks.pages).to_equal(3) expect(tasks.items).to_length(1) expect(tasks.has_next).to_be_true() expect(tasks.has_prev).to_be_false()
def test_task_create(client): """Test creating a new task""" task_id = str(uuid4()) t = Task.create_task(task_id) expect(t.task_id).to_equal(task_id) expect(t.created_at).not_to_be_null() expect(t.last_modified_at).not_to_be_null() created_at = t.created_at last_mod = t.last_modified_at t.save() expect(t.created_at).to_equal(created_at) expect(t.last_modified_at).to_be_greater_than(last_mod)
def test_get_tasks_pagination(client): """Test getting tasks pagination""" Task.create_task('my-task-1') Task.create_task('my-task-2') Task.create_task('my-task-3') Task.create_task('my-task-4') app = client.application server_name = app.config['SERVER_NAME'] resp = client.get("/tasks?page=2") data = loads(resp.data) expect(data["total"]).to_equal(4) expect(data["page"]).to_equal(2) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_true() expect(data["prevUrl"]).to_equal(f'http://{server_name}/tasks?page=1') expect(data["nextUrl"]).to_be_null()
def test_search_tasks1(client): """Tests search task by task_id.""" task_id = f"task-search-{str(uuid4())}" Task.create_task(task_id) Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) resp = client.get("/search/?query=search") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(1)
def test_task_to_dict(client): """Test to_dict""" task = Task.create_task('my-task') app = client.application server_name = app.config['SERVER_NAME'] with app.app_context(): res = task.to_dict() expect(res['taskId']).to_equal('my-task') created_at = int(task.created_at.timestamp()) expect(int(res["createdAt"])).to_equal(created_at) last_modified_at = int(task.last_modified_at.timestamp()) expect(int(res["lastModifiedAt"])).to_equal(last_modified_at) expect(res["url"]).to_equal(f'http://{server_name}/tasks/my-task') expect(res["jobsCount"]).to_equal(0)
def test_get_task_details(client): """Test getting tasks""" task_id = str(uuid4()) job_id = str(uuid4()) task = Task.create_task(task_id) task.create_or_update_job(job_id, "ubuntu", "command") resp = client.get(f"/tasks/{task_id}/") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data).to_include("jobs") expect(data["jobs"]).to_length(1) job_data = data["jobs"][0] expect(job_data).to_include("id") expect(job_data["id"]).to_equal(job_id) expect(job_data["url"]).to_equal( f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/")
def test_task_to_dict(client): """Test to_dict""" task = Task.create_task("my-task") app = client.application server_name = app.config["SERVER_NAME"] with app.app_context(): res = task.to_dict() expect(res["taskId"]).to_equal("my-task") created_at = task.created_at.isoformat() expect(res["createdAt"]).to_equal(created_at) last_modified_at = task.last_modified_at.isoformat() expect(res["lastModifiedAt"]).to_equal(last_modified_at) expect(res["url"]).to_equal(f"http://{server_name}/tasks/my-task/") expect(res["jobsCount"]).to_equal(0)
def get_tasks(): logger = g.logger.bind(operation="get_tasks") try: page = int(request.args.get('page', 1)) except ValueError: logger.error(f"Tasks pagination page param should be an integer.") abort(404) per_page = current_app.config["PAGINATION_PER_PAGE"] logger.debug(f"Getting tasks page={page} per_page={per_page}...") paginator = Task.get_tasks(page=page, per_page=per_page) logger.debug("Tasks retrieved successfully...") tasks_url = url_for("task.get_tasks", _external=True) next_url = None if paginator.has_next: next_url = f'{tasks_url}?page={paginator.next_num}' prev_url = None if paginator.has_prev: prev_url = f'{tasks_url}?page={paginator.prev_num}' data = { "items": [], "total": paginator.total, "page": paginator.page, "pages": paginator.pages, "perPage": paginator.per_page, "hasNext": paginator.has_next, "hasPrev": paginator.has_prev, "nextUrl": next_url, "prevUrl": prev_url, } for task in paginator.items: data['items'].append(task.to_dict()) return jsonify(data)
def test_get_tasks(client): """Test getting tasks""" Task.create_task('my-task-1') Task.create_task('my-task-2') Task.create_task('my-task-3') resp = client.get("/tasks") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(3) expect(data["total"]).to_equal(3) expect(data["page"]).to_equal(1) expect(data["pages"]).to_equal(1) expect(data["perPage"]).to_equal(3) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_false()
def get_task(task_id): logger = g.logger.bind(operation="get_task", task_id=task_id) logger.debug("Getting job...") task = Task.get_by_task_id(task_id) if task is None: logger.error("Task not found.") abort(404) return logger.debug("Task retrieved successfully...") jobs = [] for job_id in task.jobs: url = url_for("task.get_job", task_id=task_id, job_id=str(job_id.id), _external=True) job = {"id": str(job_id.id), "url": url} jobs.append(job) return jsonify({"taskId": task_id, "jobs": jobs})
def test_get_tasks(auth_client): """Test getting tasks""" Task.create_task("my-task-1") Task.create_task("my-task-2") Task.create_task("my-task-3") userAndPass = b64encode(b"test:auth").decode("ascii") headers = {"Authorization": "Basic %s" % userAndPass} resp = auth_client.get("/tasks/", headers=headers) expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(3) expect(data["total"]).to_equal(3) expect(data["page"]).to_equal(1) expect(data["pages"]).to_equal(1) expect(data["perPage"]).to_equal(3) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_false()
def test_monitor_job_with_retry(client): """Test monitoring a job for a task that fails""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id j.metadata["retries"] = 3 j.metadata["retry_count"] = 0 ex = j.create_execution("image", "command") j.save() exec_mock = MagicMock() exec_mock.get_result.return_value = MagicMock( exit_code=1, log="".encode("utf-8"), error="error".encode("utf-8")) exec_class_mock = MagicMock() exec_class_mock.Executor.return_value = exec_mock client.application.executor_module = exec_class_mock queue = Queue("monitor", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.monitor_job, t.task_id, job_id, ex.execution_id) worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() res = app.redis.zrange(b"rq:scheduler:scheduled_jobs", 0, -1) expect(res).to_length(1) time = datetime.now() + timedelta(seconds=2) res = app.redis.zscore("rq:scheduler:scheduled_jobs", res[0]) expect(int(res)).to_equal(int(time.timestamp())) nj = app.redis.zrange("rq:scheduler:scheduled_jobs", 0, 0)[0].decode("utf-8") next_job_id = f"rq:job:{nj}" res = app.redis.exists(next_job_id) expect(res).to_be_true() res = app.redis.hexists(next_job_id, "data") expect(res).to_be_true() res = app.redis.hget(next_job_id, "origin") expect(res).to_equal("jobs") res = app.redis.hget(next_job_id, "description") expect(res).to_equal( f"fastlane.worker.job.run_job('{task_id}', '{job_id}', 'image', 'command')" ) t.reload() expect(t.jobs[0].executions[0].status).to_equal( JobExecution.Status.done)
def test_run_job(client): """Test running a new job for a task""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id t.save() exec_mock = MagicMock() exec_class_mock = MagicMock() exec_class_mock.Executor.return_value = exec_mock client.application.executor_module = exec_class_mock queue = Queue("jobs", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.run_job, t.task_id, job_id, "image", "command") worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() keys = app.redis.keys() next_job_id = [ key for key in keys if key.decode("utf-8").startswith("rq:job") and not key.decode("utf-8").endswith(result.id) ] expect(next_job_id).to_length(1) next_job_id = next_job_id[0] res = app.redis.exists(next_job_id) expect(res).to_be_true() res = app.redis.hget(next_job_id, "status") expect(res).to_equal("queued") res = app.redis.hexists(next_job_id, "data") expect(res).to_be_true() res = app.redis.hget(next_job_id, "origin") expect(res).to_equal("monitor") res = app.redis.hget(next_job_id, "description") expect(res).to_equal( f"fastlane.worker.job.monitor_job('{task_id}', '{job_id}', '{execution.execution_id}')" ) res = app.redis.hget(next_job_id, "timeout") expect(res).to_equal("-1") t.reload() expect(t.jobs[0].executions[0].status).to_equal( JobExecution.Status.running)
def new(task_id): from fastlane.models.task import Task return Task.create_task(task_id)
def test_enqueue1(client): """Test enqueue a job works""" task_id = str(uuid4()) data = { "image": "ubuntu", "command": "ls", } rv = client.post(f'/tasks/{task_id}', data=dumps(data), follow_redirects=True) expect(rv.status_code).to_equal(200) obj = loads(rv.data) job_id = obj['jobId'] expect(job_id).not_to_be_null() expect(obj['queueJobId']).not_to_be_null() queue_job_id = obj["queueJobId"] hash_key = f'rq:job:{queue_job_id}' app = client.application res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, 'status') expect(res).to_equal('queued') res = app.redis.hexists(hash_key, 'created_at') expect(res).to_be_true() res = app.redis.hexists(hash_key, 'enqueued_at') expect(res).to_be_true() res = app.redis.hexists(hash_key, 'data') expect(res).to_be_true() res = app.redis.hget(hash_key, 'origin') expect(res).to_equal('jobs') res = app.redis.hget(hash_key, 'description') expect(res).to_equal( f"fastlane.worker.job.run_job('{obj['taskId']}', '{job_id}', 'ubuntu', 'ls')" ) res = app.redis.hget(hash_key, 'timeout') expect(res).to_equal('-1') task = Task.get_by_task_id(obj['taskId']) expect(task).not_to_be_null() expect(task.jobs).not_to_be_empty() j = task.jobs[0] expect(str(j.id)).to_equal(job_id) q = 'rq:queue:jobs' res = app.redis.llen(q) expect(res).to_equal(1) res = app.redis.lpop(q) expect(res).to_equal(queue_job_id) with client.application.app_context(): count = Task.objects.count() expect(count).to_equal(1)
def test_enqueue1(client): """Test enqueue a job works""" task_id = str(uuid4()) data = {"image": "ubuntu", "command": "ls"} rv = client.post(f"/tasks/{task_id}", data=dumps(data), follow_redirects=True) expect(rv.status_code).to_equal(200) obj = loads(rv.data) job_id = obj["jobId"] expect(job_id).not_to_be_null() expect(obj["queueJobId"]).not_to_be_null() app = client.application task = Task.get_by_task_id(obj["taskId"]) with app.app_context(): expect(obj["taskUrl"]).to_equal(task.get_url()) queue_job_id = obj["queueJobId"] hash_key = f"rq:job:{queue_job_id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("queued") res = app.redis.hexists(hash_key, "created_at") expect(res).to_be_true() res = app.redis.hexists(hash_key, "enqueued_at") expect(res).to_be_true() res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() res = app.redis.hget(hash_key, "origin") expect(res).to_equal("jobs") res = app.redis.hget(hash_key, "description") expect(res).to_equal( f"fastlane.worker.job.run_job('{obj['taskId']}', '{job_id}', 'ubuntu', 'ls')" ) res = app.redis.hget(hash_key, "timeout") expect(res).to_equal("-1") expect(task).not_to_be_null() expect(task.jobs).not_to_be_empty() j = task.jobs[0] expect(str(j.id)).to_equal(job_id) q = "rq:queue:jobs" res = app.redis.llen(q) expect(res).to_equal(1) res = app.redis.lpop(q) expect(res).to_equal(queue_job_id) with app.app_context(): count = Task.objects.count() expect(count).to_equal(1)
def test_task_get_by_task_id2(client): """Test getting a task by task id returns None if no task exists""" task_id = str(uuid4()) topic = Task.get_by_task_id(task_id) expect(topic).to_be_null()
def create_task(task_id): details = get_details() if details is None or details == "": msg = "Failed to enqueue task because JSON body could not be parsed." g.logger.warn(msg) return make_response(msg, 400) image = details.get("image", None) command = details.get("command", None) if image is None or command is None: return make_response("image and command must be filled in the request.", 400) logger = g.logger.bind(task_id=task_id, image=image, command=command) logger.debug("Creating task...") task = Task.objects(task_id=task_id).modify(task_id=task_id, upsert=True, new=True) logger.info("Task created successfully.") logger.debug("Creating job...") retries = details.get("retries", 0) expiration = details.get("expiration") hard_limit = current_app.config["HARD_EXECUTION_TIMEOUT_SECONDS"] timeout = details.get("timeout", hard_limit) timeout = min( timeout, hard_limit ) # ensure jobs can't specify more than hard limit j = task.create_job() j.metadata["retries"] = retries j.metadata["retry_count"] = 0 j.metadata["expiration"] = expiration j.metadata["timeout"] = timeout j.metadata["envs"] = details.get("envs", {}) j.save() job_id = str(j.id) logger.debug("Job created successfully...", job_id=job_id) queue_job_id = None start_at = details.get("startAt", None) start_in = parse_time(details.get("startIn", None)) cron = details.get("cron", None) scheduler = Scheduler("jobs", connection=current_app.redis) args = [task_id, job_id, image, command] if start_at is not None: dt = datetime.utcfromtimestamp(int(start_at)) logger.debug("Enqueuing job execution in the future...", start_at=dt) result = scheduler.enqueue_at(dt, run_job, *args) j.metadata["enqueued_id"] = result.id j.save() logger.info("Job execution enqueued successfully.", start_at=dt) elif start_in is not None: dt = datetime.now(tz=timezone.utc) + start_in logger.debug("Enqueuing job execution in the future...", start_at=dt) result = scheduler.enqueue_at(dt, run_job, *args) j.metadata["enqueued_id"] = result.id j.save() logger.info("Job execution enqueued successfully.", start_at=dt) elif cron is not None: logger.debug("Enqueuing job execution using cron...", cron=cron) result = scheduler.cron( cron, # A cron string (e.g. "0 0 * * 0") func=run_job, args=args, repeat=None, queue_name="jobs", ) j.metadata["enqueued_id"] = result.id j.metadata["cron"] = cron j.scheduled = True j.save() logger.info("Job execution enqueued successfully.", cron=cron) else: logger.debug("Enqueuing job execution...") result = current_app.job_queue.enqueue(run_job, *args, timeout=-1) queue_job_id = result.id j.metadata["enqueued_id"] = result.id j.save() logger.info("Job execution enqueued successfully.") job_url = url_for("task.get_job", task_id=task_id, job_id=job_id, _external=True) task_url = url_for("task.get_task", task_id=task_id, _external=True) return jsonify( { "taskId": task_id, "jobId": job_id, "queueJobId": queue_job_id, "jobUrl": job_url, "taskUrl": task_url, } )