def test_task_get_tasks(client): """Test getting tasks""" Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) tasks = Task.get_tasks() expect(tasks.total).to_equal(2)
def test_task_create2(client): """Test creating a new task fails when no task_id provided""" msg = "ValidationError (Task:None) (Field is required: ['task_id'])" with expect.error_to_happen(ValidationError, message=msg): Task.create_task(None) with expect.error_to_happen(ValidationError, message=msg): Task.create_task("")
def test_monitor_job_with_retry2(client): """Test monitoring a job for a task that fails stops after max retries""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id j.metadata["retries"] = 3 j.metadata["retry_count"] = 3 ex = j.create_execution("image", "command") j.save() exec_mock = MagicMock() exec_mock.get_result.return_value = MagicMock( exit_code=1, log="".encode("utf-8"), error="error".encode("utf-8") ) client.application.executor = exec_mock queue = Queue("monitor", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.monitor_job, t.task_id, job_id, ex.execution_id) worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() keys = app.redis.keys() next_job_id = [ key for key in keys if key.decode("utf-8").startswith("rq:job") and not key.decode("utf-8").endswith(result.id) ] expect(next_job_id).to_length(0)
def test_task_get_by_task_id(client): """Test getting a task by task id""" task_id = str(uuid4()) t = Task.create_task(task_id) topic = Task.get_by_task_id(t.task_id) expect(topic.id).to_equal(t.id)
def test_get_tasks_data(client): """Test getting tasks resource data""" task = Task.create_task('my-task') resp = client.get("/tasks") data = loads(resp.data) task_data = data["items"][0] with client.application.app_context(): expect(task_data.keys()).to_equal(task.to_dict().keys())
def test_job_create(client): """Test creating a new job""" task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() expect(j.job_id).to_equal(str(j.id)) expect(j.created_at).not_to_be_null() expect(j.last_modified_at).not_to_be_null() expect(j.executions).to_be_empty()
def test_job_get_by_job_id(client): """Test getting a job by id""" task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() topic = Job.get_by_id(task_id, j.job_id) expect(topic).not_to_be_null() expect(topic.job_id).to_equal(str(j.id)) topic = Job.get_by_id("invalid", "invalid") expect(topic).to_be_null()
def test_task_create(client): """Test creating a new task""" task_id = str(uuid4()) t = Task.create_task(task_id) expect(t.task_id).to_equal(task_id) expect(t.created_at).not_to_be_null() expect(t.last_modified_at).not_to_be_null() created_at = t.created_at last_mod = t.last_modified_at t.save() expect(t.created_at).to_equal(created_at) expect(t.last_modified_at).to_be_greater_than(last_mod)
def test_get_tasks_pagination(client): """Test getting tasks pagination""" Task.create_task('my-task-1') Task.create_task('my-task-2') Task.create_task('my-task-3') Task.create_task('my-task-4') app = client.application server_name = app.config['SERVER_NAME'] resp = client.get("/tasks?page=2") data = loads(resp.data) expect(data["total"]).to_equal(4) expect(data["page"]).to_equal(2) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_true() expect(data["prevUrl"]).to_equal(f'http://{server_name}/tasks?page=1') expect(data["nextUrl"]).to_be_null()
def test_search_tasks1(client): """Tests search task by task_id.""" task_id = f"task-search-{str(uuid4())}" Task.create_task(task_id) Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) resp = client.get("/search/?query=search") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(1)
def test_task_get_tasks_pagination(client): """Test getting tasks pagination""" Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) Task.create_task(str(uuid4())) tasks = Task.get_tasks(page=1, per_page=1) expect(tasks.total).to_equal(3) expect(tasks.pages).to_equal(3) expect(tasks.items).to_length(1) expect(tasks.has_next).to_be_true() expect(tasks.has_prev).to_be_false()
def test_task_to_dict(client): """Test to_dict""" task = Task.create_task('my-task') app = client.application server_name = app.config['SERVER_NAME'] with app.app_context(): res = task.to_dict() expect(res['taskId']).to_equal('my-task') created_at = int(task.created_at.timestamp()) expect(int(res["createdAt"])).to_equal(created_at) last_modified_at = int(task.last_modified_at.timestamp()) expect(int(res["lastModifiedAt"])).to_equal(last_modified_at) expect(res["url"]).to_equal(f'http://{server_name}/tasks/my-task') expect(res["jobsCount"]).to_equal(0)
def test_task_to_dict(client): """Test to_dict""" task = Task.create_task("my-task") app = client.application server_name = app.config["SERVER_NAME"] with app.app_context(): res = task.to_dict() expect(res["taskId"]).to_equal("my-task") created_at = task.created_at.isoformat() expect(res["createdAt"]).to_equal(created_at) last_modified_at = task.last_modified_at.isoformat() expect(res["lastModifiedAt"]).to_equal(last_modified_at) expect(res["url"]).to_equal(f"http://{server_name}/tasks/my-task/") expect(res["jobsCount"]).to_equal(0)
def test_get_task_details(client): """Test getting tasks""" task_id = str(uuid4()) job_id = str(uuid4()) task = Task.create_task(task_id) task.create_or_update_job(job_id, "ubuntu", "command") resp = client.get(f"/tasks/{task_id}/") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data).to_include("jobs") expect(data["jobs"]).to_length(1) job_data = data["jobs"][0] expect(job_data).to_include("id") expect(job_data["id"]).to_equal(job_id) expect(job_data["url"]).to_equal( f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/")
def test_get_tasks(client): """Test getting tasks""" Task.create_task('my-task-1') Task.create_task('my-task-2') Task.create_task('my-task-3') resp = client.get("/tasks") expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(3) expect(data["total"]).to_equal(3) expect(data["page"]).to_equal(1) expect(data["pages"]).to_equal(1) expect(data["perPage"]).to_equal(3) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_false()
def test_get_tasks(auth_client): """Test getting tasks""" Task.create_task("my-task-1") Task.create_task("my-task-2") Task.create_task("my-task-3") userAndPass = b64encode(b"test:auth").decode("ascii") headers = {"Authorization": "Basic %s" % userAndPass} resp = auth_client.get("/tasks/", headers=headers) expect(resp.status_code).to_equal(200) data = loads(resp.data) expect(data["items"]).to_length(3) expect(data["total"]).to_equal(3) expect(data["page"]).to_equal(1) expect(data["pages"]).to_equal(1) expect(data["perPage"]).to_equal(3) expect(data["hasNext"]).to_be_false() expect(data["hasPrev"]).to_be_false()
def test_monitor_job_with_retry(client): """Test monitoring a job for a task that fails""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id j.metadata["retries"] = 3 j.metadata["retry_count"] = 0 ex = j.create_execution("image", "command") j.save() exec_mock = MagicMock() exec_mock.get_result.return_value = MagicMock( exit_code=1, log="".encode("utf-8"), error="error".encode("utf-8")) exec_class_mock = MagicMock() exec_class_mock.Executor.return_value = exec_mock client.application.executor_module = exec_class_mock queue = Queue("monitor", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.monitor_job, t.task_id, job_id, ex.execution_id) worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() res = app.redis.zrange(b"rq:scheduler:scheduled_jobs", 0, -1) expect(res).to_length(1) time = datetime.now() + timedelta(seconds=2) res = app.redis.zscore("rq:scheduler:scheduled_jobs", res[0]) expect(int(res)).to_equal(int(time.timestamp())) nj = app.redis.zrange("rq:scheduler:scheduled_jobs", 0, 0)[0].decode("utf-8") next_job_id = f"rq:job:{nj}" res = app.redis.exists(next_job_id) expect(res).to_be_true() res = app.redis.hexists(next_job_id, "data") expect(res).to_be_true() res = app.redis.hget(next_job_id, "origin") expect(res).to_equal("jobs") res = app.redis.hget(next_job_id, "description") expect(res).to_equal( f"fastlane.worker.job.run_job('{task_id}', '{job_id}', 'image', 'command')" ) t.reload() expect(t.jobs[0].executions[0].status).to_equal( JobExecution.Status.done)
def test_run_job(client): """Test running a new job for a task""" with client.application.app_context(): app = client.application app.redis.flushall() task_id = str(uuid4()) t = Task.create_task(task_id) j = t.create_job() job_id = j.job_id t.save() exec_mock = MagicMock() exec_class_mock = MagicMock() exec_class_mock.Executor.return_value = exec_mock client.application.executor_module = exec_class_mock queue = Queue("jobs", is_async=False, connection=client.application.redis) result = queue.enqueue(job_mod.run_job, t.task_id, job_id, "image", "command") worker = SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) t.reload() expect(t.jobs).to_length(1) job = t.jobs[0] expect(job.executions).to_length(1) execution = job.executions[0] expect(execution.image).to_equal("image") expect(execution.command).to_equal("command") hash_key = f"rq:job:{result.id}" res = app.redis.exists(hash_key) expect(res).to_be_true() res = app.redis.hget(hash_key, "status") expect(res).to_equal("finished") res = app.redis.hexists(hash_key, "data") expect(res).to_be_true() keys = app.redis.keys() next_job_id = [ key for key in keys if key.decode("utf-8").startswith("rq:job") and not key.decode("utf-8").endswith(result.id) ] expect(next_job_id).to_length(1) next_job_id = next_job_id[0] res = app.redis.exists(next_job_id) expect(res).to_be_true() res = app.redis.hget(next_job_id, "status") expect(res).to_equal("queued") res = app.redis.hexists(next_job_id, "data") expect(res).to_be_true() res = app.redis.hget(next_job_id, "origin") expect(res).to_equal("monitor") res = app.redis.hget(next_job_id, "description") expect(res).to_equal( f"fastlane.worker.job.monitor_job('{task_id}', '{job_id}', '{execution.execution_id}')" ) res = app.redis.hget(next_job_id, "timeout") expect(res).to_equal("-1") t.reload() expect(t.jobs[0].executions[0].status).to_equal( JobExecution.Status.running)
def new(task_id): from fastlane.models.task import Task return Task.create_task(task_id)