Beispiel #1
0
def get_task_and_details(task_id):
    details = get_details()

    if details is None or details == "":
        msg = "Failed to enqueue task because JSON body could not be parsed."
        g.logger.warn(msg)

        return None, None, None, make_response(msg, 400)

    image = details.get("image", None)
    command = details.get("command", None)

    if image is None or command is None:
        return (
            None,
            None,
            None,
            make_response("image and command must be filled in the request.",
                          400),
        )

    logger = g.logger.bind(task_id=task_id, image=image, command=command)

    logger.debug("Creating task...")
    task = Task.objects(task_id=task_id).modify(task_id=task_id,
                                                upsert=True,
                                                new=True)
    logger.info("Task created successfully.")

    return task, details, logger, None
Beispiel #2
0
def get_task(task_id):
    logger = g.logger.bind(operation="get_task", task_id=task_id)

    logger.debug("Getting job...")
    task = Task.get_by_task_id(task_id)

    if task is None:
        return return_error("Task not found.",
                            "get_task",
                            status=404,
                            logger=logger)

    logger.debug("Task retrieved successfully...")

    task_jobs = Job.objects(id__in=[str(job_id.id) for job_id in task.jobs])

    jobs = []

    for job in task_jobs:
        url = url_for("task.get_job",
                      task_id=task_id,
                      job_id=str(job.job_id),
                      _external=True)
        job = {
            "id": str(job.job_id),
            "createdAt": job.created_at.isoformat(),
            "url": url
        }
        jobs.append(job)

    return jsonify({"taskId": task_id, "jobs": jobs})
Beispiel #3
0
def test_enqueue12(client):
    """Test enqueue a job works with PUT"""

    with client.application.app_context():
        task_id = str(uuid4())
        job_id = str(uuid4())
        data = {"image": "ubuntu", "command": "ls"}
        response = client.put(
            f"/tasks/{task_id}/jobs/{job_id}/", data=dumps(data), follow_redirects=True
        )

        expect(response.status_code).to_equal(200)

        obj = loads(response.data)
        expect(obj).not_to_be_null()
        new_job_id = obj["jobId"]
        expect(new_job_id).to_equal(job_id)
        expect(obj["queueJobId"]).not_to_be_null()

        expect(obj["executionId"]).not_to_be_null()

        task = Task.get_by_task_id(obj["taskId"])
        expect(task).not_to_be_null()
        expect(task.jobs).not_to_be_empty()

        j = task.jobs[0]
        job = Job.objects(id=j.id).first()
        expect(str(job.job_id)).to_equal(job_id)

        expect(obj["taskUrl"]).to_equal(task.get_url())
        expect(obj).to_be_enqueued()

        count = Task.objects.count()
        expect(count).to_equal(1)
Beispiel #4
0
def test_enqueue1(client):
    """Test enqueue a job works"""

    with client.application.app_context():
        task_id = str(uuid4())
        data = {"image": "ubuntu", "command": "ls"}
        response = client.post(f"/tasks/{task_id}/",
                               data=dumps(data),
                               follow_redirects=True)

        expect(response.status_code).to_equal(200)
        obj = loads(response.data)

        expect(obj["taskUrl"]).to_equal(
            f"http://localhost:10000/tasks/{task_id}/")

        job_id = obj["jobId"]
        expect(job_id).not_to_be_null()

        expect(obj["jobUrl"]).to_equal(
            f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/")

        expect(obj["queueJobId"]).not_to_be_null()

        expect(obj["executionId"]).not_to_be_null()
        execution_id = obj["executionId"]

        expect(obj["executionUrl"]).to_equal(
            f"http://localhost:10000/tasks/{task_id}/jobs/{job_id}/executions/{execution_id}/"
        )

        task = Task.get_by_task_id(obj["taskId"])
        expect(task).not_to_be_null()
        expect(task.jobs).not_to_be_empty()

        j = task.jobs[0]
        job = Job.objects(id=j.id).first()
        expect(str(job.job_id)).to_equal(job_id)

        expect(obj["taskUrl"]).to_equal(task.get_url())
        expect(obj).to_be_enqueued()
        expect(obj).to_be_enqueued_with_value("status", "queued")

        expect(obj).to_be_enqueued_with_value("created_at")
        expect(obj).to_be_enqueued_with_value("enqueued_at")
        expect(obj).to_be_enqueued_with_value("data")
        expect(obj).to_be_enqueued_with_value("origin", "jobs")
        expect(obj).to_be_enqueued_with_value(
            "description",
            f"fastlane.worker.job.run_job('{obj['taskId']}', '{job_id}', '{execution_id}', 'ubuntu', 'ls')",
        )
        expect(obj).to_be_enqueued_with_value("timeout", "-1")

        count = Task.objects.count()
        expect(count).to_equal(1)
Beispiel #5
0
def search_tasks():
    logger = g.logger.bind(operation="search_tasks")

    query = request.args.get("query")

    if not query:
        msg = "The query param is required."

        return return_error(msg, "search_tasks", status=400, logger=logger)

    page, error = get_current_page(logger)

    if error:
        return error

    per_page = current_app.config["PAGINATION_PER_PAGE"]

    logger.debug(f"Getting tasks page={page} per_page={per_page}...")
    paginator = Task.search_tasks(query=query, page=page, per_page=per_page)

    logger.debug("Tasks retrieved successfully...")

    next_url = None

    if paginator.has_next:
        next_url = url_for("task.search_tasks",
                           query=query,
                           page=paginator.next_num,
                           _external=True)

    prev_url = None

    if paginator.has_prev:
        prev_url = url_for("task.search_tasks",
                           query=query,
                           page=paginator.prev_num,
                           _external=True)

    data = {
        "items": [],
        "total": paginator.total,
        "page": paginator.page,
        "pages": paginator.pages,
        "perPage": paginator.per_page,
        "hasNext": paginator.has_next,
        "hasPrev": paginator.has_prev,
        "nextUrl": next_url,
        "prevUrl": prev_url,
    }

    for task in paginator.items:
        data["items"].append(task.to_dict())

    return jsonify(data)
Beispiel #6
0
def test_job_create(client):  # pylint: disable=unused-argument
    """Test creating a new job"""

    task_id = str(uuid4())

    task = Task.create_task(task_id)
    job = task.create_job()

    expect(job.created_at).not_to_be_null()
    expect(job.last_modified_at).not_to_be_null()
    expect(job.executions).to_be_empty()
Beispiel #7
0
def test_job_create_or_update1(client):  # pylint: disable=unused-argument
    """Test creating or updating a new job"""

    task_id = str(uuid4())
    job_id = str(uuid4())

    task = Task.create_task(task_id)
    job = task.create_or_update_job(job_id, "image", "command")

    expect(job.job_id).to_equal(str(job_id))
    expect(job.created_at).not_to_be_null()
    expect(job.last_modified_at).not_to_be_null()
    expect(job.executions).to_be_empty()
Beispiel #8
0
def test_job_get_by_job_id(client):  # pylint: disable=unused-argument
    """Test getting a job by id"""

    task_id = str(uuid4())
    task = Task.create_task(task_id)

    job = task.create_job()

    topic = Job.get_by_id(task_id, job.job_id)
    expect(topic).not_to_be_null()
    expect(topic.job_id).to_equal(str(job.job_id))

    topic = Job.get_by_id("invalid", "invalid")
    expect(topic).to_be_null()
Beispiel #9
0
def test_job_create_or_update2(client):  # pylint: disable=unused-argument
    """Test creating or updating an existing job"""

    task_id = str(uuid4())

    task = Task.create_task(task_id)
    job = task.create_job()

    job_id = str(job.job_id)
    new_job = task.create_or_update_job(job_id)

    expect(str(new_job.id)).to_equal(str(job.id))
    expect(new_job.created_at).not_to_be_null()
    expect(new_job.last_modified_at).not_to_be_null()
    expect(new_job.executions).to_be_empty()
Beispiel #10
0
def get_tasks():
    logger = g.logger.bind(operation="get_tasks")

    page, error = get_current_page(logger)

    if error:
        return error

    per_page = current_app.config["PAGINATION_PER_PAGE"]

    logger.debug(f"Getting tasks page={page} per_page={per_page}...")
    paginator = Task.get_tasks(page=page, per_page=per_page)

    logger.debug("Tasks retrieved successfully...")

    next_url = None

    if paginator.has_next:
        next_url = url_for("task.get_tasks",
                           page=paginator.next_num,
                           _external=True)

    prev_url = None

    if paginator.has_prev:
        prev_url = url_for("task.get_tasks",
                           page=paginator.prev_num,
                           _external=True)

    data = {
        "items": [],
        "total": paginator.total,
        "page": paginator.page,
        "pages": paginator.pages,
        "perPage": paginator.per_page,
        "hasNext": paginator.has_next,
        "hasPrev": paginator.has_prev,
        "nextUrl": next_url,
        "prevUrl": prev_url,
    }

    for task in paginator.items:
        data["items"].append(task.to_dict())

    return jsonify(data)
Beispiel #11
0
def test_enqueue2(client):
    """Test enqueue a job with the same task does not create a new task"""

    with client.application.app_context():
        task_id = str(uuid4())

        data = {"image": "ubuntu", "command": "ls"}

        options = dict(
            data=dumps(data),
            headers={"Content-Type": "application/json"},
            follow_redirects=True,
        )

        response = client.post(f"/tasks/{task_id}/", **options)
        expect(response.status_code).to_equal(200)

        obj = loads(response.data)
        job_id = obj["jobId"]
        expect(job_id).not_to_be_null()
        expect(obj["queueJobId"]).not_to_be_null()

        response = client.post(f"/tasks/{task_id}/", **options)
        expect(response.status_code).to_equal(200)
        obj = loads(response.data)
        job_id = obj["jobId"]
        expect(job_id).not_to_be_null()
        expect(obj["queueJobId"]).not_to_be_null()

        expect(obj).to_be_enqueued()

        task = Task.get_by_task_id(obj["taskId"])
        expect(task).not_to_be_null()
        expect(task.jobs).not_to_be_empty()

        count = Task.objects.count()
        expect(count).to_equal(1)
Beispiel #12
0
    def new(task_id):
        from fastlane.models import Task

        return Task.create_task(task_id)