Exemplo n.º 1
0
def tasks():

    if request.method == 'GET':

        expanded = is_expanded()

        query = """
            SELECT json, uuid
            FROM task
            """
        args = []

        try:
            json_query = arg_json("json")
        except ValueError as ex:
            return bad_request(str(ex))

        if json_query is not None:
            query += "WHERE json @> %s"
            args.append(request.args.get("json"))

        query += " ORDER BY added"

        try:
            cursor = dbcursor_query(query, args)
        except Exception as ex:
            return error(str(ex))

        result = []
        for row in cursor:
            url = base_url(row[1])
            if not expanded:
                result.append(url)
                continue
            row[0]['href'] = url
            result.append(row[0])
        return json_response(result)

    elif request.method == 'POST':

        try:
            task = pscheduler.json_load(request.data)
        except ValueError:
            return bad_request("Invalid JSON:" + request.data)

        # TODO: Validate the JSON against a TaskSpecification


        # See if the task spec is valid

        try:
            returncode, stdout, stderr = pscheduler.run_program(
                [ "pscheduler", "internal", "invoke", "test",
                  task['test']['type'], "spec-is-valid" ],
                stdin = pscheduler.json_dump(task['test']['spec'])
                )

            if returncode != 0:
                return error("Invalid test specification: " + stderr)
        except Exception as ex:
            return error("Unable to validate test spec: " + str(ex))

        log.debug("Validated test: %s", pscheduler.json_dump(task['test']))


        # Find the participants

        try:
            returncode, stdout, stderr = pscheduler.run_program(
                [ "pscheduler", "internal", "invoke", "test",
                  task['test']['type'], "participants" ],
                stdin = pscheduler.json_dump(task['test']['spec'])
                )

            if returncode != 0:
                return error("Unable to determine participants: " + stderr)

            participants = [ host if host is not None
                             else pscheduler.api_this_host()
                             for host in pscheduler.json_load(stdout)["participants"] ]
        except Exception as ex:
            return error("Unable to determine participants: " + str(ex))
        nparticipants = len(participants)

        # TODO: The participants must be unique.  This should be
        # verified by fetching the host name from each one.

        #
        # TOOL SELECTION
        #

        # TODO: Need to provide for tool being specified by the task
        # package.

        tools = []

        for participant in participants:

            try:
                # TODO: This will fail with a very large test spec.
                status, result = pscheduler.url_get(
                    pscheduler.api_url(participant, "tools"),
                    params={ 'test': pscheduler.json_dump(task['test']) }
                    )
                if status != 200:
                    raise Exception("%d: %s" % (status, result))
                tools.append(result)
            except Exception as ex:
                return error("Error getting tools from %s: %s" \
                                     % (participant, str(ex)))
            log.debug("Participant %s offers tools %s", participant, tools)

        if len(tools) != nparticipants:
            return error("Didn't get a full set of tool responses")

        if "tools" in task:
            tool = pick_tool(tools, pick_from=task['tools'])
        else:
            tool = pick_tool(tools)

        if tool is None:
            # TODO: This could stand some additional diagnostics.
            return no_can_do("Couldn't find a tool in common among the participants.")

        task['tool'] = tool

        #
        # TASK CREATION
        #

        task_data = pscheduler.json_dump(task)
        log.debug("Task data: %s", task_data)

        tasks_posted = []

        # Evaluate the task against the limits and reject the request
        # if it doesn't pass.

        log.debug("Checking limits on %s", task["test"])

        (processor, whynot) = limitprocessor()
        if processor is None:
            log.debug("Limit processor is not initialized. %s", whynot)
            return no_can_do("Limit processor is not initialized: %s" % whynot)

        # TODO: This is cooked up in two places.  Make a function of it.
        hints = {
            "ip": request.remote_addr
            }
        hints_data = pscheduler.json_dump(hints)

        log.debug("Processor = %s" % processor)
        passed, diags = processor.process(task["test"], hints)

        if not passed:
            return forbidden("Task forbidden by limits:\n" + diags)

        # Post the lead with the local database, which also assigns
        # its UUID.  Make it disabled so the scheduler doesn't try to
        # do anything with it until the task has been submitted to all
        # of the other participants.

        try:
            cursor = dbcursor_query("SELECT * FROM api_task_post(%s, %s, 0, NULL, FALSE)",
                                    [task_data, hints_data], onerow=True)
        except Exception as ex:
            return error(str(ex.diag.message_primary))

        if cursor.rowcount == 0:
            return error("Task post failed; poster returned nothing.")

        task_uuid = cursor.fetchone()[0]

        log.debug("Tasked lead, UUID %s", task_uuid)

        # Other participants get the UUID forced upon them.

        for participant in range(1,nparticipants):
            part_name = participants[participant]
            try:
                log.debug("Tasking %d@%s: %s", participant, part_name, task_data)
                post_url = pscheduler.api_url(part_name,
                                              'tasks/' + task_uuid)
                log.debug("Posting task to %s", post_url)
                status, result = pscheduler.url_post(
                    post_url,
                    params={ 'participant': participant },
                    data=task_data,
                    json=False,
                    throw=False)
                log.debug("Remote returned %d: %s", status, result)
                if status != 200:
                    raise Exception("Unable to post task to %s: %s"
                                    % (part_name, result))
                tasks_posted.append(result)

            except Exception as ex:

                log.exception()

                for url in tasks_posted:
                    # TODO: Handle failure?
                    status, result = requests.delete(url)

                    try:
                        dbcursor_query("SELECT api_task_delete(%s)",
                                       [task_uuid])
                    except Exception as ex:
                        log.exception()

                return error("Error while tasking %d@%s: %s" % (participant, part_name, ex))


        # Enable the task so the scheduler will schedule it.
        try:
            dbcursor_query("SELECT api_task_enable(%s)", [task_uuid])
        except Exception as ex:
            log.exception()
            return error("Failed to enable task %s.  See system logs." % task_uuid)
        log.debug("Task enabled for scheduling.")

        return ok_json("%s/%s" % (request.base_url, task_uuid))

    else:

        return not_allowed()
Exemplo n.º 2
0
def tasks_uuid(uuid):
    if request.method == 'GET':

        # Get a task, adding server-derived details if a 'detail'
        # argument is present.

        try:
            cursor = dbcursor_query("""
                SELECT
                    task.json,
                    task.added,
                    task.start,
                    task.slip,
                    task.duration,
                    task.post,
                    task.runs,
                    task.participants,
                    scheduling_class.anytime,
                    scheduling_class.exclusive,
                    scheduling_class.multi_result,
                    task.participant,
                    task.enabled,
                    task.cli
                FROM
                    task
                    JOIN test ON test.id = task.test
                    JOIN scheduling_class
                        ON scheduling_class.id = test.scheduling_class
                WHERE uuid = %s
            """, [uuid])
        except Exception as ex:
            return error(str(ex))

        if cursor.rowcount == 0:
            return not_found()

        row = cursor.fetchone()
        if row is None:
            return not_found()
        json = row[0]

        # Redact anything in the test spec or archivers that's marked
        # private as well as _key at the top level if there is one.

        if "_key" in json:
            json["_key"] = None 

        json["test"]["spec"] = pscheduler.json_decomment(
            json["test"]["spec"], prefix="_", null=True)

        try:
            for archive in range(0,len(json["archives"])):
                json["archives"][archive]["data"] = pscheduler.json_decomment(
                    json["archives"][archive]["data"], prefix="_", null=True)
        except KeyError:
            pass  # Don't care if not there.

        # Add details if we were asked for them.

        if arg_boolean('detail'):

            part_list = row[7];
            # The database is not supposed to allow this, but spit out
            # a sane default as a last resort in case it happens.
            if part_list is None:
                part_list = [None]
            if row[10] == 0 and part_list[0] is None:
                part_list[0] = pscheduler.api_this_host()

            json['detail'] = {
                'added': None if row[1] is None \
                    else pscheduler.datetime_as_iso8601(row[1]),
                'start': None if row[2] is None \
                    else pscheduler.datetime_as_iso8601(row[2]),
                'slip': None if row[3] is None \
                    else pscheduler.timedelta_as_iso8601(row[3]),
                'duration': None if row[4] is None \
                    else pscheduler.timedelta_as_iso8601(row[4]),
                'post': None if row[5] is None \
                    else pscheduler.timedelta_as_iso8601(row[5]),
                'runs': None if row[6] is None \
                    else int(row[6]),
                'participants': part_list,
                'anytime':  row[8],
                'exclusive':  row[9],
                'multi-result':  row[10],
                'enabled':  row[12],
                'cli':  row[13]
                }

        return ok_json(json)

    elif request.method == 'POST':

        log.debug("Posting to %s", uuid)
        log.debug("Data is %s", request.data)

        # TODO: This is only for participant 1+
        # TODO: This should probably a PUT and not a POST.

        try:
            json_in = pscheduler.json_load(request.data)
        except ValueError:
            return bad_request("Invalid JSON")
        log.debug("JSON is %s", json_in)

        try:
            participant = arg_cardinal('participant')
        except ValueError as ex:
            return bad_request("Invalid participant: " + str(ex))
        log.debug("Participant %d", participant)

        # Evaluate the task against the limits and reject the request
        # if it doesn't pass.

        log.debug("Checking limits on task")

        processor, whynot = limitprocessor()
        if processor is None:
            message = "Limit processor is not initialized: %s" % whynot
            log.debug(message)
            return no_can_do(message)

        # TODO: This is cooked up in two places.  Make a function of it.
        hints = {
            "ip": request.remote_addr
            }
        hints_data = pscheduler.json_dump(hints)

        passed, diags = processor.process(json_in["test"], hints)

        if not passed:
            return forbidden("Task forbidden by limits:\n" + diags)
        log.debug("Limits passed")

        # TODO: Pluck UUID from URI
        uuid = url_last_in_path(request.url)

        log.debug("Posting task %s", uuid)

        try:
            cursor = dbcursor_query(
                "SELECT * FROM api_task_post(%s, %s, %s, %s)",
                [request.data, hints_data, participant, uuid])
        except Exception as ex:
            return error(str(ex))
        if cursor.rowcount == 0:
            return error("Task post failed; poster returned nothing.")
        # TODO: Assert that rowcount is 1
        log.debug("All done: %s", base_url())
        return ok(base_url())

    elif request.method == 'DELETE':

        parsed = list(urlparse.urlsplit(request.url))
        parsed[1] = "%s"
        template = urlparse.urlunsplit(parsed)

        try:
            cursor = dbcursor_query(
                "SELECT api_task_disable(%s, %s)", [uuid, template])
        except Exception as ex:
            return error(str(ex))

        return ok()

    else:

        return not_allowed()
Exemplo n.º 3
0
def hostname():
    """Return the hosts's name"""
    return ok_json(pscheduler.api_this_host())
Exemplo n.º 4
0
def root():
    return ok_json("This is the pScheduler API server on %s (%s)."
              % (server_hostname(), pscheduler.api_this_host()))
Exemplo n.º 5
0
def hostname():
    """Return the hosts's name"""
    return ok_json(pscheduler.api_this_host())
Exemplo n.º 6
0
def root():
    return ok('"This is the pScheduler API server on %s."'
              % pscheduler.api_this_host())
Exemplo n.º 7
0
def tasks_uuid_runs_run(task, run):

    if task is None:
        return bad_request("Missing or invalid task")

    if run is None:
        return bad_request("Missing or invalid run")

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return error("Cannot wait on local and merged results")

        # If asked for 'first', dig up the first run and use its UUID.

        if run == 'first':
            # 60 tries at 0.5s intervals == 30 sec.
            tries = 60
            while tries > 0:
                try:
                    run = __runs_first_run(task)
                except Exception as ex:
                    log.exception()
                    return error(str(ex))
                if run is not None:
                    break
                time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()


        # 60 tries at 0.5s intervals == 30 sec.
        tries = 60 if (wait_local or wait_merged) else 1

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        lower(run.times),
                        upper(run.times),
                        upper(run.times) - lower(run.times),
                        task.participant,
                        task.nparticipants,
                        task.participants,
                        run.part_data,
                        run.part_data_full,
                        run.result,
                        run.result_full,
                        run.result_merged,
                        run_state.enum,
                        run_state.display,
                        run.errors,
                        run.clock_survey
                    FROM
                        run
                        JOIN task ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE
                        task.uuid = %s
                        AND run.uuid = %s""", [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                return not_found()

            row = cursor.fetchone()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and row[7] is None) \
                        or (wait_merged and row[9] is None):
                    time.sleep(0.5)
                    tries -= 1
                else:
                    break

        # Return a result Whether or not we timed out and let the
        # client sort it out.

        result = {}

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urlparse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urlparse.urljoin( request.url, href_path )

        result['href'] = href
        result['start-time'] = pscheduler.datetime_as_iso8601(row[0])
        result['end-time'] = pscheduler.datetime_as_iso8601(row[1])
        result['duration'] = pscheduler.timedelta_as_iso8601(row[2])
        participant_num = row[3]
        result['participant'] = participant_num
        result['participants'] = [
            pscheduler.api_this_host()
            if participant is None and participant_num == 0
            else participant
            for participant in row[5]
            ]
        result['participant-data'] = row[6]
        result['participant-data-full'] = row[7]
        result['result'] = row[8]
        result['result-full'] = row[9]
        result['result-merged'] = row[10]
        result['state'] = row[11]
        result['state-display'] = row[12]
        result['errors'] = row[13]
        if row[14] is not None:
            result['clock-survey'] = row[14]
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        return json_response(result)

    elif request.method == 'PUT':

        log.debug("Run PUT %s", request.url)

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(request.data)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", request.data)
            return error("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        try:
            cursor = dbcursor_query(
                "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)",
                [run], onerow=True)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        if not cursor.fetchone()[0]:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            passed, diags = __evaluate_limits(task, start_time)

            try:
                cursor = dbcursor_query("SELECT api_run_post(%s, %s, %s)",
                               [task, start_time, run], onerow=True)
                log.debug("Full put of %s, got back %s", run, cursor.fetchone()[0])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  part_data_full = %s
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                           [ part_data_full, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))
            if cursor.rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  result_full = %s,
                                  state = CASE %s
                                      WHEN TRUE THEN run_state_finished()
                                      ELSE run_state_failed()
                                      END
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                               [ result_full, succeeded, run, task ])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount != 1:
                return not_found()

            return ok()



    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        try:
            cursor = dbcursor_query("""
            DELETE FROM run
            WHERE
                task in (SELECT id FROM task WHERE uuid = %s)
                AND uuid = %s 
            """, [task, run])
        except Exception as ex:
            log.exception()
            return error(str(ex))

        return ok() if cursor.rowcount == 1 else not_found()

    else:

        return not_allowed()