Ejemplo n.º 1
0
    def __init__(self, source=None):  # Open file or path to file with limit set
        """Construct a limit processor.

        The 'source' argument can be one of three types:

        string - Path to a file to open and read.

        file - Open file handle to read.

        None - The limit processor becomes inert and passes all limits
        unconditionally.
        """

        # If we were given no source, put the processor into an inert
        # mode where everything passes.
        self.inert = source is None
        if self.inert:
            return

        #
        # Load the validation data
        #

        validation_path = os.path.join(os.path.dirname(__file__), "pscheduler-limits-validate.json")
        # TODO: Throw something nicer than IOError if this fails.
        validation_file = open(validation_path, "r")
        validation = pscheduler.json_load(validation_file)
        validation_file.close()

        #
        # Inhale the source and validate it
        #

        if type(source) is str or type(source) is unicode:
            source = open(source, "r")
        elif type(source) is file:
            pass  # We're good with this.
        else:
            raise ValueError("Source must be a file or path")

        # At this point, source is a file.

        assert type(source) is file
        limit_config = pscheduler.json_load(source)

        valid, message = pscheduler.json_validate(limit_config, validation)

        if not valid:
            raise ValueError("Invalid limit file: %s" % message)

        #
        # Set up all of the stages
        #

        self.identifiers = IdentifierSet(limit_config["identifiers"])
        self.classifiers = ClassifierSet(limit_config["classifiers"], self.identifiers)
        self.limits = LimitSet(limit_config["limits"])
        self.applications = ApplicationSet(limit_config["applications"], self.classifiers, self.limits)
Ejemplo n.º 2
0
    def evaluate(self, run):  # The proposed run

        # Dissent if the test isn't our type
        if run["type"] != self.test:
            return {"passed": False, "reasons": ["Test is not '%s'" % self.test]}

        pass_input = {"spec": run["spec"], "limit": self.limit}

        returncode, stdout, stderr = pscheduler.run_program(
            ["pscheduler", "internal", "invoke", "test", self.test, "limit-passes"],
            stdin=pscheduler.json_dump(pass_input),
            # TODO:  Is this reasonable?
            timeout=5,
        )

        if returncode != 0:
            raise RuntimeError("Failed to validate limit: %s" % stderr)

        check_result = pscheduler.json_load(stdout)
        passed = check_result["passes"]

        result = {"passed": passed}
        if not passed:
            result["reasons"] = check_result["errors"]

        return result
Ejemplo n.º 3
0
def arg_json(name):
    """Fetch and validate an argument as JSON"""
    argval = request.args.get(name)
    if argval is None:
        return None
    # This will throw a ValueError if something's wrong.
    return pscheduler.json_load(argval)
Ejemplo n.º 4
0
def tests_name_spec(name):

    try:
        cursor = dbcursor_query("SELECT EXISTS (SELECT * FROM test WHERE NAME = %s)",
                                [ name ])
    except Exception as ex:
        return error(str(ex))

    exists = cursor.fetchone()[0]
    if not exists:
        return not_found()

    try:
        args = arg_json('args')
    except ValueError:
        return error("Invalid JSON passed to 'args'")
    
    status, stdout, stderr = pscheduler.run_program(
        [ 'pscheduler', 'internal', 'invoke', 'test', name, 'cli-to-spec' ],
        stdin = pscheduler.json_dump(args),
        short = True,
        )

    if status != 0:
        return bad_request(stderr)

    # The extra parse here makes 'pretty' work.
    returned_json = pscheduler.json_load(stdout)
    return ok_json(returned_json)
Ejemplo n.º 5
0
 def create_metadata(self, metadata):
     result = {}
     post_url = self.url
     if not post_url.endswith('/'):
         post_url += '/'
     log.debug("Posting metadata to %s: %s" % (post_url, metadata))
     r = requests.post(post_url, data=pscheduler.json_dump(metadata), headers=self.headers, verify=self.verify_ssl)
     if r.status_code != 200 and r.status_code != 201:
         try:
             return False, "%d: %s" % (r.status_code, pscheduler.json_load(r.text)['detail'])
         except:
             return False, "%d: %s" % (r.status_code, r.text)
     try:
         rjson = pscheduler.json_load(r.text)
         log.debug("Metadata POST result: %s" % rjson)
     except:
         return False, "Invalid JSON returned from server: %s" % r.text 
     
     return True, rjson
Ejemplo n.º 6
0
 def create_data(self, metadata_key, data_points):
     result = {}
     put_url = self.url
     if not put_url.endswith('/'):
         put_url += '/'
     put_url += ("%s/" % metadata_key)
     data = { 'data': data_points }
     log.debug("Putting data to %s: %s" % (put_url, data))
     r = requests.put(put_url, data=pscheduler.json_dump(data), headers=self.headers, verify=self.verify_ssl)
     if r.status_code== 409:
         #duplicate data
         log.debug("Attempted to add duplicate data point. Skipping")
     elif r.status_code != 200 and r.status_code != 201:
         try:
             return False, "%d: %s" % (r.status_code, pscheduler.json_load(r.text)['detail'])
         except:
             return False, "%d: %s" % (r.status_code, r.text)
     
     return True, ""
Ejemplo n.º 7
0
    def __init__(self, data):  # Data suitable for this class

        valid, message = test_data_is_valid(data)
        if not valid:
            raise ValueError("Invalid data: %s" % message)

        self.test = data["test"]
        self.limit = data["limit"]

        returncode, stdout, stderr = pscheduler.run_program(
            ["pscheduler", "internal", "invoke", "test", self.test, "limit-is-valid"],
            stdin=pscheduler.json_dump(self.limit),
            # TODO:  Is this reasonable?
            timeout=5,
        )

        if returncode != 0:
            raise RuntimeError("Failed to validate limit: %s" % stderr)

        result = pscheduler.json_load(stdout)
        if not result["valid"]:
            raise ValueError("Invalid limit: %s" % result["message"])
Ejemplo n.º 8
0
def tasks_uuid_cli(uuid):

    # Get a task, adding server-derived details if a 'detail'
    # argument is present.

    try:
        cursor = dbcursor_query(
            """SELECT
                   task.json #>> '{test, spec}',
                   test.name
               FROM
                   task
                   JOIN test on test.id = task.test
               WHERE task.uuid = %s""", [uuid])
    except Exception as ex:
        return error(str(ex))

    if cursor.rowcount == 0:
        return not_found()

    row = cursor.fetchone()
    if row is None:
        return not_found()
    json, test = row

    try:
        returncode, stdout, stderr = pscheduler.run_program(
            [ "pscheduler", "internal", "invoke", "test",
              test, "spec-to-cli" ], stdin = json )
        if returncode != 0:
            return error("Unable to convert test spec: " + stderr)
    except Exception as ex:
        return error("Unable to convert test spec: " + str(ex))

    returned = pscheduler.json_load(stdout)
    returned.insert(0, test)

    return ok(pscheduler.json_dump(returned))
Ejemplo n.º 9
0
def tests_name_participants(name):

    spec = request.args.get('spec')
    if spec is None:
        return bad_request("No test spec provided")

    try:
        returncode, stdout, stderr = pscheduler.run_program(
            [ "pscheduler", "internal", "invoke", "test", name,
              "participants"],
            stdin = spec
            )
    except KeyError:
        return bad_request("Invalid spec")
    except Exception as ex:
        return bad_request(ex)

    if returncode != 0:
        return bad_request(stderr)

    # If this fails because of bad JSON, an exception will be thrown,
    # caught and logged.
    return json_response(pscheduler.json_load(stdout))
Ejemplo n.º 10
0
def tests_name_lead(name):

    spec = request.args.get('spec')
    if spec is None:
        return bad_request("No test spec provided")

    try:
        returncode, stdout, stderr = pscheduler.run_program(
            [ "pscheduler", "internal", "invoke", "test", name,
              "participants"],
            stdin = spec
            )
    except KeyError:
        return bad_request("Invalid spec")
    except Exception as ex:
        return bad_request(ex)

    if returncode != 0:
        return bad_request(stderr)

    part_list = pscheduler.json_load(stdout)
    lead = part_list['participants'][0]

    return json_response(lead)
Ejemplo n.º 11
0
def tasks_uuid(uuid):
    if request.method == 'GET':

        # Get a task, adding server-derived details if a 'detail'
        # argument is present.

        try:
            cursor = dbcursor_query("""
                SELECT
                    task.json,
                    task.added,
                    task.start,
                    task.slip,
                    task.duration,
                    task.post,
                    task.runs,
                    task.participants,
                    scheduling_class.anytime,
                    scheduling_class.exclusive,
                    scheduling_class.multi_result,
                    task.participant,
                    task.enabled,
                    task.cli
                FROM
                    task
                    JOIN test ON test.id = task.test
                    JOIN scheduling_class
                        ON scheduling_class.id = test.scheduling_class
                WHERE uuid = %s
            """, [uuid])
        except Exception as ex:
            return error(str(ex))

        if cursor.rowcount == 0:
            return not_found()

        row = cursor.fetchone()
        if row is None:
            return not_found()
        json = row[0]

        # Redact anything in the test spec or archivers that's marked
        # private as well as _key at the top level if there is one.

        if "_key" in json:
            json["_key"] = None 

        json["test"]["spec"] = pscheduler.json_decomment(
            json["test"]["spec"], prefix="_", null=True)

        try:
            for archive in range(0,len(json["archives"])):
                json["archives"][archive]["data"] = pscheduler.json_decomment(
                    json["archives"][archive]["data"], prefix="_", null=True)
        except KeyError:
            pass  # Don't care if not there.

        # Add details if we were asked for them.

        if arg_boolean('detail'):

            part_list = row[7];
            # The database is not supposed to allow this, but spit out
            # a sane default as a last resort in case it happens.
            if part_list is None:
                part_list = [None]
            if row[10] == 0 and part_list[0] is None:
                part_list[0] = pscheduler.api_this_host()

            json['detail'] = {
                'added': None if row[1] is None \
                    else pscheduler.datetime_as_iso8601(row[1]),
                'start': None if row[2] is None \
                    else pscheduler.datetime_as_iso8601(row[2]),
                'slip': None if row[3] is None \
                    else pscheduler.timedelta_as_iso8601(row[3]),
                'duration': None if row[4] is None \
                    else pscheduler.timedelta_as_iso8601(row[4]),
                'post': None if row[5] is None \
                    else pscheduler.timedelta_as_iso8601(row[5]),
                'runs': None if row[6] is None \
                    else int(row[6]),
                'participants': part_list,
                'anytime':  row[8],
                'exclusive':  row[9],
                'multi-result':  row[10],
                'enabled':  row[12],
                'cli':  row[13]
                }

        return ok_json(json)

    elif request.method == 'POST':

        log.debug("Posting to %s", uuid)
        log.debug("Data is %s", request.data)

        # TODO: This is only for participant 1+
        # TODO: This should probably a PUT and not a POST.

        try:
            json_in = pscheduler.json_load(request.data)
        except ValueError:
            return bad_request("Invalid JSON")
        log.debug("JSON is %s", json_in)

        try:
            participant = arg_cardinal('participant')
        except ValueError as ex:
            return bad_request("Invalid participant: " + str(ex))
        log.debug("Participant %d", participant)

        # Evaluate the task against the limits and reject the request
        # if it doesn't pass.

        log.debug("Checking limits on task")

        processor, whynot = limitprocessor()
        if processor is None:
            message = "Limit processor is not initialized: %s" % whynot
            log.debug(message)
            return no_can_do(message)

        # TODO: This is cooked up in two places.  Make a function of it.
        hints = {
            "ip": request.remote_addr
            }
        hints_data = pscheduler.json_dump(hints)

        passed, diags = processor.process(json_in["test"], hints)

        if not passed:
            return forbidden("Task forbidden by limits:\n" + diags)
        log.debug("Limits passed")

        # TODO: Pluck UUID from URI
        uuid = url_last_in_path(request.url)

        log.debug("Posting task %s", uuid)

        try:
            cursor = dbcursor_query(
                "SELECT * FROM api_task_post(%s, %s, %s, %s)",
                [request.data, hints_data, participant, uuid])
        except Exception as ex:
            return error(str(ex))
        if cursor.rowcount == 0:
            return error("Task post failed; poster returned nothing.")
        # TODO: Assert that rowcount is 1
        log.debug("All done: %s", base_url())
        return ok(base_url())

    elif request.method == 'DELETE':

        parsed = list(urlparse.urlsplit(request.url))
        parsed[1] = "%s"
        template = urlparse.urlunsplit(parsed)

        try:
            cursor = dbcursor_query(
                "SELECT api_task_disable(%s, %s)", [uuid, template])
        except Exception as ex:
            return error(str(ex))

        return ok()

    else:

        return not_allowed()
Ejemplo n.º 12
0
def tasks():

    if request.method == 'GET':

        expanded = is_expanded()

        query = """
            SELECT json, uuid
            FROM task
            """
        args = []

        try:
            json_query = arg_json("json")
        except ValueError as ex:
            return bad_request(str(ex))

        if json_query is not None:
            query += "WHERE json @> %s"
            args.append(request.args.get("json"))

        query += " ORDER BY added"

        try:
            cursor = dbcursor_query(query, args)
        except Exception as ex:
            return error(str(ex))

        result = []
        for row in cursor:
            url = base_url(row[1])
            if not expanded:
                result.append(url)
                continue
            row[0]['href'] = url
            result.append(row[0])
        return json_response(result)

    elif request.method == 'POST':

        try:
            task = pscheduler.json_load(request.data)
        except ValueError:
            return bad_request("Invalid JSON:" + request.data)

        # TODO: Validate the JSON against a TaskSpecification


        # See if the task spec is valid

        try:
            returncode, stdout, stderr = pscheduler.run_program(
                [ "pscheduler", "internal", "invoke", "test",
                  task['test']['type'], "spec-is-valid" ],
                stdin = pscheduler.json_dump(task['test']['spec'])
                )

            if returncode != 0:
                return error("Invalid test specification: " + stderr)
        except Exception as ex:
            return error("Unable to validate test spec: " + str(ex))

        log.debug("Validated test: %s", pscheduler.json_dump(task['test']))


        # Find the participants

        try:
            returncode, stdout, stderr = pscheduler.run_program(
                [ "pscheduler", "internal", "invoke", "test",
                  task['test']['type'], "participants" ],
                stdin = pscheduler.json_dump(task['test']['spec'])
                )

            if returncode != 0:
                return error("Unable to determine participants: " + stderr)

            participants = [ host if host is not None
                             else pscheduler.api_this_host()
                             for host in pscheduler.json_load(stdout)["participants"] ]
        except Exception as ex:
            return error("Unable to determine participants: " + str(ex))
        nparticipants = len(participants)

        # TODO: The participants must be unique.  This should be
        # verified by fetching the host name from each one.

        #
        # TOOL SELECTION
        #

        # TODO: Need to provide for tool being specified by the task
        # package.

        tools = []

        for participant in participants:

            try:
                # TODO: This will fail with a very large test spec.
                status, result = pscheduler.url_get(
                    pscheduler.api_url(participant, "tools"),
                    params={ 'test': pscheduler.json_dump(task['test']) }
                    )
                if status != 200:
                    raise Exception("%d: %s" % (status, result))
                tools.append(result)
            except Exception as ex:
                return error("Error getting tools from %s: %s" \
                                     % (participant, str(ex)))
            log.debug("Participant %s offers tools %s", participant, tools)

        if len(tools) != nparticipants:
            return error("Didn't get a full set of tool responses")

        if "tools" in task:
            tool = pick_tool(tools, pick_from=task['tools'])
        else:
            tool = pick_tool(tools)

        if tool is None:
            # TODO: This could stand some additional diagnostics.
            return no_can_do("Couldn't find a tool in common among the participants.")

        task['tool'] = tool

        #
        # TASK CREATION
        #

        task_data = pscheduler.json_dump(task)
        log.debug("Task data: %s", task_data)

        tasks_posted = []

        # Evaluate the task against the limits and reject the request
        # if it doesn't pass.

        log.debug("Checking limits on %s", task["test"])

        (processor, whynot) = limitprocessor()
        if processor is None:
            log.debug("Limit processor is not initialized. %s", whynot)
            return no_can_do("Limit processor is not initialized: %s" % whynot)

        # TODO: This is cooked up in two places.  Make a function of it.
        hints = {
            "ip": request.remote_addr
            }
        hints_data = pscheduler.json_dump(hints)

        log.debug("Processor = %s" % processor)
        passed, diags = processor.process(task["test"], hints)

        if not passed:
            return forbidden("Task forbidden by limits:\n" + diags)

        # Post the lead with the local database, which also assigns
        # its UUID.  Make it disabled so the scheduler doesn't try to
        # do anything with it until the task has been submitted to all
        # of the other participants.

        try:
            cursor = dbcursor_query("SELECT * FROM api_task_post(%s, %s, 0, NULL, FALSE)",
                                    [task_data, hints_data], onerow=True)
        except Exception as ex:
            return error(str(ex.diag.message_primary))

        if cursor.rowcount == 0:
            return error("Task post failed; poster returned nothing.")

        task_uuid = cursor.fetchone()[0]

        log.debug("Tasked lead, UUID %s", task_uuid)

        # Other participants get the UUID forced upon them.

        for participant in range(1,nparticipants):
            part_name = participants[participant]
            try:
                log.debug("Tasking %d@%s: %s", participant, part_name, task_data)
                post_url = pscheduler.api_url(part_name,
                                              'tasks/' + task_uuid)
                log.debug("Posting task to %s", post_url)
                status, result = pscheduler.url_post(
                    post_url,
                    params={ 'participant': participant },
                    data=task_data,
                    json=False,
                    throw=False)
                log.debug("Remote returned %d: %s", status, result)
                if status != 200:
                    raise Exception("Unable to post task to %s: %s"
                                    % (part_name, result))
                tasks_posted.append(result)

            except Exception as ex:

                log.exception()

                for url in tasks_posted:
                    # TODO: Handle failure?
                    status, result = requests.delete(url)

                    try:
                        dbcursor_query("SELECT api_task_delete(%s)",
                                       [task_uuid])
                    except Exception as ex:
                        log.exception()

                return error("Error while tasking %d@%s: %s" % (participant, part_name, ex))


        # Enable the task so the scheduler will schedule it.
        try:
            dbcursor_query("SELECT api_task_enable(%s)", [task_uuid])
        except Exception as ex:
            log.exception()
            return error("Failed to enable task %s.  See system logs." % task_uuid)
        log.debug("Task enabled for scheduling.")

        return ok_json("%s/%s" % (request.base_url, task_uuid))

    else:

        return not_allowed()
Ejemplo n.º 13
0
def tasks_uuid_runs(task):

    if request.method == 'GET':

        query = "SELECT '" + base_url() + """/' || run.uuid
             FROM
                 run
                 JOIN task ON task.id = run.task
             WHERE
                task.uuid = %s"""
        args = [task]

        try:

            start_time = arg_datetime('start')
            if start_time is not None:
                query += " AND lower(times) >= %s"
                args.append(start_time)

            end_time = arg_datetime('end')
            if end_time is not None:
                query += " AND upper(times) <= %s"
                args.append(end_time)

            if arg_boolean('upcoming'):
                query += " AND (times @> normalized_now() OR lower(times) > normalized_now())"
                query += " AND state IN (run_state_pending(), run_state_on_deck(), run_state_running(), run_state_nonstart())"

            query += " ORDER BY times"

            limit = arg_cardinal('limit')
            if limit is not None:
                query += " LIMIT " + str(limit)

            # TODO: This should be exapandable

        except ValueError as ex:

            return bad_request(str(ex))


        return json_query_simple(query, args, empty_ok=True)

    elif request.method == 'POST':

        log.debug("Run POST: %s --> %s", request.url, request.data)

        try:
            data = pscheduler.json_load(request.data)
            start_time = pscheduler.iso8601_as_datetime(data['start-time'])
        except KeyError:
            return bad_request("Missing start time")
        except ValueError:
            return bad_request("Invalid JSON:" + request.data)


        try:
            passed, diags = __evaluate_limits(task, start_time)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        try:
            log.debug("Posting run for task %s starting %s"
                      % (task, start_time))
            cursor = dbcursor_query("SELECT api_run_post(%s, %s, NULL, %s)",
                               [task, start_time, diags], onerow=True)
            uuid = cursor.fetchone()[0]
        except Exception as ex:
            log.exception()
            return error(str(ex))

        url = base_url() + '/' + uuid
        log.debug("New run posted to %s", url)
        return ok_json(url)

    else:

        return not_allowed()
Ejemplo n.º 14
0
def tasks_uuid_runs_run(task, run):

    if task is None:
        return bad_request("Missing or invalid task")

    if run is None:
        return bad_request("Missing or invalid run")

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return error("Cannot wait on local and merged results")

        # If asked for 'first', dig up the first run and use its UUID.

        if run == 'first':
            # 60 tries at 0.5s intervals == 30 sec.
            tries = 60
            while tries > 0:
                try:
                    run = __runs_first_run(task)
                except Exception as ex:
                    log.exception()
                    return error(str(ex))
                if run is not None:
                    break
                time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()


        # 60 tries at 0.5s intervals == 30 sec.
        tries = 60 if (wait_local or wait_merged) else 1

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        lower(run.times),
                        upper(run.times),
                        upper(run.times) - lower(run.times),
                        task.participant,
                        task.nparticipants,
                        task.participants,
                        run.part_data,
                        run.part_data_full,
                        run.result,
                        run.result_full,
                        run.result_merged,
                        run_state.enum,
                        run_state.display,
                        run.errors,
                        run.clock_survey
                    FROM
                        run
                        JOIN task ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE
                        task.uuid = %s
                        AND run.uuid = %s""", [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                return not_found()

            row = cursor.fetchone()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and row[7] is None) \
                        or (wait_merged and row[9] is None):
                    time.sleep(0.5)
                    tries -= 1
                else:
                    break

        # Return a result Whether or not we timed out and let the
        # client sort it out.

        result = {}

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urlparse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urlparse.urljoin( request.url, href_path )

        result['href'] = href
        result['start-time'] = pscheduler.datetime_as_iso8601(row[0])
        result['end-time'] = pscheduler.datetime_as_iso8601(row[1])
        result['duration'] = pscheduler.timedelta_as_iso8601(row[2])
        participant_num = row[3]
        result['participant'] = participant_num
        result['participants'] = [
            pscheduler.api_this_host()
            if participant is None and participant_num == 0
            else participant
            for participant in row[5]
            ]
        result['participant-data'] = row[6]
        result['participant-data-full'] = row[7]
        result['result'] = row[8]
        result['result-full'] = row[9]
        result['result-merged'] = row[10]
        result['state'] = row[11]
        result['state-display'] = row[12]
        result['errors'] = row[13]
        if row[14] is not None:
            result['clock-survey'] = row[14]
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        return json_response(result)

    elif request.method == 'PUT':

        log.debug("Run PUT %s", request.url)

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(request.data)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", request.data)
            return error("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        try:
            cursor = dbcursor_query(
                "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)",
                [run], onerow=True)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        if not cursor.fetchone()[0]:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            passed, diags = __evaluate_limits(task, start_time)

            try:
                cursor = dbcursor_query("SELECT api_run_post(%s, %s, %s)",
                               [task, start_time, run], onerow=True)
                log.debug("Full put of %s, got back %s", run, cursor.fetchone()[0])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  part_data_full = %s
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                           [ part_data_full, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))
            if cursor.rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  result_full = %s,
                                  state = CASE %s
                                      WHEN TRUE THEN run_state_finished()
                                      ELSE run_state_failed()
                                      END
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                               [ result_full, succeeded, run, task ])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount != 1:
                return not_found()

            return ok()



    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        try:
            cursor = dbcursor_query("""
            DELETE FROM run
            WHERE
                task in (SELECT id FROM task WHERE uuid = %s)
                AND uuid = %s 
            """, [task, run])
        except Exception as ex:
            log.exception()
            return error(str(ex))

        return ok() if cursor.rowcount == 1 else not_found()

    else:

        return not_allowed()
Ejemplo n.º 15
0
def tasks_uuid_runs(task):

    if not uuid_is_valid(task):
        return not_found()

    if request.method == 'GET':

        query = "SELECT '" + base_url() + """/' || run.uuid
             FROM
                 run
                 JOIN task ON task.id = run.task
             WHERE
                task.uuid = %s"""
        args = [task]

        try:

            start_time = arg_datetime('start')
            if start_time is not None:
                query += " AND lower(times) >= %s"
                args.append(start_time)

            end_time = arg_datetime('end')
            if end_time is not None:
                query += " AND upper(times) <= %s"
                args.append(end_time)

            if arg_boolean('upcoming'):
                query += " AND (times @> normalized_now() OR lower(times) > normalized_now())"
                query += " AND state IN (run_state_pending(), run_state_on_deck(), run_state_running(), run_state_nonstart())"

            query += " ORDER BY times"

            limit = arg_cardinal('limit')
            if limit is not None:
                query += " LIMIT " + str(limit)

            # TODO: This should be exapandable

        except ValueError as ex:

            return bad_request(str(ex))

        return json_query_simple(query, args, empty_ok=True)

    elif request.method == 'POST':

        log.debug("Run POST: %s --> %s", request.url, request.data)

        try:
            requester, key = task_requester_key(task)
            if requester is None:
                return not_found()

            if not access_write_task(requester, key):
                return forbidden()

        except Exception as ex:
            return error(str(ex))

        try:
            data = pscheduler.json_load(request.data, max_schema=1)
            start_time = pscheduler.iso8601_as_datetime(data['start-time'])
        except KeyError:
            return bad_request("Missing start time")
        except ValueError as ex:
            return bad_request("Invalid JSON: %s" % (str(ex)))

        try:
            passed, diags, response = __evaluate_limits(task, start_time)
        except Exception as ex:
            log.exception()
            return error(str(ex))
        if response is not None:
            return response

        try:
            log.debug("Posting run for task %s starting %s" %
                      (task, start_time))
            cursor = dbcursor_query(
                "SELECT * FROM api_run_post(%s, %s, NULL, %s)",
                [task, start_time, diags],
                onerow=True)
            succeeded, uuid, conflicts, error_message = cursor.fetchone()
            cursor.close()
            if conflicts:
                return conflict(error_message)
            if error_message:
                return error(error_message)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        url = base_url() + '/' + uuid
        log.debug("New run posted to %s", url)
        return ok_json(url)

    else:

        return not_allowed()
Ejemplo n.º 16
0
def tasks_uuid_runs_run(task, run):

    if not uuid_is_valid(task):
        return not_found()

    if ((request.method in ['PUT', 'DELETE'] and not uuid_is_valid(run))
            or (run not in ['first', 'next'] and not uuid_is_valid(run))):
        return not_found()

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return bad_request("Cannot wait on local and merged results")

        # Figure out how long to wait in seconds.  Zero means don't
        # wait.

        wait_time = arg_integer('wait')
        if wait_time is None:
            wait_time = 30
        if wait_time < 0:
            return bad_request("Wait time must be >= 0")

        # If asked for 'first', dig up the first run and use its UUID.

        if run in ['next', 'first']:
            future = run == 'next'
            wait_interval = 0.5
            tries = int(wait_time / wait_interval) if wait_time > 0 \
                    else 1
            while tries > 0:
                try:
                    run = __runs_first_run(task, future)
                except Exception as ex:
                    log.exception()
                    return error(str(ex))
                if run is not None:
                    break
                if wait_time > 0:
                    time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()

        # 60 tries at 0.5s intervals == 30 sec.
        tries = 60 if (wait_local or wait_merged) else 1

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        lower(run.times),
                        upper(run.times),
                        upper(run.times) - lower(run.times),
                        task.participant,
                        task.nparticipants,
                        task.participants,
                        run.part_data,
                        run.part_data_full,
                        run.result,
                        run.result_full,
                        run.result_merged,
                        run_state.enum,
                        run_state.display,
                        run.errors,
                        run.clock_survey,
                        run.id,
                        archiving_json(run.id),
                        run.added,
                        run_state.finished
                    FROM
                        run
                        JOIN task ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE
                        task.uuid = %s
                        AND run.uuid = %s""", [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                cursor.close()
                return not_found()

            row = cursor.fetchone()
            cursor.close()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and row[8] is None) \
                   or (wait_merged \
                       and ( (row[9] is None) or (not row[18]) ) ):
                    log.debug("Waiting for merged: %s %s", row[9], row[18])
                    time.sleep(0.5)
                    tries -= 1
                else:
                    break

        # Return a result Whether or not we timed out and let the
        # client sort it out.

        result = {}

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urlparse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urlparse.urljoin(request.url, href_path)

        result['href'] = href
        result['start-time'] = pscheduler.datetime_as_iso8601(row[0])
        result['end-time'] = pscheduler.datetime_as_iso8601(row[1])
        result['duration'] = pscheduler.timedelta_as_iso8601(row[2])
        participant_num = row[3]
        result['participant'] = participant_num
        result['participants'] = [
            server_netloc()
            if participant is None and participant_num == 0 else participant
            for participant in row[5]
        ]
        result['participant-data'] = row[6]
        result['participant-data-full'] = row[7]
        result['result'] = row[8]
        result['result-full'] = row[9]
        result['result-merged'] = row[10]
        result['state'] = row[11]
        result['state-display'] = row[12]
        result['errors'] = row[13]
        if row[14] is not None:
            result['clock-survey'] = row[14]
        if row[16] is not None:
            result['archivings'] = row[16]
        if row[17] is not None:
            result['added'] = pscheduler.datetime_as_iso8601(row[17])
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        return json_response(result)

    elif request.method == 'PUT':

        log.debug("Run PUT %s", request.url)

        try:
            requester, key = task_requester_key(task)
            if requester is None:
                return not_found()

            if not access_write_task(requester, key):
                return forbidden()

        except Exception as ex:
            return error(str(ex))

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(request.data, max_schema=1)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", request.data)
            return bad_request("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        try:
            cursor = dbcursor_query(
                "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)", [run],
                onerow=True)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        fetched = cursor.fetchone()[0]
        cursor.close()
        if not fetched:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            try:

                passed, diags, response = __evaluate_limits(task, start_time)
                if response is not None:
                    return response

                cursor = dbcursor_query(
                    "SELECT * FROM api_run_post(%s, %s, %s)",
                    [task, start_time, run],
                    onerow=True)
                succeeded, uuid, conflicts, error_message = cursor.fetchone()
                cursor.close()
                if conflicts:
                    return conflict(error_message)
                if not succeeded:
                    return error(error_message)
                log.debug("Full put of %s, got back %s", run, uuid)
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            try:
                cursor = dbcursor_query(
                    """
                              UPDATE
                                  run
                              SET
                                  part_data_full = %s
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """, [part_data_full, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            try:
                cursor = dbcursor_query(
                    """
                              UPDATE
                                  run
                              SET
                                  result_full = %s,
                                  state = CASE %s
                                      WHEN TRUE THEN run_state_finished()
                                      ELSE run_state_failed()
                                      END
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """, [result_full, succeeded, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            return ok()

    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        try:
            requester, key = task_requester_key(task)
            if requester is None:
                return not_found()

            if not access_write_task(requester, key):
                return forbidden()

        except Exception as ex:
            return error(str(ex))

        try:
            cursor = dbcursor_query(
                """
            DELETE FROM run
            WHERE
                task in (SELECT id FROM task WHERE uuid = %s)
                AND uuid = %s 
            """, [task, run])
        except Exception as ex:
            log.exception()
            return error(str(ex))

        rowcount = cursor.rowcount
        cursor.close()

        return ok() if rowcount == 1 else not_found()

    else:

        return not_allowed()