コード例 #1
0
    def __init__(
            self,
            data  # Data suitable for this class
    ):

        valid, message = rundaterange_data_is_valid(data)
        if not valid:
            raise ValueError("Invalid data: %s" % message)

        self.start = pscheduler.iso8601_as_datetime(data['start'])
        self.end = pscheduler.iso8601_as_datetime(data['end'])

        if self.end < self.start:
            raise ValueError("End must be after start.")

        self.overlap = data['overlap'] if 'overlap' in data else False
コード例 #2
0
    def evaluate(
            self,
            proposal  # Task and hints
    ):
        """Check that the proposed times don't overlap with this limit"""

        start = pscheduler.iso8601_as_datetime(
            proposal['task']['run_schedule']['start'])
        duration = pscheduler.iso8601_as_timedelta(
            proposal['task']['run_schedule']['duration'])
        end = start + duration

        subset = start >= self.start and end < self.end

        if self.overlap:
            passed = ((start <= self.start < end) or (start <= self.end < end)
                      or subset)
        else:
            passed = subset

        result = {"passed": passed}
        if not passed:
            result['reasons'] = ["Ranges do not match"]

        return result
コード例 #3
0
ファイル: rundaterange.py プロジェクト: perfsonar/pscheduler
    def evaluate(self,
                 run             # The proposed run
                 ):

        """Check that the proposed times don't overlap with this limit"""

        start = pscheduler.iso8601_as_datetime(run['schedule']['start'])
        duration = pscheduler.iso8601_as_timedelta(run['schedule']['duration'])
        end = start + duration

        subset = start >= self.start and end < self.end

        if self.overlap:
            passed = ( (start <= self.start < end)
                       or (start <= self.end < end)
                       or subset
                   )
        else:
            passed = subset

        result = { "passed": passed }
        if not passed:
            result['reasons'] = [ "Ranges do not match" ]

        return result
コード例 #4
0
ファイル: rundaterange.py プロジェクト: perfsonar/pscheduler
    def __init__(self,
                 data   # Data suitable for this class
                 ):

        
        valid, message = rundaterange_data_is_valid(data)
        if not valid:
            raise ValueError("Invalid data: %s" % message)

        self.start = pscheduler.iso8601_as_datetime(data['start'])
        self.end = pscheduler.iso8601_as_datetime(data['end'])

        if self.end < self.start:
            raise ValueError("End must be after start.")

        self.overlap = data['overlap'] if 'overlap' in data else False
コード例 #5
0
ファイル: args.py プロジェクト: perfsonar/pscheduler
def arg_datetime(name):
    """Fetch and validate an argument as an ISO8601 date and time,
    returning a datetime if specificed, None if not and throwing a
    ValueError if invalid."""
    argval = request.args.get(name)
    if argval is None:
        return None
    timestamp = pscheduler.iso8601_as_datetime(argval)
    if timestamp is None:
        raise ValueError("Invalid timestamp; expecting ISO8601.")
    return timestamp
コード例 #6
0
def arg_datetime(name):
    """Fetch and validate an argument as an ISO8601 date and time,
    returning a datetime if specificed, None if not and throwing a
    ValueError if invalid."""
    argval = request.args.get(name)
    if argval is None:
        return None
    timestamp = pscheduler.iso8601_as_datetime(argval)
    if timestamp is None:
        raise ValueError("Invalid timestamp; expecting ISO8601.")
    return timestamp
コード例 #7
0
    def evaluate(
            self,
            proposal  # Task and hints
    ):
        """Check that the proposed times don't overlap with this limit"""

        start = pscheduler.iso8601_as_datetime(
            proposal['task']['run_schedule']['start'])
        duration = pscheduler.iso8601_as_timedelta(
            proposal['task']['run_schedule']['duration'])
        end = start + duration

        # Python's datetime doesn't have methods to get this.  Bravo.
        start_week = start.isocalendar()[1]
        end_week = end.isocalendar()[1]

        match_failures = []

        for name, lower, upper, wrap_after, wrap_to in [
                # Feel free to resurrect me if this ever wraps.  :-)
            ('year', start.year, end.year, 294276, 1),
            ('month', start.month, end.month, 12, 1),
            ('week', start_week, end_week, 53, 1),
            ('weekday', start.isoweekday(), end.isoweekday(), 7, 1),
            ('day', start.day, end.day, 31, 1),
            ('hour', start.hour, end.hour, 23, 0),
            ('minute', start.minute, end.minute, 59, 0),
            ('second', start.second, end.second, 59, 0)
        ]:

            # Don't bother matching things that weren't specified
            if name not in self.matches:
                continue

            if not wrappable_range_overlaps(lower,
                                            upper,
                                            self.matches[name],
                                            wrap_after=wrap_after,
                                            wrap_to=wrap_to,
                                            overlap=self.overlap):
                match_failures.append(name)

        result = {"passed": not match_failures}
        if match_failures:
            result['reasons'] = [
                "Mismatch on " + mis for mis in match_failures
            ]

        return result
コード例 #8
0
ファイル: runschedule.py プロジェクト: perfsonar/pscheduler
    def evaluate(self,
                 run             # The proposed run
                 ):

        """Check that the proposed times don't overlap with this limit"""

        start = pscheduler.iso8601_as_datetime(run['schedule']['start'])
        duration = pscheduler.iso8601_as_timedelta(run['schedule']['duration'])
        end = start + duration

        # Python's datetime doesn't have methods to get this.  Bravo.
        start_week = start.isocalendar()[1]
        end_week = end.isocalendar()[1]

        match_failures = []

        for name, lower, upper, wrap_after, wrap_to in [
                # Feel free to resurrect me if this ever wraps.  :-)
                ('year', start.year, end.year, 294276, 1),
                ('month', start.month, end.month, 12, 1),
                ('week', start_week, end_week, 53, 1),
                ('weekday', start.isoweekday(), end.isoweekday(), 7, 1),
                ('day', start.day, end.day, 31, 1),
                ('hour', start.hour, end.hour, 23, 0),
                ('minute', start.minute, end.minute, 59, 0),
                ('second', start.second, end.second, 59, 0)
                ]:

            # Don't bother matching things that weren't specified
            if name not in self.matches:
                continue

            if not wrappable_range_overlaps(lower, upper, self.matches[name],
                                            wrap_after=wrap_after,
                                            wrap_to=wrap_to,
                                            overlap=self.overlap):
                match_failures.append(name)

        result = { "passed": not match_failures }
        if match_failures:
            result['reasons'] = [ "Mismatch on " + mis
                                  for mis in match_failures ]

        return result
コード例 #9
0
ファイル: runs.py プロジェクト: krihal/pscheduler
def tasks_uuid_runs_run(task, run):

    if not uuid_is_valid(task):
        return not_found()

    if ((request.method in ['PUT', 'DELETE'] and not uuid_is_valid(run))
            or (run not in ['first', 'next'] and not uuid_is_valid(run))):
        return not_found()

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return bad_request("Cannot wait on local and merged results")

        # Figure out how long to wait in seconds.  Zero means don't
        # wait.

        wait_time = arg_integer('wait')
        if wait_time is None:
            wait_time = 30
        if wait_time < 0:
            return bad_request("Wait time must be >= 0")

        # If asked for 'first', dig up the first run and use its UUID.

        if run in ['next', 'first']:
            future = run == 'next'
            wait_interval = 0.5
            tries = int(wait_time / wait_interval) if wait_time > 0 \
                    else 1
            while tries > 0:
                run = __runs_first_run(task, future)
                if run is not None:
                    break
                if wait_time > 0:
                    time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()

        # Obey the wait time with tries at 0.5s intervals
        tries = wait_time * 2 if (wait_local or wait_merged) else 1
        result = {}

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        run_json(run.id),
                        run_state.finished
                    FROM
                        task
                        JOIN run ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE 
                        task.uuid = %s
                        AND run.uuid = %s
                    """, [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                cursor.close()
                return not_found()

            result, finished = cursor.fetchone()
            cursor.close()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and result['result'] is None) \
                   or (wait_merged \
                       and ( (result['result-full'] is None) or (not finished) ) ):
                    log.debug("Waiting (%d left) for merged: %s %s", tries,
                              result['result-full'], finished)
                    time.sleep(0.5)
                    tries -= 1
                else:
                    log.debug("Got the requested result.")
                    break

        # Even if we timed out waiting, return the last result we saw
        # and let the client sort it out.

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urllib.parse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urllib.parse.urljoin(request.url, href_path)

        result['href'] = href
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        # For a NULL first participant, fill in the netloc.

        try:
            if result['participants'][0] is None:
                result['participants'][0] = server_netloc()
        except KeyError:
            pass  # Not there?  Don't care.

        return json_response(result)

    elif request.method == 'PUT':

        data = request.data.decode("ascii")

        log.debug("Run PUT %s", request.url)

        requester, key = task_requester_key(task)

        if requester is None:
            return not_found()

        if not access_write_task(requester, key):
            return forbidden()

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(data, max_schema=1)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", data)
            return bad_request("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        cursor = dbcursor_query(
            "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)", [run],
            onerow=True)

        fetched = cursor.fetchone()[0]
        cursor.close()
        if not fetched:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            try:

                passed, diags, response, priority \
                    = __evaluate_limits(task, start_time)
                if response is not None:
                    return response

                if passed:
                    diag_message = None
                else:
                    diag_message = "Run forbidden by limits:\n%s" % (diags)

                cursor = dbcursor_query(
                    "SELECT * FROM api_run_post(%s, %s, %s, %s, %s, %s)",
                    [task, start_time, run, diag_message, priority, diags],
                    onerow=True)
                succeeded, uuid, conflicts, error_message = cursor.fetchone()
                cursor.close()
                if conflicts:
                    return conflict(error_message)
                if not succeeded:
                    return error(error_message)
                log.debug("Full put of %s, got back %s", run, uuid)
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            cursor = dbcursor_query(
                """
                          UPDATE
                              run
                          SET
                              part_data_full = %s
                          WHERE
                              uuid = %s
                              AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                          """, [part_data_full, run, task])

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            cursor = dbcursor_query(
                """
                          UPDATE
                              run
                          SET
                              result_full = %s,
                              state = CASE %s
                                  WHEN TRUE THEN run_state_finished()
                                  ELSE run_state_failed()
                                  END
                          WHERE
                              uuid = %s
                              AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                          """, [result_full, succeeded, run, task])

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            return ok()

    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        requester, key = task_requester_key(task)
        if requester is None:
            return not_found()

        if not access_write_task(requester, key):
            return forbidden()

        cursor = dbcursor_query(
            """
        DELETE FROM run
        WHERE
            task in (SELECT id FROM task WHERE uuid = %s)
            AND uuid = %s 
        """, [task, run])

        rowcount = cursor.rowcount
        cursor.close()

        return ok() if rowcount == 1 else not_found()

    else:

        return not_allowed()
コード例 #10
0
ファイル: runs.py プロジェクト: krihal/pscheduler
def tasks_uuid_runs(task):

    if not uuid_is_valid(task):
        return not_found()

    if request.method == 'GET':

        query = "SELECT '" + base_url() + """/' || run.uuid
             FROM
                 run
                 JOIN task ON task.id = run.task
             WHERE
                task.uuid = %s"""
        args = [task]

        try:

            start_time = arg_datetime('start')
            if start_time is not None:
                query += " AND lower(times) >= %s"
                args.append(start_time)

            end_time = arg_datetime('end')
            if end_time is not None:
                query += " AND upper(times) <= %s"
                args.append(end_time)

            if arg_boolean('upcoming'):
                query += " AND (times @> normalized_now() OR lower(times) > normalized_now())"
                query += " AND state IN (run_state_pending(), run_state_on_deck(), run_state_running(), run_state_nonstart())"

            query += " ORDER BY times"

            limit = arg_cardinal('limit')
            if limit is not None:
                query += " LIMIT " + str(limit)

            # TODO: This should be exapandable

        except ValueError as ex:
            return bad_request(str(ex))

        return json_query_simple(query, args, empty_ok=True)

    elif request.method == 'POST':

        data = request.data.decode("ascii")

        log.debug("Run POST: %s --> %s", request.url, data)

        requester, key = task_requester_key(task)
        if requester is None:
            return not_found()

        if not access_write_task(requester, key):
            return forbidden()

        try:
            data = pscheduler.json_load(data, max_schema=1)
            start_time = pscheduler.iso8601_as_datetime(data['start-time'])
        except KeyError:
            return bad_request("Missing start time")
        except ValueError as ex:
            return bad_request("Invalid JSON: %s" % (str(ex)))

        try:
            passed, diags, response, priority \
                = __evaluate_limits(task, start_time)
        except Exception as ex:
            log.exception()
            return error(str(ex))
        if response is not None:
            return response

        try:
            log.debug("Posting run for task %s starting %s, priority %s" %
                      (task, start_time, priority))

            if passed:
                diag_message = None
            else:
                diag_message = "Run forbidden by limits:\n%s" % (diags)

            cursor = dbcursor_query(
                "SELECT * FROM api_run_post(%s, %s, NULL, %s, %s, %s)",
                [task, start_time, diag_message, priority, diags],
                onerow=True)
            succeeded, uuid, conflicts, error_message = cursor.fetchone()
            cursor.close()
            if conflicts:
                return conflict(error_message)
            if error_message:
                return error(error_message)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        url = base_url() + '/' + uuid
        log.debug("New run posted to %s", url)
        return ok_json(url)

    else:

        return not_allowed()
コード例 #11
0
ファイル: runs.py プロジェクト: smritigambhir/pscheduler
def tasks_uuid_runs_run(task, run):

    if not uuid_is_valid(task):
        return not_found()

    if ((request.method in ['PUT', 'DELETE'] and not uuid_is_valid(run))
            or (run not in ['first', 'next'] and not uuid_is_valid(run))):
        return not_found()

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return bad_request("Cannot wait on local and merged results")

        # Figure out how long to wait in seconds.  Zero means don't
        # wait.

        wait_time = arg_integer('wait')
        if wait_time is None:
            wait_time = 30
        if wait_time < 0:
            return bad_request("Wait time must be >= 0")

        # If asked for 'first', dig up the first run and use its UUID.

        if run in ['next', 'first']:
            future = run == 'next'
            wait_interval = 0.5
            tries = int(wait_time / wait_interval) if wait_time > 0 \
                    else 1
            while tries > 0:
                try:
                    run = __runs_first_run(task, future)
                except Exception as ex:
                    log.exception()
                    return error(str(ex))
                if run is not None:
                    break
                if wait_time > 0:
                    time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()

        # 60 tries at 0.5s intervals == 30 sec.
        tries = 60 if (wait_local or wait_merged) else 1

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        lower(run.times),
                        upper(run.times),
                        upper(run.times) - lower(run.times),
                        task.participant,
                        task.nparticipants,
                        task.participants,
                        run.part_data,
                        run.part_data_full,
                        run.result,
                        run.result_full,
                        run.result_merged,
                        run_state.enum,
                        run_state.display,
                        run.errors,
                        run.clock_survey,
                        run.id,
                        archiving_json(run.id),
                        run.added
                    FROM
                        run
                        JOIN task ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE
                        task.uuid = %s
                        AND run.uuid = %s""", [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                cursor.close()
                return not_found()

            row = cursor.fetchone()
            cursor.close()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and row[7] is None) \
                        or (wait_merged and row[9] is None):
                    time.sleep(0.5)
                    tries -= 1
                else:
                    break

        # Return a result Whether or not we timed out and let the
        # client sort it out.

        result = {}

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urlparse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urlparse.urljoin(request.url, href_path)

        result['href'] = href
        result['start-time'] = pscheduler.datetime_as_iso8601(row[0])
        result['end-time'] = pscheduler.datetime_as_iso8601(row[1])
        result['duration'] = pscheduler.timedelta_as_iso8601(row[2])
        participant_num = row[3]
        result['participant'] = participant_num
        result['participants'] = [
            server_netloc()
            if participant is None and participant_num == 0 else participant
            for participant in row[5]
        ]
        result['participant-data'] = row[6]
        result['participant-data-full'] = row[7]
        result['result'] = row[8]
        result['result-full'] = row[9]
        result['result-merged'] = row[10]
        result['state'] = row[11]
        result['state-display'] = row[12]
        result['errors'] = row[13]
        if row[14] is not None:
            result['clock-survey'] = row[14]
        if row[16] is not None:
            result['archivings'] = row[16]
        if row[17] is not None:
            result['added'] = pscheduler.datetime_as_iso8601(row[17])
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        return json_response(result)

    elif request.method == 'PUT':

        log.debug("Run PUT %s", request.url)

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(request.data, max_schema=1)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", request.data)
            return bad_request("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        try:
            cursor = dbcursor_query(
                "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)", [run],
                onerow=True)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        fetched = cursor.fetchone()[0]
        cursor.close()
        if not fetched:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            try:

                passed, diags, response = __evaluate_limits(task, start_time)
                if response is not None:
                    return response

                cursor = dbcursor_query(
                    "SELECT * FROM api_run_post(%s, %s, %s)",
                    [task, start_time, run],
                    onerow=True)
                succeeded, uuid, conflicts, error_message = cursor.fetchone()
                cursor.close()
                if conflicts:
                    return conflict(error_message)
                if not succeeded:
                    return error(error_message)
                log.debug("Full put of %s, got back %s", run, uuid)
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            try:
                cursor = dbcursor_query(
                    """
                              UPDATE
                                  run
                              SET
                                  part_data_full = %s
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """, [part_data_full, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            try:
                cursor = dbcursor_query(
                    """
                              UPDATE
                                  run
                              SET
                                  result_full = %s,
                                  state = CASE %s
                                      WHEN TRUE THEN run_state_finished()
                                      ELSE run_state_failed()
                                      END
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """, [result_full, succeeded, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            rowcount = cursor.rowcount
            cursor.close()
            if rowcount != 1:
                return not_found()

            return ok()

    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        try:
            requester = task_requester(task)
            if requester is None:
                return not_found()

            if not access_write_ok(requester):
                return forbidden()

        except Exception as ex:
            return error(str(ex))

        try:
            cursor = dbcursor_query(
                """
            DELETE FROM run
            WHERE
                task in (SELECT id FROM task WHERE uuid = %s)
                AND uuid = %s 
            """, [task, run])
        except Exception as ex:
            log.exception()
            return error(str(ex))

        rowcount = cursor.rowcount
        cursor.close()

        return ok() if rowcount == 1 else not_found()

    else:

        return not_allowed()
コード例 #12
0
ファイル: runs.py プロジェクト: perfsonar/pscheduler
def tasks_uuid_runs(task):

    if request.method == 'GET':

        query = "SELECT '" + base_url() + """/' || run.uuid
             FROM
                 run
                 JOIN task ON task.id = run.task
             WHERE
                task.uuid = %s"""
        args = [task]

        try:

            start_time = arg_datetime('start')
            if start_time is not None:
                query += " AND lower(times) >= %s"
                args.append(start_time)

            end_time = arg_datetime('end')
            if end_time is not None:
                query += " AND upper(times) <= %s"
                args.append(end_time)

            if arg_boolean('upcoming'):
                query += " AND (times @> normalized_now() OR lower(times) > normalized_now())"
                query += " AND state IN (run_state_pending(), run_state_on_deck(), run_state_running(), run_state_nonstart())"

            query += " ORDER BY times"

            limit = arg_cardinal('limit')
            if limit is not None:
                query += " LIMIT " + str(limit)

            # TODO: This should be exapandable

        except ValueError as ex:

            return bad_request(str(ex))


        return json_query_simple(query, args, empty_ok=True)

    elif request.method == 'POST':

        log.debug("Run POST: %s --> %s", request.url, request.data)

        try:
            data = pscheduler.json_load(request.data)
            start_time = pscheduler.iso8601_as_datetime(data['start-time'])
        except KeyError:
            return bad_request("Missing start time")
        except ValueError:
            return bad_request("Invalid JSON:" + request.data)


        try:
            passed, diags = __evaluate_limits(task, start_time)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        try:
            log.debug("Posting run for task %s starting %s"
                      % (task, start_time))
            cursor = dbcursor_query("SELECT api_run_post(%s, %s, NULL, %s)",
                               [task, start_time, diags], onerow=True)
            uuid = cursor.fetchone()[0]
        except Exception as ex:
            log.exception()
            return error(str(ex))

        url = base_url() + '/' + uuid
        log.debug("New run posted to %s", url)
        return ok_json(url)

    else:

        return not_allowed()
コード例 #13
0
ファイル: runs.py プロジェクト: perfsonar/pscheduler
def tasks_uuid_runs_run(task, run):

    if task is None:
        return bad_request("Missing or invalid task")

    if run is None:
        return bad_request("Missing or invalid run")

    if request.method == 'GET':

        # Wait for there to be a local result
        wait_local = arg_boolean('wait-local')

        # Wait for there to be a merged result
        wait_merged = arg_boolean('wait-merged')

        if wait_local and wait_merged:
            return error("Cannot wait on local and merged results")

        # If asked for 'first', dig up the first run and use its UUID.

        if run == 'first':
            # 60 tries at 0.5s intervals == 30 sec.
            tries = 60
            while tries > 0:
                try:
                    run = __runs_first_run(task)
                except Exception as ex:
                    log.exception()
                    return error(str(ex))
                if run is not None:
                    break
                time.sleep(1.0)
                tries -= 1

            if run is None:
                return not_found()


        # 60 tries at 0.5s intervals == 30 sec.
        tries = 60 if (wait_local or wait_merged) else 1

        while tries:

            try:
                cursor = dbcursor_query(
                    """
                    SELECT
                        lower(run.times),
                        upper(run.times),
                        upper(run.times) - lower(run.times),
                        task.participant,
                        task.nparticipants,
                        task.participants,
                        run.part_data,
                        run.part_data_full,
                        run.result,
                        run.result_full,
                        run.result_merged,
                        run_state.enum,
                        run_state.display,
                        run.errors,
                        run.clock_survey
                    FROM
                        run
                        JOIN task ON task.id = run.task
                        JOIN run_state ON run_state.id = run.state
                    WHERE
                        task.uuid = %s
                        AND run.uuid = %s""", [task, run])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount == 0:
                return not_found()

            row = cursor.fetchone()

            if not (wait_local or wait_merged):
                break
            else:
                if (wait_local and row[7] is None) \
                        or (wait_merged and row[9] is None):
                    time.sleep(0.5)
                    tries -= 1
                else:
                    break

        # Return a result Whether or not we timed out and let the
        # client sort it out.

        result = {}

        # This strips any query parameters and replaces the last item
        # with the run, which might be needed if the 'first' option
        # was used.

        href_path_parts = urlparse.urlparse(request.url).path.split('/')
        href_path_parts[-1] = run
        href_path = '/'.join(href_path_parts)
        href = urlparse.urljoin( request.url, href_path )

        result['href'] = href
        result['start-time'] = pscheduler.datetime_as_iso8601(row[0])
        result['end-time'] = pscheduler.datetime_as_iso8601(row[1])
        result['duration'] = pscheduler.timedelta_as_iso8601(row[2])
        participant_num = row[3]
        result['participant'] = participant_num
        result['participants'] = [
            pscheduler.api_this_host()
            if participant is None and participant_num == 0
            else participant
            for participant in row[5]
            ]
        result['participant-data'] = row[6]
        result['participant-data-full'] = row[7]
        result['result'] = row[8]
        result['result-full'] = row[9]
        result['result-merged'] = row[10]
        result['state'] = row[11]
        result['state-display'] = row[12]
        result['errors'] = row[13]
        if row[14] is not None:
            result['clock-survey'] = row[14]
        result['task-href'] = root_url('tasks/' + task)
        result['result-href'] = href + '/result'

        return json_response(result)

    elif request.method == 'PUT':

        log.debug("Run PUT %s", request.url)

        # Get the JSON from the body
        try:
            run_data = pscheduler.json_load(request.data)
        except ValueError:
            log.exception()
            log.debug("Run data was %s", request.data)
            return error("Invalid or missing run data")

        # If the run doesn't exist, take the whole thing as if it were
        # a POST.

        try:
            cursor = dbcursor_query(
                "SELECT EXISTS (SELECT * FROM run WHERE uuid = %s)",
                [run], onerow=True)
        except Exception as ex:
            log.exception()
            return error(str(ex))

        if not cursor.fetchone()[0]:

            log.debug("Record does not exist; full PUT.")

            try:
                start_time = \
                    pscheduler.iso8601_as_datetime(run_data['start-time'])
            except KeyError:
                return bad_request("Missing start time")
            except ValueError:
                return bad_request("Invalid start time")

            passed, diags = __evaluate_limits(task, start_time)

            try:
                cursor = dbcursor_query("SELECT api_run_post(%s, %s, %s)",
                               [task, start_time, run], onerow=True)
                log.debug("Full put of %s, got back %s", run, cursor.fetchone()[0])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            return ok()

        # For anything else, only one thing can be udated at a time,
        # and even that is a select subset.

        log.debug("Record exists; partial PUT.")

        if 'part-data-full' in run_data:

            log.debug("Updating part-data-full from %s", run_data)

            try:
                part_data_full = \
                    pscheduler.json_dump(run_data['part-data-full'])
            except KeyError:
                return bad_request("Missing part-data-full")
            except ValueError:
                return bad_request("Invalid part-data-full")

            log.debug("Full data is: %s", part_data_full)

            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  part_data_full = %s
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                           [ part_data_full, run, task])
            except Exception as ex:
                log.exception()
                return error(str(ex))
            if cursor.rowcount != 1:
                return not_found()

            log.debug("Full data updated")

            return ok()

        elif 'result-full' in run_data:

            log.debug("Updating result-full from %s", run_data)

            try:
                result_full = \
                    pscheduler.json_dump(run_data['result-full'])
            except KeyError:
                return bad_request("Missing result-full")
            except ValueError:
                return bad_request("Invalid result-full")

            try:
                succeeded = bool(run_data['succeeded'])
            except KeyError:
                return bad_request("Missing success value")
            except ValueError:
                return bad_request("Invalid success value")

            log.debug("Updating result-full: JSON %s", result_full)
            log.debug("Updating result-full: Run  %s", run)
            log.debug("Updating result-full: Task %s", task)
            try:
                cursor = dbcursor_query("""
                              UPDATE
                                  run
                              SET
                                  result_full = %s,
                                  state = CASE %s
                                      WHEN TRUE THEN run_state_finished()
                                      ELSE run_state_failed()
                                      END
                              WHERE
                                  uuid = %s
                                  AND EXISTS (SELECT * FROM task WHERE UUID = %s)
                              """,
                               [ result_full, succeeded, run, task ])
            except Exception as ex:
                log.exception()
                return error(str(ex))

            if cursor.rowcount != 1:
                return not_found()

            return ok()



    elif request.method == 'DELETE':

        # TODO: If this is the lead, the run's counterparts on the
        # other participating nodes need to be removed as well.

        try:
            cursor = dbcursor_query("""
            DELETE FROM run
            WHERE
                task in (SELECT id FROM task WHERE uuid = %s)
                AND uuid = %s 
            """, [task, run])
        except Exception as ex:
            log.exception()
            return error(str(ex))

        return ok() if cursor.rowcount == 1 else not_found()

    else:

        return not_allowed()