def schedule(): try: range_start = arg_datetime('start') range_end = arg_datetime('end') except ValueError: return bad_request('Invalid start or end time') try: cursor = dbcursor_query(""" SELECT lower(times), upper(times), task, run, state_enum, state_display, task_json, task_cli FROM schedule WHERE times && tstzrange(%s, %s, '[)'); """, [range_start, range_end]) except Exception as ex: log.exception() return error(str(ex)) result = [] for row in cursor: task_href = pscheduler.api_url(path="tasks/%s" % row[2]) run_href = "%s/runs/%s" % (task_href, row[3]) run = { "start-time": pscheduler.datetime_as_iso8601(row[0]), "end-time": pscheduler.datetime_as_iso8601(row[1]), "href": run_href, "result-href": "%s/result" % run_href, "state": row[4], "state-display": row[5], "task": row[6], "cli": row[7] } run["task"]["href"] = task_href result.append(run) return ok_json(result)
def monitor(): try: window_size = arg_cardinal('window') except ValueError as ex: return bad_request(str(ex)) try: cursor = dbcursor_query("""SELECT ppf, lower(times), upper(times), task, run, state_enum, state_display, task_json, task_cli FROM schedule_monitor(%s)""", [window_size]) except Exception as ex: log.exception() return error(str(ex)) result = [] for row in cursor: task_href = pscheduler.api_url(path="tasks/%s" % row[2]) run_href = "%s/runs/%s" % (task_href, row[3]) run = { "ppf": row[0], "start-time": pscheduler.datetime_as_iso8601(row[1]), "end-time": pscheduler.datetime_as_iso8601(row[2]), "href": run_href, "result-href": "%s/result" % run_href, "state": row[5], "state-display": row[6], "task": row[7], "cli": row[8] } run["task"]["href"] = task_href result.append(run) return ok_json(result)
def tasks(): if request.method == 'GET': expanded = is_expanded() query = """ SELECT json, uuid FROM task """ args = [] try: json_query = arg_json("json") except ValueError as ex: return bad_request(str(ex)) if json_query is not None: query += "WHERE json @> %s" args.append(request.args.get("json")) query += " ORDER BY added" try: cursor = dbcursor_query(query, args) except Exception as ex: return error(str(ex)) result = [] for row in cursor: url = base_url(row[1]) if not expanded: result.append(url) continue row[0]['href'] = url result.append(row[0]) return json_response(result) elif request.method == 'POST': try: task = pscheduler.json_load(request.data) except ValueError: return bad_request("Invalid JSON:" + request.data) # TODO: Validate the JSON against a TaskSpecification # See if the task spec is valid try: returncode, stdout, stderr = pscheduler.run_program( [ "pscheduler", "internal", "invoke", "test", task['test']['type'], "spec-is-valid" ], stdin = pscheduler.json_dump(task['test']['spec']) ) if returncode != 0: return error("Invalid test specification: " + stderr) except Exception as ex: return error("Unable to validate test spec: " + str(ex)) log.debug("Validated test: %s", pscheduler.json_dump(task['test'])) # Find the participants try: returncode, stdout, stderr = pscheduler.run_program( [ "pscheduler", "internal", "invoke", "test", task['test']['type'], "participants" ], stdin = pscheduler.json_dump(task['test']['spec']) ) if returncode != 0: return error("Unable to determine participants: " + stderr) participants = [ host if host is not None else pscheduler.api_this_host() for host in pscheduler.json_load(stdout)["participants"] ] except Exception as ex: return error("Unable to determine participants: " + str(ex)) nparticipants = len(participants) # TODO: The participants must be unique. This should be # verified by fetching the host name from each one. # # TOOL SELECTION # # TODO: Need to provide for tool being specified by the task # package. tools = [] for participant in participants: try: # TODO: This will fail with a very large test spec. status, result = pscheduler.url_get( pscheduler.api_url(participant, "tools"), params={ 'test': pscheduler.json_dump(task['test']) } ) if status != 200: raise Exception("%d: %s" % (status, result)) tools.append(result) except Exception as ex: return error("Error getting tools from %s: %s" \ % (participant, str(ex))) log.debug("Participant %s offers tools %s", participant, tools) if len(tools) != nparticipants: return error("Didn't get a full set of tool responses") if "tools" in task: tool = pick_tool(tools, pick_from=task['tools']) else: tool = pick_tool(tools) if tool is None: # TODO: This could stand some additional diagnostics. return no_can_do("Couldn't find a tool in common among the participants.") task['tool'] = tool # # TASK CREATION # task_data = pscheduler.json_dump(task) log.debug("Task data: %s", task_data) tasks_posted = [] # Evaluate the task against the limits and reject the request # if it doesn't pass. log.debug("Checking limits on %s", task["test"]) (processor, whynot) = limitprocessor() if processor is None: log.debug("Limit processor is not initialized. %s", whynot) return no_can_do("Limit processor is not initialized: %s" % whynot) # TODO: This is cooked up in two places. Make a function of it. hints = { "ip": request.remote_addr } hints_data = pscheduler.json_dump(hints) log.debug("Processor = %s" % processor) passed, diags = processor.process(task["test"], hints) if not passed: return forbidden("Task forbidden by limits:\n" + diags) # Post the lead with the local database, which also assigns # its UUID. Make it disabled so the scheduler doesn't try to # do anything with it until the task has been submitted to all # of the other participants. try: cursor = dbcursor_query("SELECT * FROM api_task_post(%s, %s, 0, NULL, FALSE)", [task_data, hints_data], onerow=True) except Exception as ex: return error(str(ex.diag.message_primary)) if cursor.rowcount == 0: return error("Task post failed; poster returned nothing.") task_uuid = cursor.fetchone()[0] log.debug("Tasked lead, UUID %s", task_uuid) # Other participants get the UUID forced upon them. for participant in range(1,nparticipants): part_name = participants[participant] try: log.debug("Tasking %d@%s: %s", participant, part_name, task_data) post_url = pscheduler.api_url(part_name, 'tasks/' + task_uuid) log.debug("Posting task to %s", post_url) status, result = pscheduler.url_post( post_url, params={ 'participant': participant }, data=task_data, json=False, throw=False) log.debug("Remote returned %d: %s", status, result) if status != 200: raise Exception("Unable to post task to %s: %s" % (part_name, result)) tasks_posted.append(result) except Exception as ex: log.exception() for url in tasks_posted: # TODO: Handle failure? status, result = requests.delete(url) try: dbcursor_query("SELECT api_task_delete(%s)", [task_uuid]) except Exception as ex: log.exception() return error("Error while tasking %d@%s: %s" % (participant, part_name, ex)) # Enable the task so the scheduler will schedule it. try: dbcursor_query("SELECT api_task_enable(%s)", [task_uuid]) except Exception as ex: log.exception() return error("Failed to enable task %s. See system logs." % task_uuid) log.debug("Task enabled for scheduling.") return ok_json("%s/%s" % (request.base_url, task_uuid)) else: return not_allowed()
def __init__(self, test, nparticipants, a, z, debug=False): """ Construct a task runner """ self.debug = debug self.results = { "hosts": { "a": a, "z": z }, "nparticipants": nparticipants, "diags": [] } self.diags = self.results["diags"] # Make sure we have sufficient pSchedulers to cover the participants if (nparticipants == 2) and ("pscheduler" not in z): # TODO: Assert that Z has a host? self.__diag("No pScheduler for or on %s." % (z["host"])) return # Fill in the test's blanks and construct a task spec test = copy.deepcopy(test) test = pscheduler.json_substitute(test, "__A__", a["pscheduler"]) z_end = z["host"] if nparticipants == 1 else z.get( "pscheduler", z["host"]) test = pscheduler.json_substitute(test, "__Z__", z_end) task = { "schema": 1, "test": test, # This is required; empty is fine. "schedule": { # TODO: Don't hard-wire this. "slip": "PT10M" } } # Post the task task_post = pscheduler.api_url(host=a["pscheduler"], path="/tasks") status, task_href = pscheduler.url_post( task_post, data=pscheduler.json_dump(task), throw=False) if status != 200: self.__diag("Unable to post task: %s" % (task_href)) return self.__debug("Posted task %s" % (task_href)) self.task_href = task_href # Get the task from the server with full details status, task_data = pscheduler.url_get(task_href, params={"detail": True}, throw=False) if status != 200: self.__diag("Unable to get detailed task data: %s" % (task_data)) return # Wait for the first run to be scheduled first_run_url = task_data["detail"]["first-run-href"] status, run_data = pscheduler.url_get(first_run_url, throw=False) if status == 404: self.__diag("The server never scheduled a run for the task.") return if status != 200: self.__diag("Error %d: %s" % (status, run_data)) return for key in ["start-time", "end-time", "result-href"]: if key not in run_data: self.__diag("Server did not return %s with run data" % (key)) return self.results["href"] = run_data["href"] self.run_data = run_data self.__debug( "Run times: %s to %s" \ % (run_data["start-time"], run_data["end-time"])) self.worker = threading.Thread(target=lambda: self.run()) self.worker.setDaemon(True) self.worker.start()