def __init__( self, transform # JSON JQTransformSpecification, assumed validated ): # We modify this. transform = copy.deepcopy(transform) if isinstance(transform["script"], list): script = "\n".join(transform["script"]) else: script = transform["script"] # Insert our function definition(s) script_lines = [ "def classifiers:", " ." + self.PRIVATE_KEY + ".classifiers", ";", "def classifiers_has($value):", " ." + self.PRIVATE_KEY + ".classifiers", " | contains([$value])", ";", "def default: %d;" % (self.DEFAULT_PRIORITY), # This will be embedded in the task. "def requested:", " .priority", ";", "def note($message):", " if $message != null", " then", " ." + self.PRIVATE_KEY + ".diags += " " [ \"\\($message | tostring)\" ]" " else", " .", " end", ";", "def priority:", " ." + self.PRIVATE_KEY + ".priority", ";", "def set($value; $message):", " # TODO: Must be an integer", " ." + self.PRIVATE_KEY + ".priority = $value", " | note(\"\\($message) (Set to \\($value))\")", ";", "def adjust($value; $message):", " # TODO: Must be an integer", " ." + self.PRIVATE_KEY + ".priority += $value", " | note(\"\\($message) (\\(if $value > 0 then \"+\" else \"-\" end)\\($value | length))\")", ";", script ] transform["script"] = script_lines self.transform = pscheduler.JQFilter(filter_spec=transform, args=transform.get("args", {}), groom=True)
def __init__( self, data # Data suitable for this class ): valid, message = data_is_valid(data) if not valid: raise ValueError("Invalid data: %s" % message) self.jqfilter = pscheduler.JQFilter(data["script"], args=data.get("args", {}), output_raw=data.get( "output-raw", False))
def __init__( self, data # Data suitable for this class ): valid, message = jq_data_is_valid(data) if not valid: raise ValueError("Invalid data: %s" % message) self.jqfilter = pscheduler.JQFilter( data["script"], args=data.get("args", {}) # Don't bother with raw output. We don't care. )
def __init__( self, transform # JSON JQTransformSpecification, assumed validated ): # We modify this. transform = copy.deepcopy(transform) if isinstance(transform["script"], list): script = "\n".join(transform["script"]) else: script = transform["script"] # One quirk of jq is that imports have to be at the top of the # file. Pull any in the script out and move them to the top # before we define our function or any user code. import_include = r"((import|include) [^;]+;)" # Put imports and includes at the top lines = map(lambda x: x[0], re.findall(import_include, script)) # Insert our function definition(s) lines.extend([ "def classifiers:", " ." + self.PRIVATE_KEY + ".classifiers", ";", "def change($message):", " ." + self.PRIVATE_KEY + ".changed = true", " | if $message != null", " then", " ." + self.PRIVATE_KEY + ".diags += [ $message | tostring ]", " else", " .", " end", ";", "def reject($message):", " error(\"Task rejected: \" + ($message | tostring))", ";", ]) # Add the rest of the script without the imports and includes lines.append(re.sub(import_include, "", script)) transform["script"] = lines self.transform = pscheduler.JQFilter( filter_spec=transform, args=transform.get("args", {}), )
def _jq_filter(transform): if transform is None: return None script = transform["script"] if isinstance(script, list): script = "\n".join(script) full_script = "def hint($name): ." + PRIVATE_KEY + ".hints[$name];" \ + script return pscheduler.JQFilter(full_script, args=transform.get("args", {}) # Don't care about raw output. Not doing that. )
def __init__( self, data # Data suitable for this class ): valid, message = data_is_valid(data) if not valid: raise ValueError("Invalid data: %s" % message) self.source = data['source'] self.bind = data.get('bind', None) self.update = pscheduler.iso8601_as_timedelta(data['update']) self.retry = pscheduler.iso8601_as_timedelta(data['retry']) self.fail_state = data.get('fail-state', False) try: # This will raise a ValueError if it's wrong. transform = data["transform"] self.transform = pscheduler.JQFilter(transform["script"], transform.get("args", {}), output_raw=True) except KeyError: self.transform = None self.exclusions = radix.Radix() if 'exclude' in data: try: for excl in data['exclude']: self.exclusions.add(excl) except ValueError: raise ValueError("Invalid IP or CIDR '%s'" % excl) self.data_lock = threading.Lock() self.updating = False # TODO: Would be nice to support a timeout so the system # doesn't sit for too long. self.cidrs = radix.Radix() self.length = 0 # Prime the timer with the epoch and do a first load of the list self.next_attempt = datetime.datetime.utcfromtimestamp(0) self.__populate_cidrs__()
def tasks(): if request.method == 'GET': where_clause = "TRUE" args = [] try: json_query = arg_json("json") except ValueError as ex: return bad_request(str(ex)) if json_query is not None: where_clause += " AND task.json_detail @> %s" args.append(request.args.get("json")) where_clause += " ORDER BY added" tasks = __tasks_get_filtered(request.base_url, where_clause=where_clause, args=args, expanded=is_expanded(), detail=arg_boolean("detail"), single=False) return ok_json(tasks) elif request.method == 'POST': data = request.data.decode("ascii") try: task = pscheduler.json_load(data, max_schema=3) except ValueError as ex: return bad_request("Invalid task specification: %s" % (str(ex))) # Validate the JSON against a TaskSpecification # TODO: Figure out how to do this without the intermediate object valid, message = pscheduler.json_validate({"": task}, { "type": "object", "properties": { "": { "$ref": "#/pScheduler/TaskSpecification" } }, "required": [""] }) if not valid: return bad_request("Invalid task specification: %s" % (message)) # See if the test spec is valid try: returncode, stdout, stderr = pscheduler.plugin_invoke( "test", task['test']['type'], "spec-is-valid", stdin=pscheduler.json_dump(task['test']['spec'])) if returncode != 0: return error("Unable to validate test spec: %s" % (stderr)) validate_json = pscheduler.json_load(stdout, max_schema=1) if not validate_json["valid"]: return bad_request( "Invalid test specification: %s" % (validate_json.get("error", "Unspecified error"))) except Exception as ex: return error("Unable to validate test spec: " + str(ex)) log.debug("Validated test: %s", pscheduler.json_dump(task['test'])) # Validate the schedule try: cron = crontab.CronTab(task["schedule"]["repeat-cron"]) except (AttributeError, ValueError): return error("Cron repeat specification is invalid.") except KeyError: pass # Validate the archives for archive in task.get("archives", []): # Data try: returncode, stdout, stderr = pscheduler.plugin_invoke( "archiver", archive["archiver"], "data-is-valid", stdin=pscheduler.json_dump(archive["data"]), ) if returncode != 0: return error("Unable to validate archive spec: %s" % (stderr)) except Exception as ex: return error("Unable to validate test spec: " + str(ex)) try: returned_json = pscheduler.json_load(stdout) if not returned_json["valid"]: return bad_request("Invalid archiver data: %s" % (returned_json["error"])) except Exception as ex: return error("Internal probelm validating archiver data: %s" % (str(ex))) # Transform, if there was one. if "transform" in archive: transform = archive["transform"] try: _ = pscheduler.JQFilter(filter_spec=transform["script"], args=transform.get("args", {})) except ValueError as ex: return error("Invalid transform: %s" % (str(ex))) # Validate the lead binding if there was one. lead_bind = task.get("lead-bind", None) if lead_bind is not None \ and (pscheduler.address_interface(lead_bind) is None): return bad_request("Lead bind '%s' is not on this host" % (lead_bind)) # Evaluate the task against the limits and reject the request # if it doesn't pass. We do this early so anything else in # the process gets any rewrites. log.debug("Checking limits on %s", task) (processor, whynot) = limitprocessor() if processor is None: log.debug("Limit processor is not initialized. %s", whynot) return no_can_do("Limit processor is not initialized: %s" % whynot) hints, error_response = request_hints() if hints is None: log.debug("Can't come up with valid hints for lead task limits.") return error_response hints_data = pscheduler.json_dump(hints) log.debug("Processor = %s" % processor) passed, limits_passed, diags, new_task, _priority \ = processor.process(task, hints) if not passed: return forbidden("Task forbidden by limits:\n" + diags) if new_task is not None: try: task = new_task returncode, stdout, stderr = pscheduler.plugin_invoke( "test", task['test']['type'], "spec-is-valid", stdin=pscheduler.json_dump(task["test"]["spec"])) if returncode != 0: return error( "Failed to validate rewritten test specification: %s" % (stderr)) validate_json = pscheduler.json_load(stdout, max_schema=1) if not validate_json["valid"]: return bad_request( "Rewritten test specification is invalid: %s" % (validate_json.get("error", "Unspecified error"))) except Exception as ex: return error( "Unable to validate rewritten test specification: " + str(ex)) # Find the participants try: returncode, stdout, stderr = pscheduler.plugin_invoke( "test", task['test']['type'], "participants", stdin=pscheduler.json_dump(task['test']['spec']), timeout=5) if returncode != 0: return error("Unable to determine participants: " + stderr) participants = [ host if host is not None else server_netloc() for host in pscheduler.json_load(stdout, max_schema=1)["participants"] ] except Exception as ex: return error("Exception while determining participants: " + str(ex)) nparticipants = len(participants) # TODO: The participants must be unique. This should be # verified by fetching the host name from each one. # # TOOL SELECTION # # TODO: Need to provide for tool being specified by the task # package. tools = [] tool_params = {"test": pscheduler.json_dump(task["test"])} tool_offers = {} for participant_no in range(0, len(participants)): participant = participants[participant_no] try: # Make sure the other participants are running pScheduler participant_api = pscheduler.api_url_hostport(participant) log.debug("Pinging %s" % (participant)) status, result = pscheduler.url_get(participant_api, throw=False, timeout=10, bind=lead_bind) if status == 400: raise TaskPostingException(result) elif status in [ 202, 204, 205, 206, 207, 208, 226, 300, 301, 302, 303, 304, 205, 306, 307, 308 ] \ or ( (status >= 400) and (status <=499) ): raise TaskPostingException( "Host is not running pScheduler") elif status != 200: raise TaskPostingException("returned status %d: %s" % (status, result)) # TODO: This will fail with a very large test spec. status, result = pscheduler.url_get("%s/tools" % (participant_api), params=tool_params, throw=False, bind=lead_bind) if status != 200: raise TaskPostingException("%d: %s" % (status, result)) tools.append(result) except TaskPostingException as ex: return error("Error getting tools from %s: %s" \ % (participant, str(ex))) log.debug("Participant %s offers tools %s", participant, result) tool_offers[participant] = result if len(tools) != nparticipants: return error("Didn't get a full set of tool responses") if "tools" in task: tool = pick_tool(tools, pick_from=task['tools']) else: tool = pick_tool(tools) # Complain if no usable tool was found if tool is None: offers = [] for participant in participants: participant_offers = tool_offers.get(participant, [{ "name": "nothing" }]) if participant_offers is not None: offer_set = [offer["name"] for offer in participant_offers] else: offer_set = ["nothing"] offers.append("%s offered %s" % (participant, ", ".join(offer_set))) return no_can_do("No tool in common among the participants: %s." % ("; ".join(offers))) task['tool'] = tool # # TASK CREATION # tasks_posted = [] # Post the lead with the local database, which also assigns # its UUID. Make it disabled so the scheduler doesn't try to # do anything with it until the task has been submitted to all # of the other participants. cursor = dbcursor_query( "SELECT * FROM api_task_post(%s, %s, %s, %s, 0, %s, NULL, FALSE, %s)", [ pscheduler.json_dump(task), participants, hints_data, pscheduler.json_dump(limits_passed), task.get("priority", None), diags ], onerow=True) if cursor.rowcount == 0: return error("Task post failed; poster returned nothing.") task_uuid = cursor.fetchone()[0] log.debug("Tasked lead, UUID %s", task_uuid) # Other participants get the UUID and participant list forced upon them. task["participants"] = participants task_params = {"key": task["_key"]} if "_key" in task else {} for participant in range(1, nparticipants): part_name = participants[participant] log.debug("Tasking participant %s", part_name) try: # Post the task log.debug("Tasking %d@%s: %s", participant, part_name, task) post_url = pscheduler.api_url_hostport(part_name, 'tasks/' + task_uuid) task_params["participant"] = participant log.debug("Posting task to %s", post_url) status, result = pscheduler.url_post(post_url, params=task_params, data=task, bind=lead_bind, json=False, throw=False) log.debug("Remote returned %d: %s", status, result) if status != 200: raise TaskPostingException( "Unable to post task to %s: %s" % (part_name, result)) tasks_posted.append(result) # Fetch the task's details and add the list of limits # passed to our own. status, result = pscheduler.url_get(post_url, params={"detail": True}, bind=lead_bind, throw=False) if status != 200: raise TaskPostingException( "Unable to fetch posted task from %s: %s" % (part_name, result)) log.debug("Fetched %s", result) try: details = result["detail"]["spec-limits-passed"] log.debug("Details from %s: %s", post_url, details) limits_passed.extend(details) except KeyError: pass except TaskPostingException as ex: # Disable the task locally and let it get rid of the # other participants. posted_to = "%s/%s" % (request.url, task_uuid) parsed = list(urllib.parse.urlsplit(posted_to)) parsed[1] = "%s" template = urllib.parse.urlunsplit(parsed) try: dbcursor_query("SELECT api_task_disable(%s, %s)", [task_uuid, template]) except Exception: log.exception() return error("Error while tasking %s: %s" % (part_name, ex)) # Update the list of limits passed in the local database # TODO: How do the other participants know about this? log.debug("Limits passed: %s", limits_passed) cursor = dbcursor_query( "UPDATE task SET limits_passed = %s::JSON WHERE uuid = %s", [pscheduler.json_dump(limits_passed), task_uuid]) # Enable the task so the scheduler will schedule it. try: dbcursor_query("SELECT api_task_enable(%s)", [task_uuid]) except Exception: log.exception() return error("Failed to enable task %s. See system logs." % task_uuid) log.debug("Task enabled for scheduling.") task_url = "%s/%s" % (request.base_url, task_uuid) # Non-expanded gets just the URL if not arg_boolean("expanded"): return ok_json(task_url) # Expanded gets a redirect to GET+expanded params = [] for arg in ["detail", "pretty"]: if arg_boolean(arg): params.append(arg) if params: task_url += "?%s" % ("&".join(params)) return see_other(task_url) else: return not_allowed()