def validate_requirements(self, tool, field): for r in tool.get(field, []): try: if self.names.get_name(r["class"], "") is None: raise validate.ValidationException("Unknown requirement %s" % (r["class"])) validate.validate_ex(self.names.get_name(r["class"], ""), r) if "requirements" in r: self.validate_requirements(r, "requirements") if "hints" in r: self.validate_requirements(r, "hints") except validate.ValidationException as v: err = "While validating %s %s\n%s" % (field, r["class"], validate.indent(str(v))) if field == "hints": _logger.warn(err) else: raise validate.ValidationException(err)
def __init__(self, toolpath_object, **kwargs): try: makeTool = kwargs.get("makeTool") self.embedded_tool = makeTool(toolpath_object["run"], **kwargs) except validate.ValidationException as v: raise WorkflowException( "Tool definition %s failed validation:\n%s" % (toolpath_object["run"]["id"], validate.indent(str(v)))) if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step_" + str(random.randint(1, 1000000000)) for field in ("inputs", "outputs"): for i in toolpath_object[field]: inputid = i["id"] (_, d) = urlparse.urldefrag(inputid) frag = d.split(".")[-1] p = urlparse.urljoin(toolpath_object["run"].get("id", self.id), "#" + frag) found = False for a in self.embedded_tool.tool[field]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException( "Did not find %s parameter '%s' in workflow step" % (field, p)) i["id"] = inputid super(WorkflowStep, self).__init__(toolpath_object, "Process", do_validate=False, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but SubworkflowFeatureRequirement not declared" )
def __init__(self, toolpath_object, **kwargs): try: makeTool = kwargs.get("makeTool") self.embedded_tool = makeTool(toolpath_object["run"], **kwargs) except validate.ValidationException as v: raise WorkflowException("Tool definition %s failed validation:\n%s" % (toolpath_object["run"]["id"], validate.indent(str(v)))) if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step_" + str(random.randint(1, 1000000000)) for field in ("inputs", "outputs"): for i in toolpath_object[field]: inputid = i["id"] (_, d) = urlparse.urldefrag(inputid) frag = d.split(".")[-1] p = urlparse.urljoin(toolpath_object["run"].get("id", self.id), "#" + frag) found = False for a in self.embedded_tool.tool[field]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException("Did not find %s parameter '%s' in workflow step" % (field, p)) i["id"] = inputid super(WorkflowStep, self).__init__(toolpath_object, "Process", do_validate=False, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException("Workflow contains embedded workflow but SubworkflowFeatureRequirement not declared")
def __init__(self, toolpath_object, pos, **kwargs): try: makeTool = kwargs.get("makeTool") self.embedded_tool = makeTool(toolpath_object["run"], **kwargs) except validate.ValidationException as v: raise WorkflowException( "Tool definition %s failed validation:\n%s" % (toolpath_object["run"]["id"], validate.indent(str(v)))) if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + str(pos) for field in ("inputs", "outputs"): for i in toolpath_object[field]: inputid = i["id"] (_, d) = urlparse.urldefrag(inputid) frag = d.split(".")[-1] p = urlparse.urljoin(toolpath_object["run"].get("id", self.id), "#" + frag) found = False for a in self.embedded_tool.tool[field]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException( "Did not find %s parameter '%s' in workflow step" % (field, p)) i["id"] = inputid super(WorkflowStep, self).__init__(toolpath_object, "Process", do_validate=False, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but SubworkflowFeatureRequirement not declared" ) if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains scatter but ScatterFeatureRequirement not declared" ) inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise WorkflowException( "Must specify scatterMethod when scattering over multiple inputs" ) inp_map = {i["id"]: i for i in inputparms} for s in scatter: if s not in inp_map: raise WorkflowException("Invalid Scatter parameter '%s'" % s) inp_map[s]["type"] = { "type": "array", "items": inp_map[s]["type"] } if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for r in xrange(0, nesting): for i in outputparms: i["type"] = {"type": "array", "items": i["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms
def __init__(self, toolpath_object, docpath): try: self.embedded_tool = makeTool(toolpath_object["run"], docpath) except validate.ValidationException as v: raise WorkflowException("Tool definition %s failed validation:\n%s" % (os.path.join(docpath, toolpath_object["run"]["id"]), validate.indent(str(v)))) if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step_" + str(random.randint(1, 1000000000)) for i in toolpath_object["inputs"]: p = i["param"] if 'param' in i else self.id (_, d) = urlparse.urldefrag(p) toolid = i.get("id", self.id + "." + d) found = False for a in self.embedded_tool.tool["inputs"]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException("Did not find input parameter '%s' in workflow step" % (p)) i["id"] = toolid for i in toolpath_object["outputs"]: p = i["param"] if 'param' in i else i['id'] toolid = i["id"] found = False for a in self.embedded_tool.tool["outputs"]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException("Did not find output parameter '%s' in workflow step" % (p)) i["id"] = toolid super(External, self).__init__(toolpath_object, "WorkflowStep", docpath)
def __init__(self, toolpath_object, validateAs, do_validate=True, **kwargs): (_, self.names) = get_schema() self.tool = toolpath_object if do_validate: try: # Validate tool documument validate.validate_ex(self.names.get_name(validateAs, ""), self.tool, strict=kwargs.get("strict")) except validate.ValidationException as v: raise validate.ValidationException("Could not validate %s as %s:\n%s" % (self.tool.get("id"), validateAs, validate.indent(str(v)))) self.requirements = kwargs.get("requirements", []) + self.tool.get("requirements", []) self.hints = kwargs.get("hints", []) + self.tool.get("hints", []) self.validate_hints(self.tool.get("hints", []), strict=kwargs.get("strict")) self.schemaDefs = {} sd, _ = self.get_requirement("SchemaDefRequirement") if sd: for i in sd["types"]: avro.schema.make_avsc_object(i, self.names) self.schemaDefs[i["name"]] = i # Build record schema from inputs self.inputs_record_schema = {"name": "input_record_schema", "type": "record", "fields": []} for i in self.tool["inputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException("Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.inputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.inputs_record_schema, self.names) self.outputs_record_schema = {"name": "outputs_record_schema", "type": "record", "fields": []} for i in self.tool["outputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException("Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.outputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.outputs_record_schema, self.names)
def __init__(self, toolpath_object, validateAs, do_validate=True, **kwargs): (_, self.names) = get_schema() self.tool = toolpath_object if do_validate: try: # Validate tool documument validate.validate_ex(self.names.get_name(validateAs, ""), self.tool, strict=kwargs.get("strict")) except validate.ValidationException as v: raise validate.ValidationException( "Could not validate %s as %s:\n%s" % (self.tool.get("id"), validateAs, validate.indent(str(v)))) self.requirements = kwargs.get("requirements", []) + self.tool.get( "requirements", []) self.hints = kwargs.get("hints", []) + self.tool.get("hints", []) self.validate_hints(self.tool.get("hints", []), strict=kwargs.get("strict")) self.schemaDefs = {} sd, _ = self.get_requirement("SchemaDefRequirement") if sd: for i in sd["types"]: avro.schema.make_avsc_object(i, self.names) self.schemaDefs[i["name"]] = i # Build record schema from inputs self.inputs_record_schema = { "name": "input_record_schema", "type": "record", "fields": [] } for i in self.tool["inputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException( "Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.inputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.inputs_record_schema, self.names) self.outputs_record_schema = { "name": "outputs_record_schema", "type": "record", "fields": [] } for i in self.tool["outputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException( "Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.outputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.outputs_record_schema, self.names)
def __init__(self, toolpath_object, pos, **kwargs): try: makeTool = kwargs.get("makeTool") self.embedded_tool = makeTool(toolpath_object["run"], **kwargs) except validate.ValidationException as v: raise WorkflowException("Tool definition %s failed validation:\n%s" % (toolpath_object["run"]["id"], validate.indent(str(v)))) if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + str(pos) for field in ("inputs", "outputs"): for i in toolpath_object[field]: inputid = i["id"] (_, d) = urlparse.urldefrag(inputid) frag = d.split(".")[-1] p = urlparse.urljoin(toolpath_object["run"].get("id", self.id), "#" + frag) found = False for a in self.embedded_tool.tool[field]: if a["id"] == p: i.update(a) found = True if not found: raise WorkflowException("Did not find %s parameter '%s' in workflow step" % (field, p)) i["id"] = inputid super(WorkflowStep, self).__init__(toolpath_object, "Process", do_validate=False, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException("Workflow contains embedded workflow but SubworkflowFeatureRequirement not declared") if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException("Workflow contains scatter but ScatterFeatureRequirement not declared") inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise WorkflowException("Must specify scatterMethod when scattering over multiple inputs") inp_map = {i["id"]: i for i in inputparms} for s in scatter: if s not in inp_map: raise WorkflowException("Invalid Scatter parameter '%s'" % s) inp_map[s]["type"] = {"type": "array", "items": inp_map[s]["type"]} if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for r in xrange(0, nesting): for i in outputparms: i["type"] = {"type": "array", "items": i["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms
def __init__(self, toolpath_object, validateAs, docpath, **kwargs): (_, self.names) = get_schema() self.docpath = docpath self.tool = toolpath_object try: # Validate tool documument validate.validate_ex(self.names.get_name(validateAs, ""), self.tool, **kwargs) except validate.ValidationException as v: raise validate.ValidationException("Could not validate %s:\n%s" % (self.tool.get("id"), validate.indent(str(v)))) self.validate_requirements(self.tool, "requirements") self.validate_requirements(self.tool, "hints") for t in self.tool.get("requirements", []): t["_docpath"] = docpath for t in self.tool.get("hints", []): t["_docpath"] = docpath avro.schema.make_avsc_object({ "name": "Any", "type": "enum", "symbols": ["Any"] }, self.names) self.schemaDefs = {} sd, _ = get_feature("SchemaDefRequirement", requirements=self.tool.get("requirements"), hints=self.tool.get("hints")) if sd: for i in sd["types"]: avro.schema.make_avsc_object(i, self.names) self.schemaDefs[i["name"]] = i # Build record schema from inputs self.inputs_record_schema = {"name": "input_record_schema", "type": "record", "fields": []} for i in self.tool["inputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException("Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.inputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.inputs_record_schema, self.names) self.outputs_record_schema = {"name": "outputs_record_schema", "type": "record", "fields": []} for i in self.tool["outputs"]: c = copy.copy(i) doc_url, fragment = urlparse.urldefrag(c['id']) c["name"] = fragment del c["id"] if "type" not in c: raise validate.ValidationException("Missing `type` in parameter `%s`" % c["name"]) if "default" in c: c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] self.outputs_record_schema["fields"].append(c) avro.schema.make_avsc_object(self.outputs_record_schema, self.names)