def __init__(self, toolpath_object: MutableMapping[str, Any], loadingContext: LoadingContext) -> None: """Build a Process object from the provided dictionary.""" super(Process, self).__init__() self.metadata = getdefault(loadingContext.metadata, {}) # type: Dict[str,Any] self.provenance_object = None # type: Optional[ProvenanceProfile] self.parent_wf = None # type: Optional[ProvenanceProfile] global SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY # pylint: disable=global-statement if SCHEMA_FILE is None or SCHEMA_ANY is None or SCHEMA_DIR is None: get_schema("v1.0") SCHEMA_ANY = cast( Dict[str, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/salad#Any"], ) SCHEMA_FILE = cast( Dict[str, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#File"], ) SCHEMA_DIR = cast( Dict[str, Any], SCHEMA_CACHE["v1.0"] [3].idx["https://w3id.org/cwl/cwl#Directory"], ) self.names = schema.make_avro_schema( [SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY], Loader({})) self.tool = toolpath_object self.requirements = copy.deepcopy( getdefault(loadingContext.requirements, [])) self.requirements.extend(self.tool.get("requirements", [])) if "id" not in self.tool: self.tool["id"] = "_:" + str(uuid.uuid4()) self.requirements.extend( get_overrides(getdefault(loadingContext.overrides_list, []), self.tool["id"]).get("requirements", [])) self.hints = copy.deepcopy(getdefault(loadingContext.hints, [])) self.hints.extend(self.tool.get("hints", [])) # Versions of requirements and hints which aren't mutated. self.original_requirements = copy.deepcopy(self.requirements) self.original_hints = copy.deepcopy(self.hints) self.doc_loader = loadingContext.loader self.doc_schema = loadingContext.avsc_names self.formatgraph = None # type: Optional[Graph] if self.doc_loader is not None: self.formatgraph = self.doc_loader.graph checkRequirements(self.tool, supportedProcessRequirements) self.validate_hints( loadingContext.avsc_names, self.tool.get("hints", []), strict=getdefault(loadingContext.strict, False), ) self.schemaDefs = {} # type: Dict[str,Dict[str, Any]] sd, _ = self.get_requirement("SchemaDefRequirement") if sd is not None: sdtypes = avroize_type(sd["types"]) av = schema.make_valid_avro(sdtypes, {t["name"]: t for t in sdtypes}, set()) for i in av: self.schemaDefs[i["name"]] = i # type: ignore schema.make_avsc_object(schema.convert_to_dict(av), self.names) # Build record schema from inputs self.inputs_record_schema = { "name": "input_record_schema", "type": "record", "fields": [], } # type: Dict[str, Any] self.outputs_record_schema = { "name": "outputs_record_schema", "type": "record", "fields": [], } # type: Dict[str, Any] for key in ("inputs", "outputs"): for i in self.tool[key]: c = copy.deepcopy(i) c["name"] = shortname(c["id"]) del c["id"] if "type" not in c: raise validate.ValidationException( "Missing 'type' in parameter '{}'".format(c["name"])) if "default" in c and "null" not in aslist(c["type"]): nullable = ["null"] nullable.extend(aslist(c["type"])) c["type"] = nullable else: c["type"] = c["type"] c["type"] = avroize_type(c["type"], c["name"]) if key == "inputs": self.inputs_record_schema["fields"].append(c) elif key == "outputs": self.outputs_record_schema["fields"].append(c) with SourceLine(toolpath_object, "inputs", validate.ValidationException): self.inputs_record_schema = cast( Dict[str, Any], schema.make_valid_avro(self.inputs_record_schema, {}, set()), ) schema.make_avsc_object( schema.convert_to_dict(self.inputs_record_schema), self.names) with SourceLine(toolpath_object, "outputs", validate.ValidationException): self.outputs_record_schema = cast( Dict[str, Any], schema.make_valid_avro(self.outputs_record_schema, {}, set()), ) schema.make_avsc_object( schema.convert_to_dict(self.outputs_record_schema), self.names) if toolpath_object.get("class") is not None and not getdefault( loadingContext.disable_js_validation, False): if loadingContext.js_hint_options_file is not None: try: with open(loadingContext.js_hint_options_file ) as options_file: validate_js_options = json.load(options_file) except (OSError, ValueError) as err: _logger.error( "Failed to read options file %s", loadingContext.js_hint_options_file, ) raise else: validate_js_options = None if self.doc_schema is not None: validate_js_expressions( cast(CommentedMap, toolpath_object), self.doc_schema.names[toolpath_object["class"]], validate_js_options, ) dockerReq, is_req = self.get_requirement("DockerRequirement") if (dockerReq is not None and "dockerOutputDirectory" in dockerReq and is_req is not None and not is_req): _logger.warning( SourceLine(item=dockerReq, raise_type=str).makeError( "When 'dockerOutputDirectory' is declared, DockerRequirement " "should go in the 'requirements' section, not 'hints'." "")) if (dockerReq is not None and is_req is not None and dockerReq.get("dockerOutputDirectory") == "/var/spool/cwl"): if is_req: # In this specific case, it is legal to have /var/spool/cwl, so skip the check. pass else: # Must be a requirement var_spool_cwl_detector(self.tool) else: var_spool_cwl_detector(self.tool)
def __init__(self, toolpath_object, # type: MutableMapping[Text, Any] loadingContext # type: LoadingContext ): # type: (...) -> None self.metadata = getdefault(loadingContext.metadata, {}) # type: Dict[Text,Any] self.provenance_object = None # type: Optional[ProvenanceProfile] self.parent_wf = None # type: Optional[ProvenanceProfile] global SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY # pylint: disable=global-statement if SCHEMA_FILE is None or SCHEMA_ANY is None or SCHEMA_DIR is None: get_schema("v1.0") SCHEMA_ANY = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/salad#Any"]) SCHEMA_FILE = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#File"]) SCHEMA_DIR = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#Directory"]) self.names = schema.make_avro_schema([SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY], Loader({})) self.tool = toolpath_object self.requirements = copy.deepcopy(getdefault(loadingContext.requirements, [])) self.requirements.extend(self.tool.get("requirements", [])) if "id" not in self.tool: self.tool["id"] = "_:" + Text(uuid.uuid4()) self.requirements.extend(get_overrides(getdefault(loadingContext.overrides_list, []), self.tool["id"]).get("requirements", [])) self.hints = copy.deepcopy(getdefault(loadingContext.hints, [])) self.hints.extend(self.tool.get("hints", [])) # Versions of requirements and hints which aren't mutated. self.original_requirements = copy.deepcopy(self.requirements) self.original_hints = copy.deepcopy(self.hints) self.doc_loader = loadingContext.loader self.doc_schema = loadingContext.avsc_names self.formatgraph = None # type: Optional[Graph] if self.doc_loader is not None: self.formatgraph = self.doc_loader.graph checkRequirements(self.tool, supportedProcessRequirements) self.validate_hints(loadingContext.avsc_names, self.tool.get("hints", []), strict=getdefault(loadingContext.strict, False)) self.schemaDefs = {} # type: Dict[Text,Dict[Text, Any]] sd, _ = self.get_requirement("SchemaDefRequirement") if sd is not None: sdtypes = avroize_type(sd["types"]) av = schema.make_valid_avro(sdtypes, {t["name"]: t for t in sdtypes}, set()) for i in av: self.schemaDefs[i["name"]] = i # type: ignore schema.make_avsc_object(schema.convert_to_dict(av), self.names) # Build record schema from inputs self.inputs_record_schema = { "name": "input_record_schema", "type": "record", "fields": []} # type: Dict[Text, Any] self.outputs_record_schema = { "name": "outputs_record_schema", "type": "record", "fields": []} # type: Dict[Text, Any] for key in ("inputs", "outputs"): for i in self.tool[key]: c = copy.deepcopy(i) c["name"] = shortname(c["id"]) del c["id"] if "type" not in c: raise validate.ValidationException( u"Missing 'type' in parameter '{}'".format(c["name"])) if "default" in c and "null" not in aslist(c["type"]): nullable = ["null"] nullable.extend(aslist(c["type"])) c["type"] = nullable else: c["type"] = c["type"] c["type"] = avroize_type(c["type"], c["name"]) if key == "inputs": self.inputs_record_schema["fields"].append(c) elif key == "outputs": self.outputs_record_schema["fields"].append(c) with SourceLine(toolpath_object, "inputs", validate.ValidationException): self.inputs_record_schema = cast( Dict[Text, Any], schema.make_valid_avro( self.inputs_record_schema, {}, set())) schema.make_avsc_object( schema.convert_to_dict(self.inputs_record_schema), self.names) with SourceLine(toolpath_object, "outputs", validate.ValidationException): self.outputs_record_schema = cast( Dict[Text, Any], schema.make_valid_avro(self.outputs_record_schema, {}, set())) schema.make_avsc_object( schema.convert_to_dict(self.outputs_record_schema), self.names) if toolpath_object.get("class") is not None \ and not getdefault(loadingContext.disable_js_validation, False): if loadingContext.js_hint_options_file is not None: try: with open(loadingContext.js_hint_options_file) as options_file: validate_js_options = json.load(options_file) except (OSError, ValueError) as err: _logger.error( "Failed to read options file %s", loadingContext.js_hint_options_file) raise err else: validate_js_options = None if self.doc_schema is not None: validate_js_expressions( cast(CommentedMap, toolpath_object), self.doc_schema.names[toolpath_object["class"]], validate_js_options) dockerReq, is_req = self.get_requirement("DockerRequirement") if dockerReq is not None and "dockerOutputDirectory" in dockerReq\ and is_req is not None and not is_req: _logger.warning(SourceLine( item=dockerReq, raise_type=Text).makeError( "When 'dockerOutputDirectory' is declared, DockerRequirement " "should go in the 'requirements' section, not 'hints'.""")) if dockerReq is not None and is_req is not None\ and dockerReq.get("dockerOutputDirectory") == "/var/spool/cwl": if is_req: # In this specific case, it is legal to have /var/spool/cwl, so skip the check. pass else: # Must be a requirement var_spool_cwl_detector(self.tool) else: var_spool_cwl_detector(self.tool)