def input( self, identifier: str, datatype: ParseableType, default: any = None, value: any = None, doc: Union[str, InputDocumentation] = None, ): """ Create an input node on a workflow :return: """ self.verify_identifier(identifier, repr(datatype)) datatype = get_instantiated_type(datatype) if default is not None: datatype.optional = True doc = (doc if isinstance(doc, InputDocumentation) else InputDocumentation(doc=doc)) inp = InputNode( self, identifier=identifier, datatype=datatype, default=default, doc=doc, value=value, ) self.nodes[identifier] = inp self.input_nodes[identifier] = inp return inp
def inputs(self): if self._cached_input_signature is None: import inspect argspec = inspect.signature(self.code_block) docstrings = parse_docstring(self.code_block.__doc__) paramlist = docstrings.get("params", []) paramdocs = { p["name"]: p.get("doc").strip() for p in paramlist if "name" in p } missing_annotations = set() unsupported_types = {} ins = [] for inp in argspec.parameters.values(): if inp.name in inspect_ignore_keys: continue fdefault = inp.default optional = (fdefault is not inspect.Parameter.empty) or fdefault is None default = fdefault if optional else None defaulttype = type(fdefault) if fdefault is not None else None annotation = (defaulttype if inp.annotation is inspect.Parameter.empty else inp.annotation) if not annotation: missing_annotations.add(inp.name) continue dt_type: Optional[DataType] = get_instantiated_type( annotation, optional=optional) if not dt_type: unsupported_types[inp.name] = annotation continue ins.append( TInput( tag=inp.name, intype=dt_type, default=default, doc=InputDocumentation(paramdocs.get(inp.name)), )) if missing_annotations: raise Exception( f"The following types on the PythonTool '{self.id()}' were missing type annotations (REQUIRED): " + ", ".join(missing_annotations)) if unsupported_types: raise Exception(f"Unsupported types for inputs: " + ", ".join( f"{k}: {v}" for k, v in unsupported_types.items())) self._cached_input_signature = ins return self._cached_input_signature
def __init__( self, value: Any, prefix: Optional[str] = None, position: Optional[int] = 0, separate_value_from_prefix=None, doc: Optional[Union[str, DocumentationMeta]] = None, shell_quote: bool = None, ): """ A ``ToolArgument`` is a CLI parameter that cannot be override (at runtime). The value can :param value: :type value: ``str`` | ``janis.InputSelector`` | ``janis.StringFormatter`` :param position: The position of the input to be applied. (Default = 0, after the base_command). :param prefix: The prefix to be appended before the element. (By default, a space will also be applied, see ``separate_value_from_prefix`` for more information) :param separate_value_from_prefix: (Default: True) Add a space between the prefix and value when ``True``. :param doc: Documentation string for the argument, this is used to generate the tool documentation and provide :param shell_quote: Stops shell quotes from being applied in all circumstances, useful when joining multiple commands together. """ self.prefix: Optional[str] = prefix self.value = value self.position: Optional[int] = position self.is_expression = ( isinstance(self.value, Selector) or (re.match(self.expr_pattern, self.value) is not None) if self.value else None ) self.separate_value_from_prefix = separate_value_from_prefix self.doc: DocumentationMeta = doc if isinstance( doc, InputDocumentation ) else InputDocumentation(doc) self.shell_quote = shell_quote if ( self.prefix and self.separate_value_from_prefix is not None and not self.separate_value_from_prefix and not self.prefix.endswith("=") ): # I don't really know what this means. Logger.warn( f"Argument ({self.prefix}{self.value}) is not separating and did not end with ='" )
def test_translate_input(self): inp = InputNode( None, identifier="testIdentifier", datatype=String(), default="defaultValue", doc=InputDocumentation("docstring"), value=None, ) tinp = cwl.translate_input(inp) self.assertEqual("testIdentifier", tinp.id) self.assertIsNone(tinp.label) self.assertIsNone(tinp.secondaryFiles) self.assertEqual("docstring", tinp.doc) self.assertIsNone(None, tinp.inputBinding) self.assertEqual("string", tinp.type) self.assertEqual("defaultValue", tinp.default)
def __init__( self, tag: str, input_type: ParseableType, position: Optional[int] = None, prefix: Optional[str] = None, separate_value_from_prefix: bool = None, prefix_applies_to_all_elements: bool = None, presents_as: str = None, secondaries_present_as: Dict[str, str] = None, separator: str = None, shell_quote: bool = None, localise_file: bool = None, default: Any = None, doc: Optional[Union[str, InputDocumentation]] = None, ): """ A ``ToolInput`` represents an input to a tool, with parameters that allow it to be bound on the command line. The ToolInput must have either a position or prefix set to be bound onto the command line. :param tag: The identifier of the input (unique to inputs and outputs of a tool) :param input_type: The data type that this input accepts :type input_type: ``janis.ParseableType`` :param position: The position of the input to be applied. (Default = 0, after the base_command). :param prefix: The prefix to be appended before the element. (By default, a space will also be applied, see ``separate_value_from_prefix`` for more information) :param separate_value_from_prefix: (Default: True) Add a space between the prefix and value when ``True``. :param prefix_applies_to_all_elements: Applies the prefix to each element of the array (Array inputs only) :param shell_quote: Stops shell quotes from being applied in all circumstances, useful when joining multiple commands together. :param separator: The separator between each element of an array (defaults to ' ') :param localise_file: Ensures that the file(s) are localised into the execution directory. :param default: The default value to be applied if the input is not defined. :param doc: Documentation string for the ToolInput, this is used to generate the tool documentation and provide hints to the user. """ super().__init__( value=None, prefix=prefix, position=position, separate_value_from_prefix=separate_value_from_prefix, doc=None, shell_quote=shell_quote, ) self.doc: InputDocumentation = (doc if isinstance( doc, DocumentationMeta) else InputDocumentation(doc=doc)) # if default is not None: # input_type.optional = True if not Validators.validate_identifier(tag): raise Exception( f"The identifier '{tag}' was not validated because {Validators.reason_for_failure(tag)}" ) self.tag: str = tag self.input_type: ParseableType = get_instantiated_type(input_type) self.default = default self.prefix_applies_to_all_elements = prefix_applies_to_all_elements self.separator = separator self.localise_file = localise_file self.presents_as = presents_as self.secondaries_present_as = secondaries_present_as if self.secondaries_present_as: if not self.input_type.secondary_files(): raise Exception( f"The ToolOutput '{self.id()}' requested a rewrite of secondary file extension through " f"'secondaries_present_as', but the type {self.input_type.id()} not have any secondary files." ) secs = set(self.input_type.secondary_files()) to_remap = set(self.secondaries_present_as.keys()) invalid = to_remap - secs if len(invalid) > 0: raise Exception( f"Error when constructing output '{self.id()}', the secondaries_present_as contained secondary " f"files ({', '.join(invalid)}) that were not found in the output " f"type '{self.input_type.id()}' ({', '.join(secs)})")
def step( self, identifier: str, tool: Tool, scatter: Union[str, List[str], ScatterDescription] = None, ignore_missing=False, doc: str = None, ): """ Construct a step on this workflow. :param identifier: The identifier of the step, unique within the workflow. :param tool: The tool that should run for this step. :param scatter: Indicate whether a scatter should occur, on what, and how. :type scatter: Union[str, ScatterDescription] :param ignore_missing: Don't throw an error if required params are missing from this function :return: """ self.verify_identifier(identifier, tool.id()) if scatter is not None and not isinstance(scatter, ScatterDescription): fields = None if isinstance(scatter, str): fields = [scatter] elif isinstance(scatter, list): fields = scatter else: raise Exception( f"Couldn't scatter with field '{scatter}' ({type(scatter)}" ) scatter = ScatterDescription(fields, method=ScatterMethods.dot) # verify scatter if scatter: ins = set(tool.inputs_map().keys()) fields = set(scatter.fields) if any(f not in ins for f in fields): # if there is a field not in the input map, we have a problem extra_keys = ", ".join(f"'{f}'" for f in (fields - ins)) raise Exception( f"Couldn't scatter the field(s) {extra_keys} for step '{identifier}' " f"as they are not inputs to the tool '{tool.id()}'") tool.workflow = self inputs = tool.inputs_map() connections = tool.connections provided_keys = set(connections.keys()) all_keys = set(inputs.keys()) required_keys = set( # The input is optional if it's optional or has default) i for i, v in inputs.items() if not (v.intype.optional or v.default is not None)) if not provided_keys.issubset(all_keys): unrecparams = ", ".join(provided_keys - all_keys) tags = ", ".join([f"in.{i}" for i in all_keys]) raise Exception( f"Unrecognised parameters {unrecparams} when creating '{identifier}' ({tool.id()}). " f"Expected types: {tags}") if not ignore_missing and not required_keys.issubset(provided_keys): missing = ", ".join(f"'{i}'" for i in (required_keys - provided_keys)) raise Exception( f"Missing the parameters {missing} when creating '{identifier}' ({tool.id()})" ) d = doc if isinstance(doc, DocumentationMeta) else DocumentationMeta( doc=doc) stp = StepNode(self, identifier=identifier, tool=tool, scatter=scatter, doc=d) added_edges = [] for (k, v) in connections.items(): isfilename = isinstance(v, Filename) if is_python_primitive(v) or isfilename: inp_identifier = f"{identifier}_{k}" referencedtype = copy.copy( inputs[k].intype) if not isfilename else v parsed_type = get_instantiated_type(v) if parsed_type and not referencedtype.can_receive_from( parsed_type): raise TypeError( f"The type {parsed_type.id()} inferred from the value '{v}' is not " f"compatible with the '{identifier}.{k}' type: {referencedtype.id()}" ) referencedtype.optional = True indoc = inputs[k].doc indoc.quality = InputQualityType.configuration v = self.input( inp_identifier, referencedtype, default=v.generated_filename() if isfilename else v, doc=indoc, ) if v is None: inp_identifier = f"{identifier}_{k}" v = self.input( inp_identifier, inputs[k].intype, default=v, doc=InputDocumentation( doc=None, quality=InputQualityType.configuration), ) verifiedsource = verify_or_try_get_source(v) if isinstance(verifiedsource, list): for vv in verifiedsource: added_edges.append(stp._add_edge(k, vv)) else: added_edges.append(stp._add_edge(k, verifiedsource)) for e in added_edges: si = e.finish.sources[e.ftag] if e.ftag else first_value( e.finish.sources) self.has_multiple_inputs = self.has_multiple_inputs or si.multiple_inputs self.has_scatter = self.has_scatter or scatter is not None self.has_subworkflow = self.has_subworkflow or isinstance( tool, Workflow) self.nodes[identifier] = stp self.step_nodes[identifier] = stp return stp