def get_command_token(self, inputs: Any) -> CWLCommandToken: if self.value is not None: for command_token in self.value: if _check_command_token(command_token, inputs): return command_token raise WorkflowDefinitionException( "No suitable command token for input value " + str(inputs)) else: return super()
def _evaulate_condition(self, context: MutableMapping[Text, Any]) -> bool: if self.when_expression is not None: condition = utils.eval_expression( expression=self.when_expression, context=context, full_js=self.full_js, expression_lib=self.expression_lib) if condition is True or condition is False: return condition else: raise WorkflowDefinitionException( "Conditional 'when' must evaluate to 'true' or 'false'") else: return True
def infer_type_from_token(token_value: Any) -> Text: if token_value is None: raise WorkflowDefinitionException( 'Inputs of type `Any` cannot be null') if isinstance(token_value, MutableMapping): if 'class' in token_value: return token_value['class'] elif isinstance(token_value, Text): return 'string' elif isinstance(token_value, int): return 'long' elif isinstance(token_value, float): return 'double' elif isinstance(token_value, bool): return 'boolean' else: # Could not infer token type: mark as Any return 'Any'
async def get(self) -> Any: while not self.queue: token = await self.dependee.get( posixpath.join(self.step.name, self.name)) if isinstance(token, TerminationToken) or token.value is None: self.queue = [token.rename(self.name)] elif isinstance(token.job, MutableSequence): self.queue = [t.rename(self.name) for t in token.value] elif isinstance(token.value, MutableSequence): self.queue = await asyncio.gather(*[ asyncio.create_task( self._build_token(cast(Text, token.job), t, i)) for i, t in enumerate(token.value) ]) else: raise WorkflowDefinitionException( "Scatter ports require iterable inputs") return self.queue.pop(0)
def _check_list(self, value: Any): if not isinstance(value, MutableSequence): raise WorkflowDefinitionException( "A {this} object can only be used to process list values". format(this=self.__class__.__name__))
def get_processor(self, token_value: Any) -> TokenProcessor: for processor in self.processors: if self.check_processor[type(processor)](processor, token_value): return processor raise WorkflowDefinitionException( "No suitable processors for token value " + str(token_value))
async def _get_value_from_command(self, job: Job, command_output: CWLCommandOutput): context = utils.build_context(job) path_processor = get_path_processor(self.port.step) connector = job.step.get_connector() resources = job.get_resources() or [None] token_value = command_output.value if command_output.value is not None else self.default_value # Check if file `cwl.output.json` exists either locally on at least one resource cwl_output_path = path_processor.join(job.output_directory, 'cwl.output.json') for resource in resources: if await remotepath.exists(connector, resource, cwl_output_path): # If file exists, use its contents as token value token_value = json.loads(await remotepath.read( connector, resource, cwl_output_path)) break # If `token_value` is a dictionary, directly extract the token value from it if isinstance(token_value, MutableMapping) and self.port.name in token_value: token = token_value[self.port.name] return await self._build_token_value(job, token) # Otherwise, generate the output object as described in `outputs` field if self.glob is not None: # Adjust glob path if '$(' in self.glob or '${' in self.glob: globpath = utils.eval_expression( expression=self.glob, context=context, full_js=self.full_js, expression_lib=self.expression_lib) else: globpath = self.glob # Resolve glob resolve_tasks = [] for resource in resources: if isinstance(globpath, MutableSequence): for path in globpath: if not path_processor.isabs(path): path = path_processor.join(job.output_directory, path) resolve_tasks.append( _expand_glob(connector, resource, path)) else: if not path_processor.isabs(globpath): globpath = path_processor.join(job.output_directory, globpath) resolve_tasks.append( _expand_glob(connector, resource, globpath)) paths = flatten_list(await asyncio.gather(*resolve_tasks)) # Cannot glob outside the job output folder for path in paths: if not path.startswith(job.output_directory): raise WorkflowDefinitionException( "Globs outside the job's output folder are not allowed" ) # Get token class from paths class_tasks = [ asyncio.create_task(_get_class_from_path(p, job)) for p in paths ] paths = [{ 'path': p, 'class': c } for p, c in zip(paths, await asyncio.gather(*class_tasks))] # If evaluation is not needed, simply return paths as token value if self.output_eval is None: token_list = await self._build_token_value(job, paths) return token_list if len( token_list) > 1 else token_list[0] if len( token_list) == 1 else None # Otherwise, fill context['self'] with glob data and proceed else: context['self'] = await self._build_token_value(job, paths) if self.output_eval is not None: # Fill context with exit code context['runtime']['exitCode'] = command_output.exit_code # Evaluate output token = utils.eval_expression(expression=self.output_eval, context=context, full_js=self.full_js, expression_lib=self.expression_lib) # Build token if isinstance(token, MutableSequence): paths = [{ 'path': el['path'], 'class': el['class'] } for el in token] return await self._build_token_value(job, paths) else: return await self._build_token_value(job, token) # As the default value (no return path is met in previous code), simply process the command output return await self._build_token_value(job, token_value)