async def compute_token(self, job: Job, command_output: CommandOutput) -> Token: if isinstance(command_output.value, MutableSequence): token_value = [ t.value for t in await asyncio.gather(*[ asyncio.create_task( self.compute_token(job, command_output.update(cv))) for cv in command_output.value ]) ] return Token(name=self.port.name, value=token_value, job=job.name, tag=get_tag(job.inputs)) if isinstance(command_output.value, MutableMapping): if self.port.name in command_output.value: return await self.compute_token( job, command_output.update( command_output.value[self.port.name])) else: token_tasks = {} for key, processor in self.processors.items(): if key in command_output.value: partial_command = command_output.update( command_output.value[key]) token_tasks[key] = asyncio.create_task( processor.compute_token(job, partial_command)) token_value = dict( zip(token_tasks.keys(), [ t.value for t in await asyncio.gather(*token_tasks.values()) ])) return Token(name=self.port.name, value=token_value, job=job.name, tag=get_tag(job.inputs)) else: token_tasks = {} for key, processor in self.processors.items(): token_tasks[key] = asyncio.create_task( processor.compute_token(job, command_output)) token_value = dict( zip(token_tasks.keys(), [ t.value for t in await asyncio.gather(*token_tasks.values()) ])) return Token(name=self.port.name, value=token_value, job=job.name, tag=get_tag(job.inputs))
async def compute_token(self, job: Job, command_output: JupyterCommandOutput) -> Token: path_processor = utils.get_path_processor(self.port.step) if self.value is not None: connector = job.step.get_connector() if job is not None else None resources = job.get_resources() or [None] if job.output_directory and not path_processor.isabs(self.value): pattern = path_processor.join(job.output_directory, self.value) else: pattern = self.value token_value = utils.flatten_list(await asyncio.gather(*[ asyncio.create_task( remotepath.resolve( connector=connector, target=resource, pattern=pattern)) for resource in resources ])) if len(token_value) == 1: token_value = token_value[0] else: token_value = command_output.user_ns.get(self.value_from) if job.output_directory: if isinstance(token_value, MutableSequence): token_value = [ path_processor.join(job.output_directory, t) if not path_processor.isabs(t) else t for t in token_value ] else: if not path_processor.isabs(token_value): token_value = path_processor.join(job.output_directory, token_value) return Token(name=self.port.name, value=token_value, job=job.name, tag=utils.get_tag(job.inputs))
async def compute_token(self, job: Job, command_output: CWLCommandOutput) -> Any: if command_output.status == Status.SKIPPED: return Token(name=self.port.name, value=None, job=job.name, tag=get_tag(job.inputs)) else: return None
async def _build_token(self, job_name: Text, token_value: Any, count: int) -> Token: job = self.step.context.scheduler.get_job(job_name) weight = await self.token_processor.weight_token(job, token_value) return Token(name=self.name, value=token_value, job=job_name, tag=posixpath.join( get_tag(job.inputs if job is not None else []), str(count)), weight=weight)
async def compute_token(self, job: Job, command_output: CWLCommandOutput) -> Any: if command_output.status == Status.SKIPPED: return None else: token_value = await self._get_value_from_command( job, command_output) self._register_data(job, token_value) weight = await self.weight_token(job, token_value) return Token(name=self.port.name, value=token_value, job=job.name, tag=get_tag(job.inputs), weight=weight)
def put(self, token: Token): if isinstance(token, TerminationToken): token_list = self.token if token_list: self.token = [ Token(name=self.name, job=[t.job for t in token_list], tag=_get_tag(token_list), value=self.merge_strategy(token_list) if self.merge_strategy else token_list) ] self.token.append(token) else: self.token = [token] self.fireable.set() else: self.token.append(token)
async def get(self, consumer: Text) -> Token: outputs = await self._retrieve(consumer) # Check for termination if utils.check_termination(outputs): return TerminationToken(self.name) # Return token outputs = flatten_list(outputs) if self.merge_strategy is not None: outputs = self._merge(outputs) if isinstance(outputs, MutableSequence): return Token(name=self.name, job=[t.job for t in outputs], value=outputs, tag=get_tag(outputs), weight=sum([t.weight for t in outputs])) else: return outputs
async def compute_token(self, job: Job, command_output: JupyterCommandOutput) -> Token: value = executor.predump( compiler=self.compiler, name=self.port.name, value=(self.value if self.value is not None else command_output.user_ns[self.value_from] if self.value_from in command_output.user_ns else builtins.__dict__.get(self.value_from)), serializer=self.serializer) token_value = ([dill.dumps(v, recurse=True) for v in value] if isinstance(value, MutableSequence) else dill.dumps(value, recurse=True)) return Token(name=self.port.name, value=token_value, job=job.name, tag=utils.get_tag(job.inputs))
async def compute_token(self, job: Job, command_output: CommandOutput) -> Token: if isinstance(command_output.value, MutableSequence): token_list = await asyncio.gather(*[ asyncio.create_task( self.processor.compute_token(job, command_output.update(value))) for value in command_output.value ]) token = Token(name=self.port.name, value=[t.value for t in token_list], job=job.name, tag=get_tag(job.inputs)) else: token = await self.processor.compute_token(job, command_output) token.value = ( [] if token.value is None else [token.value] if not isinstance(token.value, MutableSequence) else token.value) return token
async def compute_token(self, job: Job, command_output: CommandOutput) -> Token: return Token(name=self.port.name, value=command_output.value, job=job.name, tag=get_tag(job.inputs))
async def compute_token(self, job: Job, command_output: JupyterCommandOutput) -> Token: return Token(name=self.port.name, value=None, tag=utils.get_tag(job.inputs), job=job.name)