def __call__(self, *args, **kwargs): """Handle the call to a Bash app. Args: - Arbitrary Kwargs: - Arbitrary Returns: App_fut """ invocation_kwargs = {} invocation_kwargs.update(self.kwargs) invocation_kwargs.update(kwargs) if self.data_flow_kernel is None: dfk = DataFlowKernelLoader.dfk() else: dfk = self.data_flow_kernel app_fut = dfk.submit(wrap_error( update_wrapper(remote_side_bash_executor, self.func)), app_args=(self.func, *args), executors=self.executors, fn_hash=self.func_hash, cache=self.cache, app_kwargs=invocation_kwargs) return app_fut
def __init__(self, func, data_flow_kernel=None, cache=False, executors='all', ignore_for_cache=None): super().__init__(func, data_flow_kernel=data_flow_kernel, executors=executors, cache=cache, ignore_for_cache=ignore_for_cache) self.kwargs = {} # We duplicate the extraction of parameter defaults # to self.kwargs to ensure availability at point of # command string format. Refer: #349 sig = signature(func) for s in sig.parameters: if sig.parameters[s].default is not Parameter.empty: self.kwargs[s] = sig.parameters[s].default # update_wrapper allows remote_side_bash_executor to masquerade as self.func # partial is used to attach the first arg the "func" to the remote_side_bash_executor # this is done to avoid passing a function type in the args which parsl.serializer # doesn't support remote_fn = partial( update_wrapper(remote_side_bash_executor, self.func), self.func) remote_fn.__name__ = self.func.__name__ self.wrapped_remote_function = wrap_error(remote_fn)
def __call__(self, *args, **kwargs): """Handle the call to a Bash app. Args: - Arbitrary Kwargs: - Arbitrary Returns: If outputs=[...] was a kwarg then: App_fut, [Data_Futures...] else: App_fut """ # Update kwargs in the app definition with ones passed in at calltime self.kwargs.update(kwargs) if self.data_flow_kernel is None: dfk = DataFlowKernelLoader.dfk() else: dfk = self.data_flow_kernel app_fut = dfk.submit(wrap_error(remote_side_bash_executor), self.func, *args, executors=self.executors, fn_hash=self.func_hash, cache=self.cache, **self.kwargs) out_futs = [DataFuture(app_fut, o, parent=app_fut, tid=app_fut.tid) for o in kwargs.get('outputs', [])] app_fut._outputs = out_futs return app_fut
def __call__(self, *args, **kwargs): """Handle the call to a Bash app. Args: - Arbitrary Kwargs: - Arbitrary Returns: App_fut """ # Update kwargs in the app definition with ones passed in at calltime self.kwargs.update(kwargs) if self.data_flow_kernel is None: dfk = DataFlowKernelLoader.dfk() else: dfk = self.data_flow_kernel app_fut = dfk.submit(wrap_error( update_wrapper(remote_side_bash_executor, self.func)), self.func, *args, executors=self.executors, fn_hash=self.func_hash, cache=self.cache, **self.kwargs) return app_fut
def __init__(self, func, data_flow_kernel=None, walltime=60, cache=False, executors='all'): super().__init__( wrap_error(func), data_flow_kernel=data_flow_kernel, walltime=walltime, executors=executors, cache=cache )
def __init__(self, func, data_flow_kernel=None, cache=False, executors='all', ignore_for_cache=[]): super().__init__( wrap_error(func), data_flow_kernel=data_flow_kernel, executors=executors, cache=cache, ignore_for_cache=ignore_for_cache )
def __init__(self, func, executor=None, walltime=60, cache=False, sites='all', fn_hash=None): """Initialize the super. This bit is the same for both bash & python apps. """ super().__init__(wrap_error(func), executor=executor, walltime=walltime, sites=sites, exec_type="python") self.fn_hash = fn_hash self.cache = cache