def multinode_f(*args, **kwargs): if not self.scope: raise NotInPyCOMPSsException(not_in_pycompss("MultiNode")) if __debug__: logger.debug("Executing multinode_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs, user_function) if context.in_worker(): old_slurm_env = set_slurm_environment() # Set the computing_nodes variable in kwargs for its usage # in @task decorator kwargs['computing_nodes'] = self.kwargs['computing_nodes'] with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = user_function(*args, **kwargs) if context.in_worker(): reset_slurm_environment(old_slurm_env) return ret
def multinode_f(*args, **kwargs): if not self.scope: raise Exception(not_in_pycompss("MultiNode")) if __debug__: logger.debug("Executing multinode_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs) else: # worker code set_slurm_environment() # Set the computing_nodes variable in kwargs for its usage # in @task decorator kwargs['computing_nodes'] = self.kwargs['computing_nodes'] with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = func(*args, **kwargs) if context.in_worker(): reset_slurm_environment() return ret
def constrained_f(*args, **kwargs): # type: (*typing.Any, **typing.Any) -> typing.Any if not self.scope: from pycompss.api.dummy.on_failure import on_failure \ as dummy_on_failure d_c = dummy_on_failure(self.args, self.kwargs) return d_c.__call__(user_function)(*args, **kwargs) if __debug__: logger.debug("Executing on_failure_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) # Set the on failure management action and default variables in # kwargs for its usage in @task decorator kwargs["on_failure"] = self.on_failure_action kwargs["defaults"] = self.defaults with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = user_function(*args, **kwargs) return ret
def __decorator_body__(self, user_function, args, kwargs): # type: (typing.Callable, tuple, dict) -> typing.Any if not self.scope: # Execute the mpi as with PyCOMPSs so that sequential # execution performs as parallel. # To disable: raise Exception(not_in_pycompss("mpi")) # TODO: Intercept @task parameters to get stream redirection if "binary" in self.kwargs: return self.__run_mpi__(args, kwargs) else: print( "WARN: Python MPI as dummy is not fully supported. Executing decorated funtion." ) return user_function(*args, **kwargs) if __debug__: logger.debug("Executing mpi_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) # The processes parameter will have to go down until the execution # is invoked. To this end, set the computing_nodes variable in kwargs # for its usage in @task decorator # WARNING: processes can be an int, a env string, a str with # dynamic variable name. if "processes" in self.kwargs: kwargs["computing_nodes"] = self.kwargs["processes"] else: # If processes not defined, check computing_units or set default process_computing_nodes(self.decorator_name, self.kwargs) kwargs["computing_nodes"] = self.kwargs["computing_nodes"] if "processes_per_node" in self.kwargs: kwargs["processes_per_node"] = self.kwargs["processes_per_node"] else: kwargs["processes_per_node"] = 1 if __debug__: logger.debug("This MPI task will have " + str(kwargs["computing_nodes"]) + " processes and " + str(kwargs["processes_per_node"]) + " processes per node.") if self.task_type == IMPL_PYTHON_MPI: prepend_strings = True else: prepend_strings = False with keep_arguments(args, kwargs, prepend_strings=prepend_strings): # Call the method ret = user_function(*args, **kwargs) return ret
def binary_f(*args, **kwargs): if not self.scope: # Execute the binary as with PyCOMPSs so that sequential # execution performs as parallel. # To disable: raise Exception(not_in_pycompss("binary")) # TODO: Intercept the @task parameters to get stream redirection cmd = [self.kwargs['binary']] if args: args = [str(a) for a in args] cmd += args my_env = os.environ.copy() if "working_dir" in self.kwargs: my_env[ "PATH"] = self.kwargs["working_dir"] + my_env["PATH"] elif "workingDir" in self.kwargs: my_env["PATH"] = self.kwargs["workingDir"] + my_env["PATH"] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=my_env) # noqa: E501 out, err = proc.communicate() if sys.version_info[0] < 3: out_message = out.strip() err_message = err.strip() else: out_message = out.decode().strip() err_message = err.decode().strip() if out_message: print(out_message) if err_message: sys.stderr.write(err_message + '\n') return proc.returncode if __debug__: logger.debug("Executing binary_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs, user_function) else: # worker code pass with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def reduce_f(*args, **kwargs): if not self.scope: raise PyCOMPSsException(not_in_pycompss("reduction")) if __debug__: logger.debug("Executing reduce_f wrapper.") # Set the chunk size and is_reduce variables in kwargs for their # usage in @task decorator kwargs['chunk_size'] = self.kwargs['chunk_size'] kwargs['is_reduce'] = self.kwargs['is_reduce'] with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = func(*args, **kwargs) return ret
def container_f(*args, **kwargs): if not self.scope: raise NotInPyCOMPSsException(not_in_pycompss("container")) if __debug__: logger.debug("Executing container_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs, user_function) with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def implement_f(*args, **kwargs): # type: (*typing.Any, **typing.Any) -> typing.Any # This is executed only when called. if not self.scope: raise NotInPyCOMPSsException(not_in_pycompss("implement")) if __debug__: logger.debug("Executing implement_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = user_function(*args, **kwargs) return ret
def container_f(*args, **kwargs): if not self.scope: raise Exception(not_in_pycompss("container")) if __debug__: logger.debug("Executing container_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs, user_function) else: # worker code pass with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def constrained_f(*args, **kwargs): if not self.scope: from pycompss.api.dummy.constraint import constraint \ as dummy_constraint d_c = dummy_constraint(self.args, self.kwargs) return d_c.__call__(user_function)(*args, **kwargs) if __debug__: logger.debug("Executing constrained_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs, user_function) with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = user_function(*args, **kwargs) return ret
def __decorator_body__(self, user_function, args, kwargs): # type: (typing.Callable, tuple, dict) -> typing.Any if not self.scope: raise NotImplementedError if __debug__: logger.debug("Executing mpmd_mpi_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) kwargs[PROCESSES_PER_NODE] = self.kwargs.get(PROCESSES_PER_NODE, 1) kwargs[COMPUTING_NODES] = self.processes with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def __decorator_body__(self, user_function, args, kwargs): # type: (typing.Callable, tuple, dict) -> typing.Any # force to serialize with JSON serializer.FORCED_SERIALIZER = 4 if not self.scope: # run http self.__run_http__(args, kwargs) if __debug__: logger.debug("Executing http_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) with keep_arguments(args, kwargs): # Call the method ret = user_function(*args, **kwargs) return ret
def binary_f(*args, **kwargs): if not self.scope: # Execute the binary as with PyCOMPSs so that sequential # execution performs as parallel. # To disable: raise Exception(not_in_pycompss("binary")) # TODO: Intercept @task parameters to get stream redirection return self.__run_binary__(args, kwargs) if __debug__: logger.debug("Executing binary_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs, user_function) with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def implement_f(*args, **kwargs): # This is executed only when called. if not self.scope: raise Exception(not_in_pycompss("implement")) if __debug__: logger.debug("Executing implement_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs) else: # worker code pass with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = func(*args, **kwargs) return ret
def decaf_f(*args, **kwargs): # type: (*typing.Any, **typing.Any) -> typing.Any if not self.scope: raise NotInPyCOMPSsException(not_in_pycompss("decaf")) if __debug__: logger.debug("Executing decaf_f wrapper.") if (context.in_master() or context.is_nesting_enabled()) \ and not self.core_element_configured: # master code - or worker with nesting enabled self.__configure_core_element__(kwargs) # Set the computing_nodes variable in kwargs for its usage # in @task decorator kwargs[COMPUTING_NODES] = self.kwargs[COMPUTING_NODES] with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = user_function(*args, **kwargs) return ret
def constrained_f(*args, **kwargs): if not self.scope: from pycompss.api.dummy.constraint import constraint \ as dummy_constraint d_c = dummy_constraint(self.args, self.kwargs) return d_c.__call__(func)(*args, **kwargs) if __debug__: logger.debug("Executing constrained_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs) else: # worker code pass with keep_arguments(args, kwargs, prepend_strings=True): # Call the method ret = func(*args, **kwargs) return ret
def mpi_f(*args, **kwargs): if not self.scope: # Execute the mpi as with PyCOMPSs so that sequential # execution performs as parallel. # To disable: raise Exception(not_in_pycompss("mpi")) # TODO: Intercept the @task parameters to get stream redirection cmd = [self.kwargs['runner']] if 'processes' in self.kwargs: cmd += ['-np', self.kwargs['processes']] elif 'computing_nodes' in self.kwargs: cmd += ['-np', self.kwargs['computing_nodes']] elif 'computingNodes' in self.kwargs: cmd += ['-np', self.kwargs['computingNodes']] else: pass if 'flags' in self.kwargs: cmd += self.kwargs['flags'].split() cmd += [self.kwargs['binary']] if args: args = [str(a) for a in args] cmd += args my_env = os.environ.copy() if "working_dir" in self.kwargs: my_env[ "PATH"] = self.kwargs["working_dir"] + my_env["PATH"] elif "workingDir" in self.kwargs: my_env["PATH"] = self.kwargs["workingDir"] + my_env["PATH"] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=my_env) # noqa: E501 out, err = proc.communicate() if sys.version_info[0] < 3: out_message = out.strip() err_message = err.strip() else: out_message = out.decode().strip() err_message = err.decode().strip() if out_message: print(out_message) if err_message: sys.stderr.write(err_message + '\n') return proc.returncode if __debug__: logger.debug("Executing mpi_f wrapper.") if context.in_master(): # master code if not self.core_element_configured: self.__configure_core_element__(kwargs) else: # worker code pass # Set the computing_nodes variable in kwargs for its usage # in @task decorator kwargs['computing_nodes'] = self.kwargs['processes'] if self.task_type == "PYTHON_MPI": prepend_strings = True else: prepend_strings = False with keep_arguments(args, kwargs, prepend_strings=prepend_strings): # Call the method ret = func(*args, **kwargs) return ret