def make_persistent(obj, *args): # noqa # type: (typing.Any, dict) -> None """ Persist the given object. :param obj: object to persist. :param args: Extra arguments. :return: None """ if obj.id is None: if len(args) == 0: # The user has not indicated the id uid = uuid.uuid4() elif len(args) == 1: # The user has indicated the id uid = args[0] else: raise ValueError('Too many arguments when calling makePersistent.') obj.id = str(uid) # Write ID file file_name = str(uid) + '.ID' file_path = STORAGE_PATH + file_name print("MAKE PERSISTENT: Creating ID file " + file_path) with open(file_path, 'w') as f: f.write(obj.id) # Write PSCO file file_name = str(uid) + '.PSCO' file_path = STORAGE_PATH + file_name print("MAKE PERSISTENT: Serializing object to file " + file_path) serialize_to_file(obj, file_path) else: # The obj is already persistent pass
def makePersistent(obj, *args): if obj.id is None: if len(args) == 0: # The user has not indicated the id uid = uuid.uuid4() elif len(args) == 1: # The user has indicated the id uid = args[0] else: raise ValueError('Too many arguments when calling makePersistent.') obj.id = str(uid) # Write ID file file_name = str(uid) + '.ID' file_path = storage_path + file_name print("MAKE PERSISTENT: Creating ID file " + file_path) with open(file_path, 'w') as f: f.write(obj.id) # Write PSCO file file_name = str(uid) + '.PSCO' file_path = storage_path + file_name print("MAKE PERSISTENT: Serializing object to file " + file_path) serialize_to_file(obj, file_path) else: # The obj is already persistent pass
def testObjectArray(self): """ Tests de-/serialization of object np.arrays""" arr = np.array(np.random.random((50, 50)), dtype=object) tmp_file = tempfile.NamedTemporaryFile() serialize_to_file(arr, tmp_file.name) deserialize_from_file(tmp_file.name)
def test_serialize_deserialize_obj_to_file(): # Uses serialize to handler underneath. from pycompss.util.serialization.serializer import serialize_to_file from pycompss.util.serialization.serializer import deserialize_from_file target_file = "target.pkl" obj = [1, 3, 2, "hello", "world"] serialize_to_file(obj, target_file) result = deserialize_from_file(target_file) os.remove(target_file) assert obj == result, "ERROR: Object serialization and deserialization retrieved wrong object." # noqa: E501
def test_serialize_deserialize_obj_to_file_no_gc(): # Uses serialize to handler underneath. import pycompss.util.serialization.serializer as serializer from pycompss.util.serialization.serializer import serialize_to_file from pycompss.util.serialization.serializer import deserialize_from_file serializer.DISABLE_GC = True target_file = "target.pkl" obj = [1, 3, 2, "hello", "world"] serialize_to_file(obj, target_file) result = deserialize_from_file(target_file) os.remove(target_file) assert obj == result, "ERROR: Object serialization and deserialization (without garbage collector) retrieved wrong object." # noqa: E501
def test_serialize_deserialize_np_to_file(): # Uses serialize to handler underneath. from pycompss.util.serialization.serializer import serialize_to_file from pycompss.util.serialization.serializer import deserialize_from_file target_file_np = "target_np.pkl" obj_np = np.random.rand(4, 4) serialize_to_file(obj_np, target_file_np) result_np = deserialize_from_file(target_file_np) os.remove(target_file_np) assert np.array_equal( obj_np, result_np ), "ERROR: Numpy object serialization and deserialization retrieved wrong object." # noqa: E501
def test_serialize_deserialize_np_to_file_no_gc(): # Uses serialize to handler underneath. import pycompss.util.serialization.serializer as serializer from pycompss.util.serialization.serializer import serialize_to_file from pycompss.util.serialization.serializer import deserialize_from_file serializer.DISABLE_GC = True target_file_np = "target_np.pkl" obj_np = np.random.rand(4, 4) serialize_to_file(obj_np, target_file_np) result_np = deserialize_from_file(target_file_np) os.remove(target_file_np) assert np.array_equal( obj_np, result_np ), "ERROR: Numpy object serialization and deserialization (without garbage collector) retrieved wrong object." # noqa: E501
def updatePersistent(obj, *args): if obj.id is not None: # The psco is already persistent # Update PSCO file file_name = str(obj.id) + '.PSCO' file_path = storage_path + file_name # Remove old file if os.path.exists(file_path): os.remove(file_path) else: raise Exception("Can not delete the file %s doesn't exists" % str(file_path)) # Create a new one serialize_to_file(obj, file_path) else: # The obj is not persistent pass
def manage_returns(num_returns, user_returns, ret_params, python_mpi): # type: (int, list, list, bool) -> list """ Manage task returns. :param num_returns: Number of returns. :param user_returns: User returns. :param ret_params: Return parameters. :param python_mpi: Boolean if is python mpi code. :return: User returns. """ if __debug__: logger.debug("Dealing with returns: " + str(num_returns)) if num_returns > 0: if num_returns == 1: # Generalize the return case to multi-return to simplify the # code user_returns = [user_returns] elif num_returns > 1 and python_mpi: user_returns = [user_returns] ret_params = __get_ret_rank__(ret_params) # Note that we are implicitly assuming that the length of the user # returns matches the number of return parameters for (obj, param) in zip(user_returns, ret_params): # If the object is a PSCO, do not serialize to file if param.content_type == parameter.TYPE.EXTERNAL_PSCO or is_psco(obj): continue # Serialize the object # Note that there is no "command line optimization" in the # returns, as we always pass them as files. # This is due to the asymmetry in worker-master communications # and because it also makes it easier for us to deal with # returns in that format f_name = __get_file_name__(param.file_name) if __debug__: logger.debug("Serializing return: " + str(f_name)) if python_mpi: if num_returns > 1: rank_zero_reduce = False else: rank_zero_reduce = True serialize_to_file_mpienv(obj, f_name, rank_zero_reduce) else: serialize_to_file(obj, f_name) return user_returns
def update_persistent(obj, *args): # noqa # type: (typing.Any, dict) -> None """ Update the given object. :param obj: object to update. :param args: Extra arguments. :return: None """ if obj.id is not None: # The psco is already persistent # Update PSCO file file_name = str(obj.id) + '.PSCO' file_path = STORAGE_PATH + file_name # Remove old file os.remove(file_path) # Create a new one serialize_to_file(obj, file_path) else: # The obj is not persistent pass
def execute_task(process_name, storage_conf, params, tracing, logger, python_mpi=False): """ ExecuteTask main method. :param process_name: Process name :param storage_conf: Storage configuration file path :param params: List of parameters :param tracing: Tracing flag :param logger: Logger to use :param python_mpi: If it is a MPI task :return: exit code, new types and new values """ if __debug__: logger.debug("Begin task execution in %s" % process_name) persistent_storage = False if storage_conf != 'null': persistent_storage = True # Retrieve the parameters from the params argument path = params[0] method_name = params[1] num_slaves = int(params[3]) time_out = int(params[2]) slaves = [] for i in range(3, 3 + num_slaves): slaves.append(params[i]) arg_position = 4 + num_slaves args = params[arg_position:] cus = args[0] args = args[1:] has_target = args[0] return_type = args[1] return_length = int(args[2]) num_params = int(args[3]) args = args[4:] # COMPSs keywords for tasks (ie: tracing, process name...) # compss_key is included to be checked in the @task decorator, so that # the task knows if it has been called from the worker or from the # user code (reason: ignore @task decorator if called from another task). compss_kwargs = { 'compss_key': True, 'compss_tracing': tracing, 'compss_process_name': process_name, 'compss_storage_conf': storage_conf, 'compss_return_length': return_length, 'python_MPI': python_mpi } if __debug__: logger.debug("Storage conf: %s" % str(storage_conf)) logger.debug("Params: %s" % str(params)) logger.debug("Path: %s" % str(path)) logger.debug("Method name: %s" % str(method_name)) logger.debug("Num slaves: %s" % str(num_slaves)) logger.debug("Slaves: %s" % str(slaves)) logger.debug("Cus: %s" % str(cus)) logger.debug("Has target: %s" % str(has_target)) logger.debug("Num Params: %s" % str(num_params)) logger.debug("Return Length: %s" % str(return_length)) logger.debug("Args: %r" % args) # Get all parameter values if __debug__: logger.debug("Processing parameters:") values = get_input_params(num_params, logger, args) types = [x.type for x in values] if __debug__: logger.debug("RUN TASK with arguments:") logger.debug("\t- Path: %s" % path) logger.debug("\t- Method/function name: %s" % method_name) logger.debug("\t- Has target: %s" % str(has_target)) logger.debug("\t- # parameters: %s" % str(num_params)) logger.debug("\t- Values:") for v in values: logger.debug("\t\t %r" % v) logger.debug("\t- COMPSs types:") for t in types: logger.debug("\t\t %s" % str(t)) import_error = False new_types = [] new_values = [] timed_out = False try: # Try to import the module (for functions) if __debug__: logger.debug("Trying to import the user module: %s" % path) py_version = sys.version_info if py_version >= (2, 7): import importlib module = importlib.import_module(path) # Python 2.7 if path.startswith('InteractiveMode_'): # Force reload in interactive mode. The user may have # overwritten a function or task. if py_version < (3, 0): reload(module) elif py_version < (3, 4): import imp imp.reload(module) else: importlib.reload(module) if __debug__: msg = "Module successfully loaded (Python version >= 2.7)" logger.debug(msg) else: module = __import__(path, globals(), locals(), [path], -1) if __debug__: msg = "Module successfully loaded (Python version < 2.7" logger.debug(msg) except ImportError: if __debug__: msg = "Could not import the module. Reason: Method in class." logger.debug(msg) import_error = True if not import_error: # Module method declared as task result = task_execution(logger, process_name, module, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] target_direction = result[3] timed_out = result[4] except_msg = result[5] if exit_code != 0: return exit_code, new_types, new_values, timed_out, except_msg else: # Method declared as task in class # Not the path of a module, it ends with a class name class_name = path.split('.')[-1] module_name = '.'.join(path.split('.')[0:-1]) if '.' in path: module_name = '.'.join(path.split('.')[0:-1]) else: module_name = path try: module = __import__(module_name, fromlist=[class_name]) klass = getattr(module, class_name) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.exception("EXCEPTION IMPORTING MODULE IN %s" % process_name) logger.exception(''.join(line for line in lines)) return 1, [], [], False, None if __debug__: logger.debug("Method in class %s of module %s" % (class_name, module_name)) logger.debug("Has target: %s" % str(has_target)) if has_target == 'true': # Instance method # The self object needs to be an object in order to call the # function. So, it can not be done in the @task decorator. # Since the args structure is parameters + self + returns we pop # the corresponding considering the return_length notified by the # runtime (-1 due to index starts from 0). self_index = num_params - return_length - 1 self_elem = values.pop(self_index) self_type = types.pop(self_index) if self_type == parameter.TYPE.EXTERNAL_PSCO: if __debug__: logger.debug("Last element (self) is a PSCO with id: %s" % str(self_elem.key)) obj = get_by_id(self_elem.key) else: obj = None file_name = None if self_elem.key is None: file_name = self_elem.file_name.split(':')[-1] if __debug__: logger.debug("Deserialize self from file.") try: obj = deserialize_from_file(file_name) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception( exc_type, exc_value, exc_traceback) logger.exception("EXCEPTION DESERIALIZING SELF IN %s" % process_name) logger.exception(''.join(line for line in lines)) return 1, [], [], False, None if __debug__: logger.debug('Deserialized self object is: %s' % self_elem.content) logger.debug( "Processing callee, a hidden object of %s in file %s" % # noqa: E501 (file_name, type(self_elem.content))) values.insert(0, obj) if not self_type == parameter.TYPE.EXTERNAL_PSCO: types.insert(0, parameter.TYPE.OBJECT) else: types.insert(0, parameter.TYPE.EXTERNAL_PSCO) result = task_execution(logger, process_name, klass, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] target_direction = result[3] timed_out = result[4] except_msg = result[5] if exit_code != 0: return exit_code, new_types, new_values, timed_out, except_msg # Depending on the target_direction option, it is necessary to # serialize again self or not. Since this option is only visible # within the task decorator, the task_execution returns the value # of target_direction in order to know here if self has to be # serialized. This solution avoids to use inspect. if target_direction.direction == parameter.DIRECTION.INOUT or \ target_direction.direction == parameter.DIRECTION.COMMUTATIVE: # noqa: E501 if is_psco(obj): # There is no explicit update if self is a PSCO. # Consequently, the changes on the PSCO must have been # pushed into the storage automatically on each PSCO # modification. if __debug__: logger.debug("The changes on the PSCO must have been" + " automatically updated by the storage.") pass else: if __debug__: logger.debug("Serializing self to file: %s" % file_name) try: serialize_to_file(obj, file_name) except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception( exc_type, exc_value, exc_traceback) logger.exception("EXCEPTION SERIALIZING SELF IN %s" % process_name) logger.exception(''.join(line for line in lines)) return 1, new_types, new_values, timed_out, except_msg if __debug__: logger.debug("Obj: %r" % obj) else: # Class method - class is not included in values (e.g. values=[7]) types.append(None) # class must be first type result = task_execution(logger, process_name, klass, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] target_direction = result[3] timed_out = result[4] except_msg = result[5] if exit_code != 0: return exit_code, new_types, new_values, timed_out, except_msg # EVERYTHING OK if __debug__: logger.debug("End task execution. Status: Ok") return exit_code, new_types, new_values, timed_out, except_msg
def execute_task(process_name, # type: str storage_conf, # type: str params, # type: list tracing, # type: bool logger, # type: typing.Any logger_cfg, # type: str log_files, # type: tuple python_mpi=False, # type: bool collections_layouts=None, # type: dict cache_queue=None, # type: typing.Any cache_ids=None, # type: typing.Any cache_profiler=False, # type: bool ): # type: (...) -> typing.Tuple[int, list, list, typing.Optional[bool], str] """ ExecuteTask main method. :param process_name: Process name. :param storage_conf: Storage configuration file path. :param params: List of parameters. :param tracing: Tracing flag. :param logger: Logger to use. :param logger_cfg: Logger configuration file :param log_files: Tuple with (out filename, err filename). None to avoid stdout and sdterr fd redirection. :param python_mpi: If it is a MPI task. :param collections_layouts: collections layouts for python MPI tasks :param cache_queue: Cache tracker communication queue :param cache_ids: Cache proxy dictionary (read-only) :param cache_profiler: Cache profiler :return: updated_args, exit_code, new_types, new_values, timed_out and except_msg """ if __debug__: logger.debug("BEGIN TASK execution in %s" % process_name) persistent_storage = False if storage_conf != 'null': persistent_storage = True # Retrieve the parameters from the params argument path = params[0] method_name = params[1] num_slaves = int(params[3]) time_out = int(params[2]) slaves = [] for i in range(3, 3 + num_slaves): slaves.append(params[i]) arg_position = 4 + num_slaves args = params[arg_position:] cus = args[0] # noqa args = args[1:] has_target = args[0] # Next parameter: return_type = args[1] return_length = int(args[2]) num_params = int(args[3]) args = args[4:] # COMPSs keywords for tasks (ie: tracing, process name...) # compss_key is included to be checked in the @task decorator, so that # the task knows if it has been called from the worker or from the # user code (reason: ignore @task decorator if called from another task # or decide if submit to runtime if nesting is enabled). compss_kwargs = { 'compss_key': True, 'compss_tracing': tracing, 'compss_process_name': process_name, 'compss_storage_conf': storage_conf, 'compss_return_length': return_length, 'compss_logger': logger, 'compss_log_cfg': logger_cfg, 'compss_log_files': log_files, 'compss_python_MPI': python_mpi, 'compss_collections_layouts': collections_layouts, 'cache_queue': cache_queue, 'cache_ids': cache_ids, 'cache_profiler': cache_profiler, } if __debug__: logger.debug("COMPSs parameters:") logger.debug("\t- Storage conf: %s" % str(storage_conf)) logger.debug("\t- Logger cfg: %s" % str(logger_cfg)) if log_files: logger.debug("\t- Log out file: %s" % str(log_files[0])) logger.debug("\t- Log err file: %s" % str(log_files[1])) else: logger.debug("\t- Log out and err not redirected") logger.debug("\t- Params: %s" % str(params)) logger.debug("\t- Path: %s" % str(path)) logger.debug("\t- Method name: %s" % str(method_name)) logger.debug("\t- Num slaves: %s" % str(num_slaves)) logger.debug("\t- Slaves: %s" % str(slaves)) logger.debug("\t- Cus: %s" % str(cus)) logger.debug("\t- Has target: %s" % str(has_target)) logger.debug("\t- Num Params: %s" % str(num_params)) logger.debug("\t- Return Length: %s" % str(return_length)) logger.debug("\t- Args: %r" % args) logger.debug("\t- COMPSs kwargs:") for k, v in compss_kwargs.items(): logger.debug("\t\t- %s: %s" % (str(k), str(v))) # Get all parameter values if __debug__: logger.debug("Processing parameters:") # logger.debug(args) values = get_task_params(num_params, logger, args) types = [x.content_type for x in values] if __debug__: logger.debug("RUN TASK with arguments:") logger.debug("\t- Path: %s" % path) logger.debug("\t- Method/function name: %s" % method_name) logger.debug("\t- Has target: %s" % str(has_target)) logger.debug("\t- # parameters: %s" % str(num_params)) # Next parameters are the values: # logger.debug("\t- Values:") # for v in values: # logger.debug("\t\t %r" % v) # logger.debug("\t- COMPSs types:") # for t in types: # logger.debug("\t\t %s" % str(t)) import_error = False if __debug__: logger.debug("LOAD TASK:") try: # Try to import the module (for functions) if __debug__: logger.debug("\t- Trying to import the user module: %s" % path) module = import_user_module(path, logger) except ImportError: if __debug__: msg = "\t- Could not import the module. Reason: Method in class." logger.debug(msg) import_error = True if __debug__: logger.debug("EXECUTE TASK:") if not import_error: # Module method declared as task result = task_execution(logger, process_name, module, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] # Next result: target_direction = result[3] timed_out = result[4] except_msg = result[5] else: # Method declared as task in class # Not the path of a module, it ends with a class name class_name = path.split('.')[-1] if '.' in path: module_name = '.'.join(path.split('.')[0:-1]) else: module_name = path try: module = __import__(module_name, fromlist=[class_name]) klass = getattr(module, class_name) except Exception: # noqa exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) exception_message = "EXCEPTION IMPORTING MODULE IN %s\n" % process_name exception_message += ''.join(line for line in lines) logger.exception(exception_message) return 1, [], [], None, exception_message if __debug__: logger.debug("Method in class %s of module %s" % (class_name, module_name)) logger.debug("Has target: %s" % str(has_target)) if has_target == 'true': # Instance method # The self object needs to be an object in order to call the # function. So, it can not be done in the @task decorator. # Since the args structure is parameters + self + returns we pop # the corresponding considering the return_length notified by the # runtime (-1 due to index starts from 0). self_index = num_params - return_length - 1 self_elem = values.pop(self_index) self_type = types.pop(self_index) if self_type == parameter.TYPE.EXTERNAL_PSCO: if __debug__: logger.debug("Last element (self) is a PSCO with id: %s" % str(self_elem.content)) obj = get_by_id(self_elem.content) else: obj = None file_name = "None" if self_elem.content == "": file_name = self_elem.file_name.original_path if __debug__: logger.debug("\t- Deserialize self from file.") try: obj = deserialize_from_file(file_name) except Exception: # noqa exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) exception_message = "EXCEPTION DESERIALIZING SELF IN %s\n" % process_name exception_message += ''.join(line for line in lines) logger.exception(exception_message) return 1, [], [], None, exception_message if __debug__: logger.debug("Deserialized self object is: %s" % self_elem.content) logger.debug("Processing callee, a hidden object of %s in file %s" % # noqa: E501 (file_name, type(self_elem.content))) values.insert(0, obj) # noqa if not self_type == parameter.TYPE.EXTERNAL_PSCO: types.insert(0, parameter.TYPE.OBJECT) else: types.insert(0, parameter.TYPE.EXTERNAL_PSCO) result = task_execution(logger, process_name, klass, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] target_direction = result[3] timed_out = result[4] except_msg = result[5] # Depending on the target_direction option, it is necessary to # serialize again self or not. Since this option is only visible # within the task decorator, the task_execution returns the value # of target_direction in order to know here if self has to be # serialized. This solution avoids to use inspect. if target_direction is not None and \ (target_direction.direction == parameter.DIRECTION.INOUT or target_direction.direction == parameter.DIRECTION.COMMUTATIVE): # noqa: E501 if is_psco(obj): # There is no explicit update if self is a PSCO. # Consequently, the changes on the PSCO must have been # pushed into the storage automatically on each PSCO # modification. if __debug__: logger.debug("The changes on the PSCO must have been" + " automatically updated by the storage.") else: if __debug__: logger.debug("Serializing self (%r) to file: %s" % (obj, file_name)) try: serialize_to_file(obj, file_name) except Exception: # noqa # Catch any serialization exception exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.exception("EXCEPTION SERIALIZING SELF IN %s" % process_name) # noqa: E501 logger.exception(''.join(line for line in lines)) exit_code = 1 if __debug__: logger.debug("Serialized successfully") else: # Class method - class is not included in values (e.g. values=[7]) types.append(None) # class must be first type result = task_execution(logger, process_name, klass, method_name, time_out, types, values, compss_kwargs, persistent_storage, storage_conf) exit_code = result[0] new_types = result[1] new_values = result[2] # Next return: target_direction = result[3] timed_out = result[4] except_msg = result[5] if __debug__: if exit_code != 0: logger.debug("EXECUTE TASK FAILED: Exit code: %s" % str(exit_code)) else: logger.debug("END TASK execution. Status: Ok") return int(exit_code), new_types, new_values, timed_out, except_msg
def update_file(obj): if obj.getID() is not None: storage_path = '/tmp/PSCO/' + str( socket.gethostname()) + '/' # NOSONAR serialize_to_file(obj, storage_path + obj.getID() + ".PSCO")
def updateFile(obj): if obj.getID() is not None: import socket storage_path = '/tmp/PSCO/' + str(socket.gethostname()) + '/' from pycompss.util.serialization.serializer import serialize_to_file serialize_to_file(obj, storage_path + obj.getID() + ".PSCO")
def manage_inouts(self, args, python_mpi): # type: (tuple, bool) -> None """ Deal with INOUTS. Serializes the result of INOUT parameters. :param args: Argument list. :param python_mpi: Boolean if python mpi. :return: None """ if __debug__: logger.debug("Dealing with INOUTs and OUTS") if python_mpi: logger.debug("\t - Managing with MPI policy") # Manage all the possible outputs of the task and build the return new # types and values for arg in args: # Handle only task parameters that are objects # Skip files and non-task-parameters if not isinstance(arg, Parameter) or \ not self.is_parameter_an_object(arg.name): continue # File collections are objects, but must be skipped as well if self.is_parameter_file_collection(arg.name): continue # Skip psco: since param.content_type has the old type, we can # not use: param.content_type != parameter.TYPE.EXTERNAL_PSCO _is_psco_true = (arg.content_type == parameter.TYPE.EXTERNAL_PSCO or is_psco(arg.content)) if _is_psco_true: continue original_name = get_name_from_kwarg(arg.name) param = self.decorator_arguments.get( original_name, self.get_default_direction(original_name)) # skip non-inouts or non-col_outs _is_col_out = (arg.content_type == parameter.TYPE.COLLECTION and param.direction == parameter.DIRECTION.OUT) _is_inout = (param.direction == parameter.DIRECTION.INOUT or param.direction == parameter.DIRECTION.COMMUTATIVE) if not (_is_inout or _is_col_out): continue # Now it's 'INOUT' or 'COLLLECTION_OUT' object param, serialize # to a file. if arg.content_type == parameter.TYPE.COLLECTION: if __debug__: logger.debug("Serializing collection: " + str(arg.name)) # handle collections recursively for (content, elem) in __get_collection_objects__(arg.content, arg): # noqa: E501 if elem.file_name: f_name = __get_file_name__(elem.file_name) if __debug__: logger.debug("\t - Serializing element: " + str(arg.name) + " to " + str(f_name)) if python_mpi: serialize_to_file_mpienv(content, f_name, False) else: serialize_to_file(content, f_name) else: # It is None --> PSCO pass else: f_name = __get_file_name__(arg.file_name) if __debug__: logger.debug("Serializing object: " + str(arg.name) + " to " + str(f_name)) if python_mpi: serialize_to_file_mpienv(arg.content, f_name, False) else: serialize_to_file(arg.content, f_name)