Пример #1
0
 def build_collection_types_values(_content, _arg, direction):
     """ Retrieve collection type-value recursively"""
     coll = []
     for (_cont, _elem) in zip(_arg.content, _arg.collection_content):
         if isinstance(_elem, str):
             coll.append((parameter.TYPE.FILE, 'null'))
         else:
             if _elem.content_type == parameter.TYPE.COLLECTION:
                 coll.append(build_collection_types_values(_cont, _elem, direction))  # noqa
             elif _elem.content_type == parameter.TYPE.EXTERNAL_PSCO and \
                     is_psco(_cont) and direction != parameter.DIRECTION.IN:  # noqa
                 coll.append((_elem.content_type, _cont.getID()))
             elif _elem.content_type == parameter.TYPE.FILE and \
                     is_psco(_cont) and direction != parameter.DIRECTION.IN:  # noqa
                 coll.append((parameter.TYPE.EXTERNAL_PSCO, _cont.getID()))   # noqa
             else:
                 coll.append((_elem.content_type, 'null'))
     return coll
Пример #2
0
    def manage_returns(num_returns, user_returns, ret_params, python_mpi):
        # type: (int, list, list, bool) -> list
        """ Manage task returns.

        :param num_returns: Number of returns.
        :param user_returns: User returns.
        :param ret_params: Return parameters.
        :param python_mpi: Boolean if is python mpi code.
        :return: User returns.
        """
        if __debug__:
            logger.debug("Dealing with returns: " + str(num_returns))
        if num_returns > 0:
            if num_returns == 1:
                # Generalize the return case to multi-return to simplify the
                # code
                user_returns = [user_returns]
            elif num_returns > 1 and python_mpi:
                user_returns = [user_returns]
                ret_params = __get_ret_rank__(ret_params)
            # Note that we are implicitly assuming that the length of the user
            # returns matches the number of return parameters
            for (obj, param) in zip(user_returns, ret_params):
                # If the object is a PSCO, do not serialize to file
                if param.content_type == parameter.TYPE.EXTERNAL_PSCO or is_psco(obj):
                    continue
                # Serialize the object
                # Note that there is no "command line optimization" in the
                # returns, as we always pass them as files.
                # This is due to the asymmetry in worker-master communications
                # and because it also makes it easier for us to deal with
                # returns in that format
                f_name = __get_file_name__(param.file_name)
                if __debug__:
                    logger.debug("Serializing return: " + str(f_name))
                if python_mpi:
                    if num_returns > 1:
                        rank_zero_reduce = False
                    else:
                        rank_zero_reduce = True

                    serialize_to_file_mpienv(obj, f_name, rank_zero_reduce)
                else:
                    serialize_to_file(obj, f_name)
        return user_returns
Пример #3
0
def execute_task(process_name,
                 storage_conf,
                 params,
                 tracing,
                 logger,
                 python_mpi=False):
    """
    ExecuteTask main method.

    :param process_name: Process name
    :param storage_conf: Storage configuration file path
    :param params: List of parameters
    :param tracing: Tracing flag
    :param logger: Logger to use
    :param python_mpi: If it is a MPI task
    :return: exit code, new types and new values
    """
    if __debug__:
        logger.debug("Begin task execution in %s" % process_name)

    persistent_storage = False
    if storage_conf != 'null':
        persistent_storage = True

    # Retrieve the parameters from the params argument
    path = params[0]
    method_name = params[1]
    num_slaves = int(params[3])
    time_out = int(params[2])
    slaves = []
    for i in range(3, 3 + num_slaves):
        slaves.append(params[i])
    arg_position = 4 + num_slaves

    args = params[arg_position:]
    cus = args[0]
    args = args[1:]
    has_target = args[0]
    return_type = args[1]
    return_length = int(args[2])
    num_params = int(args[3])

    args = args[4:]

    # COMPSs keywords for tasks (ie: tracing, process name...)
    # compss_key is included to be checked in the @task decorator, so that
    # the task knows if it has been called from the worker or from the
    # user code (reason: ignore @task decorator if called from another task).
    compss_kwargs = {
        'compss_key': True,
        'compss_tracing': tracing,
        'compss_process_name': process_name,
        'compss_storage_conf': storage_conf,
        'compss_return_length': return_length,
        'python_MPI': python_mpi
    }

    if __debug__:
        logger.debug("Storage conf: %s" % str(storage_conf))
        logger.debug("Params: %s" % str(params))
        logger.debug("Path: %s" % str(path))
        logger.debug("Method name: %s" % str(method_name))
        logger.debug("Num slaves: %s" % str(num_slaves))
        logger.debug("Slaves: %s" % str(slaves))
        logger.debug("Cus: %s" % str(cus))
        logger.debug("Has target: %s" % str(has_target))
        logger.debug("Num Params: %s" % str(num_params))
        logger.debug("Return Length: %s" % str(return_length))
        logger.debug("Args: %r" % args)

    # Get all parameter values
    if __debug__:
        logger.debug("Processing parameters:")
    values = get_input_params(num_params, logger, args)
    types = [x.type for x in values]

    if __debug__:
        logger.debug("RUN TASK with arguments:")
        logger.debug("\t- Path: %s" % path)
        logger.debug("\t- Method/function name: %s" % method_name)
        logger.debug("\t- Has target: %s" % str(has_target))
        logger.debug("\t- # parameters: %s" % str(num_params))
        logger.debug("\t- Values:")
        for v in values:
            logger.debug("\t\t %r" % v)
        logger.debug("\t- COMPSs types:")
        for t in types:
            logger.debug("\t\t %s" % str(t))

    import_error = False

    new_types = []
    new_values = []
    timed_out = False

    try:
        # Try to import the module (for functions)
        if __debug__:
            logger.debug("Trying to import the user module: %s" % path)
        py_version = sys.version_info
        if py_version >= (2, 7):
            import importlib
            module = importlib.import_module(path)  # Python 2.7
            if path.startswith('InteractiveMode_'):
                # Force reload in interactive mode. The user may have
                # overwritten a function or task.
                if py_version < (3, 0):
                    reload(module)
                elif py_version < (3, 4):
                    import imp
                    imp.reload(module)
                else:
                    importlib.reload(module)
            if __debug__:
                msg = "Module successfully loaded (Python version >= 2.7)"
                logger.debug(msg)
        else:
            module = __import__(path, globals(), locals(), [path], -1)
            if __debug__:
                msg = "Module successfully loaded (Python version < 2.7"
                logger.debug(msg)
    except ImportError:
        if __debug__:
            msg = "Could not import the module. Reason: Method in class."
            logger.debug(msg)
        import_error = True

    if not import_error:
        # Module method declared as task
        result = task_execution(logger, process_name, module, method_name,
                                time_out, types, values, compss_kwargs,
                                persistent_storage, storage_conf)
        exit_code = result[0]
        new_types = result[1]
        new_values = result[2]
        target_direction = result[3]
        timed_out = result[4]
        except_msg = result[5]

        if exit_code != 0:
            return exit_code, new_types, new_values, timed_out, except_msg

    else:
        # Method declared as task in class
        # Not the path of a module, it ends with a class name
        class_name = path.split('.')[-1]
        module_name = '.'.join(path.split('.')[0:-1])

        if '.' in path:
            module_name = '.'.join(path.split('.')[0:-1])
        else:
            module_name = path
        try:
            module = __import__(module_name, fromlist=[class_name])
            klass = getattr(module, class_name)
        except Exception:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            lines = traceback.format_exception(exc_type, exc_value,
                                               exc_traceback)
            logger.exception("EXCEPTION IMPORTING MODULE IN %s" % process_name)
            logger.exception(''.join(line for line in lines))
            return 1, [], [], False, None

        if __debug__:
            logger.debug("Method in class %s of module %s" %
                         (class_name, module_name))
            logger.debug("Has target: %s" % str(has_target))

        if has_target == 'true':
            # Instance method
            # The self object needs to be an object in order to call the
            # function. So, it can not be done in the @task decorator.
            # Since the args structure is parameters + self + returns we pop
            # the corresponding considering the return_length notified by the
            # runtime (-1 due to index starts from 0).
            self_index = num_params - return_length - 1
            self_elem = values.pop(self_index)
            self_type = types.pop(self_index)
            if self_type == parameter.TYPE.EXTERNAL_PSCO:
                if __debug__:
                    logger.debug("Last element (self) is a PSCO with id: %s" %
                                 str(self_elem.key))
                obj = get_by_id(self_elem.key)
            else:
                obj = None
                file_name = None
                if self_elem.key is None:
                    file_name = self_elem.file_name.split(':')[-1]
                    if __debug__:
                        logger.debug("Deserialize self from file.")
                    try:
                        obj = deserialize_from_file(file_name)
                    except Exception:
                        exc_type, exc_value, exc_traceback = sys.exc_info()
                        lines = traceback.format_exception(
                            exc_type, exc_value, exc_traceback)
                        logger.exception("EXCEPTION DESERIALIZING SELF IN %s" %
                                         process_name)
                        logger.exception(''.join(line for line in lines))
                        return 1, [], [], False, None
                    if __debug__:
                        logger.debug('Deserialized self object is: %s' %
                                     self_elem.content)
                        logger.debug(
                            "Processing callee, a hidden object of %s in file %s"
                            %  # noqa: E501
                            (file_name, type(self_elem.content)))
            values.insert(0, obj)

            if not self_type == parameter.TYPE.EXTERNAL_PSCO:
                types.insert(0, parameter.TYPE.OBJECT)
            else:
                types.insert(0, parameter.TYPE.EXTERNAL_PSCO)

            result = task_execution(logger, process_name, klass, method_name,
                                    time_out, types, values, compss_kwargs,
                                    persistent_storage, storage_conf)
            exit_code = result[0]
            new_types = result[1]
            new_values = result[2]
            target_direction = result[3]
            timed_out = result[4]
            except_msg = result[5]

            if exit_code != 0:
                return exit_code, new_types, new_values, timed_out, except_msg

            # Depending on the target_direction option, it is necessary to
            # serialize again self or not. Since this option is only visible
            # within the task decorator, the task_execution returns the value
            # of target_direction in order to know here if self has to be
            # serialized. This solution avoids to use inspect.
            if target_direction.direction == parameter.DIRECTION.INOUT or \
                    target_direction.direction == parameter.DIRECTION.COMMUTATIVE:  # noqa: E501
                if is_psco(obj):
                    # There is no explicit update if self is a PSCO.
                    # Consequently, the changes on the PSCO must have been
                    # pushed into the storage automatically on each PSCO
                    # modification.
                    if __debug__:
                        logger.debug("The changes on the PSCO must have been" +
                                     " automatically updated by the storage.")
                    pass
                else:
                    if __debug__:
                        logger.debug("Serializing self to file: %s" %
                                     file_name)
                    try:
                        serialize_to_file(obj, file_name)
                    except:
                        exc_type, exc_value, exc_traceback = sys.exc_info()
                        lines = traceback.format_exception(
                            exc_type, exc_value, exc_traceback)
                        logger.exception("EXCEPTION SERIALIZING SELF IN %s" %
                                         process_name)
                        logger.exception(''.join(line for line in lines))
                        return 1, new_types, new_values, timed_out, except_msg
                    if __debug__:
                        logger.debug("Obj: %r" % obj)
        else:
            # Class method - class is not included in values (e.g. values=[7])
            types.append(None)  # class must be first type

            result = task_execution(logger, process_name, klass, method_name,
                                    time_out, types, values, compss_kwargs,
                                    persistent_storage, storage_conf)
            exit_code = result[0]
            new_types = result[1]
            new_values = result[2]
            target_direction = result[3]
            timed_out = result[4]
            except_msg = result[5]

            if exit_code != 0:
                return exit_code, new_types, new_values, timed_out, except_msg

    # EVERYTHING OK
    if __debug__:
        logger.debug("End task execution. Status: Ok")

    return exit_code, new_types, new_values, timed_out, except_msg
Пример #4
0
def execute_task(process_name,              # type: str
                 storage_conf,              # type: str
                 params,                    # type: list
                 tracing,                   # type: bool
                 logger,                    # type: typing.Any
                 logger_cfg,                # type: str
                 log_files,                 # type: tuple
                 python_mpi=False,          # type: bool
                 collections_layouts=None,  # type: dict
                 cache_queue=None,          # type: typing.Any
                 cache_ids=None,            # type: typing.Any
                 cache_profiler=False,      # type: bool
                 ):
    # type: (...) -> typing.Tuple[int, list, list, typing.Optional[bool], str]
    """ ExecuteTask main method.

    :param process_name: Process name.
    :param storage_conf: Storage configuration file path.
    :param params: List of parameters.
    :param tracing: Tracing flag.
    :param logger: Logger to use.
    :param logger_cfg: Logger configuration file
    :param log_files: Tuple with (out filename, err filename).
                      None to avoid stdout and sdterr fd redirection.
    :param python_mpi: If it is a MPI task.
    :param collections_layouts: collections layouts for python MPI tasks
    :param cache_queue: Cache tracker communication queue
    :param cache_ids: Cache proxy dictionary (read-only)
    :param cache_profiler: Cache profiler
    :return: updated_args, exit_code, new_types, new_values, timed_out
             and except_msg
    """
    if __debug__:
        logger.debug("BEGIN TASK execution in %s" % process_name)

    persistent_storage = False
    if storage_conf != 'null':
        persistent_storage = True

    # Retrieve the parameters from the params argument
    path = params[0]
    method_name = params[1]
    num_slaves = int(params[3])
    time_out = int(params[2])
    slaves = []
    for i in range(3, 3 + num_slaves):
        slaves.append(params[i])
    arg_position = 4 + num_slaves

    args = params[arg_position:]
    cus = args[0]  # noqa
    args = args[1:]
    has_target = args[0]
    # Next parameter: return_type = args[1]
    return_length = int(args[2])
    num_params = int(args[3])

    args = args[4:]

    # COMPSs keywords for tasks (ie: tracing, process name...)
    # compss_key is included to be checked in the @task decorator, so that
    # the task knows if it has been called from the worker or from the
    # user code (reason: ignore @task decorator if called from another task
    # or decide if submit to runtime if nesting is enabled).
    compss_kwargs = {
        'compss_key': True,
        'compss_tracing': tracing,
        'compss_process_name': process_name,
        'compss_storage_conf': storage_conf,
        'compss_return_length': return_length,
        'compss_logger': logger,
        'compss_log_cfg': logger_cfg,
        'compss_log_files': log_files,
        'compss_python_MPI': python_mpi,
        'compss_collections_layouts': collections_layouts,
        'cache_queue': cache_queue,
        'cache_ids': cache_ids,
        'cache_profiler': cache_profiler,
    }

    if __debug__:
        logger.debug("COMPSs parameters:")
        logger.debug("\t- Storage conf: %s" % str(storage_conf))
        logger.debug("\t- Logger cfg: %s" % str(logger_cfg))
        if log_files:
            logger.debug("\t- Log out file: %s" % str(log_files[0]))
            logger.debug("\t- Log err file: %s" % str(log_files[1]))
        else:
            logger.debug("\t- Log out and err not redirected")
        logger.debug("\t- Params: %s" % str(params))
        logger.debug("\t- Path: %s" % str(path))
        logger.debug("\t- Method name: %s" % str(method_name))
        logger.debug("\t- Num slaves: %s" % str(num_slaves))
        logger.debug("\t- Slaves: %s" % str(slaves))
        logger.debug("\t- Cus: %s" % str(cus))
        logger.debug("\t- Has target: %s" % str(has_target))
        logger.debug("\t- Num Params: %s" % str(num_params))
        logger.debug("\t- Return Length: %s" % str(return_length))
        logger.debug("\t- Args: %r" % args)
        logger.debug("\t- COMPSs kwargs:")
        for k, v in compss_kwargs.items():
            logger.debug("\t\t- %s: %s" % (str(k), str(v)))

    # Get all parameter values
    if __debug__:
        logger.debug("Processing parameters:")
        # logger.debug(args)
    values = get_task_params(num_params, logger, args)
    types = [x.content_type for x in values]

    if __debug__:
        logger.debug("RUN TASK with arguments:")
        logger.debug("\t- Path: %s" % path)
        logger.debug("\t- Method/function name: %s" % method_name)
        logger.debug("\t- Has target: %s" % str(has_target))
        logger.debug("\t- # parameters: %s" % str(num_params))
        # Next parameters are the values:
        # logger.debug("\t- Values:")
        # for v in values:
        #     logger.debug("\t\t %r" % v)
        # logger.debug("\t- COMPSs types:")
        # for t in types:
        #     logger.debug("\t\t %s" % str(t))

    import_error = False
    if __debug__:
        logger.debug("LOAD TASK:")
    try:
        # Try to import the module (for functions)
        if __debug__:
            logger.debug("\t- Trying to import the user module: %s" % path)
        module = import_user_module(path, logger)
    except ImportError:
        if __debug__:
            msg = "\t- Could not import the module. Reason: Method in class."
            logger.debug(msg)
        import_error = True

    if __debug__:
        logger.debug("EXECUTE TASK:")
    if not import_error:
        # Module method declared as task
        result = task_execution(logger,
                                process_name,
                                module,
                                method_name,
                                time_out,
                                types,
                                values,
                                compss_kwargs,
                                persistent_storage,
                                storage_conf)
        exit_code = result[0]
        new_types = result[1]
        new_values = result[2]
        # Next result: target_direction = result[3]
        timed_out = result[4]
        except_msg = result[5]
    else:
        # Method declared as task in class
        # Not the path of a module, it ends with a class name
        class_name = path.split('.')[-1]

        if '.' in path:
            module_name = '.'.join(path.split('.')[0:-1])
        else:
            module_name = path
        try:
            module = __import__(module_name, fromlist=[class_name])
            klass = getattr(module, class_name)
        except Exception:  # noqa
            exc_type, exc_value, exc_traceback = sys.exc_info()
            lines = traceback.format_exception(exc_type,
                                               exc_value,
                                               exc_traceback)
            exception_message = "EXCEPTION IMPORTING MODULE IN %s\n" % process_name
            exception_message += ''.join(line for line in lines)
            logger.exception(exception_message)
            return 1, [], [], None, exception_message

        if __debug__:
            logger.debug("Method in class %s of module %s" % (class_name,
                                                              module_name))
            logger.debug("Has target: %s" % str(has_target))

        if has_target == 'true':
            # Instance method
            # The self object needs to be an object in order to call the
            # function. So, it can not be done in the @task decorator.
            # Since the args structure is parameters + self + returns we pop
            # the corresponding considering the return_length notified by the
            # runtime (-1 due to index starts from 0).
            self_index = num_params - return_length - 1
            self_elem = values.pop(self_index)
            self_type = types.pop(self_index)
            if self_type == parameter.TYPE.EXTERNAL_PSCO:
                if __debug__:
                    logger.debug("Last element (self) is a PSCO with id: %s" %
                                 str(self_elem.content))
                obj = get_by_id(self_elem.content)
            else:
                obj = None
                file_name = "None"
                if self_elem.content == "":
                    file_name = self_elem.file_name.original_path
                    if __debug__:
                        logger.debug("\t- Deserialize self from file.")
                    try:
                        obj = deserialize_from_file(file_name)
                    except Exception:  # noqa
                        exc_type, exc_value, exc_traceback = sys.exc_info()
                        lines = traceback.format_exception(exc_type,
                                                           exc_value,
                                                           exc_traceback)
                        exception_message = "EXCEPTION DESERIALIZING SELF IN %s\n" % process_name
                        exception_message += ''.join(line for line in lines)
                        logger.exception(exception_message)
                        return 1, [], [], None, exception_message
                    if __debug__:
                        logger.debug("Deserialized self object is: %s" %
                                     self_elem.content)
                        logger.debug("Processing callee, a hidden object of %s in file %s" %  # noqa: E501
                                     (file_name, type(self_elem.content)))
            values.insert(0, obj)  # noqa

            if not self_type == parameter.TYPE.EXTERNAL_PSCO:
                types.insert(0, parameter.TYPE.OBJECT)
            else:
                types.insert(0, parameter.TYPE.EXTERNAL_PSCO)

            result = task_execution(logger,
                                    process_name,
                                    klass,
                                    method_name,
                                    time_out,
                                    types,
                                    values,
                                    compss_kwargs,
                                    persistent_storage,
                                    storage_conf)
            exit_code = result[0]
            new_types = result[1]
            new_values = result[2]
            target_direction = result[3]
            timed_out = result[4]
            except_msg = result[5]

            # Depending on the target_direction option, it is necessary to
            # serialize again self or not. Since this option is only visible
            # within the task decorator, the task_execution returns the value
            # of target_direction in order to know here if self has to be
            # serialized. This solution avoids to use inspect.
            if target_direction is not None and \
                    (target_direction.direction == parameter.DIRECTION.INOUT or
                     target_direction.direction == parameter.DIRECTION.COMMUTATIVE):  # noqa: E501
                if is_psco(obj):
                    # There is no explicit update if self is a PSCO.
                    # Consequently, the changes on the PSCO must have been
                    # pushed into the storage automatically on each PSCO
                    # modification.
                    if __debug__:
                        logger.debug("The changes on the PSCO must have been" +
                                     " automatically updated by the storage.")
                else:
                    if __debug__:
                        logger.debug("Serializing self (%r) to file: %s" %
                                     (obj, file_name))
                    try:
                        serialize_to_file(obj, file_name)
                    except Exception:  # noqa
                        # Catch any serialization exception
                        exc_type, exc_value, exc_traceback = sys.exc_info()
                        lines = traceback.format_exception(exc_type,
                                                           exc_value,
                                                           exc_traceback)
                        logger.exception("EXCEPTION SERIALIZING SELF IN %s" % process_name)  # noqa: E501
                        logger.exception(''.join(line for line in lines))
                        exit_code = 1
                    if __debug__:
                        logger.debug("Serialized successfully")
        else:
            # Class method - class is not included in values (e.g. values=[7])
            types.append(None)  # class must be first type

            result = task_execution(logger,
                                    process_name,
                                    klass,
                                    method_name,
                                    time_out,
                                    types,
                                    values,
                                    compss_kwargs,
                                    persistent_storage,
                                    storage_conf)
            exit_code = result[0]
            new_types = result[1]
            new_values = result[2]
            # Next return: target_direction = result[3]
            timed_out = result[4]
            except_msg = result[5]

    if __debug__:
        if exit_code != 0:
            logger.debug("EXECUTE TASK FAILED: Exit code: %s" % str(exit_code))
        else:
            logger.debug("END TASK execution. Status: Ok")

    return int(exit_code), new_types, new_values, timed_out, except_msg
Пример #5
0
def _synchronize(obj, mode):
    # type: (object, int) -> object
    """ Synchronization function.

    This method retrieves the value of a future object.
    Calls the runtime in order to wait for the value and returns it when
    received.

    :param obj: Object to synchronize.
    :param mode: Direction of the object to synchronize.
    :return: The value of the object requested.
    """
    # TODO: Add a boolean to differentiate between files and object on the
    # COMPSs.open_file call. This change pretends to obtain better traces.
    # Must be implemented first in the Runtime, then in the bindings common
    # C API and finally add the boolean here
    app_id = 0
    if is_psco(obj):
        obj_id = get_id(obj)
        if not OT_is_pending_to_synchronize(obj_id):
            return obj
        else:
            # file_path is of the form storage://pscoId or
            # file://sys_path_to_file
            file_path = COMPSs.open_file(app_id,
                                         "".join(("storage://", str(obj_id))),
                                         mode)
            # TODO: Add switch on protocol (first parameter returned currently ignored)
            _, file_name = file_path.split("://")
            new_obj = get_by_id(file_name)
            OT_stop_tracking(obj)
            return new_obj

    obj_id = OT_is_tracked(obj)
    if obj_id is None:  # Not being tracked
        return obj
    if not OT_is_pending_to_synchronize(obj_id):
        return obj

    if __debug__:
        logger.debug("Synchronizing object %s with mode %s" % (obj_id, mode))

    file_name = OT_get_file_name(obj_id)
    compss_file = COMPSs.open_file(app_id, file_name, mode)

    # Runtime can return a path or a PSCOId
    if compss_file.startswith('/'):
        # If the real filename is null, then return None. The task that
        # produces the output file may have been ignored or cancelled, so its
        # result does not exist.
        real_file_name = compss_file.split('/')[-1]
        if real_file_name == "null":
            print("WARNING: Could not retrieve the object " + str(file_name) +
                  " since the task that produces it may have been IGNORED or CANCELLED. Please, check the logs. Returning None.")  # noqa: E501
            return None
        new_obj = deserialize_from_file(compss_file)
        COMPSs.close_file(app_id, file_name, mode)
    else:
        new_obj = get_by_id(compss_file)

    if mode == 'r':
        OT_update_mapping(obj_id, new_obj)

    if mode != 'r':
        COMPSs.delete_file(app_id, OT_get_file_name(obj_id), False)
        OT_stop_tracking(obj)

    return new_obj
Пример #6
0
    def manage_new_types_values(self,
                                num_returns,   # type: int
                                user_returns,  # type: list
                                args,          # type: tuple
                                has_self,      # type: bool
                                target_label,  # type: str
                                self_type,     # type: str
                                self_value     # type: object
                                ):
        # type: (...) -> (list, list)
        """ Manage new types and values.

        We must notify COMPSs when types are updated
        Potential update candidates are returns and INOUTs
        But the whole types and values list must be returned
        new_types and new_values correspond to "parameters self returns"

        :param num_returns: Number of returns.
        :param user_returns: User returns.
        :param args: Arguments.
        :param has_self: If has self.
        :param target_label: Target label (self, cls, etc.).
        :param self_type: Self type.
        :param self_value: Self value.
        :return: List new types, List new values.
        """
        new_types, new_values = [], []
        if __debug__:
            logger.debug("Building types update")

        def build_collection_types_values(_content, _arg, direction):
            """ Retrieve collection type-value recursively"""
            coll = []
            for (_cont, _elem) in zip(_arg.content, _arg.collection_content):
                if isinstance(_elem, str):
                    coll.append((parameter.TYPE.FILE, 'null'))
                else:
                    if _elem.content_type == parameter.TYPE.COLLECTION:
                        coll.append(build_collection_types_values(_cont, _elem, direction))  # noqa
                    elif _elem.content_type == parameter.TYPE.EXTERNAL_PSCO and \
                            is_psco(_cont) and direction != parameter.DIRECTION.IN:  # noqa
                        coll.append((_elem.content_type, _cont.getID()))
                    elif _elem.content_type == parameter.TYPE.FILE and \
                            is_psco(_cont) and direction != parameter.DIRECTION.IN:  # noqa
                        coll.append((parameter.TYPE.EXTERNAL_PSCO, _cont.getID()))   # noqa
                    else:
                        coll.append((_elem.content_type, 'null'))
            return coll

        # Add parameter types and value
        params_start = 1 if has_self else 0
        params_end = len(args) - num_returns + 1
        # Update new_types and new_values with the args list
        # The results parameter is a boolean to distinguish the error message.
        for arg in args[params_start:params_end - 1]:
            # Loop through the arguments and update new_types and new_values
            if not isinstance(arg, Parameter):
                raise Exception("ERROR: A task parameter arrived as an"
                                " object instead as a TaskParameter"
                                " when building the task result message.")
            else:
                original_name = get_name_from_kwarg(arg.name)
                param = self.decorator_arguments.get(original_name,
                                                     self.get_default_direction(original_name))  # noqa: E501
                if arg.content_type == parameter.TYPE.EXTERNAL_PSCO or \
                        arg.content_type == parameter.TYPE.FILE:
                    # It was originally a persistent object
                    if is_psco(arg.content):
                        new_types.append(parameter.TYPE.EXTERNAL_PSCO)
                        new_values.append(arg.content.getID())
                    else:
                        new_types.append(arg.content_type)
                        new_values.append('null')
                elif arg.content_type == parameter.TYPE.COLLECTION:
                    # There is a collection that can contain persistent objects
                    collection_new_values = \
                        build_collection_types_values(arg.content,
                                                      arg,
                                                      param.direction)
                    new_types.append(parameter.TYPE.COLLECTION)
                    new_values.append(collection_new_values)
                else:
                    # Any other return object: same type and null value
                    new_types.append(arg.content_type)
                    new_values.append('null')

        # Add self type and value if exist
        if has_self:
            if self.decorator_arguments[target_label].direction == parameter.DIRECTION.INOUT:  # noqa: E501
                # Check if self is a PSCO that has been persisted inside the
                # task and target_direction.
                # Update self type and value
                self_type = get_compss_type(args[0])
                if self_type == parameter.TYPE.EXTERNAL_PSCO:
                    self_value = args[0].getID()
                else:
                    # Self can only be of type FILE, so avoid the last update
                    # of self_type
                    if is_psco(args[0]):
                        self_type = parameter.TYPE.EXTERNAL_PSCO
                        self_value = args[0].getID()
                    else:
                        self_type = parameter.TYPE.FILE
                        self_value = 'null'
            new_types.append(self_type)
            new_values.append(self_value)

        # Add return types and values
        # Loop through the rest of the arguments and update new_types and
        #  new_values.
        # assert len(args[params_end - 1:]) == len(user_returns)
        # add_parameter_new_types_and_values(args[params_end - 1:], True)
        if num_returns > 0:
            for ret in user_returns:
                ret_type = get_compss_type(ret)
                if ret_type == parameter.TYPE.EXTERNAL_PSCO:
                    ret_value = ret.getID()
                elif ret_type == parameter.TYPE.COLLECTION:
                    collection_ret_values = []
                    for elem in ret:
                        if elem.type == parameter.TYPE.EXTERNAL_PSCO or \
                                elem.type == parameter.TYPE.FILE:
                            if is_psco(elem.content):
                                collection_ret_values.append(elem.key)
                            else:
                                collection_ret_values.append('null')
                        else:
                            collection_ret_values.append('null')
                    new_types.append(parameter.TYPE.COLLECTION)
                    new_values.append(collection_ret_values)
                else:
                    # Returns can only be of type FILE, so avoid the last
                    # update of ret_type
                    ret_type = parameter.TYPE.FILE
                    ret_value = 'null'
                new_types.append(ret_type)
                new_values.append(ret_value)

        return new_types, new_values
Пример #7
0
    def manage_inouts(self, args, python_mpi):
        # type: (tuple, bool) -> None
        """ Deal with INOUTS. Serializes the result of INOUT parameters.

        :param args: Argument list.
        :param python_mpi: Boolean if python mpi.
        :return: None
        """
        if __debug__:
            logger.debug("Dealing with INOUTs and OUTS")
            if python_mpi:
                logger.debug("\t - Managing with MPI policy")

        # Manage all the possible outputs of the task and build the return new
        # types and values
        for arg in args:
            # Handle only task parameters that are objects

            # Skip files and non-task-parameters
            if not isinstance(arg, Parameter) or \
                    not self.is_parameter_an_object(arg.name):
                continue

            # File collections are objects, but must be skipped as well
            if self.is_parameter_file_collection(arg.name):
                continue

            # Skip psco: since param.content_type has the old type, we can
            # not use:  param.content_type != parameter.TYPE.EXTERNAL_PSCO
            _is_psco_true = (arg.content_type == parameter.TYPE.EXTERNAL_PSCO or
                             is_psco(arg.content))
            if _is_psco_true:
                continue

            original_name = get_name_from_kwarg(arg.name)
            param = self.decorator_arguments.get(
                original_name, self.get_default_direction(original_name))

            # skip non-inouts or non-col_outs
            _is_col_out = (arg.content_type == parameter.TYPE.COLLECTION and
                           param.direction == parameter.DIRECTION.OUT)

            _is_inout = (param.direction == parameter.DIRECTION.INOUT or
                         param.direction == parameter.DIRECTION.COMMUTATIVE)

            if not (_is_inout or _is_col_out):
                continue

            # Now it's 'INOUT' or 'COLLLECTION_OUT' object param, serialize
            # to a file.
            if arg.content_type == parameter.TYPE.COLLECTION:
                if __debug__:
                    logger.debug("Serializing collection: " + str(arg.name))
                # handle collections recursively
                for (content, elem) in __get_collection_objects__(arg.content, arg):  # noqa: E501
                    if elem.file_name:
                        f_name = __get_file_name__(elem.file_name)
                        if __debug__:
                            logger.debug("\t - Serializing element: " +
                                         str(arg.name) + " to " + str(f_name))
                        if python_mpi:
                            serialize_to_file_mpienv(content, f_name, False)
                        else:
                            serialize_to_file(content, f_name)
                    else:
                        # It is None --> PSCO
                        pass
            else:
                f_name = __get_file_name__(arg.file_name)
                if __debug__:
                    logger.debug("Serializing object: " +
                                 str(arg.name) + " to " + str(f_name))
                if python_mpi:
                    serialize_to_file_mpienv(arg.content, f_name, False)
                else:
                    serialize_to_file(arg.content, f_name)