Example #1
0
def getByID(id):
    """
    This functions retrieves an object from an external storage technology
    from the obj object.
    This dummy returns the same object as submited by the parameter obj.
    @param obj: key/object of the object to be retrieved.
    @return: the real object.
    """
    #print "-----------------------------------------------------"
    #print "| WARNING!!! - YOU ARE USING THE DUMMY STORAGE API. |"
    #print "| Call to: getByID                                  |"
    #print "|   *********************************************   |"
    #print "|   *** Check that you really want to use the ***   |"
    #print "|   ************* dummy storage api *************   |"
    #print "|   *********************************************   |"
    #print "-----------------------------------------------------"
    if id is not None:
        try:
            file_name = id + '.PSCO'
            file_path = storage_path + file_name
            obj = deserialize_from_file(file_path)
            obj.setID(id)
            return obj
        except ValueError:
            # The id does not complain uuid4 --> raise an exception
            print(
                "Error: the ID for getByID does not complain the uuid4 format."
            )
            raise ValueError(
                'Using the dummy storage API getByID with wrong id.')
    else:
        # Using a None id --> raise an exception
        print("Error: the ID for getByID is None.")
        raise ValueError('Using the dummy storage API getByID with None id.')
Example #2
0
    def __init__(self, tf_depth, tf_base_sleep_time, tf_sleep_random_range,
                 file_elements):
        """
        Initializes the command line arguments instance with the given values.
        """
        from pycompss.util.serializer import deserialize_from_file
        self.elements = []
        for fe in file_elements:
            elem = deserialize_from_file(fe)
            self.elements.append(elem)

        self.tf_depth = int(tf_depth)
        self.tf_base_sleep_time = int(tf_base_sleep_time)
        self.tf_sleep_random_range = int(tf_sleep_random_range)
Example #3
0
def execute_task(process_name, storage_conf, params):
    """
    ExecuteTask main method
    """
    logger = logging.getLogger('pycompss.worker.worker')

    if __debug__:
        logger.debug("[PYTHON WORKER %s] Begin task execution" % process_name)

    persistent_storage = False
    if storage_conf != 'null':
        persistent_storage = True
        from pycompss.util.persistent_storage import storage_task_context

    # COMPSs keywords for tasks (ie: tracing, process name...)
    compss_kwargs = {
        'compss_tracing': tracing,
        'compss_process_name': process_name,
        'compss_storage_conf': storage_conf
    }

    # Retrieve the parameters from the params argument
    path = params[0]
    method_name = params[1]
    numSlaves = int(params[2])
    slaves = []
    for i in range(2, 2 + numSlaves):
        slaves.append(params[i])
    argPosition = 3 + numSlaves

    args = params[argPosition:]
    cus = args[0]

    args = args[1:]
    has_target = args[0]
    return_type = args[1]
    num_params = int(args[2])

    args = args[3:]

    if __debug__:
        logger.debug("[PYTHON WORKER %s] Storage conf: %s" % (str(process_name), str(storage_conf)))
        logger.debug("[PYTHON WORKER %s] Params: %s" % (str(process_name), str(params)))
        logger.debug("[PYTHON WORKER %s] Path: %s" % (str(process_name), str(path)))
        logger.debug("[PYTHON WORKER %s] Method name: %s" % (str(process_name), str(method_name)))
        logger.debug("[PYTHON WORKER %s] Num slaves: %s" % (str(process_name), str(numSlaves)))
        logger.debug("[PYTHON WORKER %s] Slaves: %s" % (str(process_name), str(slaves)))
        logger.debug("[PYTHON WORKER %s] Cus: %s" % (str(process_name), str(cus)))
        logger.debug("[PYTHON WORKER %s] Has target: %s" % (str(process_name), str(has_target)))
        logger.debug("[PYTHON WORKER %s] Num Params: %s" % (str(process_name), str(num_params)))
        logger.debug("[PYTHON WORKER %s] Args: %r" % (str(process_name), args))

    # if tracing:
    #     pyextrae.event(TASK_EVENTS, 0)
    #     pyextrae.event(TASK_EVENTS, PARAMETER_PROCESSING)

    # Get all parameter values
    if __debug__:
        logger.debug("[PYTHON WORKER %s] Processing parameters:" % process_name)
    values, types, streams, prefixes = get_input_params(num_params, logger, args, process_name, persistent_storage)

    # if tracing:
    #     pyextrae.event(TASK_EVENTS, 0)
    #     pyextrae.event(TASK_EVENTS, LOGGING)

    if __debug__:
        logger.debug("[PYTHON WORKER %s] RUN TASK with arguments: " % process_name)
        logger.debug("[PYTHON WORKER %s] \t- Path: %s" % (process_name, path))
        logger.debug("[PYTHON WORKER %s] \t- Method/function name: %s" % (process_name, method_name))
        logger.debug("[PYTHON WORKER %s] \t- Has target: %s" % (process_name, str(has_target)))
        logger.debug("[PYTHON WORKER %s] \t- # parameters: %s" % (process_name, str(num_params)))
        logger.debug("[PYTHON WORKER %s] \t- Values:" % process_name)
        for v in values:
            logger.debug("[PYTHON WORKER %s] \t\t %r" % (process_name, v))
        logger.debug("[PYTHON WORKER %s] \t- COMPSs types:" % process_name)
        for t in types:
            logger.debug("[PYTHON WORKER %s] \t\t %s" % (process_name, str(t)))

    # if tracing:
    #     pyextrae.event(TASK_EVENTS, 0)
    #     pyextrae.event(TASK_EVENTS, MODULES_IMPORT)

    import_error = False

    newTypes = []
    newValues = []

    try:
        # Try to import the module (for functions)
        if __debug__:
            logger.debug("[PYTHON WORKER %s] Trying to import the user module: %s" % (process_name, path))
        if sys.version_info >= (2, 7):
            import importlib
            module = importlib.import_module(path)  # Python 2.7
            if __debug__:
                logger.debug("[PYTHON WORKER %s] Module successfully loaded (Python version >= 2.7)" % process_name)
        else:
            module = __import__(path, globals(), locals(), [path], -1)
            if __debug__:
                logger.debug("[PYTHON WORKER %s] Module successfully loaded (Python version < 2.7" % process_name)

        def task_execution_1():
            return task_execution(logger, process_name, module, method_name, types, values, compss_kwargs)

        if persistent_storage:
            with storage_task_context(logger, values, config_file_path=storage_conf):
                newTypes, newValues = task_execution_1()
        else:
            newTypes, newValues = task_execution_1()

    # ==========================================================================
    except AttributeError:
        # Appears with functions that have not been well defined.
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("[PYTHON WORKER %s] WORKER EXCEPTION - Attribute Error Exception" % process_name)
        logger.exception(''.join(line for line in lines))
        logger.exception("[PYTHON WORKER %s] Check that all parameters have been defined with an absolute import path (even if in the same file)" % process_name)
        # If exception is raised during the task execution, newTypes and
        # newValues are empty
        return 1, newTypes, newValues
    # ==========================================================================
    except ImportError:
        import_error = True
    # ==========================================================================
    except Exception:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("[PYTHON WORKER %s] WORKER EXCEPTION" % process_name)
        logger.exception(''.join(line for line in lines))
        # If exception is raised during the task execution, newTypes and
        # newValues are empty
        return 1, newTypes, newValues

    if import_error:
        if __debug__:
            logger.debug("[PYTHON WORKER %s] Could not import the module. Reason: Method in class." % process_name)

        # Not the path of a module, it ends with a class name
        class_name = path.split('.')[-1]
        module_name = '.'.join(path.split('.')[0:-1])

        if '.' in path:
            module_name = '.'.join(path.split('.')[0:-1])
        else:
            module_name = path
        module = __import__(module_name, fromlist=[class_name])
        klass = getattr(module, class_name)

        if __debug__:
            logger.debug("[PYTHON WORKER %s] Method in class %s of module %s" % (process_name, class_name, module_name))

        if has_target == 'true':
            # Instance method
            last_elem = values.pop()
            if is_PSCO(last_elem):
                obj = last_elem
            else:
                file_name = last_elem.split(':')[-1]
                if __debug__:
                    logger.debug("[PYTHON WORKER %s] Deserialize self from file." % process_name)
                obj = deserialize_from_file(file_name)
                if __debug__:
                    logger.debug("[PYTHON WORKER %s] Processing callee, a hidden object of %s in file %s" % (process_name, file_name, type(obj)))
            values.insert(0, obj)
            types.pop()
            types.insert(0, TYPE.OBJECT if not is_PSCO(last_elem) else TYPE.EXTERNAL_PSCO)

            def task_execution_2():
                return task_execution(logger, process_name, klass, method_name, types, values, compss_kwargs)

            if persistent_storage:
                with storage_task_context(logger, values, config_file_path=storage_conf):
                    newTypes, newValues = task_execution_2()
            else:
                newTypes, newValues = task_execution_2()

            if is_PSCO(last_elem):
                # There is no update PSCO on the storage API. Consequently, the changes on the PSCO must have been
                # pushed into the storage automatically on each PSCO modification.
                if __debug__:
                    logger.debug("[PYTHON WORKER %s] The changes on the PSCO must have been automatically updated by the storage." % process_name)
                pass
            else:
                if __debug__:
                    logger.debug("[PYTHON WORKER %s] Serializing self to file." % process_name)
                serialize_to_file(obj, file_name)
            if __debug__:
                logger.debug("[PYTHON WORKER %s] Obj: %r" % (process_name, obj))
        else:
            # Class method - class is not included in values (e.g. values = [7])
            types.insert(0, None)  # class must be first type

            def task_execution_3():
                return task_execution(logger, process_name, klass, method_name, types, values, compss_kwargs)

            if persistent_storage:
                with storage_task_context(logger, values, config_file_path=storage_conf):
                    newTypes, newValues = task_execution_3()
            else:
                newTypes, newValues = task_execution_3()

    # EVERYTHING OK
    if __debug__:
        logger.debug("[PYTHON WORKER %s] End task execution. Status: Ok" % process_name)

    # if tracing:
    #     pyextrae.eventandcounters(TASK_EVENTS, 0)

    return 0, newTypes, newValues   # Exit code, updated params
Example #4
0
def compss_worker(persistent_storage):
    """
    Worker main method (invocated from __main__).

    :param persistent_storage: Persistent storage boolean
    :return: None
    """

    logger = logging.getLogger('pycompss.worker.worker')

    logger.debug("Starting Worker")

    # Set the binding in worker mode
    set_pycompss_context('WORKER')

    args = sys.argv[6:]
    path = args[0]
    method_name = args[1]

    num_slaves = int(args[2])
    slaves = []
    for i in range(2, 2 + num_slaves):
        slaves.append(args[i])
    arg_position = 3 + num_slaves

    args = args[arg_position:]
    cus = args[0]

    args = args[1:]
    has_target = args[0]
    return_type = args[1]
    num_returns = int(args[2])
    num_params = int(args[3])

    args = args[4:]

    # COMPSs keywords for tasks (ie: tracing, process name...)
    compss_kwargs = {
        'compss_tracing': tracing,
        'compss_process_name': "GAT",
        'compss_storage_conf': storage_conf,
        'compss_return_length': num_returns
    }

    values = []
    types = []
    streams = []
    prefixes = []

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, PARAMETER_PROCESSING)

    if persistent_storage:
        from pycompss.util.persistent_storage import storage_task_context

    # Get all parameter values
    logger.debug("Processing parameters:")
    pos = 0
    for i in range(0, num_params):
        p_type = int(args[pos])
        p_stream = int(args[pos + 1])
        p_prefix = args[pos + 2]
        p_value = args[pos + 3]

        logger.debug("Parameter : " + str(i))
        logger.debug("\t * Type : " + str(p_type))
        logger.debug("\t * Stream : " + str(p_stream))
        logger.debug("\t * Prefix : " + str(p_prefix))
        logger.debug("\t * Value: " + str(p_value))

        types.append(p_type)
        streams.append(p_stream)
        prefixes.append(p_prefix)

        if p_type == TYPE.FILE:
            values.append(p_value)
        elif p_type == TYPE.EXTERNAL_PSCO:
            po = get_by_id(p_value)
            values.append(po)
            pos += 1  # Skip info about direction (R, W)
        elif p_type == TYPE.STRING:
            num_substrings = int(p_value)
            aux = ''
            first_substring = True
            for j in range(4, num_substrings + 4):
                if not first_substring:
                    aux += ' '
                first_substring = False
                aux += args[pos + j]
            # Decode the string received
            aux = base64.b64decode(aux.encode())
            if aux.decode() == EMPTY_STRING_KEY:
                # Then it is an empty string
                aux = ""
            else:
                #######
                # Check if the string is really an object
                # Required in order to recover objects passed as parameters.
                # - Option object_conversion
                real_value = aux
                try:
                    # try to recover the real object
                    if IS_PYTHON3:
                        # decode removes double backslash, and encode returns as binary
                        aux = deserialize_from_string(aux.decode(STR_ESCAPE).encode())
                    else:
                        # decode removes double backslash, and str casts the output
                        aux = deserialize_from_string(str(aux.decode(STR_ESCAPE)))
                except (SerializerException, ValueError, EOFError):
                    # was not an object
                    aux = str(real_value.decode())
                #######
            values.append(aux)
            logger.debug("\t * Final Value: " + str(aux))
            pos += num_substrings
        elif p_type == TYPE.INT:
            values.append(int(p_value))
        elif p_type == TYPE.LONG:
            my_l = long(p_value)
            if my_l > JAVA_MAX_INT or my_l < JAVA_MIN_INT:
                # A Python int was converted to a Java long to prevent overflow
                # We are sure we will not overflow Python int, otherwise this
                # would have been passed as a serialized object.
                my_l = int(my_l)
            values.append(my_l)
        elif p_type == TYPE.DOUBLE:
            values.append(float(p_value))
        elif p_type == TYPE.BOOLEAN:
            if p_value == 'true':
                values.append(True)
            else:
                values.append(False)
        # elif (p_type == TYPE.OBJECT):
        #    pass
        else:
            logger.fatal("Invalid type (%d) for parameter %d" % (p_type, i))
            exit(1)
        pos += 4

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, LOGGING)

    if logger.isEnabledFor(logging.DEBUG):
        values_str = ''
        types_str = ''
        for v in values:
            values_str += "\t\t" + str(v) + "\n"
        for t in types:
            types_str += str(t) + " "
        logger.debug("RUN TASK with arguments\n" +
                     "\t- Path: " + path + "\n" +
                     "\t- Method/function name: " + method_name + "\n" +
                     "\t- Has target: " + has_target + "\n" +
                     "\t- # parameters: " + str(num_params) + "\n" +
                     "\t- Values:\n" + values_str +
                     "\t- COMPSs types: " + types_str)

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, MODULES_IMPORT)

    try:
        # Try to import the module (for functions)
        logger.debug("Trying to import the user module.")
        if sys.version_info >= (2, 7):
            module = importlib.import_module(path)  # Python 2.7
            logger.debug("Module successfully loaded (Python version >= 2.7)")
        else:
            module = __import__(path, globals(), locals(), [path], -1)
            logger.debug("Module successfully loaded (Python version < 2.7")

        if persistent_storage:
            with storage_task_context(logger, values, config_file_path=storage_conf):
                getattr(module, method_name)(*values, compss_types=types, **compss_kwargs)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
        else:
            getattr(module, method_name)(*values, compss_types=types, **compss_kwargs)
            if tracing:
                pyextrae.eventandcounters(TASK_EVENTS, 0)
                pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
    # ==========================================================================
    except AttributeError:
        # Appears with functions that have not been well defined.
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("WORKER EXCEPTION - Attribute Error Exception")
        logger.exception(''.join(line for line in lines))
        logger.exception("Check that all parameters have been defined with " +
                         "an absolute import path (even if in the same file)")
        exit(1)
    # ==========================================================================
    except ImportError:
        logger.debug("Could not import the module. Reason: Method in class.")
        # Not the path of a module, it ends with a class name
        class_name = path.split('.')[-1]
        module_name = '.'.join(path.split('.')[0:-1])

        if '.' in path:
            module_name = '.'.join(path.split('.')[0:-1])
        else:
            module_name = path
        module = __import__(module_name, fromlist=[class_name])
        klass = getattr(module, class_name)
        logger.debug("Method in class %s of module %s" % (class_name, module_name))

        if has_target == 'true':
            # Instance method
            file_name = values.pop()
            logger.debug("Deserialize self from file.")
            obj = deserialize_from_file(file_name)

            logger.debug("Processing callee, a hidden object of %s in file %s" % (file_name, type(obj)))
            values.insert(0, obj)
            types.pop()
            types.insert(0, TYPE.OBJECT)

            if persistent_storage:
                with storage_task_context(logger, values, config_file_path=storage_conf):
                    getattr(klass, method_name)(*values, compss_types=types, **compss_kwargs)
                    if tracing:
                        pyextrae.eventandcounters(TASK_EVENTS, 0)
                        pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
            else:
                getattr(klass, method_name)(*values, compss_types=types, **compss_kwargs)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)

            logger.debug("Serializing self to file")
            logger.debug("Obj: " + str(obj))
            serialize_to_file(obj, file_name)
        else:
            # Class method - class is not included in values (e.g. values = [7])
            types.insert(0, None)  # class must be first type

            if persistent_storage:
                with storage_task_context(logger, values, config_file_path=storage_conf):
                    getattr(klass, method_name)(*values, compss_types=types, **compss_kwargs)
                    if tracing:
                        pyextrae.eventandcounters(TASK_EVENTS, 0)
                        pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
            else:
                getattr(klass, method_name)(*values, compss_types=types, **compss_kwargs)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
    # ==========================================================================
    except Exception:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("WORKER EXCEPTION")
        logger.exception(''.join(line for line in lines))
        exit(1)
Example #5
0
def reveal_objects(values, spec_args, deco_kwargs, compss_types, process_name,
                   has_return, is_multi_return, num_return):
    """
    Function that goes through all parameters in order to
    find and open the files.
    :param values: <List> - The value of each parameter.
    :param spec_args: <List> - Specific arguments.
    :param deco_kwargs: <List> - The decoratos.
    :param compss_types: <List> - The types of the values.
    :param process_name: <String> - Process name (id).
    :param has_returns: <Boolean>  - If the function returns a value.
    :param is_multi_return: <Boolean> - If the return is a multireturn.
    :param num_return: <Int> - Number of returns
    :return: a list with the real values
    """
    from pycompss.api.parameter import Parameter
    from pycompss.api.parameter import TYPE
    from pycompss.api.parameter import DIRECTION

    num_pars = len(spec_args)
    real_values = []
    to_serialize = []

    if has_return:
        num_pars -= num_return  # return value must not be passed to the function call

    # print('Spec args: %s'%(str(spec_args)))
    # print('COMPSs Types: %s'%(str(compss_types)))
    # print('Values: %s'%(str(values)))

    for i in range(num_pars):
        spec_arg = spec_args[i]
        compss_type = compss_types[i]
        value = values[i]
        if i == 0:
            if spec_arg == 'self':  # callee object
                if deco_kwargs['isModifier']:
                    d = DIRECTION.INOUT
                else:
                    d = DIRECTION.IN
                deco_kwargs[spec_arg] = Parameter(p_type=TYPE.OBJECT,
                                                  p_direction=d)
            elif inspect.isclass(value):  # class (it's a class method)
                real_values.append(value)
                continue

        p = deco_kwargs.get(spec_arg)
        if p is None:  # decoration not present, using default
            p = deco_kwargs['varargsType'] if spec_arg.startswith(
                '*args') else Parameter()

        if compss_type == TYPE.FILE and p.type != TYPE.FILE:
            # Getting ids and file names from passed files and objects pattern
            # is: "originalDataID:destinationDataID;flagToPreserveOriginalData:flagToWrite:PathToFile"
            complete_fname = value.split(':')
            if len(complete_fname) > 1:
                # In NIO we get more information
                forig = complete_fname[0]
                fdest = complete_fname[1]
                preserve = complete_fname[2]
                write_final = complete_fname[3]
                fname = complete_fname[4]
                preserve, write_final = list(
                    map(lambda x: x == "true", [preserve, write_final]))
                suffix_name = forig
            else:
                # In GAT we only get the name
                fname = complete_fname[0]

            value = fname
            # For COMPSs it is a file, but it is actually a Python object
            logger.debug("Processing a hidden object in parameter %d", i)
            obj = deserialize_from_file(value)
            real_values.append(obj)
            if p.direction != DIRECTION.IN:
                to_serialize.append((obj, value))
        else:
            # print('compss_type' + str(compss_type) + ' type' + str(p.type))
            if compss_type == TYPE.FILE:
                complete_fname = value.split(':')
                if len(complete_fname) > 1:
                    # In NIO we get more information
                    forig = complete_fname[0]
                    fdest = complete_fname[1]
                    preserve = complete_fname[2]
                    write_final = complete_fname[3]
                    fname = complete_fname[4]
                else:
                    # In GAT we only get the name
                    fname = complete_fname[0]
                value = fname
            real_values.append(value)
    return real_values, to_serialize
Example #6
0
def compss_worker(persistent_storage):
    """
    Worker main method (invocated from __main__).
    """
    logger = logging.getLogger('pycompss.worker.worker')

    logger.debug("Starting Worker")

    args = sys.argv[6:]
    path = args[0]
    method_name = args[1]

    numSlaves = int(args[2])
    slaves = []
    for i in range(2, 2 + numSlaves):
        slaves.append(args[i])
    argPosition = 3 + numSlaves

    args = args[argPosition:]
    cus = args[0]

    args = args[1:]
    has_target = args[0]
    return_type = args[1]
    num_params = int(args[2])

    args = args[3:]
    pos = 0

    values = []
    types = []
    streams = []
    prefixes = []

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, PARAMETER_PROCESSING)

    if persistent_storage:
        from storage.api import getByID
        from storage.api import TaskContext

    # Get all parameter values
    logger.debug("Processing parameters:")
    for i in range(0, num_params):
        pType = int(args[pos])
        pStream = int(args[pos + 1])
        pPrefix = args[pos + 2]
        pValue = args[pos + 3]

        logger.debug("Parameter : " + str(i))
        logger.debug("\t * Type : " + str(pType))
        logger.debug("\t * Stream : " + str(pStream))
        logger.debug("\t * Prefix : " + str(pPrefix))
        logger.debug("\t * Value: " + str(pValue))

        types.append(pType)
        streams.append(pStream)
        prefixes.append(pPrefix)

        if pType == TYPE.FILE:
            '''
            # check if it is a persistent object
            # TODO: I find that it makes no sense to identify PSCOs this way
            # Why do not we simply check if the object of a subclass of the
            # storage_object?
            if 'getID' in dir(pValue) and pValue.getID() is not None:
                po = getByID(pValue.getID())
                values.append(po)
            else:
                values.append(pValue)
            '''
            values.append(pValue)
        elif pType == TYPE.EXTERNAL_PSCO:
            po = getByID(pValue)
            values.append(po)
            pos += 1  # Skip info about direction (R, W)
        elif pType == TYPE.STRING:
            num_substrings = int(pValue)
            aux = ''
            first_substring = True
            for j in range(4, num_substrings + 4):
                if not first_substring:
                    aux += ' '
                first_substring = False
                aux += args[pos + j]
            # Decode the string received
            aux = base64.b64decode(aux)
            #######
            # Check if the string is really an object
            # Required in order to recover objects passed as parameters.
            # - Option object_conversion
            real_value = aux
            try:
                # try to recover the real object
                aux = deserialize_from_string(aux.decode('string_escape'))
            except (SerializerException, ValueError, EOFError):
                # was not an object
                aux = real_value
            #######
            values.append(aux)
            logger.debug("\t * Final Value: " + str(aux))
            pos += num_substrings
        elif pType == TYPE.INT:
            values.append(int(pValue))
        elif pType == TYPE.LONG:
            l = long(pValue)
            if l > JAVA_MAX_INT or l < JAVA_MIN_INT:
                # A Python int was converted to a Java long to prevent overflow
                # We are sure we will not overflow Python int, otherwise this
                # would have been passed as a serialized object.
                l = int(l)
            values.append(l)
        elif pType == TYPE.DOUBLE:
            values.append(float(pValue))
        elif pType == TYPE.BOOLEAN:
            if pValue == 'true':
                values.append(True)
            else:
                values.append(False)
        # elif (pType == TYPE.OBJECT):
        #    pass
        else:
            logger.fatal("Invalid type (%d) for parameter %d" % (pType, i))
            exit(1)
        pos += 4

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, LOGGING)

    if logger.isEnabledFor(logging.DEBUG):
        values_str = ''
        types_str = ''
        for v in values:
            values_str += "\t\t" + str(v) + "\n"
        for t in types:
            types_str += str(t) + " "
        logger.debug("RUN TASK with arguments\n" +
                     "\t- Path: " + path + "\n" +
                     "\t- Method/function name: " + method_name + "\n" +
                     "\t- Has target: " + has_target + "\n" +
                     "\t- # parameters: " + str(num_params) + "\n" +
                     "\t- Values:\n" + values_str +
                     "\t- COMPSs types: " + types_str)

    if tracing:
        pyextrae.event(TASK_EVENTS, 0)
        pyextrae.event(TASK_EVENTS, MODULES_IMPORT)

    try:
        # Try to import the module (for functions)
        logger.debug("Trying to import the user module.")
        if sys.version_info >= (2, 7):
            module = importlib.import_module(path)  # Python 2.7
            logger.debug("Module successfully loaded (Python version >= 2.7)")
        else:
            module = __import__(path, globals(), locals(), [path], -1)
            logger.debug("Module successfully loaded (Python version < 2.7")

        if persistent_storage:
            with TaskContext(logger, values, config_file_path=storage_conf):
                getattr(module, method_name)(*values, compss_types=types, compss_tracing=tracing)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
        else:
            getattr(module, method_name)(*values, compss_types=types, compss_tracing=tracing)
            if tracing:
                pyextrae.eventandcounters(TASK_EVENTS, 0)
                pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
    # ==========================================================================
    except AttributeError:
        # Appears with functions that have not been well defined.
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("WORKER EXCEPTION - Attribute Error Exception")
        logger.exception(''.join(line for line in lines))
        logger.exception("Check that all parameters have been defined with " +
                         "an absolute import path (even if in the same file)")
        exit(1)
    # ==========================================================================
    except ImportError:
        logger.debug("Could not import the module. Reason: Method in class.")
        # Not the path of a module, it ends with a class name
        class_name = path.split('.')[-1]
        module_name = '.'.join(path.split('.')[0:-1])

        if '.' in path:
            module_name = '.'.join(path.split('.')[0:-1])
        else:
            module_name = path
        module = __import__(module_name, fromlist=[class_name])
        klass = getattr(module, class_name)
        logger.debug("Method in class %s of module %s" % (class_name, module_name))

        if has_target == 'true':
            # Instance method
            file_name = values.pop()
            logger.debug("Deserialize self from file.")
            obj = deserialize_from_file(file_name)

            logger.debug("Processing callee, a hidden object of %s in file %s" % (file_name, type(obj)))
            values.insert(0, obj)
            types.pop()
            types.insert(0, TYPE.OBJECT)

            if persistent_storage:
                with TaskContext(logger, values, config_file_path=storage_conf):
                    getattr(klass, method_name)(*values, compss_types=types, compss_tracing=tracing)
                    if tracing:
                        pyextrae.eventandcounters(TASK_EVENTS, 0)
                        pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
            else:
                getattr(klass, method_name)(*values, compss_types=types, compss_tracing=tracing)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)

            logger.debug("Serializing self to file")
            logger.debug("Obj: " + str(obj))
            serialize_to_file(obj, file_name)
        else:
            # Class method - class is not included in values (e.g. values = [7])
            types.insert(0, None)    # class must be first type

            if persistent_storage:
                with TaskContext(logger, values, config_file_path=storage_conf):
                    getattr(klass, method_name)(*values, compss_types=types, compss_tracing=tracing)
                    if tracing:
                        pyextrae.eventandcounters(TASK_EVENTS, 0)
                        pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
            else:
                getattr(klass, method_name)(*values, compss_types=types, compss_tracing=tracing)
                if tracing:
                    pyextrae.eventandcounters(TASK_EVENTS, 0)
                    pyextrae.eventandcounters(TASK_EVENTS, WORKER_END)
    # ==========================================================================
    except Exception:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        logger.exception("WORKER EXCEPTION")
        logger.exception(''.join(line for line in lines))
        exit(1)