Ejemplo n.º 1
0
def start_runtime(log_level='off', tracing=0, interactive=False):
    # type: (str, int, bool) -> None
    """ Starts the COMPSs runtime.

    Starts the runtime by calling the external python library that calls
    the bindings-common.

    :param log_level: Log level [ 'trace' | 'debug' | 'info' | 'api' | 'off' ].
    :param tracing: Tracing level [0 (deactivated) | 1 (basic) | 2 (advanced)].
    :param interactive: Boolean if interactive (ipython or jupyter).
    :return: None
    """
    if __debug__:
        logger.info("Starting COMPSs...")

    if tracing > 0 and not interactive:
        # Enabled only if not interactive - extrae issues within jupyter.
        enable_trace_master()

    with event(START_RUNTIME_EVENT, master=True):
        if interactive and context.in_master():
            COMPSs.load_runtime(external_process=True)
        else:
            COMPSs.load_runtime(external_process=False)

        if log_level == 'trace':
            # Could also be 'debug' or True, but we only show the C extension
            # debug in the maximum tracing level.
            COMPSs.set_debug(True)
            OT_enable_report()

        COMPSs.start_runtime()

    if __debug__:
        logger.info("COMPSs started")
Ejemplo n.º 2
0
def event(event_id,
          master=False,
          inside=False,
          cpu_affinity=False,
          gpu_affinity=False):
    # type: (int or str, bool, bool, bool, bool) -> None
    """ Emits an event wrapping the desired code.

    Does nothing if tracing is disabled.

    :param event_id: Event identifier to emit.
    :param master: If the event is emitted as master.
    :param inside: If the event is produced inside the worker.
    :param cpu_affinity: If the event is produced inside the worker for
                         cpu affinity.
    :param gpu_affinity: If the event is produced inside the worker for
                         gpu affinity.
    :return: None
    """
    emit = False
    if TRACING and in_master() and master:
        emit = True
    if TRACING and in_worker() and not master:
        emit = True
    if emit:
        event_group, event_id = __get_proper_type_event__(
            event_id, master, inside, cpu_affinity, gpu_affinity)
        PYEXTRAE.eventandcounters(event_group, event_id)  # noqa
    yield  # here the code runs
    if emit:
        PYEXTRAE.eventandcounters(event_group, 0)  # noqa
Ejemplo n.º 3
0
        def multinode_f(*args, **kwargs):
            if not self.scope:
                raise Exception(not_in_pycompss("MultiNode"))

            if __debug__:
                logger.debug("Executing multinode_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs)
            else:
                # worker code
                set_slurm_environment()

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs['computing_nodes'] = self.kwargs['computing_nodes']

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = func(*args, **kwargs)

            if context.in_worker():
                reset_slurm_environment()

            return ret
Ejemplo n.º 4
0
        def constrained_f(*args, **kwargs):
            # type: (*typing.Any, **typing.Any) -> typing.Any
            if not self.scope:
                from pycompss.api.dummy.on_failure import on_failure \
                    as dummy_on_failure
                d_c = dummy_on_failure(self.args, self.kwargs)
                return d_c.__call__(user_function)(*args, **kwargs)

            if __debug__:
                logger.debug("Executing on_failure_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs)

            # Set the on failure management action and default variables in
            # kwargs for its usage in @task decorator
            kwargs["on_failure"] = self.on_failure_action
            kwargs["defaults"] = self.defaults

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 5
0
def test_in_master_context():
    from pycompss.util.context import in_master
    from pycompss.util.context import set_pycompss_context
    set_pycompss_context(MASTER)
    master_context = in_master()
    assert master_context is True, MASTER_CONTEXT_ERROR
    set_pycompss_context(OUT_OF_SCOPE)
Ejemplo n.º 6
0
        def multinode_f(*args, **kwargs):
            if not self.scope:
                raise NotInPyCOMPSsException(not_in_pycompss("MultiNode"))

            if __debug__:
                logger.debug("Executing multinode_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs, user_function)

            if context.in_worker():
                old_slurm_env = set_slurm_environment()

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs['computing_nodes'] = self.kwargs['computing_nodes']

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = user_function(*args, **kwargs)

            if context.in_worker():
                reset_slurm_environment(old_slurm_env)

            return ret
Ejemplo n.º 7
0
    def __decorator_body__(self, user_function, args, kwargs):
        # type: (typing.Callable, tuple, dict) -> typing.Any
        if not self.scope:
            # Execute the mpi as with PyCOMPSs so that sequential
            # execution performs as parallel.
            # To disable: raise Exception(not_in_pycompss("mpi"))
            # TODO: Intercept @task parameters to get stream redirection
            if "binary" in self.kwargs:
                return self.__run_mpi__(args, kwargs)
            else:
                print(
                    "WARN: Python MPI as dummy is not fully supported. Executing decorated funtion."
                )
                return user_function(*args, **kwargs)

        if __debug__:
            logger.debug("Executing mpi_f wrapper.")

        if (context.in_master() or context.is_nesting_enabled()) \
                and not self.core_element_configured:
            # master code - or worker with nesting enabled
            self.__configure_core_element__(kwargs)

        # The processes parameter will have to go down until the execution
        # is invoked. To this end, set the computing_nodes variable in kwargs
        # for its usage in @task decorator
        # WARNING: processes can be an int, a env string, a str with
        #          dynamic variable name.
        if "processes" in self.kwargs:
            kwargs["computing_nodes"] = self.kwargs["processes"]
        else:
            # If processes not defined, check computing_units or set default
            process_computing_nodes(self.decorator_name, self.kwargs)
            kwargs["computing_nodes"] = self.kwargs["computing_nodes"]
        if "processes_per_node" in self.kwargs:
            kwargs["processes_per_node"] = self.kwargs["processes_per_node"]
        else:
            kwargs["processes_per_node"] = 1
        if __debug__:
            logger.debug("This MPI task will have " +
                         str(kwargs["computing_nodes"]) + " processes and " +
                         str(kwargs["processes_per_node"]) +
                         " processes per node.")

        if self.task_type == IMPL_PYTHON_MPI:
            prepend_strings = True
        else:
            prepend_strings = False

        with keep_arguments(args, kwargs, prepend_strings=prepend_strings):
            # Call the method
            ret = user_function(*args, **kwargs)

        return ret
Ejemplo n.º 8
0
        def binary_f(*args, **kwargs):
            if not self.scope:
                # Execute the binary as with PyCOMPSs so that sequential
                # execution performs as parallel.
                # To disable: raise Exception(not_in_pycompss("binary"))
                # TODO: Intercept the @task parameters to get stream redirection
                cmd = [self.kwargs['binary']]
                if args:
                    args = [str(a) for a in args]
                    cmd += args
                my_env = os.environ.copy()
                if "working_dir" in self.kwargs:
                    my_env[
                        "PATH"] = self.kwargs["working_dir"] + my_env["PATH"]
                elif "workingDir" in self.kwargs:
                    my_env["PATH"] = self.kwargs["workingDir"] + my_env["PATH"]
                proc = subprocess.Popen(cmd,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE,
                                        env=my_env)  # noqa: E501
                out, err = proc.communicate()
                if sys.version_info[0] < 3:
                    out_message = out.strip()
                    err_message = err.strip()
                else:
                    out_message = out.decode().strip()
                    err_message = err.decode().strip()
                if out_message:
                    print(out_message)
                if err_message:
                    sys.stderr.write(err_message + '\n')
                return proc.returncode

            if __debug__:
                logger.debug("Executing binary_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs, user_function)
            else:
                # worker code
                pass

            with keep_arguments(args, kwargs, prepend_strings=False):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 9
0
        def container_f(*args, **kwargs):
            if not self.scope:
                raise NotInPyCOMPSsException(not_in_pycompss("container"))

            if __debug__:
                logger.debug("Executing container_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs, user_function)

            with keep_arguments(args, kwargs, prepend_strings=False):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 10
0
        def software_f(*args, **kwargs):
            # type: (*typing.Any, **typing.Any) -> typing.Any
            if not self.scope or not context.in_master():
                # Execute the software as with PyCOMPSs so that sequential
                # execution performs as parallel.
                # To disable: raise Exception(not_in_pycompss(BINARY))
                return user_function(*args, **kwargs)

            if __debug__:
                logger.debug("Executing software_f wrapper.")

            if self.constraints is not None:
                core_element = CE()
                core_element.set_impl_constraints(self.constraints)
                kwargs[CORE_ELEMENT_KEY] = core_element

            if self.container is not None:
                _func = str(user_function.__name__)
                impl_type = IMPL_CONTAINER
                impl_signature = '.'.join((impl_type, _func))

                ce = kwargs.get(CORE_ELEMENT_KEY, CE())
                impl_args = [self.container[ENGINE],  # engine
                             self.container[IMAGE],  # image
                             UNASSIGNED,  # internal_type
                             UNASSIGNED,  # internal_binary
                             UNASSIGNED,  # internal_func
                             UNASSIGNED,  # working_dir
                             UNASSIGNED]  # fail_by_ev
                ce.set_impl_type(impl_type)
                ce.set_impl_signature(impl_signature)
                ce.set_impl_type_args(impl_args)
                kwargs[CORE_ELEMENT_KEY] = ce

            if self.decor:
                decorator = self.decor

                def decor_f():
                    def f():
                        ret = decorator(**self.config_args)
                        return ret(user_function)(*args, **kwargs)
                    return f()
                return decor_f()
            else:
                # It's a PyCOMPSs task with only @task and @software decorators
                return user_function(*args, **kwargs)
Ejemplo n.º 11
0
        def implement_f(*args, **kwargs):
            # type: (*typing.Any, **typing.Any) -> typing.Any
            # This is executed only when called.
            if not self.scope:
                raise NotInPyCOMPSsException(not_in_pycompss("implement"))

            if __debug__:
                logger.debug("Executing implement_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs)

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 12
0
        def constrained_f(*args, **kwargs):
            if not self.scope:
                from pycompss.api.dummy.constraint import constraint \
                    as dummy_constraint
                d_c = dummy_constraint(self.args, self.kwargs)
                return d_c.__call__(user_function)(*args, **kwargs)

            if __debug__:
                logger.debug("Executing constrained_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs, user_function)

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 13
0
        def container_f(*args, **kwargs):
            if not self.scope:
                raise Exception(not_in_pycompss("container"))

            if __debug__:
                logger.debug("Executing container_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs, user_function)
            else:
                # worker code
                pass

            with keep_arguments(args, kwargs, prepend_strings=False):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 14
0
        def binary_f(*args, **kwargs):
            if not self.scope:
                # Execute the binary as with PyCOMPSs so that sequential
                # execution performs as parallel.
                # To disable: raise Exception(not_in_pycompss("binary"))
                # TODO: Intercept @task parameters to get stream redirection
                return self.__run_binary__(args, kwargs)

            if __debug__:
                logger.debug("Executing binary_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs, user_function)

            with keep_arguments(args, kwargs, prepend_strings=False):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 15
0
    def __decorator_body__(self, user_function, args, kwargs):
        # type: (typing.Callable, tuple, dict) -> typing.Any
        # force to serialize with JSON
        serializer.FORCED_SERIALIZER = 4
        if not self.scope:
            # run http
            self.__run_http__(args, kwargs)

        if __debug__:
            logger.debug("Executing http_f wrapper.")

        if (context.in_master() or context.is_nesting_enabled()) \
                and not self.core_element_configured:
            # master code - or worker with nesting enabled
            self.__configure_core_element__(kwargs)

        with keep_arguments(args, kwargs):
            # Call the method
            ret = user_function(*args, **kwargs)

        return ret
Ejemplo n.º 16
0
        def implement_f(*args, **kwargs):
            # This is executed only when called.
            if not self.scope:
                raise Exception(not_in_pycompss("implement"))

            if __debug__:
                logger.debug("Executing implement_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs)
            else:
                # worker code
                pass

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = func(*args, **kwargs)

            return ret
Ejemplo n.º 17
0
    def __decorator_body__(self, user_function, args, kwargs):
        # type: (typing.Callable, tuple, dict) -> typing.Any
        if not self.scope:
            raise NotImplementedError

        if __debug__:
            logger.debug("Executing mpmd_mpi_f wrapper.")

        if (context.in_master() or context.is_nesting_enabled()) \
                and not self.core_element_configured:
            # master code - or worker with nesting enabled
            self.__configure_core_element__(kwargs)

        kwargs[PROCESSES_PER_NODE] = self.kwargs.get(PROCESSES_PER_NODE, 1)
        kwargs[COMPUTING_NODES] = self.processes

        with keep_arguments(args, kwargs, prepend_strings=False):
            # Call the method
            ret = user_function(*args, **kwargs)

        return ret
Ejemplo n.º 18
0
        def decaf_f(*args, **kwargs):
            # type: (*typing.Any, **typing.Any) -> typing.Any
            if not self.scope:
                raise NotInPyCOMPSsException(not_in_pycompss("decaf"))

            if __debug__:
                logger.debug("Executing decaf_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs)

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs[COMPUTING_NODES] = self.kwargs[COMPUTING_NODES]

            with keep_arguments(args, kwargs, prepend_strings=False):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret
Ejemplo n.º 19
0
    def __call__(self, func):
        """ Parse and set the implement parameters within the task core element.

        :param func: Function to decorate.
        :return: Decorated function.
        """
        @wraps(func)
        def implement_f(*args, **kwargs):
            # This is executed only when called.
            if not self.scope:
                raise Exception(not_in_pycompss("implement"))

            if __debug__:
                logger.debug("Executing implement_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs)
            else:
                # worker code
                pass

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = func(*args, **kwargs)

            return ret

        implement_f.__doc__ = func.__doc__

        if context.in_master() and not self.first_register:
            import pycompss.api.task as t
            self.first_register = True
            t.REGISTER_ONLY = True
            self.__call__(func)(self)
            t.REGISTER_ONLY = False

        return implement_f
Ejemplo n.º 20
0
    def __call__(self, user_function):
        # type: (typing.Callable) -> typing.Callable
        """ Parse and set the implement parameters within the task core element.

        :param user_function: Function to decorate.
        :return: Decorated function.
        """
        @wraps(user_function)
        def implement_f(*args, **kwargs):
            # type: (*typing.Any, **typing.Any) -> typing.Any
            # This is executed only when called.
            if not self.scope:
                raise NotInPyCOMPSsException(not_in_pycompss("implement"))

            if __debug__:
                logger.debug("Executing implement_f wrapper.")

            if (context.in_master() or context.is_nesting_enabled()) \
                    and not self.core_element_configured:
                # master code - or worker with nesting enabled
                self.__configure_core_element__(kwargs)

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = user_function(*args, **kwargs)

            return ret

        implement_f.__doc__ = user_function.__doc__

        if context.in_master() and not self.first_register:
            import pycompss.api.task as t
            self.first_register = True
            t.REGISTER_ONLY = True
            self.__call__(user_function)(self)
            t.REGISTER_ONLY = False

        return implement_f
Ejemplo n.º 21
0
        def constrained_f(*args, **kwargs):
            if not self.scope:
                from pycompss.api.dummy.constraint import constraint \
                    as dummy_constraint
                d_c = dummy_constraint(self.args, self.kwargs)
                return d_c.__call__(func)(*args, **kwargs)

            if __debug__:
                logger.debug("Executing constrained_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs)
            else:
                # worker code
                pass

            with keep_arguments(args, kwargs, prepend_strings=True):
                # Call the method
                ret = func(*args, **kwargs)

            return ret
Ejemplo n.º 22
0
    def __init__(self, *args, **kwargs):
        # type: (*typing.Any, **typing.Any) -> None
        """ Parse the config file and store the arguments that will be used
        later to wrap the "real" decorator.

        self = itself.
        args = not used.
        kwargs = dictionary with the given @software parameter (config_file).

        :param args: Arguments
        :param kwargs: Keyword arguments
        """
        decorator_name = "".join(('@', Software.__name__.lower()))
        # super(Software, self).__init__(decorator_name, *args, **kwargs)
        self.task_type = None  # type: typing.Any
        self.config_args = None  # type: typing.Any
        self.decor = None  # type: typing.Any
        self.constraints = None  # type: typing.Any
        self.container = None  # type: typing.Any

        self.decorator_name = decorator_name
        self.args = args
        self.kwargs = kwargs
        self.scope = context.in_pycompss()
        self.core_element = None  # type: typing.Any
        self.core_element_configured = False

        if self.scope and context.in_master():
            if __debug__:
                logger.debug("Init @software decorator..")
            # Check the arguments
            check_arguments(MANDATORY_ARGUMENTS,
                            DEPRECATED_ARGUMENTS,
                            SUPPORTED_ARGUMENTS | DEPRECATED_ARGUMENTS,
                            list(kwargs.keys()),
                            decorator_name)
            self.parse_config_file()
Ejemplo n.º 23
0
        def ompss_f(*args, **kwargs):
            if not self.scope:
                # from pycompss.api.dummy.ompss import ompss as dummy_ompss
                # d_o = dummy_ompss(self.args, self.kwargs)
                # return d_o.__call__(func)
                raise Exception(not_in_pycompss("ompss"))

            if context.in_master():
                # master code
                mod = inspect.getmodule(func)
                self.module = mod.__name__  # not func.__module__

                if (self.module == '__main__' or
                        self.module == 'pycompss.runtime.launch'):
                    # The module where the function is defined was run as
                    # __main__, so we need to find out the real module name.

                    # path = mod.__file__
                    # dirs = mod.__file__.split(os.sep)
                    # file_name = os.path.splitext(
                    #                 os.path.basename(mod.__file__))[0]

                    # Get the real module name from our launch.py variable
                    path = getattr(mod, "APP_PATH")

                    dirs = path.split(os.path.sep)
                    file_name = os.path.splitext(os.path.basename(path))[0]
                    mod_name = file_name

                    i = len(dirs) - 1
                    while i > 0:
                        new_l = len(path) - (len(dirs[i]) + 1)
                        path = path[0:new_l]
                        if "__init__.py" in os.listdir(path):
                            # directory is a package
                            i -= 1
                            mod_name = dirs[i] + '.' + mod_name
                        else:
                            break
                    self.module = mod_name

                # Include the registering info related to @ompss

                # Retrieve the base core_element established at @task decorator
                from pycompss.api.task import current_core_element as cce
                if not self.registered:
                    self.registered = True
                    # Update the core element information with the @ompss
                    # information
                    cce.set_impl_type("OMPSS")
                    binary = self.kwargs['binary']
                    if 'working_dir' in self.kwargs:
                        working_dir = self.kwargs['working_dir']
                    elif 'workingDir' in self.kwargs:
                        working_dir = self.kwargs['workingDir']
                    else:
                        working_dir = '[unassigned]'  # Empty or '[unassigned]'
                    impl_signature = 'OMPSS.' + binary
                    cce.set_impl_signature(impl_signature)
                    impl_args = [binary, working_dir]
                    cce.set_impl_type_args(impl_args)
            else:
                # worker code
                pass

            # This is executed only when called.
            if __debug__:
                logger.debug("Executing ompss_f wrapper.")

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs['computing_nodes'] = self.kwargs['computing_nodes']

            if len(args) > 0:
                # The 'self' for a method function is passed as args[0]
                slf = args[0]

                # Replace and store the attributes
                saved = {}
                for k, v in self.kwargs.items():
                    if hasattr(slf, k):
                        saved[k] = getattr(slf, k)
                        setattr(slf, k, v)

            # Call the method
            import pycompss.api.task as t
            t.prepend_strings = False
            ret = func(*args, **kwargs)
            t.prepend_strings = True

            if len(args) > 0:
                # Put things back
                for k, v in saved.items():
                    setattr(slf, k, v)

            return ret
Ejemplo n.º 24
0
 def __decorator_body__(self, user_function, args, kwargs):
     # Determine the context and decide what to do
     if context.in_master():
         # @task being executed in the master
         # Each task will have a TaskMaster, so its content will
         # not be shared.
         self.__check_core_element__(kwargs, user_function)
         with event(TASK_INSTANTIATION, master=True):
             master = TaskMaster(self.decorator_arguments,
                                 self.user_function, self.core_element,
                                 self.registered, self.signature,
                                 self.interactive, self.module,
                                 self.function_arguments,
                                 self.function_name, self.module_name,
                                 self.function_type, self.class_name,
                                 self.hints, self.on_failure, self.defaults)
         result = master.call(*args, **kwargs)
         fo, self.core_element, self.registered, self.signature, self.interactive, self.module, self.function_arguments, self.function_name, self.module_name, self.function_type, self.class_name, self.hints = result  # noqa: E501
         del master
         return fo
     elif context.in_worker():
         if "compss_key" in kwargs.keys():
             if context.is_nesting_enabled():
                 # Update the whole logger since it will be in job out/err
                 update_logger_handlers(kwargs["compss_log_cfg"],
                                        kwargs["compss_log_files"][0],
                                        kwargs["compss_log_files"][1])
             # @task being executed in the worker
             with event(WORKER_TASK_INSTANTIATION,
                        master=False,
                        inside=True):
                 worker = TaskWorker(self.decorator_arguments,
                                     self.user_function, self.on_failure,
                                     self.defaults)
             result = worker.call(*args, **kwargs)
             # Force flush stdout and stderr
             sys.stdout.flush()
             sys.stderr.flush()
             # Remove worker
             del worker
             if context.is_nesting_enabled():
                 # Wait for all nested tasks to finish
                 from pycompss.runtime.binding import nested_barrier
                 nested_barrier()
                 # Reestablish logger handlers
                 update_logger_handlers(kwargs["compss_log_cfg"])
             return result
         else:
             if context.is_nesting_enabled():
                 # Each task will have a TaskMaster, so its content will
                 # not be shared.
                 with event(TASK_INSTANTIATION, master=True):
                     master = TaskMaster(
                         self.decorator_arguments, self.user_function,
                         self.core_element, self.registered, self.signature,
                         self.interactive, self.module,
                         self.function_arguments, self.function_name,
                         self.module_name, self.function_type,
                         self.class_name, self.hints, self.on_failure,
                         self.defaults)
                 result = master.call(*args, **kwargs)
                 fo, self.core_element, self.registered, self.signature, self.interactive, self.module, self.function_arguments, self.function_name, self.module_name, self.function_type, self.class_name, self.hints = result  # noqa: E501
                 del master
                 return fo
             else:
                 # Called from another task within the worker
                 # Ignore the @task decorator and run it sequentially
                 message = "".join((
                     "WARNING: Calling task: ",
                     str(user_function.__name__),
                     " from this task.\n",
                     "         It will be executed sequentially ",  # noqa: E501
                     "within the caller task."))
                 print(message, file=sys.stderr)
                 return self._sequential_call(*args, **kwargs)
     # We are neither in master nor in the worker, or the user has
     # stopped the interactive session.
     # Therefore, the user code is being executed with no
     # launch_compss/enqueue_compss/runcompss/interactive session
     return self._sequential_call(*args, **kwargs)
Ejemplo n.º 25
0
Archivo: mpi.py Proyecto: xyuan/compss
        def mpi_f(*args, **kwargs):
            if not self.scope:
                # from pycompss.api.dummy.mpi import mpi as dummy_mpi
                # d_m = dummy_mpi(self.args, self.kwargs)
                # return d_m.__call__(func)
                raise Exception(not_in_pycompss("mpi"))

            if context.in_master():
                # master code
                mod = inspect.getmodule(func)
                self.module = mod.__name__  # not func.__module__

                if self.module == '__main__' or \
                        self.module == 'pycompss.runtime.launch':
                    # The module where the function is defined was run as
                    # __main__, so we need to find out the real module name.

                    # path = mod.__file__
                    # dirs = mod.__file__.split(os.sep)
                    # file_name = os.path.splitext(
                    #                 os.path.basename(mod.__file__))[0]

                    # Get the real module name from our launch.py variable
                    path = getattr(mod, "APP_PATH")

                    dirs = path.split(os.path.sep)
                    file_name = os.path.splitext(os.path.basename(path))[0]
                    mod_name = file_name

                    i = len(dirs) - 1
                    while i > 0:
                        new_l = len(path) - (len(dirs[i]) + 1)
                        path = path[0:new_l]
                        if "__init__.py" in os.listdir(path):
                            # directory is a package
                            i -= 1
                            mod_name = dirs[i] + '.' + mod_name
                        else:
                            break
                    self.module = mod_name

                # Include the registering info related to @mpi

                # Retrieve the base core_element established at @task decorator
                from pycompss.api.task import current_core_element as cce
                if not self.registered:
                    self.registered = True

                    # Update the core element information with the @mpi
                    # information
                    if "binary" in self.kwargs:
                        binary = self.kwargs['binary']
                        cce.set_impl_type("MPI")
                    else:
                        binary = "[unassigned]"
                        cce.set_impl_type("PYTHON_MPI")
                        self.task_type = "PYTHON_MPI"

                    if 'working_dir' in self.kwargs:
                        working_dir = self.kwargs['working_dir']
                    else:
                        working_dir = '[unassigned]'  # Empty or '[unassigned]'

                    runner = self.kwargs['runner']
                    if 'flags' in self.kwargs:
                        flags = self.kwargs['flags']
                    else:
                        flags = '[unassigned]'  # Empty or '[unassigned]'
                    if 'scale_by_cu' in self.kwargs:
                        scale_by_cu = self.kwargs['scale_by_cu']
                        if isinstance(scale_by_cu, bool):
                            if scale_by_cu:
                                scale_by_cu_str = 'true'
                            else:
                                scale_by_cu_str = 'false'
                        elif isinstance(scale_by_cu, str):
                            scale_by_cu_str = scale_by_cu
                        else:
                            raise Exception("Incorrect format for scale_by_cu property. " +      # noqa: E501
                                            "It should be boolean or an environment variable")   # noqa: E501
                    else:
                        scale_by_cu_str = 'false'

                    if 'fail_by_exit_value' in self.kwargs:
                        fail_by_ev = self.kwargs['fail_by_exit_value']
                        if isinstance(fail_by_ev, bool):
                            if fail_by_ev:
                                fail_by_ev_str = 'true'
                            else:
                                fail_by_ev_str = 'false'
                        elif isinstance(fail_by_ev, str):
                            fail_by_ev_str = fail_by_ev
                        else:
                            raise Exception("Incorrect format for fail_by_exit_value property. " +  # noqa: E501
                                            "It should be boolean or an environment variable")      # noqa: E501
                    else:
                        fail_by_ev_str = 'false'

                    if binary == "[unassigned]":
                        impl_signature = "MPI."
                    else:
                        impl_signature = 'MPI.' + \
                                         str(self.kwargs['processes']) + \
                                         "." + binary

                    # Add information to CoreElement
                    cce.set_impl_signature(impl_signature)
                    impl_args = [binary,
                                 working_dir,
                                 runner,
                                 flags,
                                 scale_by_cu_str,
                                 fail_by_ev_str]
                    cce.set_impl_type_args(impl_args)
            else:
                # worker code
                pass

            # This is executed only when called.
            if __debug__:
                logger.debug("Executing mpi_f wrapper.")

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs['computing_nodes'] = self.kwargs['processes']

            if len(args) > 0:
                # The 'self' for a method function is passed as args[0]
                slf = args[0]

                # Replace and store the attributes
                saved = {}
                for k, v in self.kwargs.items():
                    if hasattr(slf, k):
                        saved[k] = getattr(slf, k)
                        setattr(slf, k, v)

            # Call the method
            import pycompss.api.task as t
            if self.task_type == "PYTHON_MPI":
                t.prepend_strings = True
            else:
                t.prepend_strings = False
            ret = func(*args, **kwargs)
            t.prepend_strings = True

            if len(args) > 0:
                # Put things back
                for k, v in saved.items():
                    setattr(slf, k, v)

            return ret
Ejemplo n.º 26
0
        def constrained_f(*args, **kwargs):
            if not self.scope:
                from pycompss.api.dummy.constraint import constraint as dummy_constraint  # noqa
                d_c = dummy_constraint(self.args, self.kwargs)
                return d_c.__call__(func)(*args, **kwargs)

            if context.in_master():
                # master code
                mod = inspect.getmodule(func)
                self.module = mod.__name__  # not func.__module__

                if (self.module == '__main__'
                        or self.module == 'pycompss.runtime.launch'):
                    # The module where the function is defined was run as
                    # __main__, so we need to find out the real module name.

                    # path = mod.__file__
                    # dirs = mod.__file__.split(os.sep)
                    # file_name = os.path.splitext(
                    #                 os.path.basename(mod.__file__))[0]

                    # Get the real module name from our launch.py variable
                    path = getattr(mod, "APP_PATH")

                    dirs = path.split(os.path.sep)
                    file_name = os.path.splitext(os.path.basename(path))[0]
                    mod_name = file_name

                    i = len(dirs) - 1
                    while i > 0:
                        new_l = len(path) - (len(dirs[i]) + 1)
                        path = path[0:new_l]
                        if "__init__.py" in os.listdir(path):
                            # directory is a package
                            i -= 1
                            mod_name = dirs[i] + '.' + mod_name
                        else:
                            break
                    self.module = mod_name

                # Include the registering info related to @constraint
                if not self.registered:
                    self.registered = True
                    # Retrieve the base core_element established at @task
                    # decorator
                    from pycompss.api.task import current_core_element as cce
                    # Update the core element information with the constraints
                    cce.set_impl_constraints(self.kwargs)
            else:
                # worker code
                pass
            # This is executed only when called.
            if __debug__:
                logger.debug("Executing constrained_f wrapper.")

            if len(args) > 0:
                # The 'self' for a method function is passed as args[0]
                slf = args[0]

                # Replace and store the attributes
                saved = {}
                for k, v in self.kwargs.items():
                    if hasattr(slf, k):
                        saved[k] = getattr(slf, k)
                        setattr(slf, k, v)

            # Call the method
            ret = func(*args, **kwargs)

            if len(args) > 0:
                # Put things back
                for k, v in saved.items():
                    setattr(slf, k, v)

            return ret
Ejemplo n.º 27
0
        def implement_f(*args, **kwargs):
            # This is executed only when called.
            if not self.scope:
                # from pycompss.api.dummy.implement import implement as dummy_implement  # noqa
                # d_i = dummy_implement(self.args, self.kwargs)
                # return d_i.__call__(func)
                raise Exception(not_in_pycompss("implement"))

            if context.in_master():
                # master code
                mod = inspect.getmodule(func)
                self.module = mod.__name__  # not func.__module__

                if (self.module == '__main__'
                        or self.module == 'pycompss.runtime.launch'):
                    # The module where the function is defined was run as
                    # __main__, so we need to find out the real module name.

                    # path = mod.__file__
                    # dirs = mod.__file__.split(os.sep)
                    # file_name = os.path.splitext(
                    #                 os.path.basename(mod.__file__))[0]

                    # Get the real module name from our launch.py variable
                    path = getattr(mod, "APP_PATH")

                    dirs = path.split(os.path.sep)
                    file_name = os.path.splitext(os.path.basename(path))[0]
                    mod_name = file_name

                    i = len(dirs) - 1
                    while i > 0:
                        new_l = len(path) - (len(dirs[i]) + 1)
                        path = path[0:new_l]
                        if "__init__.py" in os.listdir(path):
                            # directory is a package
                            i -= 1
                            mod_name = dirs[i] + '.' + mod_name
                        else:
                            break
                    self.module = mod_name

                # Include the registering info related to @implement

                # Retrieve the base core_element established at @task decorator
                if not self.registered:
                    self.registered = True
                    from pycompss.api.task import current_core_element as cce
                    # Update the core element information with the @implement
                    # information
                    if 'sourceClass' in self.kwargs:
                        another_class = self.kwargs['sourceClass']
                    else:
                        another_class = self.kwargs['source_class']
                    another_method = self.kwargs['method']
                    ce_signature = another_class + '.' + another_method
                    cce.set_ce_signature(ce_signature)
                    # This is not needed since the arguments are already set
                    # by the task decorator.
                    # implArgs = [another_class, another_method]
                    # cce.set_implTypeArgs(implArgs)
                    cce.set_impl_type("METHOD")
            else:
                # worker code
                pass

            if __debug__:
                logger.debug("Executing implement_f wrapper.")

            # The 'self' for a method function is passed as args[0]
            slf = args[0]

            # Replace and store the attributes
            saved = {}
            for k, v in self.kwargs.items():
                if hasattr(slf, k):
                    saved[k] = getattr(slf, k)
                    setattr(slf, k, v)

            # Call the method
            ret = func(*args, **kwargs)

            # Put things back
            for k, v in saved.items():
                setattr(slf, k, v)

            return ret
Ejemplo n.º 28
0
 def __init__(self, event_id):
     # type: (int) -> None
     self.emitted = False
     if TRACING and in_master():
         PYEXTRAE.eventandcounters(BINDING_MASTER_TYPE, event_id)
         self.emitted = True
Ejemplo n.º 29
0
        def mpi_f(*args, **kwargs):
            if not self.scope:
                # Execute the mpi as with PyCOMPSs so that sequential
                # execution performs as parallel.
                # To disable: raise Exception(not_in_pycompss("mpi"))
                # TODO: Intercept the @task parameters to get stream redirection
                cmd = [self.kwargs['runner']]
                if 'processes' in self.kwargs:
                    cmd += ['-np', self.kwargs['processes']]
                elif 'computing_nodes' in self.kwargs:
                    cmd += ['-np', self.kwargs['computing_nodes']]
                elif 'computingNodes' in self.kwargs:
                    cmd += ['-np', self.kwargs['computingNodes']]
                else:
                    pass
                if 'flags' in self.kwargs:
                    cmd += self.kwargs['flags'].split()
                cmd += [self.kwargs['binary']]
                if args:
                    args = [str(a) for a in args]
                    cmd += args
                my_env = os.environ.copy()
                if "working_dir" in self.kwargs:
                    my_env[
                        "PATH"] = self.kwargs["working_dir"] + my_env["PATH"]
                elif "workingDir" in self.kwargs:
                    my_env["PATH"] = self.kwargs["workingDir"] + my_env["PATH"]
                proc = subprocess.Popen(cmd,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE,
                                        env=my_env)  # noqa: E501
                out, err = proc.communicate()
                if sys.version_info[0] < 3:
                    out_message = out.strip()
                    err_message = err.strip()
                else:
                    out_message = out.decode().strip()
                    err_message = err.decode().strip()
                if out_message:
                    print(out_message)
                if err_message:
                    sys.stderr.write(err_message + '\n')
                return proc.returncode

            if __debug__:
                logger.debug("Executing mpi_f wrapper.")

            if context.in_master():
                # master code
                if not self.core_element_configured:
                    self.__configure_core_element__(kwargs)
            else:
                # worker code
                pass

            # Set the computing_nodes variable in kwargs for its usage
            # in @task decorator
            kwargs['computing_nodes'] = self.kwargs['processes']

            if self.task_type == "PYTHON_MPI":
                prepend_strings = True
            else:
                prepend_strings = False

            with keep_arguments(args, kwargs, prepend_strings=prepend_strings):
                # Call the method
                ret = func(*args, **kwargs)

            return ret
Ejemplo n.º 30
0
 def mpi_f(*args, **kwargs):
     ret = self.__decorator_body__(user_function, args, kwargs)
     if context.in_master() and int(self.kwargs["processes"]) == 1:
         return [ret]
     else:
         return ret