Пример #1
0
def load_loggers(debug, persistent_storage, tracing):
    # type: (bool, str, bool) -> (..., ...)
    """ Load all loggers.

    :param debug: is Debug enabled.
    :param persistent_storage: is persistent storage enabled.
    :param tracing: if tracing is enabled.
    :return: main logger of the application and a list of loggers for the
             persistent data framework.
    """
    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    log_cfg_path = "".join((worker_path, '/../../../../log'))
    if not os.path.isdir(log_cfg_path):
        # If not exists, then we are using the source for unit testing
        log_cfg_path = "".join((worker_path, '/../../../../../log'))
    if debug:
        # Debug
        log_json = "/".join((log_cfg_path, 'logging_worker_debug.json'))
    else:
        # Default
        log_json = "/".join((log_cfg_path, 'logging_worker_off.json'))
    init_logging_worker(log_json, tracing)

    # Define logger facilities
    logger = logging.getLogger('pycompss.worker.piper.piper_worker')
    storage_loggers = []
    if persistent_storage:
        storage_loggers.append(logging.getLogger('dataclay'))
        storage_loggers.append(logging.getLogger('hecuba'))
        storage_loggers.append(logging.getLogger('redis'))
        storage_loggers.append(logging.getLogger('storage'))
    return logger, storage_loggers
Пример #2
0
def executor(process_name, command):
    # type: (str, str) -> None
    """ Execution main method.

    Iterates over the input pipe in order to receive tasks (with their
    parameters) and process them.

    :param process_name: Process name (MPI Process-X, where X is the MPI rank).
    :param command: Command to execute.
    :return: None
    """
    # Replace Python Worker's SIGTERM handler.
    signal.signal(signal.SIGTERM, shutdown_handler)

    log_level = command.split()[6]
    tracing = command.split()[7] == "true"

    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    if log_level == 'true' or log_level == "debug":
        # Debug
        log_json = "".join(
            (worker_path, "/../../../log/logging_mpi_worker_debug.json"))
    elif log_level == "info" or log_level == "off":
        log_json = "".join(
            (worker_path, "/../../../log/logging_mpi_worker_off.json"))
    else:
        # Default
        log_json = "".join(
            (worker_path, "/../../../log/logging_mpi_worker.json"))
    init_logging_worker(log_json, tracing)

    logger = logging.getLogger("pycompss.worker.external.mpi_worker")
    logger_handlers = copy.copy(logger.handlers)
    logger_level = logger.getEffectiveLevel()
    try:
        logger_formatter = logging.Formatter(
            logger_handlers[0].formatter._fmt)  # noqa
    except IndexError:
        logger_formatter = None

    if __debug__:
        logger.debug("[PYTHON EXECUTOR] [%s] Starting process" %
                     str(process_name))

    sig, msg = process_task(command, process_name, logger, log_json,
                            logger_handlers, logger_level, logger_formatter)
    # Signal expected management:
    # if sig == FAILURE_SIG:
    #     raise Exception("Task execution failed!", msg)
    # elif sig == UNEXPECTED_SIG:
    #     raise Exception("Unexpected message!", msg)

    sys.stdout.flush()
    sys.stderr.flush()
    if __debug__:
        logger.debug("[PYTHON EXECUTOR] [%s] Exiting process " %
                     str(process_name))
    if sig != 0:
        sys.exit(sig)
Пример #3
0
def load_loggers(debug, persistent_storage):
    """
    Loads all the loggers

    :param debug: is Debug enabled
    :param persistent_storage: is persistent storage enabled
    :return logger: main logger of the application
    :return storage_loggers: loggers for the persistent data engine
    """
    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    if debug:
        # Debug
        init_logging_worker(worker_path + '/../../../log/logging_debug.json')
    else:
        # Default
        init_logging_worker(worker_path + '/../../../log/logging_off.json')

    # Define logger facilities
    logger = logging.getLogger('pycompss.worker.piper.piper_worker')
    storage_loggers = []
    if persistent_storage:
        storage_loggers.append(logging.getLogger('dataclay'))
        storage_loggers.append(logging.getLogger('hecuba'))
        storage_loggers.append(logging.getLogger('redis'))
        storage_loggers.append(logging.getLogger('storage'))
    return logger, storage_loggers
Пример #4
0
def executor(process_name, command):
    """
    Execution main method.
    Iterates over the input pipe in order to receive tasks (with their
    parameters) and process them.

    :param process_name: Process name (MPI Process-X, where X is the MPI rank).
    :param command: Command to execute
    :return: None
    """
    # Replace Python Worker's SIGTERM handler.
    signal.signal(signal.SIGTERM, shutdown_handler)

    debug = command.split()[6] == "true"
    tracing = command.split()[7] == "true"

    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    if debug:
        # Debug
        init_logging_worker(worker_path +
                            '/../../../log/logging_worker_debug.json',
                            tracing)
    else:
        # Default
        init_logging_worker(worker_path +
                            '/../../../log/logging_worker_off.json',
                            tracing)

    logger = logging.getLogger('pycompss.worker.external.mpi_worker')
    logger_handlers = copy.copy(logger.handlers)
    logger_level = logger.getEffectiveLevel()
    logger_formatter = logging.Formatter(logger_handlers[0].formatter._fmt)  # noqa

    if __debug__:
        logger.debug("[PYTHON EXECUTOR] [%s] Starting process" %
                     str(process_name))

    sig, msg = process_task(command,
                            process_name,
                            logger,
                            logger_handlers,
                            logger_level,
                            logger_formatter)

    sys.stdout.flush()
    sys.stderr.flush()
    if __debug__:
        logger.debug("[PYTHON EXECUTOR] [%s] Exiting process " %
                     str(process_name))
    if sig != 0:
        sys.exit(sig)
Пример #5
0
def main():
    # Emit sync event if tracing is enabled
    tracing = sys.argv[1] == 'true'
    task_id = int(sys.argv[2])
    log_level = sys.argv[3]
    storage_conf = sys.argv[4]
    stream_backend = sys.argv[5]
    stream_master_name = sys.argv[6]
    stream_master_port = sys.argv[7]
    # method_type = sys.argv[8]
    params = sys.argv[9:]
    # class_name = sys.argv[9]
    # method_name = sys.argv[10]
    # num_slaves = sys.argv[11]
    # i = 11 + num_slaves
    # slaves = sys.argv[11..i]
    # numCus = sys.argv[i+1]
    # has_target = sys.argv[i+2] == 'true'
    # num_params = int(sys.argv[i+3])
    # params = sys.argv[i+4..]

    print("tracing = " + str(tracing))
    print("task_id = " + str(task_id))
    print("log_level = " + str(log_level))
    print("storage_conf = " + str(storage_conf))

    persistent_storage = False
    if storage_conf != 'null':
        persistent_storage = True

    streaming = False
    if stream_backend not in [None, 'null', 'NONE']:
        streaming = True

    with trace_multiprocessing_worker() if tracing else dummy_context():

        if streaming:
            # Start streaming
            DistroStreamClientHandler.init_and_start(
                master_ip=stream_master_name, master_port=stream_master_port)

        # Load log level configuration file
        worker_path = os.path.dirname(os.path.realpath(__file__))
        if log_level == 'true' or log_level == "debug":
            # Debug
            init_logging_worker(
                worker_path + '/../../../log/logging_gat_worker_debug.json',
                tracing)
        elif log_level == "info" or log_level == "off":
            # Info or no debug
            init_logging_worker(
                worker_path + '/../../../log/logging_gat_worker_off.json',
                tracing)
        else:
            # Default
            init_logging_worker(
                worker_path + '/../../../log/logging_gat_worker.json', tracing)

        if persistent_storage:
            # Initialize storage
            with event(INIT_STORAGE_AT_WORKER_EVENT):
                from storage.api import initWorker as initStorageAtWorker
                initStorageAtWorker(config_file_path=storage_conf)

        # Init worker
        exit_code = compss_worker(tracing, str(task_id), storage_conf, params)

        if streaming:
            # Finish streaming
            DistroStreamClientHandler.set_stop()

        if persistent_storage:
            # Finish storage
            with event(FINISH_STORAGE_AT_WORKER_EVENT):
                from storage.api import finishWorker as finishStorageAtWorker
                finishStorageAtWorker()

    if exit_code == 1:
        exit(1)
Пример #6
0
def main():
    # Emit sync event if tracing is enabled
    tracing = sys.argv[1] == 'true'
    task_id = int(sys.argv[2])
    log_level = sys.argv[3]
    storage_conf = sys.argv[4]
    stream_backend = sys.argv[5]
    stream_master_name = sys.argv[6]
    stream_master_port = sys.argv[7]
    # method_type = sys.argv[8]
    params = sys.argv[9:]
    # class_name = sys.argv[9]
    # method_name = sys.argv[10]
    # num_slaves = sys.argv[11]
    # i = 11 + num_slaves
    # slaves = sys.argv[11..i]
    # numCus = sys.argv[i+1]
    # has_target = sys.argv[i+2] == 'true'
    # num_params = int(sys.argv[i+3])
    # params = sys.argv[i+4..]

    print("tracing = " + str(tracing))
    print("task_id = " + str(task_id))
    print("log_level = " + str(log_level))
    print("storage_conf = " + str(storage_conf))

    persistent_storage = False
    if storage_conf != 'null':
        persistent_storage = True
        from storage.api import initWorker as initStorageAtWorker
        from storage.api import finishWorker as finishStorageAtWorker

    streaming = False
    if stream_backend not in [None, 'null', 'NONE']:
        streaming = True

    if tracing:
        # Start tracing
        import pyextrae.multiprocessing as pyextrae
        pyextrae.eventandcounters(SYNC_EVENTS, task_id)
        # pyextrae.eventandcounters(TASK_EVENTS, 0)
        pyextrae.eventandcounters(TASK_EVENTS, WORKER_RUNNING_EVENT)

    if streaming:
        # Start streaming
        DistroStreamClientHandler.init_and_start(
            master_ip=stream_master_name, master_port=stream_master_port)

    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    if log_level == 'true' or log_level == "debug":
        # Debug
        init_logging_worker(worker_path + '/../../../log/logging_debug.json')
    elif log_level == "info" or log_level == "off":
        # Info or no debug
        init_logging_worker(worker_path + '/../../../log/logging_off.json')
    else:
        # Default
        init_logging_worker(worker_path + '/../../../log/logging.json')

    if persistent_storage:
        # Initialize storage
        initStorageAtWorker(config_file_path=storage_conf)

    # Init worker
    exit_code = compss_worker(tracing, str(task_id), storage_conf, params)

    if tracing:
        # Finish tracing
        pyextrae.eventandcounters(TASK_EVENTS, 0)
        # pyextrae.eventandcounters(TASK_EVENTS, PROCESS_DESTRUCTION)
        pyextrae.eventandcounters(SYNC_EVENTS, task_id)

    if streaming:
        # Finish streaming
        DistroStreamClientHandler.set_stop()

    if persistent_storage:
        # Finish storage
        finishStorageAtWorker()

    if exit_code == 1:
        exit(1)
Пример #7
0
def main():
    # type: () -> None
    """ GAT worker main code.

    Executes the task provided by parameters.

    :return: None
    """
    # Emit sync event if tracing is enabled
    tracing = sys.argv[1] == 'true'
    task_id = int(sys.argv[2])
    log_level = sys.argv[3]
    storage_conf = sys.argv[4]
    stream_backend = sys.argv[5]
    stream_master_name = sys.argv[6]
    stream_master_port = sys.argv[7]
    # Next: method_type = sys.argv[8]
    params = sys.argv[9:]
    # Next parameters:
    # class_name = sys.argv[10]
    # method_name = sys.argv[11]
    # num_slaves = sys.argv[12]
    # i = 13 + num_slaves
    # slaves = sys.argv[12..i]
    # numCus = sys.argv[i+1]
    # has_target = sys.argv[i+2] == 'true'
    # num_params = int(sys.argv[i+3])
    # params = sys.argv[i+4..]

    if log_level == "true" or log_level == "debug":
        print("Tracing = " + str(tracing))
        print("Task id = " + str(task_id))
        print("Log level = " + str(log_level))
        print("Storage conf = " + str(storage_conf))

    persistent_storage = False
    if storage_conf != "null":
        persistent_storage = True

    streaming = False
    if stream_backend not in [None, "null", "NONE"]:
        streaming = True

    with trace_multiprocessing_worker() if tracing else dummy_context():

        if streaming:
            # Start streaming
            DistroStreamClientHandler.init_and_start(
                master_ip=stream_master_name, master_port=stream_master_port)

        # Load log level configuration file
        worker_path = os.path.dirname(os.path.realpath(__file__))
        if log_level == "true" or log_level == "debug":
            # Debug
            log_json = "".join(
                (worker_path, "/../../../log/logging_gat_worker_debug.json"))
        elif log_level == "info" or log_level == "off":
            # Info or no debug
            log_json = "".join(
                (worker_path, "/../../../log/logging_gat_worker_off.json"))
        else:
            # Default
            log_json = "".join(
                (worker_path, "/../../../log/logging_gat_worker.json"))
        init_logging_worker(log_json, tracing)

        if persistent_storage:
            # Initialize storage
            with event_worker(INIT_STORAGE_AT_WORKER_EVENT):
                from storage.api import initWorker as initStorageAtWorker  # noqa
                initStorageAtWorker(config_file_path=storage_conf)

        # Init worker
        exit_code = compss_worker(tracing, str(task_id), storage_conf, params,
                                  log_json)

        if streaming:
            # Finish streaming
            DistroStreamClientHandler.set_stop()

        if persistent_storage:
            # Finish storage
            with event_worker(FINISH_STORAGE_AT_WORKER_EVENT):
                from storage.api import finishWorker as finishStorageAtWorker  # noqa
                finishStorageAtWorker()

    if exit_code == 1:
        exit(1)
Пример #8
0
def main():
    # type: () -> int
    """ Main method to process the task execution.

    :return: Exit value
    """

    # Parse arguments
    # TODO: Enhance the received parameters from ContainerInvoker.java
    func_file_path = str(sys.argv[1])
    func_name = str(sys.argv[2])
    num_slaves = 0
    timeout = 0
    cus = 1
    log_level = sys.argv[3]
    tracing = sys.argv[4] == 'true'
    has_target = str(sys.argv[5]).lower() == "true"
    return_type = str(sys.argv[6])
    return_length = int(sys.argv[7])
    num_params = int(sys.argv[8])
    func_params = sys.argv[9:]

    # Log initialisation
    # Load log level configuration file
    worker_path = os.path.dirname(os.path.realpath(__file__))
    if log_level == "true" or log_level == "debug":
        # Debug
        log_json = "".join((worker_path,
                            "/log/logging_container_worker_debug.json"))
    elif log_level == "info" or log_level == "off":
        # Info or no debug
        log_json = "".join((worker_path,
                            "/log/logging_container_worker_off.json"))
    else:
        # Default
        log_json = "".join((worker_path,
                            "/log/logging_container_worker.json"))
    init_logging_worker(log_json, tracing)
    if __debug__:
        logger = logging.getLogger('pycompss.worker.container.container_worker')  # noqa: E501
        logger.debug("Initialising Python worker inside the container...")

    task_params = [func_file_path, func_name, num_slaves,
                   timeout, cus, has_target, return_type,
                   return_length, num_params]  # type: typing.List[typing.Any]
    execute_task_params = task_params + func_params

    if __debug__:
        logger.debug("- File: " + str(func_file_path))
        logger.debug("- Function: " + str(func_name))
        logger.debug("- HasTarget: " + str(has_target))
        logger.debug("- ReturnType: " + str(return_type))
        logger.debug("- Num Returns: " + str(return_length))
        logger.debug("- Num Parameters: " + str(num_params))
        logger.debug("- Parameters: " + str(func_params))
        logger.debug("DONE Parsing Python function and arguments")

    # Process task
    if __debug__:
        logger.debug("Processing task...")

    process_name = "ContainerInvoker"
    storage_conf = "null"
    tracing = False
    log_files = ()
    python_mpi = False
    collections_layouts = None  # type: typing.Optional[dict]
    context.set_pycompss_context(context.WORKER)
    result = execute_task(process_name,
                          storage_conf,
                          execute_task_params,
                          tracing,
                          logger,
                          "None",
                          log_files,           # noqa
                          python_mpi,
                          collections_layouts  # noqa
                          )
    # The ignored result is time out
    exit_value, new_types, new_values, _, except_msg = result

    if __debug__:
        logger.debug("DONE Processing task")

    # Process results
    if __debug__:
        logger.debug("Processing results...")
        logger.debug("Task exit value = " + str(exit_value))

    if exit_value == 0:
        # Task has finished without exceptions
        if __debug__:
            logger.debug("Building return parameters...")
            logger.debug("New Types: " + str(new_types))
            logger.debug("New Values: " + str(new_values))
        build_return_params_message(new_types, new_values)
        if __debug__:
            logger.debug("DONE Building return parameters")
    elif exit_value == 2:
        # Task has finished with a COMPSs Exception
        if __debug__:
            except_msg = except_msg.replace(" ", "_")
            logger.debug("Registered COMPSs Exception: %s" %
                         str(except_msg))
    else:
        # An exception has been raised in task
        if __debug__:
            except_msg = except_msg.replace(" ", "_")
            logger.debug("Registered Exception in task execution %s" %
                         str(except_msg))

    # Return
    if exit_value != 0:
        logger.debug("ERROR: Task execution finished with non-zero exit value (%s != 0)" % str(exit_value))  # noqa: E501
    else:
        logger.debug("Task execution finished SUCCESSFULLY!")
    return exit_value