コード例 #1
0
    def update_params(self, argv):
        # type: (list) -> None
        """ Constructs a configuration description for the piper worker using
        the arguments.

        :param argv: arguments from the command line.
        :return: None
        """
        set_temporary_directory(argv[1], create_tmpdir=False)
        if argv[2] == 'true':
            context.enable_nesting()
            self.nesting = True
        self.debug = argv[3] == 'true'
        self.tracing = argv[4] == '1'
        self.storage_conf = argv[5]
        self.stream_backend = argv[6]
        self.stream_master_name = argv[7]
        self.stream_master_port = argv[8]
        self.tasks_x_node = int(argv[9])
        in_pipes = argv[10:10 + self.tasks_x_node]
        out_pipes = argv[10 + self.tasks_x_node:-2]
        if self.debug:
            assert self.tasks_x_node == len(in_pipes)
            assert self.tasks_x_node == len(out_pipes)
        self.pipes = []
        for i in range(0, self.tasks_x_node):
            self.pipes.append(Pipe(in_pipes[i], out_pipes[i]))
        self.control_pipe = Pipe(argv[-2], argv[-1])
コード例 #2
0
ファイル: launch.py プロジェクト: curiousTauseef/compss
def launch_pycompss_application(
        app,
        func,
        log_level='off',  # type: str
        o_c=False,  # type: bool
        debug=False,  # type: bool
        graph=False,  # type: bool
        trace=False,  # type: bool
        monitor=None,  # type: int
        project_xml=None,  # type: str
        resources_xml=None,  # type: str
        summary=False,  # type: bool
        task_execution='compss',  # type: str
        storage_impl=None,  # type: str
        storage_conf=None,  # type: str
        streaming_backend=None,  # type: str
        streaming_master_name=None,  # type: str
        streaming_master_port=None,  # type: str
        task_count=50,  # type: int
        app_name=None,  # type: str
        uuid=None,  # type: str
        base_log_dir=None,  # type: str
        specific_log_dir=None,  # type: str
        extrae_cfg=None,  # type: str
        comm='NIO',  # type: str
        conn='es.bsc.compss.connectors.DefaultSSHConnector',  # noqa: E501
        master_name='',  # type: str
        master_port='',  # type: str
        scheduler='es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler',  # noqa: E501
        jvm_workers='-Xms1024m,-Xmx1024m,-Xmn400m',
        cpu_affinity='automatic',  # type: str
        gpu_affinity='automatic',  # type: str
        fpga_affinity='automatic',  # type: str
        fpga_reprogram='',  # type: str
        profile_input='',  # type: str
        profile_output='',  # type: str
        scheduler_config='',  # type: str
        external_adaptation=False,  # type: bool
        propagate_virtual_environment=True,  # type: bool
        mpi_worker=False,  # type: bool
        *args,
        **kwargs):
    # type: (...) -> None
    """ Launch PyCOMPSs application from function.

    :param app: Application path
    :param func: Function
    :param log_level: Logging level [ 'trace'|'debug'|'info'|'api'|'off' ]
                      (default: 'off')
    :param o_c: Objects to string conversion [ True | False ] (default: False)
    :param debug: Debug mode [ True | False ] (default: False)
                  (overrides log_level)
    :param graph: Generate graph [ True | False ] (default: False)
    :param trace: Generate trace
                  [ True | False | 'scorep' | 'arm-map' | 'arm-ddt']
                  (default: False)
    :param monitor: Monitor refresh rate (default: None)
    :param project_xml: Project xml file path
    :param resources_xml: Resources xml file path
    :param summary: Execution summary [ True | False ] (default: False)
    :param task_execution: Task execution (default: 'compss')
    :param storage_impl: Storage implementation path
    :param storage_conf: Storage configuration file path
    :param streaming_backend: Streaming backend (default: None)
    :param streaming_master_name: Streaming master name (default: None)
    :param streaming_master_port: Streaming master port (default: None)
    :param task_count: Task count (default: 50)
    :param app_name: Application name (default: Interactive_date)
    :param uuid: UUId
    :param base_log_dir: Base logging directory
    :param specific_log_dir: Specific logging directory
    :param extrae_cfg: Extrae configuration file path
    :param comm: Communication library (default: NIO)
    :param conn: Connector (default: DefaultSSHConnector)
    :param master_name: Master Name (default: '')
    :param master_port: Master port (default: '')
    :param scheduler: Scheduler (default:
                  es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler)
    :param jvm_workers: Java VM parameters
                        (default: '-Xms1024m,-Xmx1024m,-Xmn400m')
    :param cpu_affinity: CPU Core affinity (default: 'automatic')
    :param gpu_affinity: GPU Core affinity (default: 'automatic')
    :param fpga_affinity: FPA Core affinity (default: 'automatic')
    :param fpga_reprogram: FPGA repogram command (default: '')
    :param profile_input: Input profile  (default: '')
    :param profile_output: Output profile  (default: '')
    :param scheduler_config: Scheduler configuration  (default: '')
    :param external_adaptation: External adaptation [ True | False ]
                                (default: False)
    :param propagate_virtual_environment: Propagate virtual environment
                                          [ True | False ] (default: False)
    :param mpi_worker: Use the MPI worker [ True | False ] (default: False)
    :param args: Positional arguments
    :param kwargs: Named arguments
    :return: Execution result
    """
    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start, compss_stop

    ##############################################################
    # INITIALIZATION
    ##############################################################

    # Initial dictionary with the user defined parameters
    all_vars = {
        'log_level': log_level,
        'debug': debug,
        'o_c': o_c,
        'graph': graph,
        'trace': trace,
        'monitor': monitor,
        'project_xml': project_xml,
        'resources_xml': resources_xml,
        'summary': summary,
        'task_execution': task_execution,
        'storage_impl': storage_impl,
        'storage_conf': storage_conf,
        'streaming_backend': streaming_backend,
        'streaming_master_name': streaming_master_name,
        'streaming_master_port': streaming_master_port,
        'task_count': task_count,
        'app_name': app_name,
        'uuid': uuid,
        'base_log_dir': base_log_dir,
        'specific_log_dir': specific_log_dir,
        'extrae_cfg': extrae_cfg,
        'comm': comm,
        'conn': conn,
        'master_name': master_name,
        'master_port': master_port,
        'scheduler': scheduler,
        'jvm_workers': jvm_workers,
        'cpu_affinity': cpu_affinity,
        'gpu_affinity': gpu_affinity,
        'fpga_affinity': fpga_affinity,
        'fpga_reprogram': fpga_reprogram,
        'profile_input': profile_input,
        'profile_output': profile_output,
        'scheduler_config': scheduler_config,
        'external_adaptation': external_adaptation,
        'propagate_virtual_environment': propagate_virtual_environment,
        'mpi_worker': mpi_worker
    }

    # Check the provided flags
    flags, issues = check_flags(all_vars)
    if not flags:
        print_flag_issues(issues)
        return None

    # Prepare the environment
    env_vars = prepare_environment(False, o_c, storage_impl, app, debug, trace,
                                   mpi_worker)
    all_vars.update(env_vars)

    monitoring_vars = prepare_loglevel_graph_for_monitoring(
        monitor, graph, debug, log_level)
    all_vars.update(monitoring_vars)

    if RUNNING_IN_SUPERCOMPUTER:
        updated_vars = updated_variables_in_sc()
        all_vars.update(updated_vars)

    to_update = prepare_tracing_environment(all_vars['trace'],
                                            all_vars['extrae_lib'],
                                            all_vars['ld_library_path'])
    all_vars['trace'], all_vars['ld_library_path'] = to_update

    inf_vars = check_infrastructure_variables(all_vars['project_xml'],
                                              all_vars['resources_xml'],
                                              all_vars['compss_home'],
                                              all_vars['app_name'],
                                              all_vars['file_name'],
                                              all_vars['external_adaptation'])
    all_vars.update(inf_vars)

    create_init_config_file(**all_vars)

    ##############################################################
    # RUNTIME START
    ##############################################################

    # Runtime start
    compss_start(log_level, all_vars['trace'], True)

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(all_vars['compss_home'], 'Bindings', 'python',
                            str(all_vars['major_version']), 'log')
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    logger = logging.getLogger("pycompss.runtime.launch")

    logger.debug('--- START ---')
    logger.debug('PyCOMPSs Log path: %s' % log_path)

    logger.debug("Starting storage")
    persistent_storage = master_init_storage(all_vars['storage_conf'], logger)

    logger.debug("Starting streaming")
    streaming = init_streaming(all_vars['streaming_backend'],
                               all_vars['streaming_master_name'],
                               all_vars['streaming_master_port'])

    saved_argv = sys.argv
    sys.argv = args
    # Execution:
    with event(APPLICATION_RUNNING_EVENT, master=True):
        if func is None or func == '__main__':
            if IS_PYTHON3:
                exec(open(app).read())
            else:
                execfile(app)  # noqa
            result = None
        else:
            if IS_PYTHON3:
                import importlib.util
                spec = importlib.util.spec_from_file_location(
                    all_vars['file_name'], app)  # noqa: E501
                imported_module = importlib.util.module_from_spec(spec)
                spec.loader.exec_module(imported_module)  # noqa
            else:
                import imp  # noqa
                imported_module = imp.load_source(all_vars['file_name'],
                                                  app)  # noqa
            method_to_call = getattr(imported_module, func)
            result = method_to_call(*args, **kwargs)
    # Recover the system arguments
    sys.argv = saved_argv

    # Stop streaming
    if streaming:
        stop_streaming()

    # Stop persistent storage
    if persistent_storage:
        master_stop_storage(logger)

    logger.debug('--- END ---')

    ##############################################################
    # RUNTIME STOP
    ##############################################################

    # Stop runtime
    compss_stop()

    return result
コード例 #3
0
ファイル: launch.py プロジェクト: curiousTauseef/compss
def compss_main():
    # type: () -> None
    """ PyCOMPSs main function.

    General call:
    python $PYCOMPSS_HOME/pycompss/runtime/launch.py $log_level
           $PyObject_serialize $storage_conf $streaming_backend
           $streaming_master_name $streaming_master_port
           $fullAppPath $application_args

    :return: None
    """
    global APP_PATH

    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start, compss_stop

    # See parse_arguments, defined above
    # In order to avoid parsing user arguments, we are going to remove user
    # args from sys.argv
    user_sys_argv = sys.argv[9:]
    sys.argv = sys.argv[:9]
    args = parse_arguments()
    # We are done, now sys.argv must contain user args only
    sys.argv = [args.app_path] + user_sys_argv

    # Get log_level
    log_level = args.log_level

    # Setup tracing
    tracing = int(args.tracing)

    # Start the runtime
    compss_start(log_level, tracing, False)

    # Get object_conversion boolean
    set_object_conversion(args.object_conversion == 'true')

    # Get storage configuration at master
    storage_conf = args.storage_configuration

    # Get application execution path
    APP_PATH = args.app_path

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(os.getenv('COMPSS_HOME'), 'Bindings', 'python',
                            str(_PYTHON_VERSION), 'log')
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    logger = logging.getLogger("pycompss.runtime.launch")

    # Get JVM options
    # jvm_opts = os.environ['JVM_OPTIONS_FILE']
    # from pycompss.util.jvm.parser import convert_to_dict
    # opts = convert_to_dict(jvm_opts)
    # storage_conf = opts.get('-Dcompss.storage.conf')

    exit_code = 0
    try:
        if __debug__:
            logger.debug('--- START ---')
            logger.debug('PyCOMPSs Log path: %s' % binding_log_path)

        # Start persistent storage
        persistent_storage = master_init_storage(storage_conf, logger)

        # Start streaming
        streaming = init_streaming(args.streaming_backend,
                                   args.streaming_master_name,
                                   args.streaming_master_port)

        # Show module warnings
        if __debug__:
            show_optional_module_warnings()

        # MAIN EXECUTION
        with event(APPLICATION_RUNNING_EVENT, master=True):
            if IS_PYTHON3:
                with open(APP_PATH) as f:
                    exec(compile(f.read(), APP_PATH, 'exec'), globals())
            else:
                execfile(APP_PATH, globals())  # MAIN EXECUTION

        # Stop streaming
        if streaming:
            stop_streaming()

        # Stop persistent storage
        if persistent_storage:
            master_stop_storage(logger)

        # End
        if __debug__:
            logger.debug('--- END ---')
    except SystemExit as e:
        if e.code != 0:
            print('[ ERROR ]: User program ended with exitcode %s.' % e.code)
            print('\t\tShutting down runtime...')
            exit_code = e.code
    except SerializerException:
        exit_code = 1
        # If an object that can not be serialized has been used as a parameter.
        print("[ ERROR ]: Serialization exception")
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        for line in lines:
            if APP_PATH in line:
                print('[ ERROR ]: In: %s', line)
        exit_code = 1
    except COMPSsException as e:
        # Any other exception occurred
        print("[ ERROR ]: A COMPSs exception occurred: " + str(e))
        traceback.print_exc()
        exit_code = 0  # COMPSs exception is not considered an error
    except Exception as e:
        # Any other exception occurred
        print("[ ERROR ]: An exception occurred: " + str(e))
        traceback.print_exc()
        exit_code = 1
    finally:
        # Stop runtime
        compss_stop(exit_code)
        sys.stdout.flush()
        sys.stderr.flush()
        sys.exit(exit_code)
コード例 #4
0
ファイル: interactive.py プロジェクト: curiousTauseef/compss
def start(
        log_level='off',  # type: str
        debug=False,  # type: bool
        o_c=False,  # type: bool
        graph=False,  # type: bool
        trace=False,  # type: bool
        monitor=None,  # type: int
        project_xml=None,  # type: str
        resources_xml=None,  # type: str
        summary=False,  # type: bool
        task_execution='compss',  # type: str
        storage_impl=None,  # type: str
        storage_conf=None,  # type: str
        streaming_backend=None,  # type: str
        streaming_master_name=None,  # type: str
        streaming_master_port=None,  # type: str
        task_count=50,  # type: int
        app_name=INTERACTIVE_FILE_NAME,  # type: str
        uuid=None,  # type: str
        base_log_dir=None,  # type: str
        specific_log_dir=None,  # type: str
        extrae_cfg=None,  # type: str
        comm='NIO',  # type: str
        conn='es.bsc.compss.connectors.DefaultSSHConnector',  # type: str
        master_name='',  # type: str
        master_port='',  # type: str
        scheduler='es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler',  # type: str  # noqa: E501
        jvm_workers='-Xms1024m,-Xmx1024m,-Xmn400m',  # type: str
        cpu_affinity='automatic',  # type: str
        gpu_affinity='automatic',  # type: str
        fpga_affinity='automatic',  # type: str
        fpga_reprogram='',  # type: str
        profile_input='',  # type: str
        profile_output='',  # type: str
        scheduler_config='',  # type: str
        external_adaptation=False,  # type: bool
        propagate_virtual_environment=True,  # type: bool
        mpi_worker=False,  # type: bool
        verbose=False  # type: bool
):
    # type: (...) -> None
    """ Start the runtime in interactive mode.

    :param log_level: Logging level [ 'trace'|'debug'|'info'|'api'|'off' ]
                      (default: 'off')
    :param debug: Debug mode [ True | False ]
                  (default: False) (overrides log-level)
    :param o_c: Objects to string conversion [ True|False ]
                (default: False)
    :param graph: Generate graph [ True|False ]
                  (default: False)
    :param trace: Generate trace [ True|False|'scorep'|'arm-map'|'arm-ddt' ]
                  (default: False)
    :param monitor: Monitor refresh rate
                    (default: None)
    :param project_xml: Project xml file path
                        (default: None)
    :param resources_xml: Resources xml file path
                          (default: None)
    :param summary: Execution summary [ True | False ]
                    (default: False)
    :param task_execution: Task execution
                           (default: 'compss')
    :param storage_impl: Storage implementation path
                         (default: None)
    :param storage_conf: Storage configuration file path
                         (default: None)
    :param streaming_backend: Streaming backend
                              (default: None)
    :param streaming_master_name: Streaming master name
                                  (default: None)
    :param streaming_master_port: Streaming master port
                                  (default: None)
    :param task_count: Task count
                       (default: 50)
    :param app_name: Application name
                     default: INTERACTIVE_FILE_NAME)
    :param uuid: UUId
                 (default: None)
    :param base_log_dir: Base logging directory
                         (default: None)
    :param specific_log_dir: Specific logging directory
                             (default: None)
    :param extrae_cfg: Extrae configuration file path
                       (default: None)
    :param comm: Communication library
                 (default: NIO)
    :param conn: Connector
                 (default: DefaultSSHConnector)
    :param master_name: Master Name
                        (default: '')
    :param master_port: Master port
                        (default: '')
    :param scheduler: Scheduler (see runcompss)
                      (default: es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler)  # noqa
    :param jvm_workers: Java VM parameters
                        (default: '-Xms1024m,-Xmx1024m,-Xmn400m')
    :param cpu_affinity: CPU Core affinity
                         (default: 'automatic')
    :param gpu_affinity: GPU affinity
                         (default: 'automatic')
    :param fpga_affinity: FPGA affinity
                          (default: 'automatic')
    :param fpga_reprogram: FPGA repogram command
                           (default: '')
    :param profile_input: Input profile
                          (default: '')
    :param profile_output: Output profile
                           (default: '')
    :param scheduler_config: Scheduler configuration
                             (default: '')
    :param external_adaptation: External adaptation [ True|False ]
                                (default: False)
    :param propagate_virtual_environment: Propagate virtual environment [ True|False ]  # noqa
                                          (default: False)
    :param mpi_worker: Use the MPI worker [ True|False ]
                       (default: False)
    :param verbose: Verbose mode [ True|False ]
                    (default: False)
    :return: None
    """
    # Export global variables
    global GRAPHING
    GRAPHING = graph
    __export_globals__()

    interactive_helpers.DEBUG = debug

    __show_flower__()

    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start

    ##############################################################
    # INITIALIZATION
    ##############################################################

    # Initial dictionary with the user defined parameters
    all_vars = {
        'log_level': log_level,
        'debug': debug,
        'o_c': o_c,
        'graph': graph,
        'trace': trace,
        'monitor': monitor,
        'project_xml': project_xml,
        'resources_xml': resources_xml,
        'summary': summary,
        'task_execution': task_execution,
        'storage_impl': storage_impl,
        'storage_conf': storage_conf,
        'streaming_backend': streaming_backend,
        'streaming_master_name': streaming_master_name,
        'streaming_master_port': streaming_master_port,
        'task_count': task_count,
        'app_name': app_name,
        'uuid': uuid,
        'base_log_dir': base_log_dir,
        'specific_log_dir': specific_log_dir,
        'extrae_cfg': extrae_cfg,
        'comm': comm,
        'conn': conn,
        'master_name': master_name,
        'master_port': master_port,
        'scheduler': scheduler,
        'jvm_workers': jvm_workers,
        'cpu_affinity': cpu_affinity,
        'gpu_affinity': gpu_affinity,
        'fpga_affinity': fpga_affinity,
        'fpga_reprogram': fpga_reprogram,
        'profile_input': profile_input,
        'profile_output': profile_output,
        'scheduler_config': scheduler_config,
        'external_adaptation': external_adaptation,
        'propagate_virtual_environment': propagate_virtual_environment,
        'mpi_worker': mpi_worker
    }

    # Check the provided flags
    flags, issues = check_flags(all_vars)
    if not flags:
        print_flag_issues(issues)
        return None

    # Prepare the environment
    env_vars = prepare_environment(True, o_c, storage_impl, 'undefined', debug,
                                   trace, mpi_worker)
    all_vars.update(env_vars)

    # Update the log level and graph values if monitoring is enabled
    monitoring_vars = prepare_loglevel_graph_for_monitoring(
        monitor, graph, debug, log_level)
    all_vars.update(monitoring_vars)

    # Check if running in supercomputer and update the variables accordingly
    # with the defined in the launcher and exported in environment variables.
    if RUNNING_IN_SUPERCOMPUTER:
        updated_vars = updated_variables_in_sc()
        if verbose:
            print("- Overridden project xml with: %s" %
                  updated_vars['project_xml'])
            print("- Overridden resources xml with: %s" %
                  updated_vars['resources_xml'])
            print("- Overridden master name with: %s" %
                  updated_vars['master_name'])
            print("- Overridden master port with: %s" %
                  updated_vars['master_port'])
            print("- Overridden uuid with: %s" % updated_vars['uuid'])
            print("- Overridden base log dir with: %s" %
                  updated_vars['base_log_dir'])
            print("- Overridden specific log dir with: %s" %
                  updated_vars['specific_log_dir'])
            print("- Overridden storage conf with: %s" %
                  updated_vars['storage_conf'])
            print("- Overridden log level with: %s" %
                  str(updated_vars['log_level']))
            print("- Overridden debug with: %s" % str(updated_vars['debug']))
            print("- Overridden trace with: %s" % str(updated_vars['trace']))
        all_vars.update(updated_vars)

    # Update the tracing environment if set and set the appropriate trace
    # integer value
    tracing_vars = prepare_tracing_environment(all_vars['trace'],
                                               all_vars['extrae_lib'],
                                               all_vars['ld_library_path'])
    all_vars['trace'], all_vars['ld_library_path'] = tracing_vars

    # Update the infrastructure variables if necessary
    inf_vars = check_infrastructure_variables(all_vars['project_xml'],
                                              all_vars['resources_xml'],
                                              all_vars['compss_home'],
                                              all_vars['app_name'],
                                              all_vars['file_name'],
                                              all_vars['external_adaptation'])
    all_vars.update(inf_vars)

    # With all this information, create the configuration file for the
    # runtime start
    create_init_config_file(**all_vars)

    ##############################################################
    # RUNTIME START
    ##############################################################

    print("* - Starting COMPSs runtime...                       *")
    sys.stdout.flush()  # Force flush
    compss_start(log_level, all_vars['trace'], True)

    global LOG_PATH
    LOG_PATH = get_log_path()
    set_temporary_directory(LOG_PATH)
    print("* - Log path : " + LOG_PATH)

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(all_vars['compss_home'], 'Bindings', 'python',
                            str(all_vars['major_version']), 'log')
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    logger = logging.getLogger("pycompss.runtime.launch")

    __print_setup__(verbose, all_vars)

    logger.debug("--- START ---")
    logger.debug("PyCOMPSs Log path: %s" % LOG_PATH)

    logger.debug("Starting storage")
    global PERSISTENT_STORAGE
    PERSISTENT_STORAGE = master_init_storage(all_vars['storage_conf'], logger)

    logger.debug("Starting streaming")
    global STREAMING
    STREAMING = init_streaming(all_vars['streaming_backend'],
                               all_vars['streaming_master_name'],
                               all_vars['streaming_master_port'])

    # MAIN EXECUTION
    # let the user write an interactive application
    print("* - PyCOMPSs Runtime started... Have fun!            *")
    print("******************************************************")

    # Emit the application start event (the 0 is in the stop function)
    emit_manual_event(APPLICATION_RUNNING_EVENT)
コード例 #5
0
ファイル: interactive.py プロジェクト: bsc-wdc/compss
def start(
        log_level="off",  # type: str
        debug=False,  # type: bool
        o_c=False,  # type: bool
        graph=False,  # type: bool
        trace=False,  # type: bool
        monitor=-1,  # type: int
        project_xml="",  # type: str
        resources_xml="",  # type: str
        summary=False,  # type: bool
        task_execution="compss",  # type: str
        storage_impl="",  # type: str
        storage_conf="",  # type: str
        streaming_backend="",  # type: str
        streaming_master_name="",  # type: str
        streaming_master_port="",  # type: str
        task_count=50,  # type: int
        app_name=INTERACTIVE_FILE_NAME,  # type: str
        uuid="",  # type: str
        base_log_dir="",  # type: str
        specific_log_dir="",  # type: str
        extrae_cfg="",  # type: str
        comm="NIO",  # type: str
        conn=DEFAULT_CONN,  # type: str
        master_name="",  # type: str
        master_port="",  # type: str
        scheduler=DEFAULT_SCHED,  # type: str
        jvm_workers=DEFAULT_JVM_WORKERS,  # type: str
        cpu_affinity="automatic",  # type: str
        gpu_affinity="automatic",  # type: str
        fpga_affinity="automatic",  # type: str
        fpga_reprogram="",  # type: str
        profile_input="",  # type: str
        profile_output="",  # type: str
        scheduler_config="",  # type: str
        external_adaptation=False,  # type: bool
        propagate_virtual_environment=True,  # type: bool
        mpi_worker=False,  # type: bool
        worker_cache=False,  # type: typing.Union[bool, str]
        shutdown_in_node_failure=False,  # type: bool
        io_executors=0,  # type: int
        env_script="",  # type: str
        reuse_on_block=True,  # type: bool
        nested_enabled=False,  # type: bool
        tracing_task_dependencies=False,  # type: bool
        trace_label="",  # type: str
        extrae_cfg_python="",  # type: str
        wcl=0,  # type: int
        cache_profiler=False,  # type: bool
        verbose=False  # type: bool
):  # type: (...) -> None
    """ Start the runtime in interactive mode.

    :param log_level: Logging level [ "trace"|"debug"|"info"|"api"|"off" ]
                      (default: "off")
    :param debug: Debug mode [ True | False ]
                  (default: False) (overrides log-level)
    :param o_c: Objects to string conversion [ True|False ]
                (default: False)
    :param graph: Generate graph [ True|False ]
                  (default: False)
    :param trace: Generate trace [ True|False|"scorep"|"arm-map"|"arm-ddt" ]
                  (default: False)
    :param monitor: Monitor refresh rate
                    (default: None)
    :param project_xml: Project xml file path
                        (default: None)
    :param resources_xml: Resources xml file path
                          (default: None)
    :param summary: Execution summary [ True | False ]
                    (default: False)
    :param task_execution: Task execution
                           (default: "compss")
    :param storage_impl: Storage implementation path
                         (default: None)
    :param storage_conf: Storage configuration file path
                         (default: None)
    :param streaming_backend: Streaming backend
                              (default: None)
    :param streaming_master_name: Streaming master name
                                  (default: None)
    :param streaming_master_port: Streaming master port
                                  (default: None)
    :param task_count: Task count
                       (default: 50)
    :param app_name: Application name
                     default: INTERACTIVE_FILE_NAME)
    :param uuid: UUId
                 (default: None)
    :param base_log_dir: Base logging directory
                         (default: None)
    :param specific_log_dir: Specific logging directory
                             (default: None)
    :param extrae_cfg: Extrae configuration file path
                       (default: None)
    :param comm: Communication library
                 (default: NIO)
    :param conn: Connector
                 (default: DefaultSSHConnector)
    :param master_name: Master Name
                        (default: "")
    :param master_port: Master port
                        (default: "")
    :param scheduler: Scheduler (see runcompss)
                      (default: es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler)  # noqa: E501
    :param jvm_workers: Java VM parameters
                        (default: "-Xms1024m,-Xmx1024m,-Xmn400m")
    :param cpu_affinity: CPU Core affinity
                         (default: "automatic")
    :param gpu_affinity: GPU affinity
                         (default: "automatic")
    :param fpga_affinity: FPGA affinity
                          (default: "automatic")
    :param fpga_reprogram: FPGA repogram command
                           (default: "")
    :param profile_input: Input profile
                          (default: "")
    :param profile_output: Output profile
                           (default: "")
    :param scheduler_config: Scheduler configuration
                             (default: "")
    :param external_adaptation: External adaptation [ True|False ]
                                (default: False)
    :param propagate_virtual_environment: Propagate virtual environment [ True|False ]  # noqa: E501
                                          (default: False)
    :param mpi_worker: Use the MPI worker [ True|False ]
                       (default: False)
    :param worker_cache: Use the worker cache [ True | int(size) | False]
                         (default: False)
    :param shutdown_in_node_failure: Shutdown in node failure [ True | False]
                                     (default: False)
    :param io_executors: <Integer> Number of IO executors
    :param env_script: <String> Environment script to be sourced in workers
    :param reuse_on_block: Reuse on block [ True | False]
                           (default: True)
    :param nested_enabled: Nested enabled [ True | False]
                           (default: True)
    :param tracing_task_dependencies: Include task dependencies in trace
                                      [ True | False] (default: False)
    :param trace_label: <String> Add trace label
    :param extrae_cfg_python: <String> Extrae configuration file for the
                              workers
    :param wcl: <Integer> Wall clock limit. Stops the runtime if reached.
                0 means forever.
    :param cache_profiler: Use the cache profiler [ True | False]
                         (default: False)
    :param verbose: Verbose mode [ True|False ]
                    (default: False)
    :return: None
    """
    # Export global variables
    global GRAPHING

    if context.in_pycompss():
        print("The runtime is already running")
        return None

    GRAPHING = graph
    __export_globals__()

    interactive_helpers.DEBUG = debug
    if debug:
        log_level = "debug"

    __show_flower__()

    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start

    ##############################################################
    # INITIALIZATION
    ##############################################################

    # Initial dictionary with the user defined parameters
    all_vars = parameters_to_dict(
        log_level, debug, o_c, graph, trace, monitor, project_xml,
        resources_xml, summary, task_execution, storage_impl, storage_conf,
        streaming_backend, streaming_master_name, streaming_master_port,
        task_count, app_name, uuid, base_log_dir, specific_log_dir, extrae_cfg,
        comm, conn, master_name, master_port, scheduler, jvm_workers,
        cpu_affinity, gpu_affinity, fpga_affinity, fpga_reprogram,
        profile_input, profile_output, scheduler_config, external_adaptation,
        propagate_virtual_environment, mpi_worker, worker_cache,
        shutdown_in_node_failure, io_executors, env_script, reuse_on_block,
        nested_enabled, tracing_task_dependencies, trace_label,
        extrae_cfg_python, wcl, cache_profiler)
    # Save all vars in global current flags so that events.py can restart
    # the notebook with the same flags
    export_current_flags(all_vars)

    # Check the provided flags
    flags, issues = check_flags(all_vars)
    if not flags:
        print_flag_issues(issues)
        return None

    # Prepare the environment
    env_vars = prepare_environment(True, o_c, storage_impl, "undefined", debug,
                                   trace, mpi_worker)
    all_vars.update(env_vars)

    # Update the log level and graph values if monitoring is enabled
    monitoring_vars = prepare_loglevel_graph_for_monitoring(
        monitor, graph, debug, log_level)
    all_vars.update(monitoring_vars)

    # Check if running in supercomputer and update the variables accordingly
    # with the defined in the launcher and exported in environment variables.
    if RUNNING_IN_SUPERCOMPUTER:
        updated_vars = updated_variables_in_sc()
        if verbose:
            print("- Overridden project xml with: %s" %
                  updated_vars["project_xml"])
            print("- Overridden resources xml with: %s" %
                  updated_vars["resources_xml"])
            print("- Overridden master name with: %s" %
                  updated_vars["master_name"])
            print("- Overridden master port with: %s" %
                  updated_vars["master_port"])
            print("- Overridden uuid with: %s" % updated_vars["uuid"])
            print("- Overridden base log dir with: %s" %
                  updated_vars["base_log_dir"])
            print("- Overridden specific log dir with: %s" %
                  updated_vars["specific_log_dir"])
            print("- Overridden storage conf with: %s" %
                  updated_vars["storage_conf"])
            print("- Overridden log level with: %s" %
                  str(updated_vars["log_level"]))
            print("- Overridden debug with: %s" % str(updated_vars["debug"]))
            print("- Overridden trace with: %s" % str(updated_vars["trace"]))
        all_vars.update(updated_vars)

    # Update the tracing environment if set and set the appropriate trace
    # integer value
    tracing_vars = prepare_tracing_environment(all_vars["trace"],
                                               all_vars["extrae_lib"],
                                               all_vars["ld_library_path"])
    all_vars["trace"], all_vars["ld_library_path"] = tracing_vars

    # Update the infrastructure variables if necessary
    inf_vars = check_infrastructure_variables(all_vars["project_xml"],
                                              all_vars["resources_xml"],
                                              all_vars["compss_home"],
                                              all_vars["app_name"],
                                              all_vars["file_name"],
                                              all_vars["external_adaptation"])
    all_vars.update(inf_vars)

    # With all this information, create the configuration file for the
    # runtime start
    create_init_config_file(**all_vars)

    # Start the event manager (ipython hooks)
    ipython = globals()["__builtins__"]["get_ipython"]()
    setup_event_manager(ipython)

    ##############################################################
    # RUNTIME START
    ##############################################################

    print("* - Starting COMPSs runtime...                         *")
    sys.stdout.flush()  # Force flush
    compss_start(log_level, all_vars["trace"], True)

    global LOG_PATH
    LOG_PATH = get_log_path()
    set_temporary_directory(LOG_PATH)
    print("* - Log path : " + LOG_PATH)

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(all_vars["compss_home"], "Bindings", "python",
                            str(all_vars["major_version"]), "log")
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    logger = logging.getLogger("pycompss.runtime.launch")

    __print_setup__(verbose, all_vars)

    logger.debug("--- START ---")
    logger.debug("PyCOMPSs Log path: %s" % LOG_PATH)

    logger.debug("Starting storage")
    global PERSISTENT_STORAGE
    PERSISTENT_STORAGE = master_init_storage(all_vars["storage_conf"], logger)

    logger.debug("Starting streaming")
    global STREAMING
    STREAMING = init_streaming(all_vars["streaming_backend"],
                               all_vars["streaming_master_name"],
                               all_vars["streaming_master_port"])

    # Start monitoring the stdout and stderr
    STDW.start_watching()

    # MAIN EXECUTION
    # let the user write an interactive application
    print("* - PyCOMPSs Runtime started... Have fun!              *")
    print(LINE_SEPARATOR)

    # Emit the application start event (the 0 is in the stop function)
    emit_manual_event(APPLICATION_RUNNING_EVENT)
コード例 #6
0
def executor(queue, process_name, pipe, conf):
    # type: (typing.Union[None, Queue], str, Pipe, typing.Any) -> None
    """Thread main body - Overrides Threading run method.

    Iterates over the input pipe in order to receive tasks (with their
    parameters) and process them.
    Notifies the runtime when each task  has finished with the
    corresponding output value.
    Finishes when the "quit" message is received.

    :param queue: Queue where to put exception messages.
    :param process_name: Process name (Thread-X, where X is the thread id).
    :param pipe: Pipe to receive and send messages from/to the runtime.
    :param conf: configuration of the executor.
    :return: None
    """
    try:
        # Replace Python Worker's SIGTERM handler.
        signal.signal(signal.SIGTERM, shutdown_handler)

        if len(conf.logger.handlers) == 0:
            # Logger has not been inherited correctly. Happens in MacOS.
            set_temporary_directory(conf.tmp_dir, create_tmpdir=False)
            # Reload logger
            conf.logger, conf.logger_cfg, conf.storage_loggers, _ = \
                load_loggers(conf.debug, conf.persistent_storage)
            # Set the binding in worker mode too
            context.set_pycompss_context(context.WORKER)
        logger = conf.logger

        tracing = conf.tracing
        storage_conf = conf.storage_conf
        storage_loggers = conf.storage_loggers

        # Get a copy of the necessary information from the logger to
        # re-establish after each task
        logger_handlers = copy.copy(logger.handlers)
        logger_level = logger.getEffectiveLevel()
        logger_formatter = logging.Formatter(
            logger_handlers[0].formatter._fmt)  # noqa
        storage_loggers_handlers = []
        for storage_logger in storage_loggers:
            storage_loggers_handlers.append(copy.copy(storage_logger.handlers))

        # Establish link with the binding-commons to enable task nesting
        if __debug__:
            logger.debug(HEADER +
                         "Establishing link with runtime in process " +
                         str(process_name))  # noqa: E501
        COMPSs.load_runtime(external_process=False, _logger=logger)
        COMPSs.set_pipes(pipe.output_pipe, pipe.input_pipe)

        if storage_conf != "null":
            try:
                from storage.api import initWorkerPostFork  # noqa
                with event_worker(INIT_WORKER_POSTFORK_EVENT):
                    initWorkerPostFork()
            except (ImportError, AttributeError):
                if __debug__:
                    logger.info(
                        HEADER +
                        "[%s] Could not find initWorkerPostFork storage call. Ignoring it."
                        %  # noqa: E501
                        str(process_name))

        # Start the streaming backend if necessary
        streaming = False
        if conf.stream_backend not in [None, "null", "NONE"]:
            streaming = True

        if streaming:
            # Initialize streaming
            logger.debug(HEADER + "Starting streaming for process " +
                         str(process_name))
            try:
                DistroStreamClientHandler.init_and_start(
                    master_ip=conf.stream_master_ip,
                    master_port=conf.stream_master_port)
            except Exception as e:
                logger.error(e)
                raise e

        # Connect to Shared memory manager
        if conf.cache_queue:
            load_shared_memory_manager()

        # Process properties
        alive = True

        if __debug__:
            logger.debug(HEADER + "[%s] Starting process" % str(process_name))

        # MAIN EXECUTOR LOOP
        while alive:
            # Runtime -> pipe - Read command from pipe
            command = COMPSs.read_pipes()
            if command != "":
                if __debug__:
                    logger.debug(HEADER + "[%s] Received command %s" %
                                 (str(process_name), str(command)))
                # Process the command
                alive = process_message(
                    command, process_name, pipe, queue, tracing, logger,
                    conf.logger_cfg, logger_handlers, logger_level,
                    logger_formatter, storage_conf, storage_loggers,
                    storage_loggers_handlers, conf.cache_queue, conf.cache_ids,
                    conf.cache_profiler)
        # Stop storage
        if storage_conf != "null":
            try:
                from storage.api import finishWorkerPostFork  # noqa
                with event_worker(FINISH_WORKER_POSTFORK_EVENT):
                    finishWorkerPostFork()
            except (ImportError, AttributeError):
                if __debug__:
                    logger.info(
                        HEADER +
                        "[%s] Could not find finishWorkerPostFork storage call. Ignoring it."
                        %  # noqa: E501
                        str(process_name))

        # Stop streaming
        if streaming:
            logger.debug(HEADER + "Stopping streaming for process " +
                         str(process_name))
            DistroStreamClientHandler.set_stop()

        sys.stdout.flush()
        sys.stderr.flush()
        if __debug__:
            logger.debug(HEADER + "[%s] Exiting process " % str(process_name))
        pipe.write(QUIT_TAG)
        pipe.close()
    except Exception as e:
        logger.error(e)
        raise e
コード例 #7
0
def launch_pycompss_application(
        app,  # type: str
        func,  # type: typing.Optional[str]
        log_level="off",  # type: str
        o_c=False,  # type: bool
        debug=False,  # type: bool
        graph=False,  # type: bool
        trace=False,  # type: bool
        monitor=-1,  # type: int
        project_xml="",  # type: str
        resources_xml="",  # type: str
        summary=False,  # type: bool
        task_execution="compss",  # type: str
        storage_impl="",  # type: str
        storage_conf="",  # type: str
        streaming_backend="",  # type: str
        streaming_master_name="",  # type: str
        streaming_master_port="",  # type: str
        task_count=50,  # type: int
        app_name="",  # type: str
        uuid="",  # type: str
        base_log_dir="",  # type: str
        specific_log_dir="",  # type: str
        extrae_cfg="",  # type: str
        comm="NIO",  # type: str
        conn=DEFAULT_CONN,  # type: str
        master_name="",  # type: str
        master_port="",  # type: str
        scheduler=DEFAULT_SCHED,  # type: str
        jvm_workers=DEFAULT_JVM_WORKERS,  # type: str
        cpu_affinity="automatic",  # type: str
        gpu_affinity="automatic",  # type: str
        fpga_affinity="automatic",  # type: str
        fpga_reprogram="",  # type: str
        profile_input="",  # type: str
        profile_output="",  # type: str
        scheduler_config="",  # type: str
        external_adaptation=False,  # type: bool
        propagate_virtual_environment=True,  # type: bool
        mpi_worker=False,  # type: bool
        worker_cache=False,  # type: typing.Union[bool, str]
        shutdown_in_node_failure=False,  # type: bool
        io_executors=0,  # type: int
        env_script="",  # type: str
        reuse_on_block=True,  # type: bool
        nested_enabled=False,  # type: bool
        tracing_task_dependencies=False,  # type: bool
        trace_label="",  # type: str
        extrae_cfg_python="",  # type: str
        wcl=0,  # type: int
        cache_profiler=False,  # type: bool
        *args,
        **kwargs):  # NOSONAR
    # type: (...) -> typing.Any
    """ Launch PyCOMPSs application from function.

    :param app: Application path
    :param func: Function
    :param log_level: Logging level [ "trace"|"debug"|"info"|"api"|"off" ]
                      (default: "off")
    :param o_c: Objects to string conversion [ True | False ] (default: False)
    :param debug: Debug mode [ True | False ] (default: False)
                  (overrides log_level)
    :param graph: Generate graph [ True | False ] (default: False)
    :param trace: Generate trace
                  [ True | False | "scorep" | "arm-map" | "arm-ddt"]
                  (default: False)
    :param monitor: Monitor refresh rate (default: None)
    :param project_xml: Project xml file path
    :param resources_xml: Resources xml file path
    :param summary: Execution summary [ True | False ] (default: False)
    :param task_execution: Task execution (default: "compss")
    :param storage_impl: Storage implementation path
    :param storage_conf: Storage configuration file path
    :param streaming_backend: Streaming backend (default: None)
    :param streaming_master_name: Streaming master name (default: None)
    :param streaming_master_port: Streaming master port (default: None)
    :param task_count: Task count (default: 50)
    :param app_name: Application name (default: Interactive_date)
    :param uuid: UUId
    :param base_log_dir: Base logging directory
    :param specific_log_dir: Specific logging directory
    :param extrae_cfg: Extrae configuration file path
    :param comm: Communication library (default: NIO)
    :param conn: Connector (default: DefaultSSHConnector)
    :param master_name: Master Name (default: "")
    :param master_port: Master port (default: "")
    :param scheduler: Scheduler (default:
                  es.bsc.compss.scheduler.loadbalancing.LoadBalancingScheduler)
    :param jvm_workers: Java VM parameters
                        (default: "-Xms1024m,-Xmx1024m,-Xmn400m")
    :param cpu_affinity: CPU Core affinity (default: "automatic")
    :param gpu_affinity: GPU Core affinity (default: "automatic")
    :param fpga_affinity: FPA Core affinity (default: "automatic")
    :param fpga_reprogram: FPGA reprogram command (default: "")
    :param profile_input: Input profile  (default: "")
    :param profile_output: Output profile  (default: "")
    :param scheduler_config: Scheduler configuration  (default: "")
    :param external_adaptation: External adaptation [ True | False ]
                                (default: False)
    :param propagate_virtual_environment: Propagate virtual environment
                                          [ True | False ] (default: False)
    :param mpi_worker: Use the MPI worker [ True | False ] (default: False)
    :param worker_cache: Use the worker cache [ True | int(size) | False]
                         (default: False)
    :param shutdown_in_node_failure: Shutdown in node failure [ True | False]
                                     (default: False)
    :param io_executors: <Integer> Number of IO executors
    :param env_script: <String> Environment script to be sourced in workers
    :param reuse_on_block: Reuse on block [ True | False] (default: True)
    :param nested_enabled: Nested enabled [ True | False] (default: False)
    :param tracing_task_dependencies: Include task dependencies in trace
                                      [ True | False] (default: False)
    :param trace_label: <String> Add trace label
    :param extrae_cfg_python: <String> Extrae configuration file for the
                              workers
    :param wcl: <Integer> Wallclock limit. Stops the runtime if reached.
                0 means forever.
    :param cache_profiler: Use the cache profiler [ True | False]
                         (default: False)
    :param args: Positional arguments
    :param kwargs: Named arguments
    :return: Execution result
    """
    # Check that COMPSs is available
    if "COMPSS_HOME" not in os.environ:
        # Do not allow to continue if COMPSS_HOME is not defined
        raise PyCOMPSsException(
            "ERROR: COMPSS_HOME is not defined in the environment")

    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start, compss_stop

    ##############################################################
    # INITIALIZATION
    ##############################################################

    if debug:
        log_level = "debug"

    # Initial dictionary with the user defined parameters
    all_vars = parameters_to_dict(
        log_level, debug, o_c, graph, trace, monitor, project_xml,
        resources_xml, summary, task_execution, storage_impl, storage_conf,
        streaming_backend, streaming_master_name, streaming_master_port,
        task_count, app_name, uuid, base_log_dir, specific_log_dir, extrae_cfg,
        comm, conn, master_name, master_port, scheduler, jvm_workers,
        cpu_affinity, gpu_affinity, fpga_affinity, fpga_reprogram,
        profile_input, profile_output, scheduler_config, external_adaptation,
        propagate_virtual_environment, mpi_worker, worker_cache,
        shutdown_in_node_failure, io_executors, env_script, reuse_on_block,
        nested_enabled, tracing_task_dependencies, trace_label,
        extrae_cfg_python, wcl, cache_profiler)
    # Save all vars in global current flags so that events.py can restart
    # the notebook with the same flags
    export_current_flags(all_vars)

    # Check the provided flags
    flags, issues = check_flags(all_vars)
    if not flags:
        print_flag_issues(issues)
        return None

    # Prepare the environment
    env_vars = prepare_environment(False, o_c, storage_impl, app, debug, trace,
                                   mpi_worker)
    all_vars.update(env_vars)

    monitoring_vars = prepare_loglevel_graph_for_monitoring(
        monitor, graph, debug, log_level)
    all_vars.update(monitoring_vars)

    if RUNNING_IN_SUPERCOMPUTER:
        updated_vars = updated_variables_in_sc()
        all_vars.update(updated_vars)

    to_update = prepare_tracing_environment(all_vars["trace"],
                                            all_vars["extrae_lib"],
                                            all_vars["ld_library_path"])
    all_vars["trace"], all_vars["ld_library_path"] = to_update

    inf_vars = check_infrastructure_variables(all_vars["project_xml"],
                                              all_vars["resources_xml"],
                                              all_vars["compss_home"],
                                              all_vars["app_name"],
                                              all_vars["file_name"],
                                              all_vars["external_adaptation"])
    all_vars.update(inf_vars)

    create_init_config_file(**all_vars)

    ##############################################################
    # RUNTIME START
    ##############################################################

    # Runtime start
    compss_start(log_level, all_vars["trace"], True)

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(all_vars["compss_home"], "Bindings", "python",
                            str(all_vars["major_version"]), "log")
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    logger = logging.getLogger("pycompss.runtime.launch")

    logger.debug("--- START ---")
    logger.debug("PyCOMPSs Log path: %s" % log_path)

    if storage_impl and storage_conf:
        logger.debug("Starting storage")
        persistent_storage = master_init_storage(all_vars["storage_conf"],
                                                 logger)
    else:
        persistent_storage = False

    logger.debug("Starting streaming")
    streaming = init_streaming(all_vars["streaming_backend"],
                               all_vars["streaming_master_name"],
                               all_vars["streaming_master_port"])

    saved_argv = sys.argv
    sys.argv = list(args)
    # Execution:
    with event_master(APPLICATION_RUNNING_EVENT):
        if func is None or func == "__main__":
            if IS_PYTHON3:
                exec(open(app).read())
            else:
                execfile(app)  # type: ignore
            result = None
        else:
            if IS_PYTHON3:
                from importlib.machinery import SourceFileLoader  # noqa
                imported_module = SourceFileLoader(
                    all_vars["file_name"], app).load_module()  # type: ignore
            else:
                import imp  # noqa
                imported_module = imp.load_source(all_vars["file_name"],
                                                  app)  # noqa
            method_to_call = getattr(imported_module, func)
            try:
                result = method_to_call(*args, **kwargs)
            except TypeError:
                result = method_to_call()
    # Recover the system arguments
    sys.argv = saved_argv

    # Stop streaming
    if streaming:
        stop_streaming()

    # Stop persistent storage
    if persistent_storage:
        master_stop_storage(logger)

    logger.debug("--- END ---")

    ##############################################################
    # RUNTIME STOP
    ##############################################################

    # Stop runtime
    compss_stop()
    clean_log_configs()

    return result
コード例 #8
0
def compss_main():
    # type: () -> None
    """ PyCOMPSs main function.

    General call:
    python $PYCOMPSS_HOME/pycompss/runtime/launch.py $wall_clock $log_level
           $PyObject_serialize $storage_conf $streaming_backend
           $streaming_master_name $streaming_master_port
           $fullAppPath $application_args

    :return: None
    """
    global APP_PATH
    global STREAMING
    global PERSISTENT_STORAGE
    global LOGGER
    # Let the Python binding know we are at master
    context.set_pycompss_context(context.MASTER)
    # Then we can import the appropriate start and stop functions from the API
    from pycompss.api.api import compss_start  # noqa
    from pycompss.api.api import compss_stop  # noqa
    from pycompss.api.api import compss_set_wall_clock  # noqa

    # See parse_arguments, defined above
    # In order to avoid parsing user arguments, we are going to remove user
    # args from sys.argv
    user_sys_argv = sys.argv[10:]
    sys.argv = sys.argv[:10]
    args = parse_arguments()
    # We are done, now sys.argv must contain user args only
    sys.argv = [args.app_path] + user_sys_argv

    # Get log_level
    log_level = args.log_level

    # Setup tracing
    tracing = int(args.tracing)

    # Get storage configuration at master
    storage_conf = args.storage_configuration

    # Load user imports before starting the runtime (can be avoided if
    # ENVIRONMENT_VARIABLE_LOAD -- defined in configuration.py --
    # is set to false).
    # Reason: some cases like autoparallel can require to avoid loading.
    # It is disabled if using storage (with dataClay this can not be done)
    if preload_user_code() and not use_storage(storage_conf):
        with context.loading_context():
            __load_user_module__(args.app_path, log_level)

    # Start the runtime
    compss_start(log_level, tracing, False)

    # Register @implements core elements (they can not be registered in
    # __load_user__module__).
    __register_implementation_core_elements__()

    # Get application wall clock limit
    wall_clock = int(args.wall_clock)
    if wall_clock > 0:
        compss_set_wall_clock(wall_clock)

    # Get object_conversion boolean
    set_object_conversion(args.object_conversion == "true")

    # Get application execution path
    APP_PATH = args.app_path

    # Setup logging
    binding_log_path = get_log_path()
    log_path = os.path.join(str(os.getenv("COMPSS_HOME")), "Bindings",
                            "python", str(PYTHON_VERSION), "log")
    set_temporary_directory(binding_log_path)
    logging_cfg_file = get_logging_cfg_file(log_level)
    init_logging(os.path.join(log_path, logging_cfg_file), binding_log_path)
    LOGGER = logging.getLogger("pycompss.runtime.launch")

    # Get JVM options
    # jvm_opts = os.environ["JVM_OPTIONS_FILE"]
    # from pycompss.util.jvm.parser import convert_to_dict
    # opts = convert_to_dict(jvm_opts)
    # storage_conf = opts.get("-Dcompss.storage.conf")

    exit_code = 0
    try:
        if __debug__:
            LOGGER.debug("--- START ---")
            LOGGER.debug("PyCOMPSs Log path: %s" % binding_log_path)

        # Start persistent storage
        PERSISTENT_STORAGE = master_init_storage(storage_conf, LOGGER)

        # Start STREAMING
        STREAMING = init_streaming(args.streaming_backend,
                                   args.streaming_master_name,
                                   args.streaming_master_port)

        # Show module warnings
        if __debug__:
            show_optional_module_warnings()

        # MAIN EXECUTION
        with event_master(APPLICATION_RUNNING_EVENT):
            # MAIN EXECUTION
            if IS_PYTHON3:
                with open(APP_PATH) as f:
                    exec(compile(f.read(), APP_PATH, "exec"), globals())
            else:
                execfile(APP_PATH, globals())  # type: ignore

        # End
        if __debug__:
            LOGGER.debug("--- END ---")
    except SystemExit as e:  # NOSONAR - reraising would not allow to stop the runtime gracefully.
        if e.code != 0:
            print("[ ERROR ]: User program ended with exitcode %s." % e.code)
            print("\t\tShutting down runtime...")
            exit_code = e.code
    except SerializerException:
        exit_code = 1
        # If an object that can not be serialized has been used as a parameter.
        print("[ ERROR ]: Serialization exception")
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        for line in lines:
            if APP_PATH in line:
                print("[ ERROR ]: In: %s", line)
        exit_code = 1
    except COMPSsException as e:
        # Any other exception occurred
        print("[ ERROR ]: A COMPSs exception occurred: " + str(e))
        traceback.print_exc()
        exit_code = 0  # COMPSs exception is not considered an error
    except Exception as e:
        # Any other exception occurred
        print("[ ERROR ]: An exception occurred: " + str(e))
        traceback.print_exc()
        exit_code = 1
    finally:
        # Stop runtime
        stop_all(exit_code)
        clean_log_configs()