def c_extension_link(in_queue, out_queue, redirect_std, out_file_name, err_file_name): # type: (Queue, Queue, bool, str, str) -> None """ Main C extension process. :param in_queue: Queue to receive messages. :param out_queue: Queue to send messages. :param redirect_std: Decide whether to store the stdout and stderr into files or not. :param out_file_name: File where to store the stdout (only required if redirect_std is True). :param err_file_name: File where to store the stderr (only required if redirect_std is True). :return: None """ # Import C extension within the external process import compss with ipython_std_redirector(out_file_name, err_file_name) \ if redirect_std else not_std_redirector(): alive = True while alive: message = in_queue.get() command = message[0] parameters = [] # type: list if len(message) > 0: parameters = list(message[1:]) if command == START: compss.start_runtime() elif command == SET_DEBUG: compss.set_debug(*parameters) elif command == STOP: compss.stop_runtime(*parameters) alive = False elif command == CANCEL_TASKS: compss.cancel_application_tasks(*parameters) elif command == ACCESSED_FILE: accessed = compss.accessed_file(*parameters) out_queue.put(accessed) elif command == OPEN_FILE: compss_name = compss.open_file(*parameters) out_queue.put(compss_name) elif command == CLOSE_FILE: compss.close_file(*parameters) elif command == DELETE_FILE: result = compss.delete_file(*parameters) out_queue.put(result) elif command == GET_FILE: compss.get_file(*parameters) elif command == GET_DIRECTORY: compss.get_directory(*parameters) elif command == BARRIER: compss.barrier(*parameters) elif command == BARRIER_GROUP: exception_message = compss.barrier_group(*parameters) out_queue.put(exception_message) elif command == OPEN_TASK_GROUP: compss.open_task_group(*parameters) elif command == CLOSE_TASK_GROUP: compss.close_task_group(*parameters) elif command == GET_LOGGING_PATH: log_path = compss.get_logging_path() out_queue.put(log_path) elif command == GET_NUMBER_OF_RESOURCES: num_resources = compss.get_number_of_resources(*parameters) out_queue.put(num_resources) elif command == REQUEST_RESOURCES: compss.request_resources(*parameters) elif command == FREE_RESOURCES: compss.free_resources(*parameters) elif command == REGISTER_CORE_ELEMENT: compss.register_core_element(*parameters) elif command == PROCESS_TASK: compss.process_task(*parameters) elif command == PROCESS_HTTP_TASK: compss.process_http_task(*parameters) elif command == SET_PIPES: compss.set_pipes(*parameters) elif command == READ_PIPES: compss.read_pipes(*parameters) elif command == SET_WALL_CLOCK: compss.set_wall_clock(*parameters) else: raise PyCOMPSsException("Unknown link command")
def process_task(f, module_name, class_name, ftype, f_parameters, f_returns, task_kwargs, num_nodes, replicated, distributed): """ Function that submits a task to the runtime. :param f: Function or method :param module_name: Name of the module containing the function/method (including packages, if any) :param class_name: Name of the class (if method) :param ftype: Function type :param f_parameters: Function parameters (dictionary {'param1':Parameter()} :param f_returns: Function returns (dictionary {'compss_retvalueX':Parameter()} :param task_kwargs: Decorator arguments :param num_nodes: Number of nodes that the task must use :param replicated: Boolean indicating if the task must be replicated or not :param distributed: Boolean indicating if the task must be distributed or not :return: The future object related to the task return """ if __debug__: logger.debug("TASK: %s of type %s, in module %s, in class %s" % (f.__name__, ftype, module_name, class_name)) app_id = 0 # Check if the function is an instance method or a class method. if ftype == FunctionType.INSTANCE_METHOD: has_target = True else: has_target = False fo = None if f_returns: fo = _build_return_objects(f_returns) num_returns = len(f_returns) # Get path if class_name == '': path = module_name else: path = module_name + '.' + class_name # Infer COMPSs types from real types, except for files _serialize_objects(f_parameters) # Build values and COMPSs types and directions values, compss_types, compss_directions, compss_streams, compss_prefixes = _build_values_types_directions( ftype, f_parameters, f_returns, f.__code_strings__) # Get priority has_priority = task_kwargs['priority'] # Signature and other parameters: signature = '.'.join([path, f.__name__]) # num_nodes = 1 # default due to not MPI decorator yet # replicated = False # default due to not replicated tag yet # distributed = False # default due to not distributed tag yet if __debug__: # Log the task submission values for debugging purposes. if logger.isEnabledFor(logging.DEBUG): values_str = '' types_str = '' direct_str = '' streams_str = '' prefixes_str = '' for v in values: values_str += str(v) + ' ' for t in compss_types: types_str += str(t) + ' ' for d in compss_directions: direct_str += str(d) + ' ' for s in compss_streams: streams_str += str(s) + ' ' for p in compss_prefixes: prefixes_str += str(p) + ' ' logger.debug("Processing task:") logger.debug("\t- App id: " + str(app_id)) logger.debug("\t- Path: " + path) logger.debug("\t- Function name: " + f.__name__) logger.debug("\t- Signature: " + signature) logger.debug("\t- Priority: " + str(has_priority)) logger.debug("\t- Has target: " + str(has_target)) logger.debug("\t- Num nodes: " + str(num_nodes)) logger.debug("\t- Replicated: " + str(replicated)) logger.debug("\t- Distributed: " + str(distributed)) logger.debug("\t- Values: " + values_str) logger.debug("\t- COMPSs types: " + types_str) logger.debug("\t- COMPSs directions: " + direct_str) logger.debug("\t- COMPSs streams: " + streams_str) logger.debug("\t- COMPSs prefixes: " + prefixes_str) # Check that there is the same amount of values as their types, as well as their directions, streams and prefixes. assert (len(values) == len(compss_types) == len(compss_directions) == len(compss_streams) == len(compss_prefixes)) ''' Submit task to the runtime (call to the C extension): Parameters: 0 - <Integer> - application id (by default always 0 due to it is not currently needed for the signature) 1 - <String> - path of the module where the task is 2 - <String> - function name of the task (to be called from the worker) 3 - <String> - priority flag (true|false) 4 - <String> - has target (true|false). If the task is within an object or not. 5 - [<String>] - task parameters (basic types or file paths for objects) 6 - [<Integer>] - parameters types (number corresponding to the type of each parameter) 7 - [<Integer>] - parameters directions (number corresponding to the direction of each parameter) 8 - [<Integer>] - parameters streams (number corresponding to the stream of each parameter) 9 - [<String>] - parameters prefixes (sting corresponding to the prefix of each parameter) ''' compss.process_task(app_id, signature, has_priority, num_nodes, replicated, distributed, has_target, num_returns, values, compss_types, compss_directions, compss_streams, compss_prefixes) # Return the future object/s corresponding to the task # This object will substitute the user expected return from the task and will be used later for synchronization # or as a task parameter (then the runtime will take care of the dependency. return fo
def c_extension_link(in_queue, out_queue): # type: (..., ...) -> None """ Main C extension process. :param in_queue: Queue to receive messages. :param out_queue: Queue to send messages. :return: None """ import compss alive = True while alive: message = in_queue.get() command = message[0] parameters = [] if len(message) > 0: parameters = message[1:] if command == START: compss.start_runtime() elif command == SET_DEBUG: compss.set_debug(*parameters) elif command == STOP: compss.stop_runtime(*parameters) alive = False elif command == CANCEL_TASKS: compss.cancel_application_tasks(*parameters) elif command == ACCESSED_FILE: accessed = compss.accessed_file(*parameters) out_queue.put(accessed) elif command == OPEN_FILE: compss_name = compss.open_file(*parameters) out_queue.put(compss_name) elif command == CLOSE_FILE: compss.close_file(*parameters) elif command == DELETE_FILE: result = compss.delete_file(*parameters) out_queue.put(result) elif command == GET_FILE: compss.get_file(*parameters) elif command == GET_DIRECTORY: compss.get_directory(*parameters) elif command == BARRIER: compss.barrier(*parameters) elif command == BARRIER_GROUP: exception_message = compss.barrier_group(*parameters) out_queue.put(exception_message) elif command == OPEN_TASK_GROUP: compss.open_task_group(*parameters) elif command == CLOSE_TASK_GROUP: compss.close_task_group(*parameters) elif command == GET_LOGGING_PATH: log_path = compss.get_logging_path() out_queue.put(log_path) elif command == GET_NUMBER_OF_RESOURCES: num_resources = compss.get_number_of_resources(*parameters) out_queue.put(num_resources) elif command == REQUEST_RESOURCES: compss.request_resources(*parameters) elif command == FREE_RESOURCES: compss.free_resources(*parameters) elif command == REGISTER_CORE_ELEMENT: compss.register_core_element(*parameters) elif command == PROCESS_TASK: compss.process_task(*parameters) elif command == SET_PIPES: compss.set_pipes(*parameters) else: raise Exception("Unknown link command")
def process_task(f, # Function or method ftype, # Function type spec_args, # Names of the task arguments class_name, # Name of the class (if method) module_name, # Name of the module containing the function/method (including packages, if any) task_args, # Unnamed arguments task_kwargs, # Named arguments deco_kwargs): # Decorator arguments logger.debug("TASK: %s of type %s, in module %s, in class %s" % (f.__name__, ftype, module_name, class_name)) first_par = 0 if ftype == Function_Type.INSTANCE_METHOD: has_target = True else: has_target = False if ftype == Function_Type.CLASS_METHOD: first_par = 1 # skip class parameter ret_type = deco_kwargs['returns'] if ret_type: # Create future for return value if ret_type in python_to_compss: # primitives, string, dic, list, tuple fu = Future() elif inspect.isclass(ret_type): # For objects, type of future has to be specified to allow o = func; o.func try: fu = ret_type() except TypeError: logger.warning("Type %s does not have an empty constructor, building generic future object" % ret_type) fu = Future() else: fu = Future() # modules, functions, methods logger.debug("Setting object %d of %s as a future" % (id(fu), type(fu))) obj_id = id(fu) ret_filename = temp_dir + temp_obj_prefix + str(obj_id) objid_to_filename[obj_id] = ret_filename task_objects[obj_id] = fu task_kwargs['compss_retvalue'] = ret_filename else: fu = None app_id = 0 if class_name == '': path = module_name else: path = module_name + '.' + class_name # Infer COMPSs types from real types, except for files num_pars = len(spec_args) is_future = {} for i in range(first_par, num_pars): spec_arg = spec_args[i] p = deco_kwargs.get(spec_arg) if p == None: logger.debug("Adding default decoration for parameter %s" % spec_arg) p = Parameter() deco_kwargs[spec_arg] = p if i < len(task_args): p.value = task_args[i] else: p.value = task_kwargs[spec_arg] val_type = type(p.value) is_future[i] = (val_type == Future) logger.debug("Parameter " + spec_arg + "\n" + "\t- Value type: " + str(val_type) + "\n" + "\t- User-defined type: " + str(p.type)) # Infer type if necessary if p.type == None: p.type = python_to_compss.get(val_type) if p.type == None: p.type = Type.OBJECT logger.debug("\n\t- Inferred type: %d" % p.type) # Serialize objects into files if p.type == Type.OBJECT or is_future.get(i): # 2nd condition: real type can be primitive, but now it's acting as a future (object) turn_into_file(p) elif p.type == Type.INT: if p.value > JAVA_MAX_INT or p.value < JAVA_MIN_INT: p.type = Type.LONG # this must go through Java as a long to prevent overflow with Java int elif p.type == Type.LONG: if p.value > JAVA_MAX_LONG or p.value < JAVA_MIN_LONG: p.type = Type.OBJECT # this must be serialized to prevent overflow with Java long turn_into_file(p) logger.debug("Final type for parameter %s: %d" % (spec_arg, p.type)) # Build values and COMPSs types and directions values = [] compss_types = [] compss_directions = [] if ftype == Function_Type.INSTANCE_METHOD: ra = range(1, num_pars) ra.append(0) # callee is the last else: ra = range(first_par, num_pars) for i in ra: spec_arg = spec_args[i] p = deco_kwargs[spec_arg] values.append(p.value) if p.type == Type.OBJECT or is_future.get(i): compss_types.append(Type.FILE) else: compss_types.append(p.type) compss_directions.append(p.direction) # Priority has_priority = deco_kwargs['priority'] if logger.isEnabledFor(logging.DEBUG): values_str = '' types_str = '' direct_str = '' for v in values: values_str += str(v) + " " for t in compss_types: types_str += str(t) + " " for d in compss_directions: direct_str += str(d) + " " logger.debug("Processing task:\n" + "\t- App id: " + str(app_id) + "\n" + "\t- Path: " + path + "\n" + "\t- Function name: " + f.__name__ + "\n" + "\t- Priority: " + str(has_priority) + "\n" + "\t- Has target: " + str(has_target) + "\n" + "\t- Num params: " + str(num_pars) + "\n" + "\t- Values: " + values_str + "\n" + "\t- COMPSs types: " + types_str + "\n" + "\t- COMPSs directions: " + direct_str) compss.process_task(app_id, path, f.__name__, has_priority, has_target, values, compss_types, compss_directions) return fu