def parse_config_file(self): # type: () -> None """ Parse the config file and set self's task_type, decor, and config args. :return: None """ file_path = self.kwargs[CONFIG_FILE] config = json.load(open(file_path, "r")) properties = config.get(PROPERTIES, {}) exec_type = config.get(TYPE, None) if exec_type is None: print("Execution type not provided for @software task") elif exec_type.lower() not in SUPPORTED_DECORATORS: msg = "Error: Executor Type {} is not supported for software task."\ .format(exec_type) raise PyCOMPSsException(msg) else: exec_type = exec_type.lower() self.task_type, self.decor = SUPPORTED_DECORATORS[exec_type] mand_args = self.task_type.MANDATORY_ARGUMENTS # type: ignore if not all(arg in properties for arg in mand_args): msg = "Error: Missing arguments for '{}'.".format(self.task_type) raise PyCOMPSsException(msg) self.config_args = properties self.constraints = config.get("constraints", None) self.container = config.get("container", None)
def __parse_chunk_size__(chunk_size): # type: (str) -> int """ Parses chunk size as string and returns its value as integer. :param chunk_size: Chunk size as string. :return: Chunk size as integer. :raises PyCOMPSsException: Can not cast string to int error. """ # Check if it is an environment variable to be loaded if chunk_size.strip().startswith("$"): # Chunk size is an ENV variable, load it env_var = chunk_size.strip()[1:] # Remove $ if env_var.startswith("{"): env_var = env_var[1:-1] # remove brackets try: parsed_chunk_size = int(os.environ[env_var]) except ValueError: raise PyCOMPSsException(cast_env_to_int_error(CHUNK_SIZE)) else: # ChunkSize is in string form, cast it try: parsed_chunk_size = int(chunk_size) except ValueError: raise PyCOMPSsException(cast_string_to_int_error(CHUNK_SIZE)) return parsed_chunk_size
def __get_programs_params__(self): # type: () -> list """ Resolve the collection layout, such as blocks, strides, etc. :return: list(programs_length, binary, params, processes) :raises PyCOMPSsException: If programs are not dict objects. """ programs = self.kwargs[PROGRAMS] programs_params = [str(len(programs))] for program in programs: if not isinstance(program, dict): raise PyCOMPSsException( "Incorrect 'program' param in MPMD MPI") binary = program.get(BINARY, None) if not binary: raise PyCOMPSsException("No binary file provided for MPMD MPI") params = program.get(PARAMS, "[unassigned]") procs = str(program.get(PROCESSES, 1)) programs_params.extend([binary, params, procs]) # increase total # of processes for this mpmd task self.processes += program.get(PROCESSES, 1) return programs_params
def __init__(self, logger, values, config_file_path=None): self.logger = logger err_msg = "Unexpected call to dummy storage task context." self.logger.error(err_msg) self.values = values self.config_file_path = config_file_path raise PyCOMPSsException(err_msg)
def get_parameter_from_dictionary(d): # type: (dict) -> Parameter """ Convert a dictionary to Parameter Given a dictionary with fields like Type, Direction, etc. returns an actual Parameter object. :param d: Parameter description as dictionary. :return: an actual Parameter object. """ if not isinstance(d, dict): raise PyCOMPSsException("Unexpected type for parameter.") else: if Type not in d: # If no Type specified => IN parameter = Parameter() else: parameter = get_new_parameter(d[Type].key) # Add other modifiers if Direction in d: parameter.direction = d[Direction] if StdIOStream in d: parameter.stream = d[StdIOStream] if Prefix in d: parameter.prefix = d[Prefix] if Depth in d: parameter.depth = d[Depth] if Weight in d: parameter.weight = d[Weight] if Keep_rename in d: parameter.keep_rename = d[Keep_rename] if Cache in d: parameter.cache = d[Cache] return parameter
def __check_deprecated_arguments__( deprecated_arguments, # type: typing.Set[str] argument_names, # type: typing.List[str] where # type: str ): # type: (...) -> None """ This method looks for deprecated arguments and displays a warning if found. :param deprecated_arguments: Set of deprecated arguments :param argument_names: List of arguments to check :param where: Location of the argument :return: None :raise PyCOMPSsException: With the unsupported argument """ for argument in argument_names: if argument == "isModifier": message = "ERROR: Unsupported argument: isModifier Found in %s.\n" \ " Please, use: target_direction" % str(where) print(message, file=sys.stderr) # also show the warn in stderr raise PyCOMPSsException("Unsupported argument: " + str(argument)) if argument in deprecated_arguments: current_argument = re.sub("([A-Z]+)", r"_\1", argument).lower() message = "WARNING: Deprecated argument: %s Found in %s.\n" \ " Please, use: %s" % (str(argument), str(where), current_argument) # The print through stdout is disabled to prevent the message to # appear twice in the console. So the warning message will only # appear in STDERR. # print(message) # show the warn through stdout print(message, file=sys.stderr) # also show the warn in stderr
def __init__(self, *args, **kwargs): # type: (*typing.Any, **typing.Any) -> None """ Store arguments passed to the decorator. self = itself. args = not used. kwargs = dictionary with the given on_failure. :param args: Arguments. :param kwargs: Keyword arguments. """ decorator_name = "".join(("@", OnFailure.__name__.lower())) # super(OnFailure, self).__init__(decorator_name, *args, **kwargs) self.decorator_name = decorator_name self.args = args self.kwargs = kwargs self.scope = context.in_pycompss() self.core_element = None # type: typing.Any self.core_element_configured = False if self.scope: # Check the arguments check_mandatory_arguments(MANDATORY_ARGUMENTS, list(kwargs.keys()), decorator_name) # Save the parameters into self so that they can be accessed when # the task fails and the action needs to be taken self.on_failure_action = kwargs.pop(MANAGEMENT) # Check supported management values if self.on_failure_action not in SUPPORTED_MANAGEMENT: raise PyCOMPSsException( "ERROR: Unsupported on failure action: %s" % self.on_failure_action) # Keep all defaults in a dictionary self.defaults = kwargs
def check_task(job_out, job_err): if os.path.exists(job_err) and os.path.getsize(job_err) > 0: # noqa # Non empty file exists raise PyCOMPSsException( "An error happened in the task. Please check " + job_err) with open(job_out, "r") as f: content = f.read() if "ERROR" in content: raise PyCOMPSsException( "An error happened in the task. Please check " + job_out) if "EXCEPTION" in content or "Exception" in content: raise PyCOMPSsException( "An exception happened in the task. Please check " + job_out) if "END TASK execution. Status: Ok" not in content: raise PyCOMPSsException( "The task was supposed to be OK. Please check " + job_out)
def __process_reduction_params__(self): # type: () -> None """ Processes the chunk size and is reduce from the decorator. :return: None """ # Resolve @reduce specific parameters if CHUNK_SIZE not in self.kwargs: chunk_size = 0 else: chunk_size_kw = self.kwargs[CHUNK_SIZE] if isinstance(chunk_size_kw, int): chunk_size = chunk_size_kw elif isinstance(chunk_size_kw, str): # Convert string to int chunk_size = self.__parse_chunk_size__(chunk_size_kw) else: raise PyCOMPSsException( "ERROR: Wrong chunk_size value at @reduction decorator.") if IS_REDUCE not in self.kwargs: is_reduce = True else: is_reduce = self.kwargs[IS_REDUCE] if __debug__: logger.debug("The task is_reduce flag is set to: %s" % str(is_reduce)) logger.debug("This Reduction task will have %s sized chunks" % str(chunk_size)) # Set the chunk_size variable in kwargs for its usage in @task self.kwargs[CHUNK_SIZE] = chunk_size self.kwargs[IS_REDUCE] = is_reduce
def __resolve_collection_layout_params__(self): # type: () -> list """ Resolve the collection layout, such as blocks, strides, etc. :return: list(param_name, block_count, block_length, stride) :raises PyCOMPSsException: If the collection layout does not contain block_count. """ num_layouts = 0 layout_params = [] for key, value in self.kwargs.items(): if "_layout" in key: num_layouts += 1 param_name = key.split("_layout")[0] collection_layout = value block_count = self.__get_block_count__(collection_layout) block_length = self.__get_block_length__(collection_layout) stride = self.__get_stride__(collection_layout) if (block_length != -1 and block_count == -1) or \ (stride != -1 and block_count == -1): msg = "Error: collection_layout must contain block_count!" raise PyCOMPSsException(msg) layout_params.extend([ param_name, str(block_count), str(block_length), str(stride) ]) layout_params.insert(0, str(num_layouts)) return layout_params
def __init__(self, *args, **kwargs): """ Store arguments passed to the decorator. self = itself. args = not used. kwargs = dictionary with the given on_failure. :param args: Arguments. :param kwargs: Keyword arguments. """ decorator_name = "".join(('@', OnFailure.__name__.lower())) super(OnFailure, self).__init__(decorator_name, *args, **kwargs) if self.scope: # Check the arguments check_mandatory_arguments(MANDATORY_ARGUMENTS, list(kwargs.keys()), decorator_name) # Save the parameters into self so that they can be accessed when # the task fails and the action needs to be taken self.on_failure_action = kwargs.pop("management") # Check supported management values if self.on_failure_action not in SUPPORTED_MANAGEMENT: raise PyCOMPSsException( "ERROR: Unsupported on failure action: " + self.on_failure_action) # noqa: E501 # Keep all defaults in a dictionary self.defaults = kwargs
def __resolve_fail_by_exit_value__(self): # type: () -> None """ Resolve the fail by exit value. Updates self.kwargs: - Updates fail_by_exit_value if necessary. :return: None """ if 'fail_by_exit_value' in self.kwargs: fail_by_ev = self.kwargs['fail_by_exit_value'] if isinstance(fail_by_ev, bool): self.kwargs['fail_by_exit_value'] = str(fail_by_ev) elif isinstance(fail_by_ev, str): # Accepted argument pass elif isinstance(fail_by_ev, int): self.kwargs['fail_by_exit_value'] = str(fail_by_ev) else: raise PyCOMPSsException( "Incorrect format for fail_by_exit_value property. " # noqa: E501 "It should be boolean or an environment variable" ) # noqa: E501 else: self.kwargs['fail_by_exit_value'] = 'false'
def __check_deprecated_arguments(deprecated_arguments, arguments, where): """ This method looks for deprecated arguments and displays a warning if found. :param deprecated_arguments: Set of deprecated arguments :param arguments: List of arguments to check :param where: Location of the argument :return: None :raise PyCOMPSsException: With the unsupported argument """ for argument in arguments: if argument == 'isModifier': message = "ERROR: Unsupported argument: isModifier Found in " + \ str(where) + ".\n" + \ " Please, use: target_direction" print(message, file=sys.stderr) # also show the warn in stderr raise PyCOMPSsException("Unsupported argument: " + str(argument)) if argument in deprecated_arguments: current_argument = re.sub('([A-Z]+)', r'_\1', argument).lower() message = "WARNING: Deprecated argument: " + str(argument) + \ " Found in " + str(where) + ".\n" + \ " Please, use: " + current_argument # The print through stdout is disabled to prevent the message to # appear twice in the console. So the warning message will only # appear in STDERR. # print(message) # show the warn through stdout print(message, file=sys.stderr) # also show the warn in stderr
def retrieve_object_from_cache(logger, cache_ids, cache_queue, identifier, parameter_name, user_function, cache_profiler): # noqa # type: (typing.Any, typing.Any, Queue, str, str, typing.Callable, bool) -> typing.Any """ Retrieve an object from the given cache proxy dict. :param logger: Logger where to push messages. :param cache_ids: Cache proxy dictionary. :param cache_queue: Cache notification queue. :param identifier: Object identifier. :param parameter_name: Parameter name. :param user_function: Function name. :param cache_profiler: If cache profiling is enabled. :return: The object from cache. """ with event_inside_worker(RETRIEVE_OBJECT_FROM_CACHE_EVENT): emit_manual_event_explicit(BINDING_DESERIALIZATION_CACHE_SIZE_TYPE, 0) identifier = __get_file_name__(identifier) if __debug__: logger.debug(HEADER + "Retrieving: " + str(identifier)) obj_id, obj_shape, obj_d_type, _, obj_hits, shared_type = cache_ids[ identifier] # noqa: E501 output = None # type: typing.Any existing_shm = None # type: typing.Any object_size = 0 if shared_type == SHARED_MEMORY_TAG: existing_shm = SharedMemory(name=obj_id) output = np.ndarray(obj_shape, dtype=obj_d_type, buffer=existing_shm.buf) # noqa: E501 object_size = len(existing_shm.buf) elif shared_type == SHAREABLE_LIST_TAG: existing_shm = ShareableList(name=obj_id) output = list(existing_shm) object_size = len(existing_shm.shm.buf) elif shared_type == SHAREABLE_TUPLE_TAG: existing_shm = ShareableList(name=obj_id) output = tuple(existing_shm) object_size = len(existing_shm.shm.buf) # Currently unsupported since conversion requires lists of lists. # elif shared_type == SHAREABLE_DICT_TAG: # existing_shm = ShareableList(name=obj_id) # output = dict(existing_shm) else: raise PyCOMPSsException("Unknown cacheable type.") if __debug__: logger.debug(HEADER + "Retrieved: " + str(identifier)) emit_manual_event_explicit(BINDING_DESERIALIZATION_CACHE_SIZE_TYPE, object_size) # Profiling filename = filename_cleaned(identifier) function_name = function_cleaned(user_function) if cache_profiler: cache_queue.put(("GET", (filename, parameter_name, function_name))) # Add hit cache_ids[identifier][4] = obj_hits + 1 return output, existing_shm
def check_output(stdout, stderr, error_expected=False): if (os.path.exists(stderr) and os.path.getsize(stderr) > 0 and not error_expected): # Non empty file exists raise PyCOMPSsException("An error happened. Please check " + stderr) else: os.remove(stdout) os.remove(stderr)
def get_redirection_file_names(): """ Retrieves the stdout and stderr file names. :return: The stdout and stderr file names. """ if is_redirected(): return _STDOUT, _STDERR else: raise PyCOMPSsException("The runtime stdout and stderr are not being redirected.") # noqa: E501
def shutdown_handler(signal, frame): # noqa """ MPI exception signal handler Do not remove the parameters. :param signal: shutdown signal :param frame: Frame :return: None """ raise PyCOMPSsException("Received SIGTERM")
def is_redirected(): """ Check if the stdout and stderr are being redirected. :return: If stdout/stderr are being redirected. """ if _STDOUT is None and _STDERR is None: return False elif _STDOUT is not None and _STDERR is not None: return True else: raise PyCOMPSsException("Inconsistent status of _STDOUT and _STDERR")
def shutdown_handler(signal, frame): # noqa """ Shutdown handler Do not remove the parameters. :param signal: shutdown signal. :param frame: Frame. :return: None :raises PyCOMPSsException: Received signal. """ raise PyCOMPSsException("Received SIGTERM")
def get_redirection_file_names(): # type: () -> typing.Tuple[str, str] """ Retrieves the stdout and stderr file names. :return: The stdout and stderr file names. """ if is_redirected(): return _STDOUT, _STDERR else: message = "The runtime stdout and stderr are not being redirected." raise PyCOMPSsException(message)
def __error_mandatory_argument(argument, decorator): """ Raises an exception when the argument is mandatory in the decorator :param argument: Argument name :param decorator: Decorator name :return: None :raise PyCOMPSsException: With the decorator and argument that produced the error """ raise PyCOMPSsException("The argument " + str(argument) + " is mandatory in the " + str(decorator) + " decorator.")
def _wall_clock_exceed(signum, frame): # type: (int, typing.Any) -> None """ Task wall clock exceeded action: raise PyCOMPSs exception. Do not remove the parameters. :param signum: Signal number. :param frame: Frame. :return: None :raises: PyCOMPSsException exception. """ raise PyCOMPSsException("Application has reached its wall clock limit")
def is_redirected(): # type: () -> bool """ Check if the stdout and stderr are being redirected. :return: If stdout/stderr are being redirected. """ if _STDOUT == "" and _STDERR == "": return False elif _STDOUT != "" and _STDERR != "": return True else: raise PyCOMPSsException("Inconsistent status of _STDOUT and _STDERR")
def get_compss_state_xml(log_path): # type: (str) -> str """ Check if there is any missing package and return the status xml full path. :param log_path: Absolute path of the log folder. :return: The compss state full path. """ if MISSING_DEPENDENCY: raise PyCOMPSsException("Missing %s package." % MISSING_DEPENDENCY) compss_state_xml = os.path.join(log_path, "monitor", "COMPSs_state.xml") return compss_state_xml
def test_pycompss_exception(): try: raise PyCOMPSsException(GENERIC_MESSAGE) except Exception as e: # NOSONAR is_ok = True assert ( str(e) == GENERIC_MESSAGE ), GENERIC_MESSAGE_ERROR else: is_ok = False assert ( is_ok ), "ERROR: The PyCOMPSsException has not been correctly raised"
def __error_mandatory_argument__(decorator, argument): # type: (str, str) -> None """ Raises an exception when the argument is mandatory in the decorator :param argument: Argument name :param decorator: Decorator name :return: None :raise PyCOMPSsException: With the decorator and argument that produced the error """ raise PyCOMPSsException( "The argument %s is mandatory in the %s decorator." % (str(argument), str(decorator)))
def start_watching(self): # type: () -> None """ Starts a new thread in charge of monitoring the stdout and stderr files provided by the redirector. :return: None """ if is_redirected(): self.running = True out_file_name, err_file_name = get_redirection_file_names() thread = threading.Thread(target=self.__std_follower__, args=(out_file_name, err_file_name)) thread.start() else: raise PyCOMPSsException("Can not find the stdout and stderr.")
def reduce_f(*args, **kwargs): if not self.scope: raise PyCOMPSsException(not_in_pycompss("reduction")) if __debug__: logger.debug("Executing reduce_f wrapper.") # Set the chunk size and is_reduce variables in kwargs for their # usage in @task decorator kwargs['chunk_size'] = self.kwargs['chunk_size'] kwargs['is_reduce'] = self.kwargs['is_reduce'] with keep_arguments(args, kwargs, prepend_strings=False): # Call the method ret = func(*args, **kwargs) return ret
def _register_object(self, obj, assign_new_key=False, force_insertion=False): # type: (typing.Any, bool, bool) -> str """ Registers an object into the object tracker. If not found or we are forced to, we create a new identifier for this object, deleting the old one if necessary. We can also query for some object without adding it in case of failure. Identifiers are of the form _runtime_id-_current_id in order to avoid having two objects from different applications with the same identifier (and thus file name). This function updates the internal self.current_id to guarantee that each time returns a new identifier. :param obj: Object to analyse. :param assign_new_key: Assign new key. :param force_insertion: force insertion. :return: Object id. """ # Force_insertion implies assign_new_key assert not force_insertion or assign_new_key identifier = self.is_tracked(obj) if identifier != "": if force_insertion: self.obj_id_to_obj.pop(identifier) address = self._get_object_address(obj) self.address_to_obj_id.pop(address) else: return identifier if assign_new_key: # This object was not in our object database or we were forced to # remove it, lets assign it an identifier and store it. # Generate a new identifier new_id = "%s-%d" % (self.runtime_id, self.current_id) self.current_id += 1 self.obj_id_to_obj[new_id] = obj address = self._get_object_address(obj) self.address_to_obj_id[address] = new_id return new_id raise PyCOMPSsException("Reached unexpected object registry case.")
def get_logging_cfg_file(log_level): # type: (str) -> str """ Retrieves the logging configuration file. :param log_level: Log level [ 'trace'|'debug'|'info'|'api'|'off' ]. :return: Logging configuration file. :raise PyCOMPSsException: Unsupported log level. """ cfg_files = { 'trace': 'logging_debug.json', # trace level == debug level 'debug': 'logging_debug.json', 'info': 'logging_info.json', 'api': 'logging_off.json', # api level == off level 'off': 'logging_off.json' } if log_level in cfg_files: logging_cfg_file = cfg_files[log_level] return logging_cfg_file else: raise PyCOMPSsException("Unsupported logging level.")