def initialize_environment_core(core_config=None, libraries=None, delete=False): from rafcon.core.config import global_config import rafcon.core.singleton if rafcon.core.singleton.state_machine_manager.state_machines: raise EnvironmentError("The environment has to have an empty StateMachineManager but here the following " "state machines are still existing: \n{0}" "".format(rafcon.core.singleton.state_machine_manager.state_machines)) test_multithreading_lock.acquire() # preserve LIBRARY_PATHS if handed with dict -> can be already be the dict of the global_config object if libraries is None and core_config is not None and 'LIBRARY_PATHS' in core_config: libraries = copy.deepcopy(core_config['LIBRARY_PATHS']) # initialize global core config if isinstance(core_config, tuple) and exists(join(core_config[1], core_config[0])): global_config.load(core_config[0], core_config[1]) if global_config.get_config_value('LIBRARY_PATHS') is not None: libraries = copy.deepcopy(global_config.get_config_value('LIBRARY_PATHS')) else: global_config.load(path=RAFCON_TEMP_PATH_CONFIGS) if isinstance(core_config, dict): for key, value in core_config.items(): global_config.set_config_value(key, value) rewind_and_set_libraries(libraries=libraries)
def from_dict(cls, dictionary): name = dictionary['name'] state_id = dictionary['state_id'] input_data_ports = dictionary['input_data_ports'] output_data_ports = dictionary['output_data_ports'] income = dictionary.get( 'income', None) # older state machine versions don't have this set outcomes = dictionary['outcomes'] safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) state = cls(name, state_id, input_data_ports, output_data_ports, income, outcomes, safe_init=safe_init) try: state.description = dictionary['description'] except (TypeError, KeyError ): # (Very) old state machines do not have a description field import traceback formatted_lines = traceback.format_exc().splitlines() logger.warning("Erroneous description for state '{1}': {0}".format( formatted_lines[-1], dictionary['name'])) return state
def unregister(self): """ Flush & close the shelve file """ set_read_and_writable_for_all = global_config.get_config_value( "EXECUTION_LOG_SET_READ_AND_WRITABLE_FOR_ALL", False) self._flush() self._close(set_read_and_writable_for_all)
def parent(self, parent): """Setter for the parent state of the state element :param rafcon.core.states.state.State parent: Parent state or None """ if parent is None: self._parent = None else: from rafcon.core.states.state import State assert isinstance(parent, State) old_parent = self.parent self._parent = ref(parent) valid, message = self._check_validity() if not valid: if not old_parent: self._parent = None else: self._parent = ref(old_parent) class_name = self.__class__.__name__ if global_config.get_config_value("LIBRARY_RECOVERY_MODE") is True: do_delete_item = True # In case of just the data type is wrong raise an Exception but keep the data flow if "not have matching data types" in message: do_delete_item = False self._parent = ref(parent) raise RecoveryModeException("{0} invalid within state \"{1}\" (id {2}): {3}".format( class_name, parent.name, parent.state_id, message), do_delete_item=do_delete_item) else: raise ValueError("{0} invalid within state \"{1}\" (id {2}): {3} {4}".format( class_name, parent.name, parent.state_id, message, self))
def _add_new_execution_history(self): new_execution_history = ExecutionHistory() if global_config.get_config_value("EXECUTION_LOG_ENABLE", False): base_dir = global_config.get_config_value("EXECUTION_LOG_PATH", "%RAFCON_TEMP_PATH_BASE/execution_logs") if base_dir.startswith('%RAFCON_TEMP_PATH_BASE'): base_dir = base_dir.replace('%RAFCON_TEMP_PATH_BASE', RAFCON_TEMP_PATH_BASE) if not os.path.exists(base_dir): os.makedirs(base_dir) shelve_name = os.path.join(base_dir, '%s_rafcon_execution_log_%s.shelve' % (time.strftime('%Y-%m-%d-%H:%M:%S', time.localtime()), self.root_state.name.replace(' ', '-'))) execution_history_store = ExecutionHistoryStorage(shelve_name) new_execution_history.set_execution_history_storage(execution_history_store) self._execution_histories.append(new_execution_history) return new_execution_history
def from_dict(cls, dictionary): states = None if 'states' not in dictionary else dictionary['states'] transitions = dictionary['transitions'] data_flows = dictionary['data_flows'] safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) state = cls(name=dictionary['name'], state_id=dictionary['state_id'], input_data_ports=dictionary['input_data_ports'], output_data_ports=dictionary['output_data_ports'], outcomes=dictionary['outcomes'], states=None, transitions=transitions if states else None, data_flows=data_flows if states else None, scoped_variables=dictionary['scoped_variables'], load_from_storage=True, safe_init=safe_init) try: state.description = dictionary['description'] except (TypeError, KeyError): # (Very) old state machines do not have a description field import traceback formatted_lines = traceback.format_exc().splitlines() logger.warning("Erroneous description for state '{1}': {0}".format(formatted_lines[-1], dictionary['name'])) if states: return state else: return state, dictionary['transitions'], dictionary['data_flows']
def execute(self, state, inputs=None, outputs=None, backward_execution=False): """Execute the user 'execute' function specified in the script :param ExecutionState state: the state belonging to the execute function, refers to 'self' :param dict inputs: the input data of the script :param dict outputs: the output data of the script :param bool backward_execution: Flag whether to run the script in backwards mode :return: Return value of the execute script :rtype: str | int """ if not self.compiled_module or global_config.get_config_value( "SCRIPT_RECOMPILATION_ON_STATE_EXECUTION", True): self.compile_module() if not outputs: outputs = {} if not inputs: inputs = {} if backward_execution: if hasattr(self._compiled_module, "backward_execute"): return self._compiled_module.backward_execute( state, inputs, outputs, rafcon.core.singleton.global_variable_manager) else: logger.debug( "No backward execution method found for state %s" % state.name) return None else: return self._compiled_module.execute( state, inputs, outputs, rafcon.core.singleton.global_variable_manager)
def __init__(self, name=None, data_type=None, default_value=None, data_port_id=None, parent=None, force_type=False, init_without_default_value_type_exceptions=False, safe_init=True): if type(self) == DataPort and not force_type: raise NotImplementedError super(DataPort, self).__init__(safe_init=safe_init) self._no_type_error_exceptions = True if init_without_default_value_type_exceptions else False if global_config.get_config_value("LIBRARY_RECOVERY_MODE") is True: self._no_type_error_exceptions = True self._was_forced_type = force_type if data_port_id is None: self._data_port_id = generate_data_port_id([]) logger.warning( "Look out: Instantiation of a data port without specifying its id is not recommended! The " "add_data_port* functions of the State/ContainerState class should be used!" ) else: self._data_port_id = data_port_id if safe_init: DataPort._safe_init(self, name, data_type, default_value, parent) else: DataPort._unsafe_init(self, name, data_type, default_value, parent)
def remove_all_libraries(init_library_manager=True): from rafcon.core.config import global_config library_paths = global_config.get_config_value("LIBRARY_PATHS") libs = [lib for lib in library_paths] for lib in libs: del library_paths[lib] if init_library_manager: rafcon.core.singleton.library_manager.initialize()
def from_dict(cls, dictionary): from_state = dictionary['from_state'] from_key = dictionary['from_key'] to_state = dictionary['to_state'] to_key = dictionary['to_key'] data_flow_id = dictionary['data_flow_id'] safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return cls(from_state, from_key, to_state, to_key, data_flow_id, safe_init=safe_init)
def from_dict(cls, dictionary): transition_id = dictionary['transition_id'] from_state = dictionary['from_state'] from_outcome = dictionary['from_outcome'] to_state = dictionary['to_state'] to_outcome = dictionary['to_outcome'] safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return cls(from_state, from_outcome, to_state, to_outcome, transition_id, safe_init=safe_init)
def from_dict(cls, dictionary): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return ScopedData(dictionary['name'], dictionary['value'], dictionary['value_type'], dictionary['from_state'], dictionary['data_port_type'], safe_init=safe_init)
def start(self, state_machine_id=None, start_state_path=None): """ Start state machine If no state machine is running start a specific state machine. If no state machine is provided the currently active state machine is started. If there is already a state machine running, just resume it without taking the passed state_machine_id argument into account. :param state_machine_id: The id if the state machine to be started :param start_state_path: The path of the state in the state machine, from which the execution will start :return: """ if not self.finished_or_stopped(): logger.debug("Resume execution engine ...") self.run_to_states = [] if self.state_machine_manager.get_active_state_machine() is not None: self.state_machine_manager.get_active_state_machine().root_state.recursively_resume_states() if isinstance(state_machine_id, int) and \ state_machine_id != self.state_machine_manager.get_active_state_machine().state_machine_id: logger.info("Resumed state machine with id {0} but start of state machine id {1} was requested." "".format(self.state_machine_manager.get_active_state_machine().state_machine_id, state_machine_id)) self.set_execution_mode(StateMachineExecutionStatus.STARTED) else: # do not start another state machine before the old one did not finish its execution if self.state_machine_running: logger.warning("An old state machine is still running! Make sure that it terminates," " before you can start another state machine! {0}".format(self)) return logger.debug("Start execution engine ...") if state_machine_id is not None: self.state_machine_manager.active_state_machine_id = state_machine_id if not self.state_machine_manager.active_state_machine_id: logger.error("There exists no active state machine!") return if not global_config.get_config_value("SCRIPT_RECOMPILATION_ON_STATE_EXECUTION", True): self.recompile_execution_scripts_recursively() self.set_execution_mode(StateMachineExecutionStatus.STARTED) self.start_state_paths = [] if start_state_path: path_list = start_state_path.split("/") cur_path = "" for path in path_list: if cur_path == "": cur_path = path else: cur_path = cur_path + "/" + path self.start_state_paths.append(cur_path) self._run_active_state_machine()
def get_storage_id_for_state(state): """ Calculates the storage id of a state. This ID can be used for generating the file path for a state. :param rafcon.core.states.state.State state: state the storage_id should is composed for """ if global_config.get_config_value('STORAGE_PATH_WITH_STATE_NAME'): max_length = global_config.get_config_value('MAX_LENGTH_FOR_STATE_NAME_IN_STORAGE_PATH') max_length_of_state_name_in_folder_name = 255 - len(ID_NAME_DELIMITER + state.state_id) # TODO: should we allow "None" in config file? if max_length is None or max_length == "None" or max_length > max_length_of_state_name_in_folder_name: if max_length_of_state_name_in_folder_name < len(state.name): logger.info("The storage folder name is forced to be maximal 255 characters in length.") max_length = max_length_of_state_name_in_folder_name return limit_text_to_be_path_element(state.name, max_length) + ID_NAME_DELIMITER + state.state_id else: return state.state_id
def join(self): """Wait for root state to finish execution""" self._root_state.join() # execution finished, close execution history log file (if present) if len(self._execution_histories) > 0: if self._execution_histories[-1].execution_history_storage is not None: set_read_and_writable_for_all = global_config.get_config_value("EXECUTION_LOG_SET_READ_AND_WRITABLE_FOR_ALL", False) self._execution_histories[-1].execution_history_storage.close(set_read_and_writable_for_all) from rafcon.core.states.state import StateExecutionStatus self._root_state.state_execution_status = StateExecutionStatus.INACTIVE
def from_dict(cls, dictionary): if 'scoped_variable_id' in dictionary: # This is needed for backwards compatibility data_port_id = dictionary['scoped_variable_id'] else: data_port_id = dictionary['data_port_id'] name = dictionary['name'] data_type = dictionary['data_type'] default_value = dictionary['default_value'] safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return cls(name, data_type, default_value, data_port_id, safe_init=safe_init)
def backward_step(self): """Take a backward step for all active states in the state machine """ logger.debug("Executing backward step ...") if not global_config.get_config_value("IN_MEMORY_EXECUTION_HISTORY_ENABLE", True): logger.error("Backward stepping is not allowed if the execution histories are disabled") return self.run_to_states = [] self.set_execution_mode(StateMachineExecutionStatus.BACKWARD)
def __init__(self, library_path=None, library_name=None, version=None, # library state specific attributes # the following are the container state specific attributes name=None, state_id=None, income=None, outcomes=None, input_data_port_runtime_values=None, use_runtime_value_input_data_ports=None, output_data_port_runtime_values=None, use_runtime_value_output_data_ports=None, allow_user_interaction=True, safe_init=True, skip_runtime_data_initialization=False): # this variable is set to true if the state initialization is finished! after initialization no change to the # library state is allowed any more self.initialized = False State.__init__(self, name, state_id, None, None, income, outcomes, safe_init=safe_init) self.library_path = library_path self.library_name = library_name self.version = version if global_config.get_config_value("RAISE_ERROR_ON_MISSING_LIBRARY_STATES", False): allow_user_interaction = False lib_os_path, new_library_path, new_library_name = \ library_manager.get_os_path_to_library(library_path, library_name, allow_user_interaction) self.lib_os_path = lib_os_path if library_path != new_library_path or library_name != new_library_name: self.library_name = new_library_name self.library_path = new_library_path # TODO this should trigger the marked_dirty of the state machine to become true logger.info("Changing information about location of library") logger.info("Old library name '{0}' was located at {1}".format(library_name, library_path)) logger.info("New library name '{0}' is located at {1}".format(new_library_name, new_library_path)) lib_version, state_copy = library_manager.get_library_state_copy_instance(self.lib_os_path) if not str(lib_version) == version and not str(lib_version) == "None": raise AttributeError("Library does not have the correct version!") self.state_copy = state_copy if safe_init: LibraryState._safe_init(self, name) else: LibraryState._unsafe_init(self, name) if not skip_runtime_data_initialization: # load_library_root_state_timer.stop(key) self._handle_runtime_values(input_data_port_runtime_values, use_runtime_value_input_data_ports, output_data_port_runtime_values, use_runtime_value_output_data_ports) else: self._input_data_port_runtime_values = input_data_port_runtime_values self._use_runtime_value_input_data_ports = use_runtime_value_input_data_ports self._output_data_port_runtime_values = output_data_port_runtime_values self._use_runtime_value_output_data_ports = use_runtime_value_output_data_ports self.initialized = True
def _get_storage_path_on_file_system(root_state_name): """ Get the shelve file of a specific state machine :param root_state_name: the root name """ base_dir = global_config.get_config_value( "EXECUTION_LOG_PATH", "%RAFCON_TEMP_PATH_BASE/execution_logs") if base_dir.startswith('%RAFCON_TEMP_PATH_BASE'): base_dir = base_dir.replace('%RAFCON_TEMP_PATH_BASE', RAFCON_TEMP_PATH_BASE) if not os.path.exists(base_dir): os.makedirs(base_dir) shelve_name = os.path.join( base_dir, '%s_rafcon_execution_log_%s.shelve' % (str(datetime.datetime.now()), root_state_name.replace(' ', '-'))) return shelve_name
def __init__(self, root_state_name): self.consumers = dict() # Queue with infinite space self.execution_history_item_queue = Queue() self.condition = threading.Condition() self.interrupt = False self._consumers_exist = False self._file_system_consumer_exists = False if global_config.get_config_value("FILE_SYSTEM_EXECUTION_HISTORY_ENABLE", False): self.register_consumer(self.FILE_SYSTEM_CONSUMER_NAME, FileSystemConsumer(root_state_name)) self._file_system_consumer_exists = True plugins.run_hook("register_execution_history_consumer", self) # Only have one thread here that will call the notify function of each consumer # The advantage it that the consumer authors don't have to care about threading # and don't have to care about when an item is popped from the queue self.worker_thread = threading.Thread(target=self._feed_consumers) self.worker_thread.start()
def join(self): """Wait for root state to finish execution""" from rafcon.core.states.concurrency_state import ConcurrencyState self._root_state.join() if not global_config.get_config_value("IN_MEMORY_EXECUTION_HISTORY_ENABLE", False): queue = [self.root_state] while len(queue) > 0: state = queue.pop(0) if isinstance(state, ConcurrencyState): if state.concurrency_history_item is not None: state.concurrency_history_item.destroy() state.concurrency_history_item = None elif hasattr(state, 'states'): queue.extend(state.states.values()) if len(self.execution_histories) > 0: self.execution_histories[-1].shutdown() from rafcon.core.states.state import StateExecutionStatus self._root_state.state_execution_status = StateExecutionStatus.INACTIVE
def get_execution_history(initial_prev=None, root_state_name="", consumer_manager=None): """ Create an instance of a InMemoryExecutionHistory or BaseExecutionHistory :param initial_prev: the initial previous history item :param root_state_name: the root state name :param consumer_manager: the consumer manager :return: an instance of BaseExecutionHistory or InMemoryExecutionHistory """ if global_config.get_config_value("IN_MEMORY_EXECUTION_HISTORY_ENABLE", True): return InMemoryExecutionHistory(initial_prev=initial_prev, root_state_name=root_state_name, consumer_manager=consumer_manager) else: return BaseExecutionHistory(initial_prev=initial_prev, root_state_name=root_state_name, consumer_manager=consumer_manager)
def from_dict(cls, dictionary): data_port_id = dictionary['data_port_id'] name = dictionary['name'] data_type = dictionary['data_type'] default_value = dictionary['default_value'] # Allow creation of DataPort class when loading from YAML file safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) if cls == DataPort: return DataPort(name, data_type, default_value, data_port_id, force_type=True, init_without_default_value_type_exceptions=True, safe_init=safe_init) # Call appropriate constructor, e.g. InputDataPort(...) for input data ports else: return cls(name, data_type, default_value, data_port_id, force_type=True, init_without_default_value_type_exceptions=True, safe_init=safe_init)
def remove_all_libraries(init_library_manager=True): from rafcon.core.config import global_config library_paths = global_config.get_config_value("LIBRARY_PATHS") library_paths.clear() if init_library_manager: rafcon.core.singleton.library_manager.initialize()
def load_state_recursively(parent, state_path=None, dirty_states=[]): """Recursively loads the state It calls this method on each sub-state of a container state. :param parent: the root state of the last load call to which the loaded state will be added :param state_path: the path on the filesystem where to find the meta file for the state :param dirty_states: a dict of states which changed during loading :return: """ from rafcon.core.states.execution_state import ExecutionState from rafcon.core.states.container_state import ContainerState from rafcon.core.states.hierarchy_state import HierarchyState from rafcon.core.singleton import library_manager path_core_data = get_core_data_path(state_path) path_meta_data = get_meta_data_path(state_path) logger.debug("Load state recursively: {0}".format(str(state_path))) try: state_info = load_data_file(path_core_data) except ValueError as e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException as e: if global_config.get_config_value( "RAISE_ERROR_ON_MISSING_LIBRARY_STATES", False) or not library_manager.show_dialog: raise logger.error( "Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format( e)) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) missing_library_meta_data = None if os.path.exists(path_meta_data): missing_library_meta_data = Vividict( storage_utils.load_objects_from_json(path_meta_data)) state_id = state_info["state_id"] outcomes = { outcome['outcome_id']: Outcome(outcome['outcome_id'], outcome['name']) for outcome in state_info["outcomes"].values() } dummy_state = HierarchyState( LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id, outcomes=outcomes, is_dummy=True, missing_library_meta_data=missing_library_meta_data) library_name = state_info['library_name'] path_parts = os.path.join(state_info['library_path'], library_name).split(os.sep) dummy_state.description = 'The Missing Library Path: %s\nThe Missing Library Name: %s\n\n' % ( state_info['library_path'], library_name) from rafcon.core.singleton import library_manager if path_parts[0] in library_manager.library_root_paths: dummy_state.description += 'The Missing Library OS Path: %s' % os.path.join( library_manager.library_root_paths[path_parts[0]], * path_parts[1:]) else: dummy_state.description += 'The missing library was located in the missing library root "%s"' % path_parts[ 0] # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state except LibraryNotFoundSkipException: return None # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info else: state = state_info[0] transitions = state_info[1] data_flows = state_info[2] # set parent of state if parent is not None and isinstance(parent, ContainerState): parent.add_state(state, storage_load=True) else: state.parent = parent # read script file if state is an ExecutionState if isinstance(state, ExecutionState): script_text = read_file(state_path, state.script.filename) state.script.set_script_without_compilation(script_text) # load semantic data try: semantic_data = load_data_file( os.path.join(state_path, SEMANTIC_DATA_FILE)) state.semantic_data = semantic_data except Exception as e: # semantic data file does not have to be there pass # load child states for p in os.listdir(state_path): child_state_path = os.path.join(state_path, p) if os.path.isdir(child_state_path): if not os.path.exists( os.path.join(child_state_path, FILE_NAME_CORE_DATA)): # this means that child_state_path is a folder, not containing a valid state # this also happens when pip creates __pycache__ folders for the script.py files upon installing rafcon continue child_state = load_state_recursively(state, child_state_path, dirty_states) if not child_state: return None # Now we can add transitions and data flows, as all child states were added if isinstance(state_info, tuple): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) if safe_init: # this will trigger all validity checks the state machine state.transitions = transitions else: state._transitions = transitions state._data_flows = data_flows for _, transition in state.transitions.items(): transition._parent = ref(state) state._data_flows = data_flows for _, data_flow in state.data_flows.items(): data_flow._parent = ref(state) state.file_system_path = state_path if state.marked_dirty: dirty_states.append(state) return state
def load_state_recursively(parent, state_path=None, dirty_states=[]): """Recursively loads the state It calls this method on each sub-state of a container state. :param parent: the root state of the last load call to which the loaded state will be added :param state_path: the path on the filesystem where to find the meta file for the state :param dirty_states: a dict of states which changed during loading :return: """ from rafcon.core.states.execution_state import ExecutionState from rafcon.core.states.container_state import ContainerState from rafcon.core.states.hierarchy_state import HierarchyState path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA) logger.debug("Load state recursively: {0}".format(str(state_path))) # TODO: Should be removed with next minor release if not os.path.exists(path_core_data): path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA_OLD) try: state_info = load_data_file(path_core_data) except ValueError as e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException as e: logger.error("Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format(e)) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) state_id = state_info["state_id"] dummy_state = HierarchyState(LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id) # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info else: state = state_info[0] transitions = state_info[1] data_flows = state_info[2] # set parent of state if parent is not None and isinstance(parent, ContainerState): parent.add_state(state, storage_load=True) else: state.parent = parent # read script file if state is an ExecutionState if isinstance(state, ExecutionState): script_text = read_file(state_path, state.script.filename) if not global_config.get_config_value("SCRIPT_COMPILE_ON_FILESYSTEM_LOAD", True): state.script.set_script_without_compilation(script_text) else: try: state.script.script = script_text except ImportError as e: logger.info("The script of the state '{}' (id {}) uses a module that is not available: {}".format( state.name, state.state_id, str(e))) except Exception as e: logger.warning("The script of the state '{}' (id {}) contains a {}: {}".format( state.name, state.state_id, e.__class__.__name__, str(e))) # load semantic data try: semantic_data = load_data_file(os.path.join(state_path, SEMANTIC_DATA_FILE)) state.semantic_data = semantic_data except Exception as e: # semantic data file does not have to be there pass one_of_my_child_states_not_found = False # load child states for p in os.listdir(state_path): child_state_path = os.path.join(state_path, p) if os.path.isdir(child_state_path): if not os.path.exists(os.path.join(child_state_path, FILE_NAME_CORE_DATA)): # this means that child_state_path is a folder, not containing a valid state # this also happens when pip creates __pycache__ folders for the script.py files upon installing rafcon continue child_state = load_state_recursively(state, child_state_path, dirty_states) if not child_state: return None if child_state.name is LIBRARY_NOT_FOUND_DUMMY_STATE_NAME: one_of_my_child_states_not_found = True if one_of_my_child_states_not_found: # omit adding transitions and data flows in this case pass else: # Now we can add transitions and data flows, as all child states were added if isinstance(state_info, tuple): # safe version # state.transitions = transitions # state.data_flows = data_flows state._transitions = transitions for _, transition in state.transitions.items(): transition._parent = ref(state) state._data_flows = data_flows for _, data_flow in state.data_flows.items(): data_flow._parent = ref(state) state.file_system_path = state_path if state.marked_dirty: dirty_states.append(state) return state
def from_dict(cls, dictionary): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return Outcome(dictionary['outcome_id'], dictionary['name'], safe_init=safe_init)
def from_dict(cls, dictionary): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) return Income(safe_init=safe_init)