def __init__(self, state_machine, meta=None, load_meta_data=True): """Constructor """ MetaModel.__init__(self) # pass columns as separate parameters assert isinstance(state_machine, StateMachine) self.state_machine = state_machine self.state_machine_id = state_machine.state_machine_id root_state = self.state_machine.root_state if isinstance(root_state, ContainerState): self.root_state = ContainerStateModel( root_state, parent=self, load_meta_data=load_meta_data) else: self.root_state = StateModel(root_state, parent=self, load_meta_data=load_meta_data) if isinstance(meta, Vividict): self.meta = meta else: self.meta = Vividict() # ongoing_complex_actions is updated by ComplexActionObserver -> secure encapsulated observation # and made observable by state machine model here self.ongoing_complex_actions = {} self.complex_action_observer = ComplexActionObserver(self) self.meta_signal = Signal() self.state_meta_signal = Signal() self.action_signal = Signal() self.state_action_signal = Signal() self.sm_selection_changed_signal = Signal() self.destruction_signal = Signal() self.temp = Vividict() if load_meta_data: self.load_meta_data(recursively=False) self.selection = Selection(self.sm_selection_changed_signal) self.storage_lock = threading.Lock( ) # lock can not be substituted by the state machine lock -> maybe because it is a RLock self.history = None if global_gui_config.get_config_value('HISTORY_ENABLED'): from rafcon.gui.models.modification_history import ModificationsHistoryModel self.history = ModificationsHistoryModel(self) else: logger.info("The modification history is disabled") self.auto_backup = None if global_gui_config.get_config_value('AUTO_BACKUP_ENABLED'): from rafcon.gui.models.auto_backup import AutoBackupModel self.auto_backup = AutoBackupModel(self) self.root_state.register_observer(self) self.register_observer(self)
def load_meta_data(self, path=None): """Load meta data of state model from the file system The meta data of the state model is loaded from the file system and stored in the meta property of the model. Existing meta data is removed. Also the meta data of all state elements (data ports, outcomes, etc) are loaded, as those stored in the same file as the meta data of the state. This is either called on the __init__ of a new state model or if a state model for a container state is created, which then calls load_meta_data for all its children. :param str path: Optional file system path to the meta data file. If not given, the path will be derived from the state's path on the filesystem :return: if meta data file was loaded True otherwise False :rtype: bool """ # TODO: for an Execution state this method is called for each hierarchy level again and again, still?? check it! # print("1AbstractState_load_meta_data: ", path, not path) if not path: path = self.state.file_system_path # print("2AbstractState_load_meta_data: ", path) if path is None: self.meta = Vividict({}) return False path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA) # TODO: Should be removed with next minor release if not os.path.exists(path_meta_data): logger.debug("Because meta data was not found in {0} use backup option {1}" "".format(path_meta_data, os.path.join(path, storage.FILE_NAME_META_DATA_OLD))) path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA_OLD) # TODO use the following logger message to debug meta data load process and to avoid maybe repetitive loads # if not os.path.exists(path_meta_data): # logger.info("path not found {0}".format(path_meta_data)) try: # print("try to load meta data from {0} for state {1}".format(path_meta_data, self.state)) tmp_meta = storage.load_data_file(path_meta_data) except ValueError as e: # if no element which is newly generated log a warning # if os.path.exists(os.path.dirname(path)): # logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) if not path.startswith(constants.RAFCON_TEMP_PATH_STORAGE) and not os.path.exists(os.path.dirname(path)): logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict tmp_meta = Vividict(tmp_meta) if tmp_meta: self._parse_for_element_meta_data(tmp_meta) # assign the meta data to the state self.meta = tmp_meta self.meta_signal.emit(MetaSignalMsg("load_meta_data", "all", True)) return True else: # print("nothing to parse", tmp_meta) return False
def __init__(self, meta=None): ModelMT.__init__(self) if isinstance(meta, dict): self.meta = Vividict(meta) else: self.meta = Vividict() self.temp = Vividict() self.meta_signal = Signal()
def load_meta_data(self, path=None, recursively=True): """Load meta data of state machine model from the file system The meta data of the state machine model is loaded from the file system and stored in the meta property of the model. Existing meta data is removed. Also the meta data of root state and children is loaded. :param str path: Optional path to the meta data file. If not given, the path will be derived from the state machine's path on the filesystem """ meta_data_path = path if path is not None else self.state_machine.file_system_path if meta_data_path: path_meta_data = os.path.join(meta_data_path, storage.FILE_NAME_META_DATA) try: tmp_meta = storage.load_data_file(path_meta_data) except ValueError: tmp_meta = {} else: tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict tmp_meta = Vividict(tmp_meta) if recursively: root_state_path = None if not path else os.path.join( path, self.root_state.state.state_id) self.root_state.load_meta_data(root_state_path) if tmp_meta: # assign the meta data to the state self.meta = tmp_meta self.meta_signal.emit(MetaSignalMsg("load_meta_data", "all", True))
def __init__(self, state_machine_manager, meta=None): """Constructor""" ModelMT.__init__(self) # pass columns as separate parameters self.register_observer(self) assert isinstance(state_machine_manager, StateMachineManager) self.state_machine_manager = state_machine_manager self.state_machines = {} for sm_id, sm in state_machine_manager.state_machines.items(): self.state_machines[sm_id] = StateMachineModel(sm) self._selected_state_machine_id = None if len(self.state_machines) > 0: self.selected_state_machine_id = list( self.state_machines.keys())[0] if isinstance(meta, Vividict): self.meta = meta else: self.meta = Vividict() # check if the sm_manager_model exists several times self.__class__.__sm_manager_creation_counter += 1 if self.__class__.__sm_manager_creation_counter == 2: logger.error("Sm_manager_model exists several times!") os._exit(0)
def __init__(self, meta=None): """Constructor""" ModelMT.__init__(self) # pass columns as separate parameters if isinstance(meta, Vividict): self.meta = meta else: self.meta = Vividict()
def __init__(self, state_machine_model): ModelMT.__init__(self) assert isinstance(state_machine_model, StateMachineModel) self.state_machine_model = state_machine_model # variables used for lock files # TODO reduce those variables self.__destroyed = False self.AUTO_RECOVERY_LOCK_ENABLED = False if os.path.exists(os.path.join(RAFCON_TEMP_PATH_BASE, 'lock')) and \ global_gui_config.get_config_value('AUTO_RECOVERY_LOCK_ENABLED'): self.AUTO_RECOVERY_LOCK_ENABLED = True self.lock_file_lock = threading.Lock() self.lock_file = None self.last_lock_file_name = None # general auto-backup variable self.timed_temp_storage_enabled = global_gui_config.get_config_value( 'AUTO_BACKUP_ENABLED') self.only_fix_interval = global_gui_config.get_config_value( 'AUTO_BACKUP_ONLY_FIX_FORCED_INTERVAL') self.force_temp_storage_interval = global_gui_config.get_config_value( 'AUTO_BACKUP_FORCED_STORAGE_INTERVAL') self.timed_temp_storage_interval = global_gui_config.get_config_value( 'AUTO_BACKUP_DYNAMIC_STORAGE_INTERVAL') self.last_backup_time = time.time( ) # used as 'last-backup' and 'last-modification-not-backup-ed' time self.marked_dirty = False self.__perform_storage = False self._timer_request_time = None self.timer_request_lock = threading.Lock() self.tmp_timed_storage_thread = None self.meta = Vividict() if state_machine_model.state_machine.file_system_path is not None: # logger.info("store meta data of {0} to {1}".format(self, meta_data_path)) # data used for restore tabs -> (having the information to load state machines without loading them) self.meta['last_saved'][ 'time'] = state_machine_model.state_machine.last_update self.meta['last_saved'][ 'file_system_path'] = state_machine_model.state_machine.file_system_path logger.debug( "The auto-backup for state-machine {2} is {0} and set to '{1}'" "".format( 'ENABLED' if self.timed_temp_storage_enabled else 'DISABLED', 'fix interval mode' if self.only_fix_interval else 'dynamic interval mode', self.state_machine_model.state_machine.state_machine_id)) # register observer before initializing check loop self.observe_model(self.state_machine_model) # initializing check loop to fully initialize the model if not self.only_fix_interval: self.perform_temp_storage() else: self.check_for_auto_backup(force=True)
def load_meta_data(self, path=None): """Load meta data of state model from the file system The meta data of the state model is loaded from the file system and stored in the meta property of the model. Existing meta data is removed. Also the meta data of all state elements (data ports, outcomes, etc) are loaded, as those stored in the same file as the meta data of the state. This is either called on the __init__ of a new state model or if a state model for a container state is created, which then calls load_meta_data for all its children. :param str path: Optional file system path to the meta data file. If not given, the path will be derived from the state's path on the filesystem :return: if meta data file was loaded True otherwise False :rtype: bool """ if not path: path = self.state.file_system_path if path is None: self.meta = Vividict({}) return False path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA) try: tmp_meta = storage.load_data_file(path_meta_data) except ValueError as e: if not path.startswith( constants.RAFCON_TEMP_PATH_STORAGE) and not os.path.exists( os.path.dirname(path)): logger.debug( "Because '{1}' meta data of {0} was not loaded properly.". format(self, e)) tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict tmp_meta = Vividict(tmp_meta) if tmp_meta: self._parse_for_element_meta_data(tmp_meta) # assign the meta data to the state self.meta = tmp_meta self.meta_signal.emit(MetaSignalMsg("load_meta_data", "all", True)) return True else: return False
def __init__(self, execution_engine, meta=None): """Constructor""" ModelMT.__init__(self) # pass columns as separate parameters self.register_observer(self) assert isinstance(execution_engine, ExecutionEngine) self.execution_engine = execution_engine if isinstance(meta, Vividict): self.meta = meta else: self.meta = Vividict()
def __init__(self, library_manager, meta=None): """Constructor """ ModelMT.__init__(self) # pass columns as separate parameters assert isinstance(library_manager, LibraryManager) self.library_manager = library_manager if isinstance(meta, Vividict): self.meta = meta else: self.meta = Vividict() # this class is an observer of its own properties: self.register_observer(self)
def create_new_state_from_state_with_type(source_state, target_state_class): """The function duplicates/transforms a state to a new state type. If the source state type and the new state type both are ContainerStates the new state will have not transitions to force the user to explicitly re-order the logical flow according the paradigm of the new state type. :param source_state: previous/original state that is to transform into a new state type (target_state_class) :param target_state_class: the final state class type :return: """ current_state_is_container = isinstance(source_state, ContainerState) new_state_is_container = issubclass(target_state_class, ContainerState) if current_state_is_container and new_state_is_container: # TRANSFORM from CONTAINER- TO CONTAINER-STATE # by default all transitions are left out if the new and original state are container states # -> because switch from Barrier, Preemptive or Hierarchy has always different rules state_transitions = {} state_start_state_id = None logger.info("Type change from %s to %s" % (type(source_state).__name__, target_state_class.__name__)) # decider state is removed because it is unique for BarrierConcurrencyState if isinstance(source_state, BarrierConcurrencyState): source_state.remove_state(UNIQUE_DECIDER_STATE_ID, force=True) assert UNIQUE_DECIDER_STATE_ID not in source_state.states # separate state-elements from source state data_flows = dict(source_state.data_flows) source_state.data_flows = {} input_data_ports = dict(source_state.input_data_ports) output_data_ports = dict(source_state.output_data_ports) scoped_variables = dict(source_state.scoped_variables) income = source_state.income outcomes = dict(source_state.outcomes) source_state.input_data_ports = {} source_state.output_data_ports = {} source_state.scoped_variables = {} source_state.transitions = { } # before remove of outcomes related transitions should be gone source_state.income = Income() source_state.outcomes = {} states = dict(source_state.states) # TODO check why next line can not be performed # source_state.states = {} new_state = target_state_class(name=source_state.name, state_id=source_state.state_id, input_data_ports=input_data_ports, output_data_ports=output_data_ports, scoped_variables=scoped_variables, income=income, outcomes=outcomes, transitions=state_transitions, data_flows=data_flows, states=states, start_state_id=state_start_state_id) else: # TRANSFORM from EXECUTION- TO CONTAINER-STATE or FROM CONTAINER- TO EXECUTION-STATE # in case the new state is an execution state remove of child states (for observable notifications) if current_state_is_container and issubclass(target_state_class, ExecutionState): if isinstance(source_state, BarrierConcurrencyState): source_state.remove_state(UNIQUE_DECIDER_STATE_ID, force=True) assert UNIQUE_DECIDER_STATE_ID not in source_state.states for state_id in list(source_state.states.keys()): source_state.remove_state(state_id) # separate state-elements from source state input_data_ports = dict(source_state.input_data_ports) output_data_ports = dict(source_state.output_data_ports) income = source_state.income outcomes = dict(source_state.outcomes) source_state.input_data_ports = {} source_state.output_data_ports = {} source_state.income = Income() source_state.outcomes = {} new_state = target_state_class(name=source_state.name, state_id=source_state.state_id, input_data_ports=input_data_ports, output_data_ports=output_data_ports, income=income, outcomes=outcomes) if source_state.description is not None and len( source_state.description) > 0: new_state.description = source_state.description new_state.semantic_data = Vividict(source_state.semantic_data) return new_state
def load_state_recursively(parent, state_path=None, dirty_states=[]): """Recursively loads the state It calls this method on each sub-state of a container state. :param parent: the root state of the last load call to which the loaded state will be added :param state_path: the path on the filesystem where to find the meta file for the state :param dirty_states: a dict of states which changed during loading :return: """ from rafcon.core.states.execution_state import ExecutionState from rafcon.core.states.container_state import ContainerState from rafcon.core.states.hierarchy_state import HierarchyState from rafcon.core.singleton import library_manager path_core_data = get_core_data_path(state_path) path_meta_data = get_meta_data_path(state_path) logger.debug("Load state recursively: {0}".format(str(state_path))) try: state_info = load_data_file(path_core_data) except ValueError as e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException as e: if global_config.get_config_value( "RAISE_ERROR_ON_MISSING_LIBRARY_STATES", False) or not library_manager.show_dialog: raise logger.error( "Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format( e)) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) missing_library_meta_data = None if os.path.exists(path_meta_data): missing_library_meta_data = Vividict( storage_utils.load_objects_from_json(path_meta_data)) state_id = state_info["state_id"] outcomes = { outcome['outcome_id']: Outcome(outcome['outcome_id'], outcome['name']) for outcome in state_info["outcomes"].values() } dummy_state = HierarchyState( LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id, outcomes=outcomes, is_dummy=True, missing_library_meta_data=missing_library_meta_data) library_name = state_info['library_name'] path_parts = os.path.join(state_info['library_path'], library_name).split(os.sep) dummy_state.description = 'The Missing Library Path: %s\nThe Missing Library Name: %s\n\n' % ( state_info['library_path'], library_name) from rafcon.core.singleton import library_manager if path_parts[0] in library_manager.library_root_paths: dummy_state.description += 'The Missing Library OS Path: %s' % os.path.join( library_manager.library_root_paths[path_parts[0]], * path_parts[1:]) else: dummy_state.description += 'The missing library was located in the missing library root "%s"' % path_parts[ 0] # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state except LibraryNotFoundSkipException: return None # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info else: state = state_info[0] transitions = state_info[1] data_flows = state_info[2] # set parent of state if parent is not None and isinstance(parent, ContainerState): parent.add_state(state, storage_load=True) else: state.parent = parent # read script file if state is an ExecutionState if isinstance(state, ExecutionState): script_text = read_file(state_path, state.script.filename) state.script.set_script_without_compilation(script_text) # load semantic data try: semantic_data = load_data_file( os.path.join(state_path, SEMANTIC_DATA_FILE)) state.semantic_data = semantic_data except Exception as e: # semantic data file does not have to be there pass # load child states for p in os.listdir(state_path): child_state_path = os.path.join(state_path, p) if os.path.isdir(child_state_path): if not os.path.exists( os.path.join(child_state_path, FILE_NAME_CORE_DATA)): # this means that child_state_path is a folder, not containing a valid state # this also happens when pip creates __pycache__ folders for the script.py files upon installing rafcon continue child_state = load_state_recursively(state, child_state_path, dirty_states) if not child_state: return None # Now we can add transitions and data flows, as all child states were added if isinstance(state_info, tuple): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) if safe_init: # this will trigger all validity checks the state machine state.transitions = transitions else: state._transitions = transitions state._data_flows = data_flows for _, transition in state.transitions.items(): transition._parent = ref(state) state._data_flows = data_flows for _, data_flow in state.data_flows.items(): data_flow._parent = ref(state) state.file_system_path = state_path if state.marked_dirty: dirty_states.append(state) return state