def cut(self, selection, smart_selection_adaption=False): """Cuts all selected items and copy them to the clipboard using smart selection adaptation by default :param selection: the current selection :param bool smart_selection_adaption: flag to enable smart selection adaptation mode :return: """ assert isinstance(selection, Selection) import rafcon.gui.helpers.state_machine as gui_helper_state_machine if gui_helper_state_machine.is_selection_inside_of_library_state(selected_elements=selection.get_all()): logger.warning("Cut is not performed because elements inside of a library state are selected.") return selection_dict_of_copied_models, parent_m = self.__create_core_and_model_object_copies( selection, smart_selection_adaption) non_empty_lists_dict, action_parent_m = self.get_action_arguments(parent_m if parent_m else None) action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard', action_parent_m=action_parent_m, affected_models=[], after=False, kwargs={'remove': non_empty_lists_dict})) for models in selection_dict_of_copied_models.values(): gui_helper_state_machine.delete_core_elements_of_models(models, destroy=True, recursive=True, force=False) affected_models = [model for models in non_empty_lists_dict.values() for model in models] action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard', action_parent_m=action_parent_m, affected_models=affected_models, after=True))
def ungroup_state(state_m): action_parent_m = state_m.parent state_id = state_m.state.state_id old_state_m = state_m # BEFORE MODEL tmp_models_dict = { 'transitions': {}, 'data_flows': {}, 'states': {}, 'scoped_variables': {}, 'state': None, 'input_data_ports': {}, 'output_data_ports': {} } related_transitions, related_data_flows = action_parent_m.state.get_connections_for_state( state_id) tmp_models_dict['state'] = action_parent_m.states[state_id] for s_id, s_m in action_parent_m.states[state_id].states.items(): tmp_models_dict['states'][s_id] = s_m for sv_m in action_parent_m.states[state_id].scoped_variables: tmp_models_dict['scoped_variables'][ sv_m.scoped_variable.data_port_id] = sv_m for t in related_transitions['internal']['enclosed']: tmp_models_dict['transitions'][ t. transition_id] = action_parent_m.states[state_id].get_transition_m( t.transition_id) for df in related_data_flows['internal']['enclosed']: tmp_models_dict['data_flows'][ df. data_flow_id] = action_parent_m.states[state_id].get_data_flow_m( df.data_flow_id) affected_models = [ action_parent_m.states[state_id], ] # print("EMIT-BEFORE ON OLD_STATE ", state_id) old_state_m.action_signal.emit( ActionSignalMsg(action='ungroup_state', origin='model', action_parent_m=action_parent_m, affected_models=affected_models, after=False, kwargs={'state_id': state_id})) action_parent_m.ungroup_state.__func__.tmp_models_storage = tmp_models_dict action_parent_m.ungroup_state.__func__.affected_models = affected_models # print("ungroup", id(old_state_m), [id(m) for m in tmp_models_dict['states']]) error_msg = "Un-Group action has not started with empty expected future models list." check_expected_future_model_list_is_empty(action_parent_m, msg=error_msg) for key in ['states' ]: # , 'scoped_variables', 'transitions', 'data_flows']: for m in tmp_models_dict[key].values(): if not m.state.state_id == UNIQUE_DECIDER_STATE_ID: action_parent_m.expected_future_models.add(m) # CORE e = None try: state_m.parent.state.ungroup_state(state_m.state.state_id) except Exception as e2: e = e2 logger.exception("State ungroup failed") error_msg = "Un-Group action has not re-used all models of grouped elements." check_expected_future_model_list_is_empty(action_parent_m, msg=error_msg) # AFTER MODEL if e is None: tmp_models_dict = action_parent_m.ungroup_state.__func__.tmp_models_storage # TODO re-organize and use partly the expected_models pattern the next lines # TODO -> when transitions/data flows only hold references onto respective logical/data ports if not gui_helper_meta_data.offset_rel_pos_of_models_meta_data_according_parent_state( tmp_models_dict): logger.error("Meta data adaptation for group states failed.") else: # reduce tmp models by not applied state meta data tmp_models_dict.pop('state') # correct state element ids with new state element ids to set meta data on right state element tmp_models_dict['states'] = \ {new_state_id: tmp_models_dict['states'][old_state_id] for old_state_id, new_state_id in action_parent_m.state.ungroup_state.__func__.state_id_dict.items()} tmp_models_dict['scoped_variables'] = \ {new_sv_id: tmp_models_dict['scoped_variables'][old_sv_id] for old_sv_id, new_sv_id in action_parent_m.state.ungroup_state.__func__.sv_id_dict.items()} tmp_models_dict['transitions'] = \ {new_t_id: tmp_models_dict['transitions'][old_t_id] for old_t_id, new_t_id in action_parent_m.state.ungroup_state.__func__.enclosed_t_id_dict.items()} tmp_models_dict['data_flows'] = \ {new_df_id: tmp_models_dict['data_flows'][old_df_id] for old_df_id, new_df_id in action_parent_m.state.ungroup_state.__func__.enclosed_df_id_dict.items()} action_parent_m.insert_meta_data_from_models_dict( tmp_models_dict, logger.info) affected_models = action_parent_m.ungroup_state.__func__.affected_models for elemets_dict in tmp_models_dict.values(): affected_models.extend(elemets_dict.values()) old_state_m.action_signal.emit( ActionSignalMsg(action='ungroup_state', origin='model', action_parent_m=action_parent_m, affected_models=affected_models, after=True, result=e)) old_state_m.prepare_destruction(recursive=True) # print("prepare destruction finished") del action_parent_m.ungroup_state.__func__.tmp_models_storage del action_parent_m.ungroup_state.__func__.affected_models # print("## ungroup finished") return old_state_m
def group_states_and_scoped_variables(state_m_list, sv_m_list): state_ids = [state_m.state.state_id for state_m in state_m_list] sv_ids = [sv.scoped_variable.data_port_id for sv in sv_m_list] action_parent_m = state_m_list[0].parent if state_m_list else sv_m_list[ 0].parent assert isinstance(action_parent_m, ContainerStateModel) # BEFORE MODEL tmp_models_dict = { 'transitions': {}, 'data_flows': {}, 'states': {}, 'scoped_variables': {}, 'state': None, 'input_data_ports': {}, 'output_data_ports': {} } related_transitions, related_data_flows = \ action_parent_m.state.get_connections_for_state_and_scoped_variables(state_ids, sv_ids) for state_id in state_ids: tmp_models_dict['states'][state_id] = action_parent_m.states[state_id] for sv_id in sv_ids: tmp_models_dict['scoped_variables'][ sv_id] = action_parent_m.get_scoped_variable_m(sv_id) for t in related_transitions['enclosed']: tmp_models_dict['transitions'][ t.transition_id] = action_parent_m.get_transition_m( t.transition_id) for df in related_data_flows['enclosed']: tmp_models_dict['data_flows'][ df.data_flow_id] = action_parent_m.get_data_flow_m(df.data_flow_id) affected_models = [] for elements_dict in tmp_models_dict.values(): if isinstance(elements_dict, dict): affected_models.extend(elements_dict.values()) elif isinstance(elements_dict, AbstractStateModel): affected_models.extend(elements_dict) # print("EMIT-BEFORE ON ACTION PARENT") action_parent_m.action_signal.emit( ActionSignalMsg(action='group_states', origin='model', action_parent_m=action_parent_m, affected_models=affected_models, after=False, kwargs={ 'state_ids': state_ids, 'scoped_variables': sv_ids })) action_parent_m.group_states.__func__.tmp_models_storage = tmp_models_dict action_parent_m.group_states.__func__.affected_models = affected_models error_msg = "Group action has not started with empty expected future models list." check_expected_future_model_list_is_empty(action_parent_m, msg=error_msg) for key in ['states', 'scoped_variables', 'transitions', 'data_flows']: for model in tmp_models_dict[key].values(): action_parent_m.expected_future_models.add(model) # CORE new_state = e = None try: assert isinstance(action_parent_m.state, ContainerState) new_state = action_parent_m.state.group_states(state_ids, sv_ids) except Exception as e2: e = e2 logger.exception("State group failed") # AFTER MODEL if new_state: tmp_models_dict = action_parent_m.group_states.__func__.tmp_models_storage grouped_state_m = action_parent_m.states[new_state.state_id] tmp_models_dict['state'] = grouped_state_m # if models are left over check if the model remove methods have eaten your models because destroy flag was True error_msg = "Group action has not re-used all models of grouped elements." check_expected_future_model_list_is_empty(action_parent_m, msg=error_msg) if not gui_helper_meta_data.scale_meta_data_according_states( tmp_models_dict): logger.error("Meta data adaptation for group states failed.") else: # at the moment this is only used to check and generate error logger messages in case grouped_state_m.insert_meta_data_from_models_dict( tmp_models_dict, logger.error) affected_models = action_parent_m.group_states.__func__.affected_models # print("EMIT-AFTER ON ACTION PARENT") affected_models.append(grouped_state_m) action_parent_m.action_signal.emit( ActionSignalMsg(action='group_states', origin='model', action_parent_m=action_parent_m, affected_models=affected_models, after=True, result=e)) del action_parent_m.group_states.__func__.tmp_models_storage del action_parent_m.group_states.__func__.affected_models return new_state
def substitute_state(target_state_m, state_m_to_insert, as_template=False): """ Substitutes the target state Both, the state to be replaced (the target state) and the state to be inserted (the new state) are passed via parameters. The new state adapts the size and position of the target state. State elements of the new state are resized but kepp their proportion. :param rafcon.gui.models.container_state.AbstractStateModel target_state_m: State Model of state to be substituted :param rafcon.gui.models.container_state.StateModel state_m_to_insert: State Model of state to be inserted :return: """ # print("substitute_state") state_to_insert = state_m_to_insert.state action_parent_m = target_state_m.parent old_state_m = target_state_m old_state = old_state_m.state state_id = old_state.state_id # BEFORE MODEL tmp_meta_data = {'transitions': {}, 'data_flows': {}, 'state': None} old_state_m = action_parent_m.states[state_id] # print("EMIT-BEFORE ON OLD_STATE ", state_id) old_state_m.action_signal.emit( ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=[ old_state_m, ], after=False, kwargs={ 'state_id': state_id, 'state': state_to_insert })) related_transitions, related_data_flows = action_parent_m.state.get_connections_for_state( state_id) tmp_meta_data['state'] = old_state_m.meta # print("old state meta", old_state_m.meta) external_t = related_transitions['external'] for t in external_t['ingoing'] + external_t['outgoing'] + external_t[ 'self']: tmp_meta_data['transitions'][ t.transition_id] = action_parent_m.get_transition_m( t.transition_id).meta external_df = related_data_flows['external'] for df in external_df['ingoing'] + external_df['outgoing'] + external_df[ 'self']: tmp_meta_data['data_flows'][ df.data_flow_id] = action_parent_m.get_data_flow_m( df.data_flow_id).meta action_parent_m.substitute_state.__func__.tmp_meta_data_storage = tmp_meta_data action_parent_m.substitute_state.__func__.old_state_m = old_state_m # put old state size and rel_pos onto new state previous_state_size = state_m_to_insert.get_meta_data_editor()['size'] state_m_to_insert.set_meta_data_editor( 'size', old_state_m.get_meta_data_editor()['size']) state_m_to_insert.set_meta_data_editor( 'rel_pos', old_state_m.get_meta_data_editor()['rel_pos']) # scale the meta data according new size prepare_state_m_for_insert_as(state_m_to_insert, previous_state_size) # CORE new_state = e = None # print("state to insert", state_to_insert) try: # if as_template: # TODO remove this work around if the models are loaded correctly # # the following enforce the creation of a new model (in needed depth) and transfer of meta data # import rafcon.gui.action # meta_dict = rafcon.gui.action.get_state_element_meta(state_m_to_insert) # new_state = action_parent_m.state.substitute_state(state_id, state_to_insert) # sm_m = action_parent_m.get_state_machine_m() # rafcon.gui.action.insert_state_meta_data(meta_dict, sm_m.get_state_model_by_path(new_state.get_path())) # else: action_parent_m.expected_future_models.add(state_m_to_insert) new_state = action_parent_m.state.substitute_state( state_id, state_to_insert) # assert new_state.state_id is state_id assert new_state is state_to_insert except Exception as e: logger.exception("State substitution failed") if new_state: # AFTER MODEL # print("AFTER MODEL", new_state) new_state_m = action_parent_m.states[new_state.state_id] update_models_recursively(state_m=new_state_m) tmp_meta_data = action_parent_m.substitute_state.__func__.tmp_meta_data_storage old_state_m = action_parent_m.substitute_state.__func__.old_state_m changed_models = [] new_state_m.meta = tmp_meta_data['state'] changed_models.append(new_state_m) for t_id, t_meta in tmp_meta_data['transitions'].items(): if action_parent_m.get_transition_m(t_id) is not None: action_parent_m.get_transition_m(t_id).meta = t_meta changed_models.append(action_parent_m.get_transition_m(t_id)) elif t_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_t_ids: logger.warning( "Transition model with id {0} to set meta data could not be found." .format(t_id)) for df_id, df_meta in tmp_meta_data['data_flows'].items(): if action_parent_m.get_data_flow_m(df_id) is not None: action_parent_m.get_data_flow_m(df_id).meta = df_meta changed_models.append(action_parent_m.get_data_flow_m(df_id)) elif df_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_df_ids: logger.warning( "Data flow model with id {0} to set meta data could not be found." .format(df_id)) msg = ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=changed_models, after=True, result=e) # print("EMIT-AFTER OLDSTATE", msg) old_state_m.action_signal.emit(msg) del action_parent_m.substitute_state.__func__.tmp_meta_data_storage del action_parent_m.substitute_state.__func__.old_state_m
def change_state_type(state_m, target_class): old_state = state_m.state old_state_m = state_m state_id = old_state.state_id is_root_state = old_state.is_root_state state_machine_m = gui_singletons.state_machine_manager_model.get_state_machine_model( old_state_m) # Before the state type is actually changed, we extract the information from the old state model, to apply it # later on to the new state model required_child_models, obsolete_child_models = extract_child_models_of_state( old_state_m, target_class) old_state_meta = old_state_m.meta # By convention, the first element within the affected models list is the root model that has been affected affected_models = [old_state_m] state_element_models = [] obsolete_state_element_models = [] for state_elements in required_child_models.values(): affected_models.extend(state_elements) state_element_models.extend(state_elements) for state_elements in obsolete_child_models.values(): affected_models.extend(state_elements) obsolete_state_element_models.extend(state_elements) # TODO ??? maybe separate again into state machine function and state function in respective helper module if is_root_state: assert isinstance(state_machine_m, StateMachineModel) assert state_machine_m.root_state is old_state_m old_state_m.action_signal.emit( ActionSignalMsg(action='change_root_state_type', origin='model', action_parent_m=state_machine_m, affected_models=affected_models, after=False, kwargs={'target_class': target_class})) old_state_m.unregister_observer(state_machine_m) state_machine_m.suppress_new_root_state_model_one_time = True else: parent_state_m = old_state_m.parent assert isinstance(parent_state_m, ContainerStateModel) old_state_m.action_signal.emit( ActionSignalMsg(action='change_state_type', origin='model', action_parent_m=parent_state_m, affected_models=affected_models, after=False, kwargs={ 'state': old_state, 'target_class': target_class })) old_state_m.unregister_observer(old_state_m) # CORE new_state = new_state_m = e = None try: if is_root_state: new_state = state_machine_m.state_machine.change_root_state_type( target_class) else: new_state = old_state_m.parent.state.change_state_type( old_state, target_class) except Exception as e: logger.exception("Root state type change failed" if is_root_state else "Container state type change failed") # AFTER MODEL # After the state has been changed in the core, we create a new model for it with all information extracted # from the old state model if new_state: if old_state.__class__.__name__ in new_state.name: new_state.name = old_state.name.replace( old_state.__class__.__name__, new_state.__class__.__name__) # Create a new state model based on the new state and apply the extracted child models new_state_m = create_state_model_for_state(new_state, old_state_meta, state_element_models) # By convention, tha last model within the affected model list, is the newly created model affected_models.append(new_state_m) if is_root_state: action_type = 'change_root_state_type' action_parent_m = state_machine_m affected_models = [ new_state_m, ] if new_state_m: new_state_m.register_observer(state_machine_m) state_machine_m.root_state = new_state_m else: action_type = 'change_state_type' action_parent_m = parent_state_m if new_state_m: new_state_m.parent = parent_state_m # Access states dict without causing a notifications. The dict is wrapped in a ObsMapWrapper object. parent_state_m.states[state_id] = new_state_m parent_state_m.update_child_is_start() # Destroy all states and state elements (core and models) that are no longer required old_state.destroy(recursive=False) # Temporarily re-register to prevent KeyError: prepare_destruction calls unregister_observer old_state_m.register_observer(old_state_m) old_state_m.prepare_destruction(recursive=False) for state_element_m in obsolete_state_element_models: if isinstance(state_element_m, AbstractStateModel): if state_element_m.core_element: state_element_m.core_element.destroy(recursive=True) else: logger.verbose( "Multiple calls of destroy {0}".format(state_element_m)) state_element_m.prepare_destruction() old_state_m.action_signal.emit( ActionSignalMsg(action=action_type, origin='model', action_parent_m=action_parent_m, affected_models=affected_models, after=True, result=e)) if is_root_state: suppressed_notification_parameters = state_machine_m.change_root_state_type.__func__.suppressed_notification_parameters state_machine_m.change_root_state_type.__func__.suppressed_notification_parameters = None state_machine_m._send_root_state_notification( *suppressed_notification_parameters) return new_state_m
def paste(self, target_state_m, cursor_position=None, limited=None, convert=False): """Paste objects to target state The method checks whether the target state is a execution state or a container state and inserts respective elements and notifies the user if the parts can not be insert to the target state. - for ExecutionStates outcomes, input- and output-data ports can be inserted - for ContainerState additional states, scoped variables and data flows and/or transitions (if related) can be inserted Related data flows and transitions are determined by origin and target keys and respective objects which has to be in the state machine selection, too. Thus, transitions or data flows without the related objects are not copied. :param target_state_m: state in which the copied/cut elements should be insert :param cursor_position: cursor position used to adapt meta data positioning of elements e.g states and via points :return: """ if all([not elems for elems in self.model_copies.values()]): logger.warning( "Paste is not performed because the clipboard is empty. " "Select one or multiple elements and Copy or Cut those before performing Paste." ) return if not isinstance(target_state_m, StateModel): logger.warning( "Paste is not performed because target state indication has to be a StateModel not {0}" "".format(target_state_m.__class__.__name__)) return if target_state_m.state.get_next_upper_library_root_state( ) is not None: logger.warning( "Paste is not performed because selected target state is inside of a library state." ) return self.reset_clipboard_mapping_dicts() element_m_copy_lists = self.model_copies self.prepare_new_copy( ) # threaded in future -> important that the copy is prepared here!!! # use non empty list dict to create arguments for action signal msg and logger messages dict_of_non_empty_lists_of_model_copies, action_parent_m = self.get_action_arguments( target_state_m) action_parent_m.action_signal.emit( ActionSignalMsg(action='paste', origin='clipboard', action_parent_m=action_parent_m, affected_models=[], after=False, kwargs={ 'insert': dict_of_non_empty_lists_of_model_copies, 'convert': convert, 'limited': limited })) self.state_id_mapping_dict[ self.copy_parent_state_id] = target_state_m.state.state_id # prepare list of lists to copy for limited or converted paste of objects target_state_element_attrs = target_state_m.state.state_element_attrs if "income" in target_state_element_attrs: target_state_element_attrs.remove("income") if limited and all([ state_element_attr in target_state_element_attrs for state_element_attr in limited ]): if len(limited) == 1 and limited[0] in [ 'input_data_ports', 'output_data_ports', 'scoped_variables' ] and convert: combined_list = element_m_copy_lists['input_data_ports'] + element_m_copy_lists['output_data_ports'] + \ element_m_copy_lists['scoped_variables'] for state_element_attr in [ 'input_data_ports', 'output_data_ports', 'scoped_variables' ]: element_m_copy_lists[state_element_attr] = combined_list state_element_attrs_to_insert = limited else: state_element_attrs_to_insert = target_state_element_attrs # check list order and put transitions and data flows to the end for state_element_attr in ['transitions', 'data_flows']: if state_element_attr in state_element_attrs_to_insert: state_element_attrs_to_insert.remove(state_element_attr) state_element_attrs_to_insert.append(state_element_attr) def insert_elements_from_model_copies_list(model_list, state_element_name): """ Insert/add all core elements of model_list into the target_state_m The function returns a list of pairs of models (new and original models) because the target_state_m for some insert operations still generates a new model. :param list model_list: list of models :param str state_element_name: appendix string to "_insert_*" to get the attribute of respective methods in clipboard-class. :return: list of pairs of models (new and original models) :rtype: list[tuple] """ new_and_copied_models = [] for orig_element_m_copy in model_list: try: # hold orig_element_m_copy related to newly generated model for debugging reasons # (its doubt that ids are fully correct, meta data is considered to be alright now) insert_function = getattr(self, '_insert_{0}'.format( state_element_name)) # e.g. self._insert_state new_element_m = insert_function(target_state_m, orig_element_m_copy) new_and_copied_models.append( (new_element_m, orig_element_m_copy)) except (ValueError, AttributeError, TypeError) as e: logger.warning( "While inserting a {0} a failure was detected, exception: {1}." "".format(state_element_name, e)) return new_and_copied_models # insert all lists and their elements into target state # insert_dict hold lists of pairs of models -> new (maybe generated by parent model) and original copy insert_dict = dict() for state_element_attr in state_element_attrs_to_insert: state_element_name = singular_form( state_element_attr ) # e.g. "states" => "state", "outcomes" => "outcome" insert_dict[state_element_attr] = \ insert_elements_from_model_copies_list(element_m_copy_lists[state_element_attr], state_element_name) # move meta data from original copied model to newly insert models and resize them to fit into target_state_m models_dict = {'state': target_state_m} for state_element_attr, state_elements in insert_dict.items(): models_dict[state_element_attr] = {} for new_state_element_m, copied_state_element_m in state_elements: new_core_element_id = new_state_element_m.core_element.core_element_id models_dict[state_element_attr][ new_core_element_id] = new_state_element_m affected_models = [] for key, state_elements in insert_dict.items(): if key == 'state': continue for new_state_element_m, copied_state_element_m in state_elements: affected_models.append(new_state_element_m) # commented parts are here for later use to detect empty meta data fields and debug those if all([all([not gui_helpers_meta_data.model_has_empty_meta(state_element_m) for state_element_m, _ in elems_list]) if isinstance(elems_list, list) else gui_helpers_meta_data.model_has_empty_meta(elems_list) for elems_list in insert_dict.values()]) or \ len(dict_of_non_empty_lists_of_model_copies) == 1 and 'states' in dict_of_non_empty_lists_of_model_copies: try: gui_helpers_meta_data.scale_meta_data_according_state( models_dict) except: logger.exception( "Scale of pasted content {0} cause a problems.".format( models_dict)) else: # TODO this should become a warning in the future or the meta module has to handle the empty data fields logger.info( "Paste miss meta to scale. {0}".format(affected_models)) if not affected_models: logger.warning( "Paste with no effect. No elements pasted from {0}".format( dict_of_non_empty_lists_of_model_copies)) action_parent_m.action_signal.emit( ActionSignalMsg(action='paste', origin='clipboard', action_parent_m=action_parent_m, affected_models=affected_models, after=True)) return insert_dict
def substitute_state(target_state_m, state_m_to_insert): """ Substitute the target state with a the handed state Both states are handed by there state models. The insert state adapts the size and position of the target state. State elements of the state handed to be insert became resize by keeping there proportion. :param rafcon.gui.models.container_state.AbstractStateModel target_state_m: State Model of state to be substituted :param rafcon.gui.models.container_state.StateModel state_m_to_insert: State Model of state to be insert instate :return: """ # print "substitute_state" gaphas_editor = True if gui_singletons.global_gui_config.get_config_value( 'GAPHAS_EDITOR') else False state_to_insert = state_m_to_insert.state action_parent_m = target_state_m.parent old_state_m = target_state_m old_state = old_state_m.state state_id = old_state.state_id # print "TARGET", old_state_m.get_meta_data_editor(gaphas_editor) # BEFORE MODEL tmp_meta_data = {'transitions': {}, 'data_flows': {}, 'state': None} old_state_m = action_parent_m.states[state_id] # print "EMIT-BEFORE ON OLD_STATE ", state_id old_state_m.action_signal.emit( ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=[ old_state_m, ], after=False, kwargs={ 'state_id': state_id, 'state': state_to_insert })) related_transitions, related_data_flows = action_parent_m.state.related_linkage_state( state_id) tmp_meta_data['state'] = old_state_m.meta # print "old state meta", old_state_m.meta for t in related_transitions['external']['ingoing'] + related_transitions[ 'external']['outgoing']: tmp_meta_data['transitions'][ t.transition_id] = action_parent_m.get_transition_m( t.transition_id).meta for df in related_data_flows['external']['ingoing'] + related_data_flows[ 'external']['outgoing']: tmp_meta_data['data_flows'][ df.data_flow_id] = action_parent_m.get_data_flow_m( df.data_flow_id).meta action_parent_m.substitute_state.__func__.tmp_meta_data_storage = tmp_meta_data action_parent_m.substitute_state.__func__.old_state_m = old_state_m # put old state size and rel_pos onto new state previous_state_size = state_m_to_insert.get_meta_data_editor( gaphas_editor)['size'] state_m_to_insert.set_meta_data_editor( 'size', old_state_m.get_meta_data_editor(gaphas_editor)['size'], gaphas_editor) state_m_to_insert.set_meta_data_editor( 'rel_pos', old_state_m.get_meta_data_editor(gaphas_editor)['rel_pos'], gaphas_editor) # scale the meta data according new size prepare_state_m_for_insert_as(state_m_to_insert, previous_state_size) # CORE new_state = e = None # print "state to insert", state_to_insert try: action_parent_m.expected_future_models.add(state_m_to_insert) new_state = action_parent_m.state.substitute_state( state_id, state_to_insert) # assert new_state.state_id is state_id assert new_state is state_to_insert except Exception as e: logger.exception("State substitution failed") if new_state: # AFTER MODEL # print "AFTER MODEL", new_state new_state_m = action_parent_m.states[new_state.state_id] tmp_meta_data = action_parent_m.substitute_state.__func__.tmp_meta_data_storage old_state_m = action_parent_m.substitute_state.__func__.old_state_m changed_models = [] new_state_m.meta = tmp_meta_data['state'] changed_models.append(new_state_m) for t_id, t_meta in tmp_meta_data['transitions'].iteritems(): if action_parent_m.get_transition_m(t_id) is not None: action_parent_m.get_transition_m(t_id).meta = t_meta changed_models.append(action_parent_m.get_transition_m(t_id)) elif t_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_t_ids: logger.warning( "Transition model with id {0} to set meta data could not be found." .format(t_id)) for df_id, df_meta in tmp_meta_data['data_flows'].iteritems(): if action_parent_m.get_data_flow_m(df_id) is not None: action_parent_m.get_data_flow_m(df_id).meta = df_meta changed_models.append(action_parent_m.get_data_flow_m(df_id)) elif df_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_df_ids: logger.warning( "Data flow model with id {0} to set meta data could not be found." .format(df_id)) msg = ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=changed_models, after=True, result=e) # print "EMIT-AFTER OLDSTATE", msg old_state_m.action_signal.emit(msg) del action_parent_m.substitute_state.__func__.tmp_meta_data_storage del action_parent_m.substitute_state.__func__.old_state_m