def configure_colors(self): # Get colors from GTKrc file if not resource_exists(__name__, self.get_assets_path("gtk-2.0", "gtkrc")): raise ValueError("GTK theme does not exist") gtkrc_file_path = resource_filename(__name__, self.get_assets_path("gtk-2.0", "gtkrc")) with open(gtkrc_file_path) as f: lines = f.readlines() for line in lines: if re.match("\s*gtk_color_scheme", line): color = re.findall(r'"(.*?)"', line) color = color[0] color = color.split(':') self.colors[color[0].upper()] = color[1] self.gtk_colors[color[0].upper()] = gtk.gdk.Color(color[1]) # Get color definitions color_file_path = resource_filename(__name__, self.get_assets_path(filename="colors.json")) try: colors = storage_utils.load_objects_from_json(color_file_path) except IOError: raise ValueError("No color definitions found") # replace unicode strings with str strings colors = {str(key): str(value) for key, value in colors.iteritems()} gtk_colors = {str(key): gtk.gdk.Color(str(value)) for key, value in colors.iteritems()} self.gtk_colors.update(gtk_colors) self.colors.update(colors)
def configure_colors(self): from gi.repository import Gdk dark_theme = self.get_config_value('THEME_DARK_VARIANT', True) css_filename = "gtk-dark.css" if dark_theme else "gtk.css" # Get colors from GTKrc file if not resource_exists(__name__, self.get_assets_path("gtk-3.0", css_filename)): raise ValueError("GTK theme does not exist") # Provide black as fallback color if theme is not found instead of crashing self.colors = defaultdict(lambda: "#FFFFFF") self.gtk_colors = defaultdict(lambda: Gdk.RGBA(0, 0, 0).to_color()) gtkrc_file_path = resource_filename( __name__, self.get_assets_path("gtk-3.0", css_filename)) with open(gtkrc_file_path) as f: lines = f.readlines() for line in lines: match = re.match("\s*@define-color (\w*) (#[\w]{3,6})", line) if match: color_name = match.group(1).upper() color_code = match.group(2) self.colors[color_name] = color_code gtk_color = Gdk.RGBA() if gtk_color.parse(color_code): self.gtk_colors[color_name] = gtk_color.to_color() else: self.logger.warning( "Could not parse color with name '{}' and code '{}'". format(color_name, color_code)) # Get color definitions colors_filename = "colors-dark.json" if dark_theme else "colors.json" color_file_path = resource_filename( __name__, self.get_assets_path(filename=colors_filename)) try: colors = storage_utils.load_objects_from_json(color_file_path) except IOError: raise ValueError("No color definitions found") for color_name, color_code in colors.items(): # replace unicode strings with str strings color_name = str(color_name) color_code = str(color_code) if color_code.startswith("#"): color = Gdk.Color.parse(color_code)[1] elif color_code in self.colors: color = self.gtk_colors[color_code] color_code = self.gtk_colors[color_code] else: self.logger.warning( "Undefined color alias '{}' for color name '{}'".format( color_code, color_name)) continue self.gtk_colors[color_name] = color self.colors[color_name] = color_code
def load_data_file(path_of_file): """ Loads the content of a file by using json.load. :param path_of_file: the path of the file to load :return: the file content as a string :raises exceptions.ValueError: if the file was not found """ if os.path.exists(path_of_file): return storage_utils.load_objects_from_json(path_of_file) raise ValueError("Data file not found: {0}".format(path_of_file))
def load_state_recursively(parent, state_path=None, dirty_states=[]): """Recursively loads the state It calls this method on each sub-state of a container state. :param parent: the root state of the last load call to which the loaded state will be added :param state_path: the path on the filesystem where to find the meta file for the state :param dirty_states: a dict of states which changed during loading :return: """ from rafcon.core.states.execution_state import ExecutionState from rafcon.core.states.container_state import ContainerState from rafcon.core.states.hierarchy_state import HierarchyState path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA) logger.debug("Load state recursively: {0}".format(str(state_path))) # TODO: Should be removed with next minor release if not os.path.exists(path_core_data): path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA_OLD) try: state_info = load_data_file(path_core_data) except ValueError as e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException as e: logger.error( "Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format( e)) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) state_id = state_info["state_id"] dummy_state = HierarchyState(LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id) # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info else: state = state_info[0] transitions = state_info[1] data_flows = state_info[2] # set parent of state if parent is not None and isinstance(parent, ContainerState): parent.add_state(state, storage_load=True) else: state.parent = parent # read script file if state is an ExecutionState if isinstance(state, ExecutionState): script_text = read_file(state_path, state.script.filename) state.script.set_script_without_compilation(script_text) # load semantic data try: semantic_data = load_data_file( os.path.join(state_path, SEMANTIC_DATA_FILE)) state.semantic_data = semantic_data except Exception as e: # semantic data file does not have to be there pass one_of_my_child_states_not_found = False # load child states for p in os.listdir(state_path): child_state_path = os.path.join(state_path, p) if os.path.isdir(child_state_path): if not os.path.exists( os.path.join(child_state_path, FILE_NAME_CORE_DATA)): # this means that child_state_path is a folder, not containing a valid state # this also happens when pip creates __pycache__ folders for the script.py files upon installing rafcon continue child_state = load_state_recursively(state, child_state_path, dirty_states) if not child_state: return None if child_state.name is LIBRARY_NOT_FOUND_DUMMY_STATE_NAME: one_of_my_child_states_not_found = True if one_of_my_child_states_not_found: # omit adding transitions and data flows in this case pass else: # Now we can add transitions and data flows, as all child states were added if isinstance(state_info, tuple): # safe version # state.transitions = transitions # state.data_flows = data_flows state._transitions = transitions for _, transition in state.transitions.items(): transition._parent = ref(state) state._data_flows = data_flows for _, data_flow in state.data_flows.items(): data_flow._parent = ref(state) state.file_system_path = state_path if state.marked_dirty: dirty_states.append(state) return state
def load_state_machine_from_path(base_path, state_machine_id=None): """Loads a state machine from the given path :param base_path: An optional base path for the state machine. :return: a tuple of the loaded container state, the version of the state and the creation time :raises ValueError: if the provided path does not contain a valid state machine """ logger.debug("Loading state machine from path {0}...".format(base_path)) state_machine_file_path = os.path.join(base_path, STATEMACHINE_FILE) state_machine_file_path_old = os.path.join(base_path, STATEMACHINE_FILE_OLD) # was the root state specified as state machine base_path to load from? if not os.path.exists(state_machine_file_path) and not os.path.exists( state_machine_file_path_old): # catch the case that a state machine root file is handed if os.path.exists(base_path) and os.path.isfile(base_path): base_path = os.path.dirname(base_path) state_machine_file_path = os.path.join(base_path, STATEMACHINE_FILE) state_machine_file_path_old = os.path.join(base_path, STATEMACHINE_FILE_OLD) if not os.path.exists(state_machine_file_path) and not os.path.exists( state_machine_file_path_old): raise ValueError( "Provided path doesn't contain a valid state machine: {0}". format(base_path)) state_machine_dict = storage_utils.load_objects_from_json( state_machine_file_path) if 'used_rafcon_version' in state_machine_dict: previously_used_rafcon_version = StrictVersion( state_machine_dict['used_rafcon_version']).version active_rafcon_version = StrictVersion(rafcon.__version__).version rafcon_newer_than_sm_version = "You are trying to load a state machine that was stored with an older " \ "version of RAFCON ({0}) than the one you are using ({1}).".format( state_machine_dict['used_rafcon_version'], rafcon.__version__) rafcon_older_than_sm_version = "You are trying to load a state machine that was stored with an newer " \ "version of RAFCON ({0}) than the one you are using ({1}).".format( state_machine_dict['used_rafcon_version'], rafcon.__version__) note_about_possible_incompatibility = "The state machine will be loaded with no guarantee of success." if active_rafcon_version[0] > previously_used_rafcon_version[0]: # this is the default case # for a list of breaking changes please see: doc/breaking_changes.rst # logger.warning(rafcon_newer_than_sm_version) # logger.warning(note_about_possible_incompatibility) pass if active_rafcon_version[0] == previously_used_rafcon_version[0]: if active_rafcon_version[1] > previously_used_rafcon_version[1]: # this is the default case # for a list of breaking changes please see: doc/breaking_changes.rst # logger.info(rafcon_newer_than_sm_version) # logger.info(note_about_possible_incompatibility) pass elif active_rafcon_version[1] == previously_used_rafcon_version[1]: # Major and minor version of RAFCON and the state machine match # It should be safe to load the state machine, as the patch level does not change the format pass else: logger.warning(rafcon_older_than_sm_version) logger.warning(note_about_possible_incompatibility) else: logger.warning(rafcon_older_than_sm_version) logger.warning(note_about_possible_incompatibility) state_machine = StateMachine.from_dict(state_machine_dict, state_machine_id) if "root_state_storage_id" not in state_machine_dict: root_state_storage_id = state_machine_dict['root_state_id'] state_machine.supports_saving_state_names = False else: root_state_storage_id = state_machine_dict['root_state_storage_id'] root_state_path = os.path.join(base_path, root_state_storage_id) state_machine.file_system_path = base_path dirty_states = [] state_machine.root_state = load_state_recursively( parent=state_machine, state_path=root_state_path, dirty_states=dirty_states) if state_machine.root_state is None: return # a corresponding exception has been handled with a proper error log in load_state_recursively if len(dirty_states) > 0: state_machine.marked_dirty = True else: state_machine.marked_dirty = False hierarchy_level = 0 number_of_states, hierarchy_level = state_machine.root_state.get_states_statistics( hierarchy_level) logger.debug( "Loaded state machine ({1}) has {0} states. (Max hierarchy level {2})". format(number_of_states, base_path, hierarchy_level)) logger.debug("Loaded state machine ({1}) has {0} transitions.".format( state_machine.root_state.get_number_of_transitions(), base_path)) logger.debug("Loaded state machine ({1}) has {0} data flows.".format( state_machine.root_state.get_number_of_data_flows(), base_path)) return state_machine
def configure_colors(self): # Provide black as fallback color if theme is not found instead of crashing self.colors = defaultdict(lambda: "#FFFFFF") try: from gi.repository import Gdk self.gtk_colors = defaultdict(lambda: Gdk.RGBA(0, 0, 0).to_color()) except ImportError: self.gtk_colors = defaultdict(lambda: None) return dark_theme = self.get_config_value('THEME_DARK_VARIANT', True) css_filename = "gtk-dark.css" if dark_theme else "gtk.css" # Get colors from GTKrc file theme_path = self._get_theme_path() if is_custom_design_enabled(): theme_path = self._get_custom_theme_path() css_file_path = os.path.join(theme_path, "gtk-3.0", css_filename) if not os.path.isfile(css_file_path): raise ValueError("GTK theme does not exist: {}".format(str(css_file_path))) with open(css_file_path) as f: lines = f.readlines() for line in lines: match = re.match("\s*@define-color (\w*) (#[\w]{3,6})", line) if match: # css colors are mapped to capital-case color names # these colors can then be used in the colors definition file for the gaphas colors (e.g., colors.json) color_name = match.group(1).upper() color_code = match.group(2) self.colors[color_name] = color_code gtk_color = Gdk.RGBA() if gtk_color.parse(color_code): self.gtk_colors[color_name] = gtk_color.to_color() else: self.logger.warning("Could not parse color with name '{}' and code '{}'".format(color_name, color_code)) # Get color definitions colors_filename = "colors-dark.json" if dark_theme else "colors.json" color_file_path = os.path.join(theme_path, colors_filename) try: colors = storage_utils.load_objects_from_json(color_file_path) except IOError: raise ValueError("No color definitions found") for color_name, color_code in colors.items(): # replace unicode strings with str strings color_name = str(color_name) color_code = str(color_code) gtk_color = Gdk.RGBA() if color_code.startswith("#"): if gtk_color.parse(color_code): color = gtk_color.to_color() else: self.logger.warning("Could not parse color with name '{}' and code '{}'".format(color_name, color_code)) continue elif color_code in self.colors: color = self.gtk_colors[color_code] color_code = self.gtk_colors[color_code] else: self.logger.warning("Undefined color alias '{}' for color name '{}'".format(color_code, color_name)) continue self.gtk_colors[color_name] = color self.colors[color_name] = color_code
# TODO: Should be removed with next minor release if not os.path.exists(path_core_data): path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA_OLD) try: state_info = load_data_file(path_core_data) except ValueError, e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException, e: logger.error( "Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format( str(e.message))) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) state_id = state_info["state_id"] dummy_state = HierarchyState(LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id) # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info
def load_state_recursively(parent, state_path=None, dirty_states=[]): """Recursively loads the state It calls this method on each sub-state of a container state. :param parent: the root state of the last load call to which the loaded state will be added :param state_path: the path on the filesystem where to find the meta file for the state :param dirty_states: a dict of states which changed during loading :return: """ from rafcon.core.states.execution_state import ExecutionState from rafcon.core.states.container_state import ContainerState from rafcon.core.states.hierarchy_state import HierarchyState from rafcon.core.singleton import library_manager path_core_data = get_core_data_path(state_path) path_meta_data = get_meta_data_path(state_path) logger.debug("Load state recursively: {0}".format(str(state_path))) try: state_info = load_data_file(path_core_data) except ValueError as e: logger.exception("Error while loading state data: {0}".format(e)) return except LibraryNotFoundException as e: if global_config.get_config_value( "RAISE_ERROR_ON_MISSING_LIBRARY_STATES", False) or not library_manager.show_dialog: raise logger.error( "Library could not be loaded: {0}\n" "Skipping library and continuing loading the state machine".format( e)) state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True) missing_library_meta_data = None if os.path.exists(path_meta_data): missing_library_meta_data = Vividict( storage_utils.load_objects_from_json(path_meta_data)) state_id = state_info["state_id"] outcomes = { outcome['outcome_id']: Outcome(outcome['outcome_id'], outcome['name']) for outcome in state_info["outcomes"].values() } dummy_state = HierarchyState( LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id, outcomes=outcomes, is_dummy=True, missing_library_meta_data=missing_library_meta_data) library_name = state_info['library_name'] path_parts = os.path.join(state_info['library_path'], library_name).split(os.sep) dummy_state.description = 'The Missing Library Path: %s\nThe Missing Library Name: %s\n\n' % ( state_info['library_path'], library_name) from rafcon.core.singleton import library_manager if path_parts[0] in library_manager.library_root_paths: dummy_state.description += 'The Missing Library OS Path: %s' % os.path.join( library_manager.library_root_paths[path_parts[0]], * path_parts[1:]) else: dummy_state.description += 'The missing library was located in the missing library root "%s"' % path_parts[ 0] # set parent of dummy state if isinstance(parent, ContainerState): parent.add_state(dummy_state, storage_load=True) else: dummy_state.parent = parent return dummy_state except LibraryNotFoundSkipException: return None # Transitions and data flows are not added when loading a state, as also states are not added. # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the # validity checks for transitions and data flows would fail if not isinstance(state_info, tuple): state = state_info else: state = state_info[0] transitions = state_info[1] data_flows = state_info[2] # set parent of state if parent is not None and isinstance(parent, ContainerState): parent.add_state(state, storage_load=True) else: state.parent = parent # read script file if state is an ExecutionState if isinstance(state, ExecutionState): script_text = read_file(state_path, state.script.filename) state.script.set_script_without_compilation(script_text) # load semantic data try: semantic_data = load_data_file( os.path.join(state_path, SEMANTIC_DATA_FILE)) state.semantic_data = semantic_data except Exception as e: # semantic data file does not have to be there pass # load child states for p in os.listdir(state_path): child_state_path = os.path.join(state_path, p) if os.path.isdir(child_state_path): if not os.path.exists( os.path.join(child_state_path, FILE_NAME_CORE_DATA)): # this means that child_state_path is a folder, not containing a valid state # this also happens when pip creates __pycache__ folders for the script.py files upon installing rafcon continue child_state = load_state_recursively(state, child_state_path, dirty_states) if not child_state: return None # Now we can add transitions and data flows, as all child states were added if isinstance(state_info, tuple): safe_init = global_config.get_config_value("LOAD_SM_WITH_CHECKS", True) if safe_init: # this will trigger all validity checks the state machine state.transitions = transitions else: state._transitions = transitions state._data_flows = data_flows for _, transition in state.transitions.items(): transition._parent = ref(state) state._data_flows = data_flows for _, data_flow in state.data_flows.items(): data_flow._parent = ref(state) state.file_system_path = state_path if state.marked_dirty: dirty_states.append(state) return state