def __init__(self, global_conf=CoreConfig()): """ Initializes a DataStore object """ # dictionary {backend_name_string: Backend instance} self.backends = {} self.treefactory = TreeFactory() self._tasks = self.treefactory.get_tasks_tree() self.requester = requester.Requester(self, global_conf) self.tagfile = None self._tagstore = self.treefactory.get_tags_tree(self.requester) self.load_tag_tree() self._backend_signals = BackendSignals() # Flag when turned to true, all pending operation should be # completed and then GTG should quit self.please_quit = False # The default backend must be loaded first. This flag turns to True # when the default backend loading has finished. self.is_default_backend_loaded = False self._backend_signals.connect('default-backend-loaded', self._activate_non_default_backends) self.filtered_datastore = FilteredDataStore(self) self._backend_mutex = threading.Lock()
def load_tag_tree(self): """ Loads the tag tree from a xml file """ tagfile = os.path.join(CoreConfig().get_data_dir(), TAG_XMLFILE) doc, xmlstore = cleanxml.openxmlfile(tagfile, TAG_XMLROOT) for t in xmlstore.childNodes: tagname = t.getAttribute("name") parent = t.getAttribute("parent") tag_attr = {} attr = t.attributes for i in range(attr.length): at_name = attr.item(i).name if at_name not in ["name", "parent"]: at_val = t.getAttribute(at_name) tag_attr[at_name] = at_val if parent == CoreConfig.SEARCH_TAG: query = t.getAttribute("query") tag = self.new_search_tag(tagname, query, tag_attr) else: tag = self.new_tag(tagname, tag_attr) if parent: tag.set_parent(parent) self.tagfile = tagfile
def core_main_init(options=None, args=None): ''' Part of the main function prior to the UI initialization. ''' # Debugging subsystem initialization if options.debug: Log.setLevel(logging.DEBUG) Log.debug("Debug output enabled.") else: Log.setLevel(logging.INFO) Log.set_debugging_mode(options.debug) config = CoreConfig() check_instance(config.get_data_dir(), args) backends_list = BackendFactory().get_saved_backends_list() # Load data store ds = DataStore(config) # Register backends for backend_dic in backends_list: ds.register_backend(backend_dic) # save the backends directly to be sure projects.xml is written ds.save(quit=False) # Launch task browser req = ds.get_requester() return ds, req
def _configure_icon_theme(self): ''' Inform gtk on the location of the backends icons (which is in the GTG directory tree, and not in the default location for icons ''' self.icon_theme = Gtk.IconTheme.get_default() for directory in CoreConfig().get_icons_directories(): self.icon_theme.prepend_search_path(directory)
def get_path(self): """ Return the current path to XML Path can be relative to projects.xml """ path = self._parameters["path"] if os.sep not in path: # Local path data_dir = CoreConfig().get_data_dir() path = os.path.join(data_dir, path) return os.path.abspath(path)
def _store_pickled_file(self, path, data): ''' A helper function to save some object in a file. @param path: a relative path. A good choice is "backend_name/object_name" @param data: the object ''' path = os.path.join(CoreConfig().get_data_dir(), path) # mkdir -p try: os.makedirs(os.path.dirname(path)) except OSError, exception: if exception.errno != errno.EEXIST: raise
def _read_backend_configuration_file(self): ''' Reads the file describing the current backend configuration (project.xml) and returns a list of dictionaries, each containing: - the xml object defining the backend characteristics under "xmlobject" - the name of the backend under "module" ''' # Read configuration file, if it does not exist, create one datafile = os.path.join(CoreConfig().get_data_dir(), CoreConfig.DATA_FILE) doc, configxml = cleanxml.openxmlfile(datafile, "config") xmlproject = doc.getElementsByTagName("backend") # collect configured backends return [{"xmlobject": xp, "module": xp.getAttribute("module")} for xp in xmlproject]
def save(self, quit=False): """ Saves the backends parameters. @param quit: If quit is true, backends are shut down """ try: self.start_get_tasks_thread.join() except Exception: pass doc, xmlconfig = cleanxml.emptydoc("config") # we ask all the backends to quit first. if quit: # we quit backends in parallel threads_dic = {} for b in self.get_all_backends(): thread = threading.Thread(target=b.quit) threads_dic[b.get_id()] = thread thread.start() for backend_id, thread in threads_dic.iteritems(): # after 20 seconds, we give up thread.join(20) if thread.isAlive(): Log.error("The %s backend stalled while quitting", backend_id) # we save the parameters for b in self.get_all_backends(disabled=True): t_xml = doc.createElement("backend") for key, value in b.get_parameters().iteritems(): if key in ["backend", "xmlobject"]: # We don't want parameters, backend, xmlobject: # we'll create them at next startup continue param_type = b.get_parameter_type(key) value = b.cast_param_type_to_string(param_type, value) t_xml.setAttribute(str(key), value) # Saving all the projects at close xmlconfig.appendChild(t_xml) datadir = CoreConfig().get_data_dir() datafile = os.path.join(datadir, CoreConfig.DATA_FILE) cleanxml.savexml(datafile, doc, backup=True) # Saving the tagstore self.save_tagtree()
def _load_pickled_file(self, path, default_value=None): ''' A helper function to load some object from a file. @param path: the relative path of the file @param default_value: the value to return if the file is missing or corrupt @returns object: the needed object, or default_value ''' path = os.path.join(CoreConfig().get_data_dir(), path) if not os.path.exists(path): return default_value with open(path, 'r') as file: try: return pickle.load(file) except Exception: Log.error("Pickle file for backend '%s' is damaged" % self.get_name()) # Loading file failed, trying backups for i in range(1, PICKLE_BACKUP_NBR + 1): backup_file = "%s.bak.%d" % (path, i) if os.path.exists(backup_file): with open(backup_file, 'r') as file: try: data = pickle.load(file) Log.info("Succesfully restored backup #%d for '%s'" % (i, self.get_name())) return data except Exception: Log.error("Backup #%d for '%s' is damaged as well" % (i, self.get_name())) # Data could not be loaded, degrade to default data Log.error("There is no suitable backup for '%s', " "loading default data" % self.get_name()) return default_value
def _store_pickled_file(self, path, data): ''' A helper function to save some object in a file. @param path: a relative path. A good choice is "backend_name/object_name" @param data: the object ''' path = os.path.join(CoreConfig().get_data_dir(), path) # mkdir -p try: os.makedirs(os.path.dirname(path)) except OSError as exception: if exception.errno != errno.EEXIST: raise # Shift backups for i in range(PICKLE_BACKUP_NBR, 1, -1): destination = "%s.bak.%d" % (path, i) source = "%s.bak.%d" % (path, i - 1) if os.path.exists(destination): os.unlink(destination) if os.path.exists(source): os.rename(source, destination) # Backup main file if PICKLE_BACKUP_NBR > 0: destination = "%s.bak.1" % path if os.path.exists(path): os.rename(path, destination) # saving with open(path, 'wb') as file: pickle.dump(data, file)
def SetUp(self): CoreConfig().set_data_dir("./test_data") CoreConfig().set_conf_dir("./test_data")