def restore_backend_from_xml(self, dic): ''' Function restoring a backend from its xml description. dic should be a dictionary containing at least the key - "module", with the module name - "xmlobject", with its xml description. Every other key is passed as-is to the backend, as parameter. Returns the backend instance, or None is something goes wrong ''' if not "module" in dic or not "xmlobject" in dic: Log.debug("Malformed backend configuration found! %s" % dic) module = self.get_backend(dic["module"]) if module is None: Log.debug("could not load module for backend %s" % dic["module"]) return None # we pop the xml object, as it will be redundant when the parameters # are set directly in the dict xp = dic.pop("xmlobject") # Building the dictionary parameters_specs = module.Backend.get_static_parameters() dic["pid"] = str(xp.getAttribute("pid")) for param_name, param_dic in parameters_specs.iteritems(): if xp.hasAttribute(param_name): # we need to convert the parameter to the right format. # we fetch the format from the static_parameters param_type = param_dic[GenericBackend.PARAM_TYPE] param_value = GenericBackend.cast_param_type_from_string( xp.getAttribute(param_name), param_type) dic[param_name] = param_value # We put the backend itself in the dict dic["backend"] = module.Backend(dic) return dic["backend"]
def openxmlfile(zefile,root ): tmpfile = zefile+'__' # print "opening %s file" %zefile try: if os.path.exists(zefile): f = open(zefile, "r") elif os.path.exists(tmpfile): Log.debug("Something happened to the tags file. Using backup") os.rename(tmpfile, zefile) f = open(zefile, "r") else: # Creating empty file doc,xmlproject = emptydoc(root) newfile = savexml(zefile, doc) # use our function to save file if not newfile: sys.exit(1) return openxmlfile(zefile, root) # recursive call stringed = f.read() stringed = cleanString(stringed,tab,enter) doc = xml.dom.minidom.parseString(stringed) # doc = xml.dom.minidom.parse(f) # print "cleaning %s" %zefile cleanDoc(doc,tab,enter) xmlproject = doc.getElementsByTagName(root)[0] f.close() return doc,xmlproject except IOError, msg: print msg sys.exit(1)
def __init__(self): """ Creates a dictionary of the currently available backend modules """ super(BackendFactory, self).__init__() if hasattr(self, "backend_modules"): # This object has already been constructed return self.backend_modules = {} # Look for backends in the GTG/backends dir this_dir = os.path.dirname(__file__) backend_files = filter( lambda f: f.endswith(".py") and f.startswith(self.BACKEND_PREFIX), os.listdir(this_dir)) # Create module names module_names = map(lambda f: f.replace(".py", ""), backend_files) Log.debug("Backends found: " + str(module_names)) # Load backend modules for module_name in module_names: extended_module_name = "GTG.backends." + module_name try: __import__(extended_module_name) except ImportError, exception: # Something is wrong with this backend, skipping Log.warning("Backend %s could not be loaded: %s" % (module_name, str(exception))) continue except Exception, exception: # Other exception log as errors Log.error("Malformated backend %s: %s" % (module_name, str(exception))) continue
def __init__(self): """ Creates a dictionary of the currently available backend modules """ super().__init__() if hasattr(self, "backend_modules"): # This object has already been constructed return self.backend_modules = {} backend_files = self._find_backend_files() # Create module names module_names = [f.replace(".py", "") for f in backend_files] Log.debug("Backends found: " + str(module_names)) # Load backend modules for module_name in module_names: extended_module_name = "GTG.backends." + module_name try: __import__(extended_module_name) except ImportError as exception: # Something is wrong with this backend, skipping Log.warning("Backend %s could not be loaded: %s" % (module_name, str(exception))) continue except Exception as exception: # Other exception log as errors Log.error("Malformated backend %s: %s" % (module_name, str(exception))) continue self.backend_modules[module_name] = \ sys.modules[extended_module_name]
def core_main_init(options=None, args=None): ''' Part of the main function prior to the UI initialization. ''' # Debugging subsystem initialization if options.debug: Log.setLevel(logging.DEBUG) Log.debug("Debug output enabled.") else: Log.setLevel(logging.INFO) Log.set_debugging_mode(options.debug) config = CoreConfig() check_instance(config.get_data_dir(), args) backends_list = BackendFactory().get_saved_backends_list() # Load data store ds = DataStore(config) # Register backends for backend_dic in backends_list: ds.register_backend(backend_dic) # save the backends directly to be sure projects.xml is written ds.save(quit=False) # Launch task browser req = ds.get_requester() return ds, req
def __init__(self): """ Creates a dictionary of the currently available backend modules """ super(BackendFactory, self).__init__() if hasattr(self, "backend_modules"): # This object has already been constructed return self.backend_modules = {} # Look for backends in the GTG/backends dir this_dir = os.path.dirname(__file__) backend_files = filter(lambda f: f.endswith(".py") and f.startswith(self.BACKEND_PREFIX), os.listdir(this_dir)) # Create module names module_names = map(lambda f: f.replace(".py", ""), backend_files) Log.debug("Backends found: " + str(module_names)) # Load backend modules for module_name in module_names: extended_module_name = "GTG.backends." + module_name try: __import__(extended_module_name) except ImportError, exception: # Something is wrong with this backend, skipping Log.warning("Backend %s could not be loaded: %s" % (module_name, str(exception))) continue except Exception, exception: # Other exception log as errors Log.error("Malformated backend %s: %s" % (module_name, str(exception))) continue
def __init__(self): """ Creates a dictionary of the currently available backend modules """ Borg.__init__(self) if hasattr(self, "backend_modules"): # This object has already been constructed return self.backend_modules = {} backend_files = self._find_backend_files() # Create module names module_names = [f.replace(".py", "") for f in backend_files] Log.debug("Backends found: " + str(module_names)) # Load backend modules for module_name in module_names: extended_module_name = "GTG.backends." + module_name try: __import__(extended_module_name) except ImportError as exception: # Something is wrong with this backend, skipping Log.warning("Backend %s could not be loaded: %s" % (module_name, str(exception))) continue except Exception as exception: # Other exception log as errors Log.error("Malformated backend %s: %s" % (module_name, str(exception))) continue self.backend_modules[module_name] = \ sys.modules[extended_module_name]
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ self.cancellation_point() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) tid = task.get_id() with self.datastore.get_backend_mutex(): with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: has_task = self.datastore.has_task has_note = tomboy.NoteExists can_sync = is_syncable with self.DbusWatchdog(self): action, note = self.sync_engine.analyze_local_id(tid, has_task, has_note, can_sync) Log.debug("processing gtg (%s, %d)" % (action, is_syncable)) if action == SyncEngine.ADD: # GTG allows multiple tasks with the same name, # Tomboy doesn't. we need to handle the renaming # manually title = task.get_title() duplicate_counter = 1 with self.DbusWatchdog(self): note = tomboy.CreateNamedNote(title) while note == "": duplicate_counter += 1 note = tomboy.CreateNamedNote(title + "(%d)" % duplicate_counter) if duplicate_counter != 1: # if we needed to rename, we have to rename also # the gtg task task.set_title(title + " (%d)" % duplicate_counter) self._populate_note(note, task) self.record_relationship( local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), self.get_modified_for_note(note), "GTG"), ) elif action == SyncEngine.UPDATE: meme = self.sync_engine.get_meme_from_local_id(task.get_id()) modified_for = self.get_modified_for_note(note) newest = meme.which_is_newest(task.get_modified(), modified_for) if newest == "local": self._populate_note(note, task) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) self.save_state() except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)
def _process_rtm_task(self, rtm_task_id): ''' Takes a rtm task id and carries out the necessary operations to refresh the sync state ''' self.cancellation_point() if not self.rtm_proxy.is_authenticated(): return rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] is_syncable = self._rtm_task_is_syncable_per_attached_tags(rtm_task) action, tid = self.sync_engine.analyze_remote_id( rtm_task_id, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) Log.debug("GTG<-RTM set task (%s, %s)" % (action, is_syncable)) if action is None: return if action == SyncEngine.ADD: if rtm_task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even saw them return tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, rtm_task) meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "RTM") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_remote_id(rtm_task_id) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "remote": self._populate_task(task, rtm_task) meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: try: rtm_task.delete() self.sync_engine.break_relationship(remote_id=rtm_task_id) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, rtm_task) self.save_state()
def on_task_modified(self, task_id, path): """ Stop task if it is tracked and it is Done/Dismissed """ Log.debug('Hamster: task modified %s', task_id) task = self.plugin_api.get_requester().get_task(task_id) if not task: return if task.get_status() in (Task.STA_DISMISSED, Task.STA_DONE): self.stop_task(task_id)
def adding_tag(tname,tag): if not self.__tagstore.has_node(tname): p = {'tag':tname,'transparent':True} self.__tasks.add_filter(tname,self.treefactory.tag_filter,parameters=p) self.__tagstore.add_node(tag) tag.set_save_callback(self.save) self.added_tag.pop(tname) Log.debug("********* tag added %s *******" % tname) else: print "Warning: Trying to add tag %s multiple times" %tname
def open_browser(self): if not self.browser: self.browser = TaskBrowser(self.req, self) # notify user if backup was used backend_dic = self.req.get_all_backends() for backend in backend_dic: if backend.get_name() == "backend_localfile" and \ backend.used_backup(): backend.notify_user_about_backup() Log.debug("Browser is open")
def get_backend(self, backend_name): ''' Returns the backend module for the backend matching backend_name. Else, returns none ''' if backend_name in self.backend_modules: return self.backend_modules[backend_name] else: Log.debug("Trying to load backend %s, but failed!" % backend_name) return None
def _process_tomboy_note(self, note): ''' Given a tomboy note, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a Tomboy note id ''' with self.datastore.get_backend_mutex(): self.cancellation_point() is_syncable = self._tomboy_note_is_syncable(note) has_task = self.datastore.has_task note_exists = self._tomboy_note_exists with self.DbusWatchdog(self): action, tid = self.sync_engine.analyze_remote_id(note, has_task, note_exists, is_syncable) Log.debug("processing tomboy (%s, %s)" % (action, is_syncable)) if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, note) modified_for = self.get_modified_for_note(note) self.record_relationship(local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), modified_for, self.get_id())) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(note) newest = meme.which_is_newest(task.get_modified(), self.get_modified_for_note(note)) if newest == "remote": self._populate_task(task, note) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified( self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: with self.DbusWatchdog(self): tomboy.DeleteNote(note) try: self.sync_engine.break_relationship(remote_id=note) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)
def set_task(self, task): ''' See GenericBackend for an explanation of this function. ''' tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, evo_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self._evo_has_task, is_syncable) Log.debug('GTG->Evo set task (%s, %s)' % (action, is_syncable)) if action is None: return if action == SyncEngine.ADD: evo_task = evolution.ecal.ECalComponent( ical=evolution.ecal.CAL_COMPONENT_TODO) with self.datastore.get_backend_mutex(): self._evolution_tasks.add_object(evo_task) self._populate_evo_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship( local_id=tid, remote_id=evo_task.get_uid(), meme=meme) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): evo_task = self._evo_get_task(evo_task_id) meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "local": self._populate_evo_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: evo_task = self._evo_get_task(evo_task_id) self._exec_lost_syncability(tid, evo_task) self.save_state()
def delete_task(self, tid, recursive=True): """Delete the task 'tid' and, by default, delete recursively all the childrens. Note: this modifies the datastore. @param tid: The id of the task to be deleted. """ # send the signal before actually deleting the task ! Log.debug("deleting task %s" % tid) return self.__basetree.del_node(tid, recursive=recursive)
def remove_widget_from_taskeditor(self, widg_id): """Remove a widget from the bottom of the task editor dialog @param widget: The Gtk.Widget that is going to be removed """ if self.is_editor() and widg_id: try: wi = self.__builder.get_object("vbox4") if wi and widg_id in self.taskwidget_widg: wi.remove(self.taskwidget_widg.pop(widg_id)) except Exception as e: Log.debug("Error removing the toolbar item in the TaskEditor:" "%s" % e)
def new_relationship(self,par,chi): if self.thread_protection: t = threading.current_thread() if t != self.thread: raise Exception('! could not new_relationship from thread %s' %t) if self.tree: return self.tree.new_relationship(par,chi) else: self.pending_relationship.append([par,chi]) #it's pending, we return False Log.debug("** There's still no tree, relationship is pending") return False
def _process_evo_task(self, evo_task_id): ''' Takes an evolution task id and carries out the necessary operations to refresh the sync state ''' self.cancellation_point() evo_task = self._evo_get_task(evo_task_id) is_syncable = self._evo_task_is_syncable(evo_task) action, tid = self.sync_engine.analyze_remote_id( evo_task_id, self.datastore.has_task, self._evo_has_task, is_syncable) Log.debug('GTG<-Evo set task (%s, %s)' % (action, is_syncable)) if action == SyncEngine.ADD: with self.datastore.get_backend_mutex(): tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=evo_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(evo_task_id) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "remote": self._populate_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) elif action == SyncEngine.REMOVE: return try: evo_task = self._evo_get_task(evo_task_id) self._delete_evolution_task(evo_task) self.sync_engine.break_relationship(remote_id=evo_task) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, evo_task) self.save_state()
def __script_worker(self): try: self.document_path = subprocess.Popen( args=["/bin/sh", "-c", self.template.get_script_path() + " " + self.document_path], shell=False, stdout=subprocess.PIPE, ).communicate()[0] except: pass if self.document_path == "ERROR": Log.debug("Document creation failed") self.document_path = None document_ready.set()
def remove_widget_from_taskeditor(self, widg_id): """Remove a widget from the bottom of the task editor dialog @param widget: The gtk.Widget that is going to be removed """ if self.is_editor() and widg_id: try: wi = self.__builder.get_object('vbox4') if wi and widg_id in self.taskwidget_widg: wi.remove(self.taskwidget_widg.pop(widg_id)) except Exception, e: Log.debug("Error removing the toolbar item in the TaskEditor:" "%s" % e)
def add_child(self, id): if self.thread_protection: t = threading.current_thread() if t != self.thread: raise Exception('! could not add_child from thread %s' %t+\ 'should be %s' %self.thread) if id not in self.children: self.children.append(id) toreturn = self.new_relationship(self.get_id(),id) else: Log.debug("%s was already in children of %s" %(id,self.get_id())) toreturn = False return toreturn
def __init__(self, req): GObject.GObject.__init__(self) self.req = req self.config_obj = self.req.get_global_config() self.browser_config = self.config_obj.get_subconfig("browser") self.plugins_config = self.config_obj.get_subconfig("plugins") self.task_config = self.config_obj.get_taskconfig() # Editors self.opened_task = {} # This is the list of tasks that are already # opened in an editor of course it's empty # right now self.browser = None self.__start_browser_hidden = False self.gtk_terminate = False # if true, the gtk main is not started # if true, closing the last window doesn't quit GTG # (GTG lives somewhere else without GUI, e.g. notification area) self.daemon_mode = False # Shared clipboard self.clipboard = clipboard.TaskClipboard(self.req) # Initialize Timer self.config = self.req.get_config('browser') self.timer = Timer(self.config) # Browser (still hidden) self.browser = TaskBrowser(self.req, self) self.__init_plugin_engine() if not self.__start_browser_hidden: self.show_browser() # Deletion UI self.delete_dialog = None # Preferences and Backends windows # Initialize dialogs self.preferences = PreferencesDialog(self.req, self) self.plugins = PluginsDialog(self.config_obj) self.edit_backends_dialog = None # Tag Editor self.tag_editor_dialog = None # DBus DBusTaskWrapper(self.req, self) Log.debug("Manager initialization finished")
def __init__(self, req): GObject.GObject.__init__(self) self.req = req self.config_obj = self.req.get_global_config() self.browser_config = self.config_obj.get_subconfig("browser") self.plugins_config = self.config_obj.get_subconfig("plugins") self.task_config = self.config_obj.get_taskconfig() # Editors # This is the list of tasks that are already opened in an editor # of course it's empty right now self.opened_task = {} self.browser = None self.__start_browser_hidden = False self.gtk_terminate = False # if true, the gtk main is not started # if true, closing the last window doesn't quit GTG # (GTG lives somewhere else without GUI, e.g. notification area) self.daemon_mode = False # Shared clipboard self.clipboard = clipboard.TaskClipboard(self.req) # Initialize Timer self.config = self.req.get_config('browser') self.timer = Timer(self.config) # Browser (still hidden) self.browser = TaskBrowser(self.req, self) self.__init_plugin_engine() if not self.__start_browser_hidden: self.show_browser() # Deletion UI self.delete_dialog = None # Preferences and Backends windows # Initialize dialogs self.preferences = PreferencesDialog(self.req, self) self.plugins = PluginsDialog(self.config_obj) self.edit_backends_dialog = None # Tag Editor self.tag_editor_dialog = None # DBus DBusTaskWrapper(self.req, self) Log.debug("Manager initialization finished")
def delete_task(self, tid, recursive=True): """Delete the task 'tid' and, by default, delete recursively all the childrens. Note: this modifies the datastore. @param tid: The id of the task to be deleted. """ # send the signal before actually deleting the task ! Log.debug("deleting task %s" % tid) task = self.get_task(tid) if task: for tag in task.get_tags(): self.emit("tag-modified", tag.get_name()) return self.__basetree.del_node(tid, recursive=recursive)
def _activate_non_default_backends(self, sender=None): """ Non-default backends have to wait until the default loads before being activated. This function is called after the first default backend has loaded all its tasks. @param sender: not used, just here for signal compatibility """ if self.is_default_backend_loaded: Log.debug("spurious call") return self.is_default_backend_loaded = True for backend in self.backends.values(): if backend.is_enabled() and not backend.is_default(): self._backend_startup(backend)
def refresh_rtm_tasks_dict(self): ''' Builds a list of RTMTasks fetched from RTM ''' if not self.is_authenticated(): self.start_authentication() self.wait_for_authentication() if not self.is_not_refreshing.isSet(): # if we're already refreshing, we just wait for that to happen and # then we immediately return self.is_not_refreshing.wait() return self.is_not_refreshing.clear() Log.debug('refreshing rtm') # To understand what this function does, here's a sample output of the # plain getLists() from RTM api: # http://www.rememberthemilk.com/services/api/tasks.rtm # our purpose is to fill this with "tasks_id: RTMTask" items rtm_tasks_dict = {} rtm_lists_list = self.__get_rtm_lists() # for each rtm list, we retrieve all the tasks in it for rtm_list in rtm_lists_list: if rtm_list.archived != '0' or rtm_list.smart != '0': # we skip archived and smart lists continue rtm_taskseries_list = self.__get_rtm_taskseries_in_list( rtm_list.id) for rtm_taskseries in rtm_taskseries_list: # we drill down to actual tasks rtm_tasks_list = self.__getattr_the_rtm_way( rtm_taskseries, 'task') for rtm_task in rtm_tasks_list: rtm_tasks_dict[rtm_task.id] = RTMTask(rtm_task, rtm_taskseries, rtm_list, self.rtm, self.timeline) # we're done: we store the dict in this class and we annotate the time # we got it self._rtm_task_dict = rtm_tasks_dict self.__rtm_task_dict_timestamp = datetime.datetime.now() self.is_not_refreshing.set()
def _process_launchpad_bug(self, bug): ''' Given a bug object, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a launchpad bug ''' has_task = self.datastore.has_task action, tid = self.sync_engine.analyze_remote_id(bug.self_link, has_task, lambda b: True) Log.debug("processing launchpad (%s)" % (action)) if action is None: return bug_dic = self._prefetch_bug_data(bug) # for the rest of the function, no access to bug must be made, so # that the time of blocking inside the with statements is short. # To be sure of that, set bug to None bug = None with self.datastore.get_backend_mutex(): if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, bug_dic) self.sync_engine.record_relationship(local_id=tid, remote_id=str( bug_dic['self_link']), meme=SyncMeme( task.get_modified(), bug_dic['modified'], self.get_id())) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) self._populate_task(task, bug_dic) meme = self.sync_engine.get_meme_from_remote_id( bug_dic['self_link']) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(bug_dic['modified']) self.save_state()
def add_child(self, tid): """Add a subtask to this task @param child: the added task """ Log.debug("adding child %s to task %s" % (tid, self.get_id())) self.can_be_deleted = False # the core of the method is in the TreeNode object TreeNode.add_child(self, tid) # now we set inherited attributes only if it's a new task child = self.req.get_task(tid) if self.is_loaded() and child and child.can_be_deleted: child.set_start_date(self.get_start_date()) child.set_due_date(self.get_due_date()) for t in self.get_tags(): child.add_tag(t.get_name()) self.sync() return True
def add_child(self, tid): """Add a subtask to this task @param child: the added task """ Log.debug("adding child %s to task %s" %(tid, self.get_id())) self.can_be_deleted = False #the core of the method is in the TreeNode object if TreeNode.add_child(self,tid): #now we set inherited attributes only if it's a new task child = self.req.get_task(tid) if child.can_be_deleted: child.set_start_date(self.get_start_date()) for t in self.get_tags(): child.tag_added(t.get_name()) self.sync() return True else: Log.debug("child addition failed (or still pending)") return False
def _process_tweet(self, tweet): ''' Given a tweet, checks if a task representing it must be created in GTG and, if so, it creates it. @param tweet: a tweet. ''' self.cancellation_point() tweet_id = str(tweet.id) is_syncable = self._is_tweet_syncable(tweet) #the "lambda" is because we don't consider tweets deletion (to be # faster) action, tid = self.sync_engine.analyze_remote_id(\ tweet_id, \ self.datastore.has_task, \ lambda tweet_id: True, \ is_syncable) Log.debug("processing tweet (%s, %s)" % (action, is_syncable)) self.cancellation_point() if action == None or action == SyncEngine.UPDATE: return elif action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, tweet) #we care only to add tweets and if the list of tags which must be #imported changes (lost-syncability can happen). Thus, we don't # care about SyncMeme(s) self.sync_engine.record_relationship(local_id = tid,\ remote_id = tweet_id, \ meme = None) self.datastore.push_task(task) elif action == SyncEngine.LOST_SYNCABILITY: self.sync_engine.break_relationship(remote_id = tweet_id) self.datastore.request_task_deletion(tid) self.save_state()
def remove_task(self, tid): """ See GenericBackend for an explanation of this function. """ if not self.rtm_proxy.is_authenticated(): return self.cancellation_point() try: rtm_task_id = self.sync_engine.get_remote_id(tid) if rtm_task_id not in self.rtm_proxy.get_rtm_tasks_dict(): # we might need to refresh our task cache self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] rtm_task.delete() Log.debug("removing task %s from RTM" % rtm_task_id) except KeyError: pass try: self.sync_engine.break_relationship(local_id=tid) self.save_state() except: pass
def _process_mantis_issue(self, issue): """ Given a issue object, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a mantis issue """ has_task = self.datastore.has_task action, tid = self.sync_engine.analyze_remote_id(str(issue["id"]), has_task, lambda b: True) Log.debug("processing mantis (%s)" % (action)) if action is None: return issue_dic = self._prefetch_issue_data(issue) # for the rest of the function, no access to issue must be made, so # that the time of blocking inside the with statements is short. # To be sure of that, set issue to None issue = None with self.datastore.get_backend_mutex(): if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, issue_dic) meme = SyncMeme(task.get_modified(), issue_dic["modified"], self.get_id()) self.sync_engine.record_relationship(local_id=tid, remote_id=str(issue_dic["number"]), meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) self._populate_task(task, issue_dic) meme = self.sync_engine.get_meme_from_remote_id(issue_dic["number"]) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(issue_dic["modified"]) self.save_state()
def __init__(self, req): self.req = req self.config_obj = self.req.get_global_config() self.config = self.config_obj.conf_dict self.task_config = self.config_obj.task_conf_dict # Editors self.opened_task = {} # This is the list of tasks that are already # opened in an editor of course it's empty # right now self.browser = None self.__start_browser_hidden = False self.gtk_terminate = False #if true, the gtk main is not started #Shared clipboard self.clipboard = clipboard.TaskClipboard(self.req) #Browser (still hidden) self.browser = TaskBrowser(self.req, self) self.__init_plugin_engine() if not self.__start_browser_hidden: self.show_browser() #Deletion UI self.delete_dialog = None #Preferences and Backends windows # Initialize dialogs self.preferences_dialog = None self.edit_backends_dialog = None #DBus DBusTaskWrapper(self.req, self) Log.debug("Manager initialization finished")
def _process_rtm_task(self, rtm_task_id): ''' Takes a rtm task id and carries out the necessary operations to refresh the sync state ''' self.cancellation_point() if not self.rtm_proxy.is_authenticated(): return rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] is_syncable = self._rtm_task_is_syncable_per_attached_tags(rtm_task) action, tid = self.sync_engine.analyze_remote_id( rtm_task_id, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) Log.debug("GTG<-RTM set task (%s, %s)" % (action, is_syncable)) if action is None: return if action == SyncEngine.ADD: if rtm_task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even saw them return tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, rtm_task) meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "RTM") self.sync_engine.record_relationship( local_id=tid, remote_id=rtm_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_remote_id(rtm_task_id) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "remote": self._populate_task(task, rtm_task) meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: try: rtm_task.delete() self.sync_engine.break_relationship(remote_id=rtm_task_id) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, rtm_task) self.save_state()
def __log(self, message): Log.debug(message)
def set_task(self, task): ''' See GenericBackend for an explanation of this function. ''' self.cancellation_point() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) tid = task.get_id() with self.datastore.get_backend_mutex(): with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: has_task = self.datastore.has_task has_note = tomboy.NoteExists can_sync = is_syncable with self.DbusWatchdog(self): action, note = self.sync_engine.analyze_local_id( tid, has_task, has_note, can_sync) Log.debug("processing gtg (%s, %d)" % (action, is_syncable)) if action == SyncEngine.ADD: # GTG allows multiple tasks with the same name, # Tomboy doesn't. we need to handle the renaming # manually title = task.get_title() duplicate_counter = 1 with self.DbusWatchdog(self): note = tomboy.CreateNamedNote(title) while note == "": duplicate_counter += 1 note = tomboy.CreateNamedNote(title + "(%d)" % duplicate_counter) if duplicate_counter != 1: # if we needed to rename, we have to rename also # the gtg task task.set_title(title + " (%d)" % duplicate_counter) self._populate_note(note, task) self.record_relationship( local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), self.get_modified_for_note(note), "GTG")) elif action == SyncEngine.UPDATE: meme = self.sync_engine.get_meme_from_local_id( task.get_id()) modified_for = self.get_modified_for_note(note) newest = meme.which_is_newest(task.get_modified(), modified_for) if newest == "local": self._populate_note(note, task) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified( self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) self.save_state() except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)
def open_browser(self): if not self.browser: self.browser = TaskBrowser(self.req, self) Log.debug("Browser is open")
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ if not self.rtm_proxy.is_authenticated(): return self.cancellation_point() tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, rtm_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) Log.debug("GTG->RTM set task (%s, %s)" % (action, is_syncable)) if action is None: return if action == SyncEngine.ADD: if task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even synced them once return try: rtm_task = self.rtm_proxy.create_new_rtm_task(task.get_title()) self._populate_rtm_task(task, rtm_task) except: rtm_task.delete() raise meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task.get_id(), meme=meme) elif action == SyncEngine.UPDATE: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "local": transaction_ids = [] try: self._populate_rtm_task(task, rtm_task, transaction_ids) except: self.rtm_proxy.unroll_changes(transaction_ids) raise meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] self._exec_lost_syncability(tid, rtm_task) self.save_state()