def _process_rtm_task(self, rtm_task_id): """ Takes a rtm task id and carries out the necessary operations to refresh the sync state """ self.cancellation_point() if not self.rtm_proxy.is_authenticated(): return rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] is_syncable = self._rtm_task_is_syncable_per_attached_tags(rtm_task) action, tid = self.sync_engine.analyze_remote_id( rtm_task_id, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) log.debug(f"GTG<-RTM set task ({action}, {is_syncable})") if action is None: return if action == SyncEngine.ADD: if rtm_task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even saw them return tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, rtm_task) meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "RTM") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_remote_id(rtm_task_id) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "remote": self._populate_task(task, rtm_task) meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: try: rtm_task.delete() self.sync_engine.break_relationship(remote_id=rtm_task_id) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, rtm_task) self.save_state()
def do_periodic_import(self): """ See PeriodicImportBackend for an explanation of this function. """ stored_evolution_task_ids = set(self.sync_engine.get_all_remote()) all_tasks = self._evolution_tasks.get_all_objects() current_evolution_task_ids = set( [task.get_uid() for task in all_tasks]) # If it's the very first time the backend is run, it's possible that # the user already synced his tasks in some way (but we don't know that # ). Therefore, we attempt to induce those tasks relationships matching # the titles. if self._parameters["is-first-run"]: gtg_titles_dic = {} for tid in self.datastore.get_all_tasks(): gtg_task = self.datastore.get_task(tid) if not self._gtg_task_is_syncable_per_attached_tags(gtg_task): continue gtg_title = gtg_task.get_title() if gtg_title in gtg_titles_dic: gtg_titles_dic[gtg_task.get_title()].append(tid) else: gtg_titles_dic[gtg_task.get_title()] = [tid] for evo_task_id in current_evolution_task_ids: evo_task = self._evo_get_task(evo_task_id) try: tids = gtg_titles_dic[evo_task.get_summary()] # we remove the tid, so that it can't be linked to two # different evolution tasks tid = tids.pop() gtg_task = self.datastore.get_task(tid) meme = SyncMeme(gtg_task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship( local_id=tid, remote_id=evo_task.get_uid(), meme=meme) except KeyError: pass # a first run has been completed successfully self._parameters["is-first-run"] = False for evo_task_id in current_evolution_task_ids: # Adding and updating self.cancellation_point() self._process_evo_task(evo_task_id) for evo_task_id in stored_evolution_task_ids.difference( current_evolution_task_ids): # Removing the old ones self.cancellation_point() tid = self.sync_engine.get_local_id(evo_task_id) self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(remote_id=evo_task_id) except KeyError: pass
def _process_tomboy_note(self, note): """ Given a tomboy note, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a Tomboy note id """ with self.datastore.get_backend_mutex(): self.cancellation_point() is_syncable = self._tomboy_note_is_syncable(note) has_task = self.datastore.has_task note_exists = self._tomboy_note_exists with self.DbusWatchdog(self): action, tid = self.sync_engine.analyze_remote_id( note, has_task, note_exists, is_syncable) log.debug(f"processing tomboy ({action}, {is_syncable})") if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, note) modified_for = self.get_modified_for_note(note) self.record_relationship(local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), modified_for, self.get_id())) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(note) newest = meme.which_is_newest(task.get_modified(), self.get_modified_for_note(note)) if newest == "remote": self._populate_task(task, note) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified( self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: with self.DbusWatchdog(self): tomboy.DeleteNote(note) try: self.sync_engine.break_relationship(remote_id=note) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, evo_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self._evo_has_task, is_syncable) log.debug('GTG->Evo set task (%s, %s)', action, is_syncable) if action is None: return if action == SyncEngine.ADD: evo_task = evolution.ecal.ECalComponent( ical=evolution.ecal.CAL_COMPONENT_TODO) with self.datastore.get_backend_mutex(): self._evolution_tasks.add_object(evo_task) self._populate_evo_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship( local_id=tid, remote_id=evo_task.get_uid(), meme=meme) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): evo_task = self._evo_get_task(evo_task_id) meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "local": self._populate_evo_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: evo_task = self._evo_get_task(evo_task_id) self._exec_lost_syncability(tid, evo_task) self.save_state()
def _process_evo_task(self, evo_task_id): """ Takes an evolution task id and carries out the necessary operations to refresh the sync state """ self.cancellation_point() evo_task = self._evo_get_task(evo_task_id) is_syncable = self._evo_task_is_syncable(evo_task) action, tid = self.sync_engine.analyze_remote_id( evo_task_id, self.datastore.has_task, self._evo_has_task, is_syncable) log.debug('GTG<-Evo set task (%s, %s)', action, is_syncable) if action == SyncEngine.ADD: with self.datastore.get_backend_mutex(): tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=evo_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(evo_task_id) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "remote": self._populate_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) elif action == SyncEngine.REMOVE: return try: evo_task = self._evo_get_task(evo_task_id) self._delete_evolution_task(evo_task) self.sync_engine.break_relationship(remote_id=evo_task) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, evo_task) self.save_state()
def _process_launchpad_bug(self, bug): """ Given a bug object, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a launchpad bug """ has_task = self.datastore.has_task action, tid = self.sync_engine.analyze_remote_id(bug.self_link, has_task, lambda b: True) log.debug(f"processing launchpad ({action})") if action is None: return bug_dic = self._prefetch_bug_data(bug) # for the rest of the function, no access to bug must be made, so # that the time of blocking inside the with statements is short. # To be sure of that, set bug to None bug = None with self.datastore.get_backend_mutex(): if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, bug_dic) meme = SyncMeme( task.get_modified(), bug_dic['modified'], self.get_id()) self.sync_engine.record_relationship( local_id=tid, remote_id=str(bug_dic['self_link']), meme=meme, ) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) self._populate_task(task, bug_dic) meme = self.sync_engine.get_meme_from_remote_id( bug_dic['self_link']) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(bug_dic['modified']) self.save_state()
def _process_mantis_issue(self, issue): """ Given a issue object, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a mantis issue """ has_task = self.datastore.has_task action, tid = self.sync_engine.analyze_remote_id( str(issue['id']), has_task, lambda b: True) log.debug("processing mantis (%s)", action) if action is None: return issue_dic = self._prefetch_issue_data(issue) # for the rest of the function, no access to issue must be made, so # that the time of blocking inside the with statements is short. # To be sure of that, set issue to None issue = None with self.datastore.get_backend_mutex(): if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, issue_dic) meme = SyncMeme(task.get_modified(), issue_dic['modified'], self.get_id()) self.sync_engine.record_relationship(local_id=tid, remote_id=str( issue_dic['number']), meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) self._populate_task(task, issue_dic) meme = self.sync_engine.get_meme_from_remote_id( issue_dic['number']) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(issue_dic['modified']) self.save_state()
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ if not self.rtm_proxy.is_authenticated(): return self.cancellation_point() tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, rtm_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) log.debug(f"GTG->RTM set task ({action}, {is_syncable})") if action is None: return if action == SyncEngine.ADD: if task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even synced them once return try: rtm_task = self.rtm_proxy.create_new_rtm_task(task.get_title()) self._populate_rtm_task(task, rtm_task) except: rtm_task.delete() raise meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task.get_id(), meme=meme) elif action == SyncEngine.UPDATE: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "local": transaction_ids = [] try: self._populate_rtm_task(task, rtm_task, transaction_ids) except: self.rtm_proxy.unroll_changes(transaction_ids) raise meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] self._exec_lost_syncability(tid, rtm_task) self.save_state()
def do_periodic_import(self): """ See PeriodicImportBackend for an explanation of this function. """ # we get the old list of synced tasks, and compare with the new tasks # set stored_rtm_task_ids = self.sync_engine.get_all_remote() current_rtm_task_ids = [ tid for tid in self.rtm_proxy.get_rtm_tasks_dict().keys() ] # If it's the very first time the backend is run, it's possible that # the user already synced his tasks in some way (but we don't know # that). Therefore, we attempt to induce those tasks relationships # matching the titles. if self._parameters["is-first-run"]: gtg_titles_dic = {} for tid in self.datastore.get_all_tasks(): gtg_task = self.datastore.get_task(tid) if not self._gtg_task_is_syncable_per_attached_tags(gtg_task): continue gtg_title = gtg_task.get_title() if gtg_title in gtg_titles_dic: gtg_titles_dic[gtg_task.get_title()].append(tid) else: gtg_titles_dic[gtg_task.get_title()] = [tid] for rtm_task_id in current_rtm_task_ids: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] try: tids = gtg_titles_dic[rtm_task.get_title()] # we remove the tid, so that it can't be linked to two # different rtm tasks tid = tids.pop() gtg_task = self.datastore.get_task(tid) meme = SyncMeme(gtg_task.get_modified(), rtm_task.get_modified(), "GTG") self.sync_engine.record_relationship( local_id=tid, remote_id=rtm_task.get_id(), meme=meme) except KeyError: pass # a first run has been completed successfully self._parameters["is-first-run"] = False for rtm_task_id in current_rtm_task_ids: self.cancellation_point() # Adding and updating self._process_rtm_task(rtm_task_id) for rtm_task_id in set(stored_rtm_task_ids).difference( set(current_rtm_task_ids)): self.cancellation_point() # Removing the old ones if not self.please_quit: tid = self.sync_engine.get_local_id(rtm_task_id) self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(remote_id=rtm_task_id) self.save_state() except KeyError: pass
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ self.cancellation_point() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) tid = task.get_id() with self.datastore.get_backend_mutex(): with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: has_task = self.datastore.has_task has_note = tomboy.NoteExists can_sync = is_syncable with self.DbusWatchdog(self): action, note = self.sync_engine.analyze_local_id( tid, has_task, has_note, can_sync) log.debug(f"processing gtg ({action}, {is_syncable:d})") if action == SyncEngine.ADD: # GTG allows multiple tasks with the same name, # Tomboy doesn't. we need to handle the renaming # manually title = task.get_title() duplicate_counter = 1 with self.DbusWatchdog(self): note = tomboy.CreateNamedNote(title) while note == "": duplicate_counter += 1 note = tomboy.CreateNamedNote(title + "(%d)" % duplicate_counter) if duplicate_counter != 1: # if we needed to rename, we have to rename also # the gtg task task.set_title(title + f" ({duplicate_counter:d})") self._populate_note(note, task) self.record_relationship( local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), self.get_modified_for_note(note), "GTG")) elif action == SyncEngine.UPDATE: meme = self.sync_engine.get_meme_from_local_id( task.get_id()) modified_for = self.get_modified_for_note(note) newest = meme.which_is_newest(task.get_modified(), modified_for) if newest == "local": self._populate_note(note, task) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified( self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) self.save_state() except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)