def do_activate(self): """Callback when launched from the desktop.""" self.init_shared() self.browser.present() log.debug("Application activation finished")
def set_debug_flag(self, debug): """Set whenever it should activate debug stuff like logging or not""" if debug: log.setLevel(logging.DEBUG) log.debug("Debug output enabled.") else: log.setLevel(logging.INFO)
def __init__(self): """ Creates a dictionary of the currently available backend modules """ super().__init__() if hasattr(self, "backend_modules"): # This object has already been constructed return self.backend_modules = {} backend_files = self._find_backend_files() # Create module names module_names = [f.replace(".py", "") for f in backend_files] log.debug("Backends found: " + str(module_names)) # Load backend modules for module_name in module_names: extended_module_name = "GTG.backends." + module_name try: __import__(extended_module_name) except ImportError as exception: # Something is wrong with this backend, skipping log.warning("Backend %s could not be loaded: %s" % (module_name, str(exception))) continue except Exception as exception: # Other exception log as errors log.error(f"Malformated backend {module_name}: {str(exception)}") continue self.backend_modules[module_name] = \ sys.modules[extended_module_name]
def restore_backend_from_xml(self, dic): """ Function restoring a backend from its xml description. dic should be a dictionary containing at least the key - "module", with the module name - "xmlobject", with its xml description. Every other key is passed as-is to the backend, as parameter. Returns the backend instance, or None is something goes wrong """ if "module" not in dic or "xmlobject" not in dic: log.debug(f"Malformed backend configuration found! {dic}") module = self.get_backend(dic["module"]) if module is None: log.debug(f"could not load module for backend {dic['module']}") return None # we pop the xml object, as it will be redundant when the parameters # are set directly in the dict xp = dic.pop("xmlobject") # Building the dictionary parameters_specs = module.Backend.get_static_parameters() dic["pid"] = str(xp.getAttribute("pid")) for param_name, param_dic in parameters_specs.items(): if xp.hasAttribute(param_name): # we need to convert the parameter to the right format. # we fetch the format from the static_parameters param_type = param_dic[GenericBackend.PARAM_TYPE] param_value = GenericBackend.cast_param_type_from_string( xp.getAttribute(param_name), param_type) dic[param_name] = param_value # We put the backend itself in the dict dic["backend"] = module.Backend(dic) return dic["backend"]
def __init__(self, app_id, debug): """Setup Application.""" super().__init__(application_id=app_id) if debug: log.setLevel(logging.DEBUG) log.debug("Debug output enabled.") else: log.setLevel(logging.INFO) # Register backends datastore = DataStore() [datastore.register_backend(backend_dic) for backend_dic in BackendFactory().get_saved_backends_list()] # Save the backends directly to be sure projects.xml is written datastore.save(quit=False) self.req = datastore.get_requester() self.config = self.req.get_config("browser") self.config_plugins = self.req.get_config("plugins") self.clipboard = clipboard.TaskClipboard(self.req) self.timer = Timer(self.config) self.timer.connect('refresh', self.autoclean) self.preferences_dialog = Preferences(self.req, self) self.plugins_dialog = PluginsDialog(self.req) self.init_style()
def _process_rtm_task(self, rtm_task_id): """ Takes a rtm task id and carries out the necessary operations to refresh the sync state """ self.cancellation_point() if not self.rtm_proxy.is_authenticated(): return rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] is_syncable = self._rtm_task_is_syncable_per_attached_tags(rtm_task) action, tid = self.sync_engine.analyze_remote_id( rtm_task_id, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) log.debug(f"GTG<-RTM set task ({action}, {is_syncable})") if action is None: return if action == SyncEngine.ADD: if rtm_task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even saw them return tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, rtm_task) meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "RTM") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_remote_id(rtm_task_id) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "remote": self._populate_task(task, rtm_task) meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: try: rtm_task.delete() self.sync_engine.break_relationship(remote_id=rtm_task_id) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, rtm_task) self.save_state()
def on_task_modified(self, task_id, path): """ Stop task if it is tracked and it is Done/Dismissed """ log.debug('Hamster: task modified %s', task_id) task = self.plugin_api.get_requester().get_task(task_id) if not task: return if task.get_status() in (Task.STA_DISMISSED, Task.STA_DONE): self.stop_task(task_id)
def on_prepare_for_sleep(self, connection, sender_name, object_path, interface_name, signal_name, parameters, user_data): """Handle dbus prepare for sleep signal.""" sleeping = parameters[0] log.debug(f"Received dbus signal {signal_name}{parameters} for Timer") # Only emit the signal if we are resuming from suspend, # not preparing for it. if not sleeping: self.emit_refresh()
def get_backend(self, backend_name): """ Returns the backend module for the backend matching backend_name. Else, returns none """ if backend_name in self.backend_modules: return self.backend_modules[backend_name] else: log.debug(f"Trying to load backend {backend_name}, but failed!") return None
def delete_task(self, tid, recursive=True): """Delete the task 'tid' and, by default, delete recursively all the childrens. Note: this modifies the datastore. @param tid: The id of the task to be deleted. """ # send the signal before actually deleting the task ! log.debug(f"deleting task {tid}") return self.__basetree.del_node(tid, recursive=recursive)
def _process_tomboy_note(self, note): """ Given a tomboy note, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a Tomboy note id """ with self.datastore.get_backend_mutex(): self.cancellation_point() is_syncable = self._tomboy_note_is_syncable(note) has_task = self.datastore.has_task note_exists = self._tomboy_note_exists with self.DbusWatchdog(self): action, tid = self.sync_engine.analyze_remote_id( note, has_task, note_exists, is_syncable) log.debug(f"processing tomboy ({action}, {is_syncable})") if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, note) modified_for = self.get_modified_for_note(note) self.record_relationship(local_id=tid, remote_id=note, meme=SyncMeme(task.get_modified(), modified_for, self.get_id())) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(note) newest = meme.which_is_newest(task.get_modified(), self.get_modified_for_note(note)) if newest == "remote": self._populate_task(task, note) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified( self.get_modified_for_note(note)) self.save_state() elif action == SyncEngine.REMOVE: with self.TomboyConnection(self, *self.BUS_ADDRESS) as tomboy: with self.DbusWatchdog(self): tomboy.DeleteNote(note) try: self.sync_engine.break_relationship(remote_id=note) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, note)
def remove_widget_from_taskeditor(self, widg_id): """Remove a widget from the bottom of the task editor dialog @param widget: The Gtk.Widget that is going to be removed """ if self.is_editor() and widg_id: try: wi = self.__builder.get_object('vbox4') if wi and widg_id in self.taskwidget_widg: wi.remove(self.taskwidget_widg.pop(widg_id)) except Exception as e: log.debug(f"Error removing the toolbar item in the TaskEditor:{e}")
def duplicate(self): """ Duplicates a task with a new ID """ copy = self.req.ds.new_task() # Inherit the recurrency copy.set_recurring(True, self.recurring_term) nextdate = self.get_next_occurrence() copy.set_due_date(nextdate) copy.set_title(self.title) copy.content = self.content copy.tags = self.tags log.debug(f"Duppicating task {self.get_id()} as task {copy.get_id()}") return copy
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, evo_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self._evo_has_task, is_syncable) log.debug(f'GTG->Evo set task ({action}, {is_syncable})') if action is None: return if action == SyncEngine.ADD: evo_task = evolution.ecal.ECalComponent( ical=evolution.ecal.CAL_COMPONENT_TODO) with self.datastore.get_backend_mutex(): self._evolution_tasks.add_object(evo_task) self._populate_evo_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship( local_id=tid, remote_id=evo_task.get_uid(), meme=meme) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): evo_task = self._evo_get_task(evo_task_id) meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "local": self._populate_evo_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: evo_task = self._evo_get_task(evo_task_id) self._exec_lost_syncability(tid, evo_task) self.save_state()
def _process_evo_task(self, evo_task_id): """ Takes an evolution task id and carries out the necessary operations to refresh the sync state """ self.cancellation_point() evo_task = self._evo_get_task(evo_task_id) is_syncable = self._evo_task_is_syncable(evo_task) action, tid = self.sync_engine.analyze_remote_id( evo_task_id, self.datastore.has_task, self._evo_has_task, is_syncable) log.debug(f'GTG<-Evo set task ({action}, {is_syncable})') if action == SyncEngine.ADD: with self.datastore.get_backend_mutex(): tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, evo_task) meme = SyncMeme(task.get_modified(), self._evo_get_modified(evo_task), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=evo_task_id, meme=meme) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: with self.datastore.get_backend_mutex(): task = self.datastore.get_task(tid) meme = self.sync_engine.get_meme_from_remote_id(evo_task_id) newest = meme.which_is_newest(task.get_modified(), self._evo_get_modified(evo_task)) if newest == "remote": self._populate_task(task, evo_task) meme.set_remote_last_modified( self._evo_get_modified(evo_task)) meme.set_local_last_modified(task.get_modified()) elif action == SyncEngine.REMOVE: return try: evo_task = self._evo_get_task(evo_task_id) self._delete_evolution_task(evo_task) self.sync_engine.break_relationship(remote_id=evo_task) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: self._exec_lost_syncability(tid, evo_task) self.save_state()
def open_uri_list(self): """Open the Editor windows of the tasks associated with the uris given. Uris are of the form gtg://<taskid> """ log.debug(f'Received {len(self.uri_list)} Task URIs') for uri in self.uri_list: if uri.startswith('gtg://'): log.debug(f'Opening task {uri[6:]}') self.open_task(uri[6:]) # if no window was opened, we just quit if not self.browser.is_visible() and not self.open_tasks: self.quit()
def do_activate(self): """Callback when launched from the desktop.""" # Browser (still hidden) if not self.browser: self.browser = MainWindow(self.req, self) if log.isEnabledFor(logging.DEBUG): self.browser.get_style_context().add_class('devel') self.init_actions() self.init_plugin_engine() self.browser.present() self.open_uri_list() log.debug("Application activation finished")
def close_task(self, tid): """Close a task editor window.""" try: editor = self.open_tasks[tid] editor.close() open_tasks = self.config.get("opened_tasks") if tid in open_tasks: open_tasks.remove(tid) self.config.set("opened_tasks", open_tasks) except KeyError: log.debug(f'Tried to close tid {tid} but it is not open')
def do_activate(self): """Callback when launched from the desktop.""" # Browser (still hidden) if not self.browser: self.browser = MainWindow(self.req, self) if self.props.application_id == 'org.gnome.GTGDevel': self.browser.get_style_context().add_class('devel') self.init_actions() self.init_plugin_engine() self.browser.present() self.open_uri_list() log.debug("Application activation finished")
def _activate_non_default_backends(self, sender=None): """ Non-default backends have to wait until the default loads before being activated. This function is called after the first default backend has loaded all its tasks. @param sender: not used, just here for signal compatibility """ if self.is_default_backend_loaded: log.debug("spurious call") return self.is_default_backend_loaded = True for backend in self.backends.values(): if backend.is_enabled() and not backend.is_default(): self._backend_startup(backend)
def _process_launchpad_bug(self, bug): """ Given a bug object, finds out if it must be synced to a GTG note and, if so, it carries out the synchronization (by creating or updating a GTG task, or deleting itself if the related task has been deleted) @param note: a launchpad bug """ has_task = self.datastore.has_task action, tid = self.sync_engine.analyze_remote_id(bug.self_link, has_task, lambda b: True) log.debug(f"processing launchpad ({action})") if action is None: return bug_dic = self._prefetch_bug_data(bug) # for the rest of the function, no access to bug must be made, so # that the time of blocking inside the with statements is short. # To be sure of that, set bug to None bug = None with self.datastore.get_backend_mutex(): if action == SyncEngine.ADD: tid = str(uuid.uuid4()) task = self.datastore.task_factory(tid) self._populate_task(task, bug_dic) meme = SyncMeme( task.get_modified(), bug_dic['modified'], self.get_id()) self.sync_engine.record_relationship( local_id=tid, remote_id=str(bug_dic['self_link']), meme=meme, ) self.datastore.push_task(task) elif action == SyncEngine.UPDATE: task = self.datastore.get_task(tid) self._populate_task(task, bug_dic) meme = self.sync_engine.get_meme_from_remote_id( bug_dic['self_link']) meme.set_local_last_modified(task.get_modified()) meme.set_remote_last_modified(bug_dic['modified']) self.save_state()
def refresh_rtm_tasks_dict(self): """ Builds a list of RTMTasks fetched from RTM """ if not self.is_authenticated(): self.start_authentication() self.wait_for_authentication() if not self.is_not_refreshing.isSet(): # if we're already refreshing, we just wait for that to happen and # then we immediately return self.is_not_refreshing.wait() return self.is_not_refreshing.clear() log.debug('refreshing rtm') # To understand what this function does, here's a sample output of the # plain getLists() from RTM api: # http://www.rememberthemilk.com/services/api/tasks.rtm # our purpose is to fill this with "tasks_id: RTMTask" items rtm_tasks_dict = {} rtm_lists_list = self.__get_rtm_lists() # for each rtm list, we retrieve all the tasks in it for rtm_list in rtm_lists_list: if rtm_list.archived != '0' or rtm_list.smart != '0': # we skip archived and smart lists continue rtm_taskseries_list = self.__get_rtm_taskseries_in_list( rtm_list.id) for rtm_taskseries in rtm_taskseries_list: # we drill down to actual tasks rtm_tasks_list = self.__getattr_the_rtm_way( rtm_taskseries, 'task') for rtm_task in rtm_tasks_list: rtm_tasks_dict[rtm_task.id] = RTMTask( rtm_task, rtm_taskseries, rtm_list, self.rtm, self.timeline) # we're done: we store the dict in this class and we annotate the time # we got it self._rtm_task_dict = rtm_tasks_dict self.__rtm_task_dict_timestamp = datetime.datetime.now() self.is_not_refreshing.set()
def insert_existing_subtask(self, tid: str, line: int = None) -> None: """Insert an existing subtask in the buffer.""" # Check if the task exists first if not self.req.has_task(tid): log.debug(f'Task {tid} not found') return if line is not None: start = self.buffer.get_iter_at_line(line) else: start = self.buffer.get_end_iter() self.buffer.insert(start, '\n') start.forward_line() line = start.get_line() # Add subtask name task = self.req.get_task(tid) self.buffer.insert(start, task.get_title()) # Reset iterator start = self.buffer.get_iter_at_line(line) # Add checkbox self.add_checkbox(tid, start) # Apply link to subtask text end = start.copy() end.forward_to_line_end() link_tag = InternalLinkTag(tid, task.get_status()) self.table.add(link_tag) self.buffer.apply_tag(link_tag, start, end) self.tags_applied.append(link_tag) # Apply subtask tag to everything start.backward_char() subtask_tag = SubTaskTag(tid) self.table.add(subtask_tag) self.buffer.apply_tag(subtask_tag, start, end) self.subtasks['tags'].append(tid) # Make sure subtasks can be deleted when removed in the text editor task.can_be_deleted = True
def add_child(self, tid): """Add a subtask to this task @param child: the added task """ log.debug(f"adding child {tid} to task {self.get_id()}") self.can_be_deleted = False # the core of the method is in the TreeNode object TreeNode.add_child(self, tid) # now we set inherited attributes only if it's a new task child = self.req.get_task(tid) if self.is_loaded() and child and child.can_be_deleted: child.set_start_date(self.get_start_date()) child.set_due_date(self.get_due_date()) for t in self.get_tags(): child.add_tag(t.get_name()) self.sync() return True
def purge_old_tasks(self, widget=None): """Remove closed tasks older than N days.""" log.debug("Deleting old tasks") today = Date.today() max_days = self.config.get('autoclean_days') closed_tree = self.req.get_tasks_tree(name='inactive') closed_tasks = [self.req.get_task(tid) for tid in closed_tree.get_all_nodes()] to_remove = [t for t in closed_tasks if (today - t.get_closed_date()).days > max_days] [self.req.delete_task(task.get_id()) for task in to_remove if self.req.has_task(task.get_id())]
def destruction(self, _=None): """Callback when destroying the window.""" # Save should be also called when buffer is modified self.pengine.onTaskClose(self.plugin_api) self.pengine.remove_api(self.plugin_api) tid = self.task.get_id() if self.task.is_new(): self.req.delete_task(tid) else: self.save() [sub.set_to_keep() for sub in self.task.get_subtasks() if sub] try: del self.app.open_tasks[tid] except KeyError: log.debug(f'Task {tid} was already removed from the open list')
def remove_task(self, tid): """ See GenericBackend for an explanation of this function. """ if not self.rtm_proxy.is_authenticated(): return self.cancellation_point() try: rtm_task_id = self.sync_engine.get_remote_id(tid) if rtm_task_id not in self.rtm_proxy.get_rtm_tasks_dict(): # we might need to refresh our task cache self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] rtm_task.delete() log.debug(f"removing task {rtm_task_id} from RTM") except KeyError: pass try: self.sync_engine.break_relationship(local_id=tid) self.save_state() except: pass
def do_open(self, files, n_files, hint): """Callback when opening files/tasks""" self.init_shared() log.debug(f'Received {len(files)} Task URIs') if len(files) != n_files: log.warning( f"Length of files {len(files)} != supposed length {n_files}") for file in files: if file.get_uri_scheme() == 'gtg': uri = file.get_uri() if uri[4:6] != '//': log.info(f"Malformed URI, needs gtg://: {uri}") else: parsed = urllib.parse.urlparse(uri) task_id = parsed.netloc log.debug(f'Opening task {task_id}') self.open_task(task_id) else: log.info(f"Unknown task to open: {file.get_uri()}") log.debug("Application opening finished")
def set_task(self, task): """ See GenericBackend for an explanation of this function. """ if not self.rtm_proxy.is_authenticated(): return self.cancellation_point() tid = task.get_id() is_syncable = self._gtg_task_is_syncable_per_attached_tags(task) action, rtm_task_id = self.sync_engine.analyze_local_id( tid, self.datastore.has_task, self.rtm_proxy.has_rtm_task, is_syncable) log.debug(f"GTG->RTM set task ({action}, {is_syncable})") if action is None: return if action == SyncEngine.ADD: if task.get_status() != Task.STA_ACTIVE: # OPTIMIZATION: # we don't sync tasks that have already been closed before we # even synced them once return try: rtm_task = self.rtm_proxy.create_new_rtm_task(task.get_title()) self._populate_rtm_task(task, rtm_task) except: rtm_task.delete() raise meme = SyncMeme(task.get_modified(), rtm_task.get_modified(), "GTG") self.sync_engine.record_relationship(local_id=tid, remote_id=rtm_task.get_id(), meme=meme) elif action == SyncEngine.UPDATE: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] with self.datastore.get_backend_mutex(): meme = self.sync_engine.get_meme_from_local_id(task.get_id()) newest = meme.which_is_newest(task.get_modified(), rtm_task.get_modified()) if newest == "local": transaction_ids = [] try: self._populate_rtm_task(task, rtm_task, transaction_ids) except: self.rtm_proxy.unroll_changes(transaction_ids) raise meme.set_remote_last_modified(rtm_task.get_modified()) meme.set_local_last_modified(task.get_modified()) else: # we skip saving the state return elif action == SyncEngine.REMOVE: self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(local_id=tid) except KeyError: pass elif action == SyncEngine.LOST_SYNCABILITY: try: rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] except KeyError: # in this case, we don't have yet the task in our local cache # of what's on the rtm website self.rtm_proxy.refresh_rtm_tasks_dict() rtm_task = self.rtm_proxy.get_rtm_tasks_dict()[rtm_task_id] self._exec_lost_syncability(tid, rtm_task) self.save_state()
from gi.repository import GObject, Gtk, GdkPixbuf from gettext import gettext as _ from GTG.core.logger import log from GTG.plugins.export.task_str import get_task_wrappers from GTG.plugins.export.templates import Template, get_templates_paths # Enforce external dependencies for dependence in "pdflatex", "pdftk", "pdfjam": retval = subprocess.call( ["which", dependence], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) if retval != 0: log.debug(f'Missing command "{dependence}"') raise ImportError(f'Missing command "{dependence}"') def get_desktop_dir(): """ Returns path to desktop dir. """ return GLib.get_user_special_dir(GLib.UserDirectory.DIRECTORY_DESKTOP) class ExportPlugin(): """ Export plugin - handle UI and trigger exporting tasks """ # Allow initilization outside __init__() and don't complain # about too many attributes PLUGIN_NAME = "export"