def _handle_delete_link(self, kwargs): if kwargs['filename'] == self.filename: id_ = kwargs['id_'] backlinks = links_api.find_back_links(self.filename, id_) rbits = 3 if len(backlinks) > 0 else 0 # id_ may not exist anymore if core_api.is_item(self.filename, id_): self._update_item(id_, rbits) oldtarget = kwargs['oldtarget'] # oldtarget may not exist anymore if core_api.is_item(self.filename, oldtarget): self._reset_item(oldtarget)
def _handle_upsert_link(self, kwargs): if kwargs['filename'] == self.filename: id_ = kwargs['id_'] target = kwargs['target'] oldtarget = kwargs['oldtarget'] backlinks = links_api.find_back_links(self.filename, id_) target_target = links_api.find_link_target(self.filename, target) if target is None: rbits = 5 if len(backlinks) > 0 else 2 else: rbits = 4 if len(backlinks) > 0 else 1 if target_target is False: target_rbits = 3 elif target_target is None: target_rbits = 5 else: target_rbits = 4 self._update_item(id_, rbits) if target is not None: self._update_item(target, target_rbits) # oldtarget may not exist anymore if oldtarget is not False and core_api.is_item(self.filename, oldtarget): self._reset_item(oldtarget)
def _append(self, filename, id_, alarmid, start, end, alarm): a = self.make_alarmid(filename, alarmid) # Check whether the database is still open because this method is # called with wx.CallAfter in _handle_alarm, thus running in a # different thread; this way it can happen that, when _handle_alarm is # called, a database is still open, but when this method is called, # that database has been already closed; this would happen for example # when closing all the databases: after each database is closed (in # rapid succession), all the remaining alarms are searched and # signalled again, and when this method would be run (in a different # thread) the alarm's database would have already been closed, thus # raising an exception later when looking information for the item # (e.g. core_api.get_item_text) # Also, for the same reason, check if the item exists, as for example # performing several undos/redos of the database in rapid succession # (e.g. using CTRL+Z/Y) would cause the same issue if core_api.is_database_open(filename) and \ core_api.is_item(filename, id_) and \ a not in self.alarms: self.alarms[a] = Alarm(self, filename, id_, alarmid, start, end, alarm) if len(self.alarms) < self.LIMIT + 1: self.alarms[a].show() else: self.hiddenalarms.add(a) # Besides being much slower, calling Layout and the other # functions at every append would raise an exception for # excessive recursions in case of too many alarms are signalled # at once self.timer.Stop() self.timer = wx.CallLater(self.DELAY, self._display_append)
def _handle_history(self, kwargs): # Yes, this is a very aggressive way of handling history actions, this # is redrawing the whole tree whenever an item is # inserted/moved/deleted, however trying to handle each case separately # is very complicated and thus causes numerous bugs, because each query # in the history group can leave the database in an unstable state # (e.g. the queries that update the previous id to the next/previous # items when moving an item) # It should be quite efficient anyway because self.data is not # recalculated except for the items that explicitly requested it, and # only the root items and their children are re-added, the rest of the # items will be re-added on request (when their parents are expanded # again) if kwargs['filename'] == self.filename: if self.history_tree_reset_request: self.dvmodel.Cleared() for id_ in core_api.get_root_items(self.filename): item = self.get_tree_item(id_) # For some reason ItemDeleted must be called too first... self.dvmodel.ItemDeleted(self._get_root(), item) self.dvmodel.ItemAdded(self._get_root(), item) self._reset_children(id_, item) for id_ in self.history_item_update_requests: # id_ may have been deleted by an action in the history group if core_api.is_item(self.filename, id_): self.update_tree_item(id_) del self.history_item_update_requests[:] self.history_tree_reset_request = False
def _append(self, filename, id_, alarmid, start, end, alarm): a = self.make_alarmid(filename, alarmid) # Check whether the database is still open because this method is # called with wx.CallAfter in _handle_alarm, thus running in a # different thread; this way it can happen that, when _handle_alarm is # called, a database is still open, but when this method is called, # that database has been already closed; this would happen for example # when closing all the databases: after each database is closed (in # rapid succession), all the remaining alarms are searched and # signalled again, and when this method would be run (in a different # thread) the alarm's database would have already been closed, thus # raising an exception later when looking information for the item # (e.g. core_api.get_item_text) # Also, for the same reason, check if the item exists, as for example # performing several undos/redos of the database in rapid succession # (e.g. using CTRL+Z/Y) would cause the same issue if core_api.is_database_open(filename) and core_api.is_item(filename, id_) and a not in self.alarms: self.alarms[a] = Alarm(self, filename, id_, alarmid, start, end, alarm) if len(self.alarms) < self.LIMIT + 1: self.alarms[a].show() else: self.hiddenalarms.add(a) # Besides being much slower, calling Layout and the other # functions at every append would raise an exception for # excessive recursions in case of too many alarms are signalled # at once self.timer.Stop() self.timer = wx.CallLater(self.DELAY, self._display_append)
def edit_items(self): sel = self.listview.GetFirstSelected() exists = False warning = False while sel > -1: filename, id_ = self.itemdatamap[self.listview.GetItemData(sel)] # Check whether the database is still open and the item # still exists because the search results are retrieved in # a separate thread and are not updated together with the # database if core_api.is_database_open(filename) and \ core_api.is_item(filename, id_): wxgui_api.open_editor(filename, id_) exists = True else: warning = True sel = self.listview.GetNextSelected(sel) if warning: if exists: msgboxes.some_items_not_found().ShowModal() else: msgboxes.all_items_not_found().ShowModal()
def _handle_upsert_link(self, kwargs): if kwargs['filename'] == self.filename: id_ = kwargs['id_'] target = kwargs['target'] oldtarget = kwargs['oldtarget'] backlinks = links_api.find_back_links(self.filename, id_) target_target = links_api.find_link_target(self.filename, target) if target is None: rbits = 5 if len(backlinks) > 0 else 2 else: rbits = 4 if len(backlinks) > 0 else 1 if target_target is False: target_rbits = 3 elif target_target is None: target_rbits = 5 else: target_rbits = 4 self._update_item(id_, rbits) if target is not None: self._update_item(target, target_rbits) # oldtarget may not exist anymore if oldtarget is not False and core_api.is_item( self.filename, oldtarget): self._reset_item(oldtarget)
def _activate_alarms_all(self, occsd): for id_ in occsd: # Due to race conditions, id_ could have been deleted meanwhile # (e.g. if the modal dialog for deleting the item was open in the # interface) if core_api.is_item(self.filename, id_): for occ in occsd[id_]: self._activate_alarm(occ)
def activate_alarms(self, time, occsd): for id_ in occsd: # Due to race conditions, id_ could have been deleted meanwhile # (e.g. if the modal dialog for deleting the item was open in the # interface) if core_api.is_item(self.filename, id_): for occ in occsd[id_]: # occ may have start or end == time if occ['alarm'] == time: self._activate_alarm(occ)
def _handle_break_links(self, kwargs): if kwargs['filename'] == self.filename: for id_ in kwargs['ids']: backlinks = links_api.find_back_links(self.filename, id_) rbits = 5 if len(backlinks) > 0 else 2 self._update_item(id_, rbits) oldtarget = kwargs['oldtarget'] # oldtarget may not exist anymore if core_api.is_item(self.filename, oldtarget): self._reset_item(oldtarget)
def _activate_alarms_unique(self, occsd): for id_ in occsd: # Due to race conditions, id_ could have been deleted meanwhile # (e.g. if the modal dialog for deleting the item was open in the # interface) if core_api.is_item(self.filename, id_): try: occ = max(occsd[id_], key=lambda occ: occ['alarm']) except ValueError: # occsd[id_] may be have been emptied in # self.activate_alarms_range pass else: self._activate_alarm(occ)
def find_in_tree(self): sel = self.get_selections() if len(sel) > 0: wxgui_api.unselect_all_items(self.filename) # Do not loop directly on view.GetSelections(), e.g. # for s in view.GetSelections(): # because it doesn't work as expected! for item in sel: id_ = self.get_item_id(item) # The logged item may not exist anymore if core_api.is_item(self.filename, id_): wxgui_api.add_item_to_selection(self.filename, id_)
def find_in_tree(self): sel = self.listview.GetFirstSelected() if sel > -1: for filename in core_api.get_open_databases(): wxgui_api.unselect_all_items(filename) seldb = None warning = False # [1]: line repeated in the loop because of # wxgui_api.select_database_tab filename, id_ = self.itemdatamap[self.listview.GetItemData(sel)] while True: # It's necessary to repeat this line (see [1]) because # wxgui_api.select_database_tab must be executed only once # for the first selected item filename, id_ = self.itemdatamap[self.listview.GetItemData( sel)] # Check whether the database is still open and the item # still exists because the search results are retrieved in # a separate thread and are not updated together with the # database if core_api.is_database_open(filename) and \ core_api.is_item(filename, id_): wxgui_api.add_item_to_selection(filename, id_) if seldb is None: seldb = filename else: warning = True sel = self.listview.GetNextSelected(sel) if sel < 0: break if seldb: wxgui_api.select_database_tab(seldb) if warning: msgboxes.some_items_not_found().ShowModal() elif warning: msgboxes.all_items_not_found().ShowModal()
def upsert_link(filename, id_, target, group, description='Insert link'): # target could be None (creating a broken link) or could be a no-longer # existing item if core_api.is_item(filename, target): # Forbid circular links (including links to self), as it could generate # unexpected infinite recursions (e.g. with synchronize_links_text) if id_ in find_links_chain(filename, target): raise exceptions.CircularLinksError() else: # Sync text tgttext = core_api.get_item_text(filename, target) core_api.update_item_text(filename, id_, tgttext, group=group, description=description) # Drop any rules if organism_api and filename in \ organism_api.get_supported_open_databases(): organism_api.update_item_rules(filename, id_, [], group=group, description=description) else: # Force target = None if the given target no longer exists target = None # Drop any rules if organism_api and filename in \ organism_api.get_supported_open_databases(): organism_api.update_item_rules(filename, id_, [], group=group, description=description) # Note that exceptions.CircularLinksError could be raised before getting # here qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_id, (id_, )) res = cursor.fetchone() # Do not allow creating more than one link per item if res: oldtarget = res['L_target'] do_update_link(filename, cursor, target, id_) core_api.give_connection(filename, qconn) core_api.insert_history( filename, group, id_, 'link_update', description, str(target) if target is not None else None, oldtarget if str(oldtarget) is not None else None) else: oldtarget = False # 'target' can be None, thus allowing the creation of a broken link do_insert_link(filename, cursor, id_, target) core_api.give_connection(filename, qconn) core_api.insert_history(filename, group, id_, 'link_insert', description, str(target) if target is not None else None, None) upsert_link_event.signal(filename=filename, id_=id_, target=target, oldtarget=oldtarget)
def upsert_link(filename, id_, target, group, description='Insert link'): # target could be None (creating a broken link) or could be a no-longer # existing item if core_api.is_item(filename, target): # Forbid circular links (including links to self), as it could generate # unexpected infinite recursions (e.g. with synchronize_links_text) if id_ in find_links_chain(filename, target): raise exceptions.CircularLinksError() else: # Sync text tgttext = core_api.get_item_text(filename, target) core_api.update_item_text(filename, id_, tgttext, group=group, description=description) # Drop any rules if organism_api and filename in \ organism_api.get_supported_open_databases(): organism_api.update_item_rules(filename, id_, [], group=group, description=description) else: # Force target = None if the given target no longer exists target = None # Drop any rules if organism_api and filename in \ organism_api.get_supported_open_databases(): organism_api.update_item_rules(filename, id_, [], group=group, description=description) # Note that exceptions.CircularLinksError could be raised before getting # here qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_id, (id_, )) res = cursor.fetchone() # Do not allow creating more than one link per item if res: oldtarget = res['L_target'] do_update_link(filename, cursor, target, id_) core_api.give_connection(filename, qconn) core_api.insert_history(filename, group, id_, 'link_update', description, str(target) if target is not None else None, oldtarget if str(oldtarget) is not None else None) else: oldtarget = False # 'target' can be None, thus allowing the creation of a broken link do_insert_link(filename, cursor, id_, target) core_api.give_connection(filename, qconn) core_api.insert_history(filename, group, id_, 'link_insert', description, str(target) if target is not None else None, None) upsert_link_event.signal(filename=filename, id_=id_, target=target, oldtarget=oldtarget)