def get_number_of_active_alarms(self): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_select_count) core_api.give_connection(self.filename, conn) row = cur.fetchone() return row['A_active_alarms']
def get_last_search(self): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.timerproperties_select_search) core_api.give_connection(self.filename, conn) return cur.fetchone()['TP_last_search']
def _activate_alarm(self, alarm): # If one of the loops that call this method lasts long enough (and # the're not run on the main thread), the database may be closed # meanwhile; however this function seems to terminate safely without # the need of further tests here if 'alarmid' not in alarm: alarmid = self._insert_alarm(id_=alarm['id_'], start=alarm['start'], end=alarm['end'], origalarm=alarm['alarm'], # Note that here passing None is correct (do # not pass False) snooze=None) else: alarmid = alarm['alarmid'] # Occurrence dictionaries store active alarms with False, not None if alarm['alarm']: filename = alarm['filename'] qconn = core_api.get_connection(filename) cursor = qconn.cursor() # Note that here using None is correct (do not use False) cursor.execute(queries.alarms_update_id, (None, alarmid)) core_api.give_connection(filename, qconn) alarm_event.signal(filename=alarm['filename'], id_=alarm['id_'], alarmid=alarmid, start=alarm['start'], end=alarm['end'], alarm=alarm['alarm'])
def set_last_search(self, tstamp): conn = core_api.get_connection(self.filename) cur = conn.cursor() # Use a UTC timestamp, so that even if the local time zone is changed # on the system, the timer behaves properly cur.execute(queries.timerproperties_update, (tstamp, )) core_api.give_connection(self.filename, conn)
def get_all_valid_item_rules(self): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.rules_select_all, (self.rules_to_string([]), )) core_api.give_connection(self.filename, qconn) return cursor
def handle_history_delete(filename, action, jparams, hid, type_, itemid): qconn = core_api.get_connection(filename) cursor = qconn.cursor() do_delete_link(cursor, itemid) core_api.give_connection(filename, qconn) history_delete_event.signal(filename=filename, id_=itemid)
def select_links(filename): qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select) core_api.give_connection(filename, qconn) return cursor.fetchall()
def get_alarms(self, mint, maxt, occs): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_select) core_api.give_connection(self.filename, conn) for row in cur: origalarm = row['A_alarm'] # Do not assign None here so that it's possible to distinguish # between occurrences without alarm and occurrences with active # alarm when they're mixed together # Storing False ensures consistent behaviour with None when doing # generic boolean tests snooze = False if row['A_snooze'] is None else row['A_snooze'] alarmd = {'filename': self.filename, 'id_': row['A_item'], 'alarmid': row['A_id'], 'start': row['A_start'], 'end': row['A_end'], 'alarm': snooze} # If the alarm is not added to occs, add it to the active # dictionary if it's active (but not snoozed) # Note that if the alarm is active but its time values are included # between mint and maxt, the alarm is added to the main dictionary, # not the active one # Also note that the second argument must be origalarm, not snooze, # in fact it's used to *update* the occurrence (if present) using # the new snooze time stored in alarmd if not occs.update(alarmd, origalarm) and snooze is False: occs.move_active(alarmd, origalarm)
def reset_modified_state(self): conn = core_api.get_connection(self.filename) cur = conn.cursor() self.changes = [row for row in cur.execute(queries.alarms_select)] core_api.give_connection(self.filename, conn) self.modified_state = False
def find_back_links(filename, id_): qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_target, (id_, )) core_api.give_connection(filename, qconn) return [row['L_id'] for row in cursor.fetchall()]
def get_all_item_rules(self): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.rules_select) core_api.give_connection(self.filename, qconn) return cursor
def _activate_alarm(self, alarm): # If one of the loops that call this method lasts long enough (and # the're not run on the main thread), the database may be closed # meanwhile; however this function seems to terminate safely without # the need of further tests here if 'alarmid' not in alarm: alarmid = self._insert_alarm( id_=alarm['id_'], start=alarm['start'], end=alarm['end'], origalarm=alarm['alarm'], # Note that here passing None is correct (do # not pass False) snooze=None) else: alarmid = alarm['alarmid'] # Occurrence dictionaries store active alarms with False, not None if alarm['alarm']: filename = alarm['filename'] qconn = core_api.get_connection(filename) cursor = qconn.cursor() # Note that here using None is correct (do not use False) cursor.execute(queries.alarms_update_id, (None, alarmid)) core_api.give_connection(filename, qconn) alarm_event.signal(filename=alarm['filename'], id_=alarm['id_'], alarmid=alarmid, start=alarm['start'], end=alarm['end'], alarm=alarm['alarm'])
def get_alarms(self, mint, maxt, occs): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_select) core_api.give_connection(self.filename, conn) for row in cur: origalarm = row['A_alarm'] # Do not assign None here so that it's possible to distinguish # between occurrences without alarm and occurrences with active # alarm when they're mixed together # Storing False ensures consistent behaviour with None when doing # generic boolean tests snooze = False if row['A_snooze'] is None else row['A_snooze'] alarmd = { 'filename': self.filename, 'id_': row['A_item'], 'alarmid': row['A_id'], 'start': row['A_start'], 'end': row['A_end'], 'alarm': snooze } # If the alarm is not added to occs, add it to the active # dictionary if it's active (but not snoozed) # Note that if the alarm is active but its time values are included # between mint and maxt, the alarm is added to the main dictionary, # not the active one # Also note that the second argument must be origalarm, not snooze, # in fact it's used to *update* the occurrence (if present) using # the new snooze time stored in alarmd if not occs.update(alarmd, origalarm) and snooze is False: occs.move_active(alarmd, origalarm)
def select_alarms_log(self): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarmsofflog_select_order) core_api.give_connection(self.filename, qconn) return cursor
def copy_item_rules(self, id_): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.rules_select_id, (id_, )) core_api.give_connection(self.filename, conn) return cur.fetchone()
def handle_history_update(filename, action, jparams, hid, type_, itemid): qconn = core_api.get_connection(filename) cursor = qconn.cursor() do_update_link(filename, cursor, int(jparams) if jparams is not None else None, itemid) core_api.give_connection(filename, qconn) history_update_event.signal(filename=filename, id_=itemid)
def _insert_alarm_log(self, id_, reason, text): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() # Also store the text, otherwise it won't be possible to retrieve it if # the item has been deleted meanwhile cursor.execute(queries.alarmsofflog_insert, (id_, reason, text)) cursor.execute(queries.alarmsofflog_delete_clean, self.log_limits) core_api.give_connection(self.filename, qconn)
def _insert_alarm(self, id_, start, end, origalarm, snooze): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_insert, (id_, start, end, origalarm, snooze)) core_api.give_connection(self.filename, conn) aid = cur.lastrowid return aid
def update_alarm_log_soft_limit(self, limit): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarmsproperties_update, (limit, )) core_api.give_connection(self.filename, qconn) self.modified_state = True self.log_limits[0] = limit
def _handle_history_update(self, filename, action, jparams, hid, type_, itemid): qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.rules_update_id, (jparams, itemid)) core_api.give_connection(filename, qconn) history_update_event.signal(filename=filename, id_=itemid, rules=self.string_to_rules(jparams))
def check_pending_changes(self): conn = core_api.get_connection(self.filename) cur = conn.cursor() change_state = self.changes != [row for row in cur.execute(queries.alarms_select)] core_api.give_connection(self.filename, conn) if change_state or self.modified_state: core_api.set_modified(self.filename)
def get_item_rules(self, id_): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.rules_select_id, (id_, )) row = cursor.fetchone() core_api.give_connection(self.filename, qconn) # The query should always return a result, so row should never be None return self.string_to_rules(row['R_rules'])
def insert_item(self, id_, group, description='Insert item'): srules = self.rules_to_string([]) qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.rules_insert, (id_, srules, )) core_api.give_connection(self.filename, qconn) core_api.insert_history(self.filename, group, id_, 'rules_insert', description, srules, None)
def check_pending_changes(self): conn = core_api.get_connection(self.filename) cur = conn.cursor() change_state = self.changes != [ row for row in cur.execute(queries.alarms_select) ] core_api.give_connection(self.filename, conn) if change_state or self.modified_state: core_api.set_modified(self.filename)
def find_first_broken_link(filename): qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_target_broken) core_api.give_connection(filename, qconn) try: return row['L_id'] except TypeError: return None
def post_init(self): conf = coreaux_api.get_extension_configuration('organism_alarms') qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarmsproperties_select_history) core_api.give_connection(self.filename, qconn) self.log_limits = [cursor.fetchone()[0], conf.get_int('log_time_limit'), conf.get_int('log_hard_limit')]
def post_init(self): conf = coreaux_api.get_extension_configuration('organism_alarms') qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarmsproperties_select_history) core_api.give_connection(self.filename, qconn) self.log_limits = [ cursor.fetchone()[0], conf.get_int('log_time_limit'), conf.get_int('log_hard_limit') ]
def find_link_target(filename, id_): qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_id, (id_, )) core_api.give_connection(filename, qconn) row = cursor.fetchone() # If it's a valid link return its target id; if it's a broken link # return None; if it's not a link return False try: return row['L_target'] except TypeError: return False
def break_links(filename, id_, group, description='Break links'): # Break any links that point to the item # Don't just delete those links, as it would leave their associated # items in an unexpected state for the user (back to normal, # undistinguished items); also this further action should be handled # properly by the interface somehow # Don't try to delete the links and their associated items, because # silently deleting items that were not selected would be confusing; # furthermore, theoretically link items are allowed (at least in the # back-end) to have their own children, which should be deleted too qconn = core_api.get_connection(filename) cursor = qconn.cursor() cursor.execute(queries.links_select_target, (id_, )) ids = set() rows = cursor.fetchall() if rows: for row in rows: linkid = row['L_id'] ids.add(linkid) do_update_link(filename, cursor, None, linkid) core_api.give_connection(filename, qconn) core_api.insert_history(filename, group, linkid, 'link_update', description, None, str(id_)) qconn = core_api.get_connection(filename) cursor = qconn.cursor() core_api.give_connection(filename, qconn) break_link_event.signal(filename=filename, ids=ids, oldtarget=id_) else: core_api.give_connection(filename, qconn)
def delete_alarms(self, id_, text): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarms_delete_item, (id_, )) if cursor.rowcount > 0: core_api.give_connection(self.filename, qconn) self._insert_alarm_log(id_, 2, text.partition('\n')[0]) # Signal the event after updating the database, so, for example, # the tasklist can be correctly updated alarm_off_event.signal(filename=self.filename, id_=id_) else: core_api.give_connection(self.filename, qconn)
def get_snoozed_alarms(self, last_search, occs): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_select) core_api.give_connection(self.filename, conn) for row in cur: itemid = row['A_item'] start = row['A_start'] end = row['A_end'] # Do not assign None here so that it's possible to distinguish # between occurrences without alarm and occurrences with active # alarm when they're mixed together # Storing False ensures consistent behaviour with None when doing # generic boolean tests snooze = False if row['A_snooze'] is None else row['A_snooze'] # Check whether the snoozed alarm has a duplicate among the # alarms found using the alarm rules, and in that case delete # the latter; the creation of duplicates is possible especially # when alarm searches are performed in rapid succession, for # example when launching outspline with multiple databases # automatically opened and many new alarms to be immediately # activated occs.try_delete_one(self.filename, itemid, start, end, row['A_alarm']) alarmd = { 'filename': self.filename, 'id_': itemid, 'alarmid': row['A_id'], 'start': start, 'end': end, 'alarm': snooze } # For safety, also check that there aren't any alarms with # snooze <= last_search left (for example this may happen if an # alarm is temporarily undone together with its item, and then it's # restored with a redo) # Note that whatever the value of last_search is, it doesn't really # have the possibility to prevent the activation of a snoozed # alarm, be it immediately or later (last_search can't be set on a # future time) if snooze and snooze > last_search: occs.add_safe(last_search, alarmd) else: occs.add_old(alarmd)
def snooze_alarms(self, alarmsd, stime, newalarm): for id_ in alarmsd: for alarmid in alarmsd[id_]: text = core_api.get_item_text(self.filename, id_) qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.alarms_update_id, (newalarm, alarmid)) core_api.give_connection(self.filename, qconn) self._insert_alarm_log(id_, 0, text.partition('\n')[0]) # Signal the event after updating the database, so, for # example, the tasklist can be correctly updated alarm_off_event.signal(filename=self.filename, id_=id_, alarmid=alarmid)
def get_snoozed_alarms(self, last_search, occs): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.alarms_select) core_api.give_connection(self.filename, conn) for row in cur: itemid = row['A_item'] start = row['A_start'] end = row['A_end'] # Do not assign None here so that it's possible to distinguish # between occurrences without alarm and occurrences with active # alarm when they're mixed together # Storing False ensures consistent behaviour with None when doing # generic boolean tests snooze = False if row['A_snooze'] is None else row['A_snooze'] # Check whether the snoozed alarm has a duplicate among the # alarms found using the alarm rules, and in that case delete # the latter; the creation of duplicates is possible especially # when alarm searches are performed in rapid succession, for # example when launching outspline with multiple databases # automatically opened and many new alarms to be immediately # activated occs.try_delete_one(self.filename, itemid, start, end, row['A_alarm']) alarmd = {'filename': self.filename, 'id_': itemid, 'alarmid': row['A_id'], 'start': start, 'end': end, 'alarm': snooze} # For safety, also check that there aren't any alarms with # snooze <= last_search left (for example this may happen if an # alarm is temporarily undone together with its item, and then it's # restored with a redo) # Note that whatever the value of last_search is, it doesn't really # have the possibility to prevent the activation of a snoozed # alarm, be it immediately or later (last_search can't be set on a # future time) if snooze and snooze > last_search: occs.add_safe(last_search, alarmd) else: occs.add_old(alarmd)
def set_last_search_safe(self, tstamp): conn = core_api.get_connection(self.filename) cur = conn.cursor() cur.execute(queries.timerproperties_select_search) last_search = cur.fetchone()['TP_last_search'] # It's possible that the database has last_search > tstamp, for example # when a database has just been opened while others were already open: # it would have a lower last_search than the other databases, and when # the next occurrences are searched, all the databases' last_search # values would be updated to the lower value, thus possibly # reactivating old alarms if tstamp > last_search: # Use a UTC timestamp, so that even if the local time zone is # changed on the system, the timer behaves properly cur.execute(queries.timerproperties_update, (tstamp, )) core_api.give_connection(self.filename, conn)
def delete_item_rules(self, id_, text, group, description='Delete item rules'): qconn = core_api.get_connection(self.filename) cursor = qconn.cursor() cursor.execute(queries.rules_select_id, (id_, )) sel = cursor.fetchone() # The query should always return a result, so sel should never be None current_rules = sel['R_rules'] cursor.execute(queries.rules_delete_id, (id_, )) core_api.give_connection(self.filename, qconn) core_api.insert_history(self.filename, group, id_, 'rules_delete', description, None, current_rules) delete_item_rules_event.signal(filename=self.filename, id_=id_, text=text)