def deleterows(self): # delete rows if self.mode == "live": archive = False if self.mode == "archive": archive = True account = self.active model = self.getmodel() # get selected entry ids entry_ids, source_rows = self.selectedids() if len(entry_ids) > 0: # retrieve cache data cache = self.cache["index"][account]["inbox"][self.mode] # use entry_ids to obtain cache row #'s df = cache['data'] cache_rows = [int(df[df['entry_id'] == id].index[0]) for id in entry_ids] # execute delete function in session bridge.sess().delete_many(account, entry_ids, archive=archive) # remove rows from cache cache["data"].drop(cache_rows, axis=0, inplace=True) cache["data"].reset_index(drop=True, inplace=True) # remove rows from model model.removeRows(cache["data"]) # refresh proxy model in view self.filterModel.setFilterFixedString("")
def update(self): extension = bridge.sess().update(gui=True) for account in extension: print(account) test_model = self.cache["index"][account]["inbox"]["live"][ "object"] print("...[PRE] model row count: {0}".format( test_model.rowCount(None))) if extension[account] is not None: live_ext = extension[account] #for row in extension[account]: #live_ext.append(row) #live_ext = pandas.DataFrame(live_ext) #print(live_ext) live_ext["archive_index"] = [ False for i in range(live_ext.shape[0]) ] live_cache = self.cache["index"][account]["inbox"]["live"] live_cache["data"] = live_cache["data"].append( live_ext, ignore_index=True) live_model = self.cache["index"][account]["inbox"]["live"][ "object"] live_model.addRows(live_cache["data"]) print("...[POST]: model row count: {0}".format( live_model.rowCount(None))) # refresh proxy model in view self.filterModel.setFilterFixedString("")
def archiverows(self, entry_ids, source_rows, category): if self.mode != "live": # if not in archive mode return # do not continue account = self.active model = self.getmodel() # retrieve cache data live_cache = self.cache["index"][account]["inbox"]["live"] arch_cache = self.cache["index"][account]["inbox"]["archive"] # use entry_ids to obtain live cache row #'s df = live_cache['data'] cache_rows = [ int(df[df['entry_id'] == id].index[0]) for id in entry_ids ] # remove already-archived entry_ids from entry_ids & row_nums for i in range(len(entry_ids) - 1, -1, -1): id = entry_ids[i] if id in list(arch_cache['data']['entry_id']): entry_ids.pop(i) cache_rows.pop(i) source_rows.pop(i) if len(entry_ids) > 0: # execute archive function in session bridge.sess().archive_many(account, entry_ids, category) # update live cache's archive index for n in cache_rows: live_cache["data"].loc[n, "archive_index"] = category # add archived rows to archive cache # assume archive_index column = category column arch_ext = pandas.DataFrame( [live_cache["data"].loc[n].copy() for n in cache_rows]) # rename archive_index column to category arch_ext.rename(columns={"archive_index": "category"}, inplace=True) arch_cache["data"] = arch_cache["data"].append(arch_ext, ignore_index=True) # update live model archive index model.addToArchive(live_cache["data"], source_rows) # update archive model archive_model = self.cache["index"][account]["inbox"]["archive"][ "object"] archive_model.addRows(arch_cache["data"]) # refresh proxy model in view self.filterModel.setFilterFixedString("") # clear view selections self.mainView.clearSelection()
def makemaster(self): bridge.sess().makemaster()