def regenerate_and_cache_search_results(self): ss = self.render_result_row() # popping over to celery put_in_cache_forever.delay(self.cache_name,ss) update_proxy_results_db_cache.delay(SpreadsheetSearchProxy, self,ss) cache.set(self.cached_count_key, self.members.count()) return ss
def regenerate_and_cache_search_results(self): ss = self.render_result_row() # popping over to celery try: transaction.commit() except: pass put_in_cache_forever.delay(self.cache_name,ss) update_proxy_results_db_cache.delay(GroupSearchProxy, self,ss) cache.set(self.cached_count_key, self.members.count()) return ss
def search_result_row(self): if cache.get(self.cache_name): return cache.get(self.cache_name) elif self.cached_search_result: try: transaction.commit() except: pass put_in_cache_forever.delay(self.cache_name, self.cached_search_result) return self.cached_search_result else: ss = self.render_result_row() # popping over to celery try: transaction.commit() except: pass put_in_cache_forever.delay(self.cache_name, ss) update_proxy_results_db_cache.delay(self, ss) return ss