def top_column_task(): logging.info('Start collect top 1000 column to redis...') session = DBSession() try: r = redis.StrictRedis(host='localhost', port=6379, db=1) sr = session.execute("""SELECT slug FROM ( SELECT id,slug FROM core_zhcolumn ) as l1 JOIN ( SELECT count(core_zharticle.id) AS c1, belong_id FROM core_zharticle GROUP BY belong_id ) as l2 ON l2.belong_id = l1.id ORDER BY l2.c1 DESC LIMIT 1000;""") for itm in sr: r.sadd('top_column_slug', itm[0]) r.expire('top_column_slug', 60 * 60 * 23) logging.info('Success collect top column to redis!') return True except Exception as e: logging.exception('ERROR in collect top column reason {0}'.format(e)) return False finally: session.close()
class SearchPaginator(RedisCachePaginator): def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True, *args, **kwargs): self.keyword = kwargs.get('keyword', '') self.session = DBSession() super(SearchPaginator, self).__init__(object_list, per_page, orphans, allow_empty_first_page) def _get_count(self): """ Returns the total number of objects, across all pages. """ key = '{0}_article_count'.format(md5(self.keyword.encode('utf-8'))) if self._count is None: count = cache.get(key) if count: self._count = count return self._count self._count = self.session.execute( '''select count(title) from core_zharticle where title ~ '{keyword}';''' .format(keyword=self.keyword)).first()[0] cache.set(key, self._count, 60 * 5) return self._count count = property(_get_count)