def fetch_packages(self): package_ids = None key = 'popular_packages.' + str(self.user.id) if self.use_cache: try: package_ids = cache.get(key) except Exception as e: app.logger.exception(e) app.logger.info("An exception catched, disable cache") self.use_cache = False if package_ids is None: packages = db.session.query(Subscribe.feed_id, db.func.count(Subscribe.id).label('total')) \ .filter(Subscribe.type == Subscribe.TYPE_FEED) \ .filter(Subscribe.feed_id.notin_(db.session.query(Subscribe.feed_id).filter( (Subscribe.user_id == self.user.id) & (Subscribe.feed_id.isnot(None)) ).group_by(Subscribe.feed_id))) \ .group_by(Subscribe.feed_id) \ .order_by(db.desc('total')) \ .limit(500) package_ids = [] for (feed_id, _count) in packages: package_ids.append(feed_id) if self.random_order: import random random.shuffle(package_ids) package_ids = package_ids[:self.packages_count] if self.use_cache: cache.set(key, package_ids, timeout=self.timeout) feeds = Feed.query.filter(Feed.id.in_(package_ids)) return feeds
def fetch_news(self): article_ids = None key = self._get_cache_key() + '.' + str(self.user.id) if self.use_cache: try: article_ids = cache.get(key) except Exception as e: app.logger.exception(e) app.logger.info("An exception catched, disable cache") self.use_cache = False if article_ids is None: article_ids = self.generate() if self.use_cache: cache.set(key, article_ids, timeout=self.timeout) return article_ids
def fetch_feed_articles(feed_id): from rdr.modules.feeds.articles.sync import ArticlesSynchronizer from rdr.modules.feeds.models import Feed from rdr.application.cache import cache sync_count = 0 cache_key = 'tasks.fetch_feed_articles.lock.' + str(feed_id) if not cache.get(cache_key): cache.set(cache_key, True, timeout=ARTICLE_FETCH_TIMEOUT) feed = Feed.query.filter(Feed.id == feed_id).first() if feed: synchronizer = ArticlesSynchronizer(feed) articles = synchronizer.sync() sync_count = len(articles) cache.delete(cache_key) return feed_id, sync_count