def setUpClass(cls): cls.feeds = [] for feed_name, feed_url in ((u'My Cool Feed', u'http://blog.1flow.io/rss'), ): # (u'Another Feed', # u'http://www.w3sh.com/feed/'), # (u'This Third Feed', # u'http://feeds.feedburner.com/bashfr?format=xml')): # NOQA feed = Feed(name=feed_name, url=feed_url).save() cls.feeds.append(feed)
def feed_distribution_by_last_fetch(): """ compute and group feeds by last_fetch delta from now. """ start_time = pytime.time() # open_feeds = Feed.objects(Q(closed=False) | Q(closed__exists=False)) open_feeds_count = Feed.objects.filter(closed__ne=True).count() lower_value = None loop_count = 0 fetched_feeds = 0 delta_lengths = ( timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL / 6), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL / 2), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 2), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 6), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 12), timedelta(days=1), timedelta(days=2), timedelta(days=3), timedelta(days=4), timedelta(days=5), timedelta(days=6), timedelta(days=7), timedelta(days=10), None ) results = {} for delta in delta_lengths: upper_value = (now() - delta) if delta else None if lower_value is None: kwargs = {'last_fetch__gt': upper_value} elif upper_value is None: kwargs = {'last_fetch__lte': lower_value} else: kwargs = {'last_fetch__lte': lower_value, 'last_fetch__gt': upper_value} feeds = Feed.objects(**kwargs) count = feeds.count() percent = float(count * 100.0 / open_feeds_count) avg_fi = sum(f.fetch_interval for f in feeds) * 1.0 / (count or 1.0) results[loop_count] = [ feeds, count, percent, lower_value, upper_value, avg_fi, ] fetched_feeds += count lower_value = upper_value loop_count += 1 results['meta'] = {'fetched_feeds': fetched_feeds, 'open_feeds_count': open_feeds_count, 'duration': pytime.time() - start_time, 'loop_count': loop_count} return results
def tearDownClass(cls): Article.drop_collection() Feed.drop_collection()
DjangoUser = get_user_model() LOGGER = logging.getLogger(__file__) # Use the test database not to pollute the production/development one. RedisStatsCounter.REDIS = TEST_REDIS TEST_REDIS.flushdb() connect_mongodb_testsuite() # Empty the database before starting in case an old test failed to tearDown(). Article.drop_collection() Read.drop_collection() User.drop_collection() Feed.drop_collection() Tag.drop_collection() WebSite.drop_collection() Author.drop_collection() @override_settings(STATICFILES_STORAGE= 'pipeline.storage.NonPackagingPipelineStorage', CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND='memory',) class HomeAndPreferencesViewTest(TestCase): def setUp(self): self.client = Client()
def feed_distribution_by_last_fetch(): """ compute and group feeds by last_fetch delta from now. """ start_time = pytime.time() # open_feeds = Feed.objects(Q(closed=False) | Q(closed__exists=False)) open_feeds_count = Feed.objects.filter(closed__ne=True).count() lower_value = None loop_count = 0 fetched_feeds = 0 delta_lengths = (timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL / 6), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL / 2), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 2), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 6), timedelta(seconds=config.FEED_FETCH_DEFAULT_INTERVAL * 12), timedelta(days=1), timedelta(days=2), timedelta(days=3), timedelta(days=4), timedelta(days=5), timedelta(days=6), timedelta(days=7), timedelta(days=10), None) results = {} for delta in delta_lengths: upper_value = (now() - delta) if delta else None if lower_value is None: kwargs = {'last_fetch__gt': upper_value} elif upper_value is None: kwargs = {'last_fetch__lte': lower_value} else: kwargs = { 'last_fetch__lte': lower_value, 'last_fetch__gt': upper_value } feeds = Feed.objects(**kwargs) count = feeds.count() percent = float(count * 100.0 / open_feeds_count) avg_fi = sum(f.fetch_interval for f in feeds) * 1.0 / (count or 1.0) results[loop_count] = [ feeds, count, percent, lower_value, upper_value, avg_fi, ] fetched_feeds += count lower_value = upper_value loop_count += 1 results['meta'] = { 'fetched_feeds': fetched_feeds, 'open_feeds_count': open_feeds_count, 'duration': pytime.time() - start_time, 'loop_count': loop_count } return results