def TaskBrokenFeeds(): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) logging.debug(" ---> ~SN~FBQueuing broken feeds...") # Force refresh feeds refresh_feeds = Feed.objects.filter( active=True, fetched_once=False, active_subscribers__gte=1).order_by('?')[:100] refresh_count = refresh_feeds.count() cp1 = time.time() logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count) # Mistakenly inactive feeds hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: r.zremrangebyscore('tasked_feeds', 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: r.zincrby('error_feeds', 1, feed_id) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() logging.debug( " ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % (inactive_count, r.scard('queued_feeds'), r.zcard('tasked_feeds'))) cp2 = time.time() old = now - datetime.timedelta(days=1) old_feeds = Feed.objects.filter( next_scheduled_update__lte=old, active_subscribers__gte=1).order_by('?')[:500] old_count = old_feeds.count() cp3 = time.time() logging.debug( " ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % ( refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, )) Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) logging.debug( " ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def handle(self, *args, **options): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() # Active feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True).exclude(active_subscribers=0).order_by('?') Feed.task_feeds(feeds) # Mistakenly inactive feeds week = now - datetime.timedelta(days=7) day = now - datetime.timedelta(days=1) feeds = Feed.objects.filter(last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1).order_by('?') if feeds: Feed.task_feeds(feeds)
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) tasked_feeds_size = r.zcard('tasked_feeds') hour_ago = now - datetime.timedelta(hours=1) r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) now_timestamp = int(now.strftime("%s")) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) r.zremrangebyscore('scheduled_updates', 0, now_timestamp) if not queued_feeds: logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...") return r.sadd('queued_feeds', *queued_feeds) logging.debug( " ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # Regular feeds if tasked_feeds_size < FEED_TASKING_MAX: feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug( " ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 logging.debug( " ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (active_count, int( (time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() # Active feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True ).exclude( active_subscribers=0 ).order_by('?') Feed.task_feeds(feeds) # Mistakenly inactive feeds day = now - datetime.timedelta(days=1) feeds = Feed.objects.filter( last_update__lte=day, queued_date__lte=day, min_to_decay__lte=60*24, active_subscribers__gte=1 ).order_by('?')[:20] if feeds: Feed.task_feeds(feeds) week = now - datetime.timedelta(days=7) feeds = Feed.objects.filter( last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1 ).order_by('?')[:20] if feeds: Feed.task_feeds(feeds)
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() # Active feeds popular_feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True, active_premium_subscribers__gte=10 ).order_by('?')[:1000] popular_count = popular_feeds.count() # Regular feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True, active_premium_subscribers__gte=1 ).order_by('?')[:500] active_count = feeds.count() # Mistakenly inactive feeds day = now - datetime.timedelta(days=1) inactive_feeds = Feed.objects.filter( last_update__lte=day, queued_date__lte=day, min_to_decay__lte=60*24, active_subscribers__gte=1 ).order_by('?')[:20] inactive_count = inactive_feeds.count() week = now - datetime.timedelta(days=7) old_feeds = Feed.objects.filter( last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1 ).order_by('?')[:20] old_count = old_feeds.count() logging.debug(" ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB/~FC%s~FB/~FC%s~SN~FB feeds..." % ( popular_count, active_count, inactive_count, old_count, )) Feed.task_feeds(popular_feeds, verbose=False) Feed.task_feeds(feeds, verbose=False) if inactive_feeds: Feed.task_feeds(inactive_feeds, verbose=False) if old_feeds: Feed.task_feeds(old_feeds, verbose=False)
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) tasked_feeds_size = r.zcard('tasked_feeds') hour_ago = now - datetime.timedelta(hours=1) r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) now_timestamp = int(now.strftime("%s")) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) r.zremrangebyscore('scheduled_updates', 0, now_timestamp) if not queued_feeds: logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...") return r.sadd('queued_feeds', *queued_feeds) logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # Regular feeds if tasked_feeds_size < FEED_TASKING_MAX: feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( active_count, int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def handle(self, *args, **options): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() # Active feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True ).exclude( active_subscribers=0 ).order_by('?') if options['all']: feeds = Feed.objects.all() Feed.task_feeds(feeds) # Mistakenly inactive feeds day = now - datetime.timedelta(days=1) feeds = Feed.objects.filter( last_update__lte=day, queued_date__lte=day, min_to_decay__lte=60*24, active_subscribers__gte=1, active=True ).order_by('?') if feeds: Feed.task_feeds(feeds) week = now - datetime.timedelta(days=7) feeds = Feed.objects.filter( last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1, active=True ).order_by('?') if feeds: Feed.task_feeds(feeds) feeds = Feed.objects.filter( last_update__lte=day, active_subscribers__gte=1, active=False, known_good=True ).order_by('?') if feeds: Feed.task_feeds(feeds)
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() # Regular feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True, active_subscribers__gte=1 ).order_by('?')[:1250] active_count = feeds.count() cp1 = time.time() # Force refresh feeds refresh_feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True, fetched_once=False, active_subscribers__gte=1 ).order_by('?')[:100] refresh_count = refresh_feeds.count() cp2 = time.time() # Mistakenly inactive feeds day = now - datetime.timedelta(days=1) inactive_feeds = Feed.objects.filter( last_update__lte=day, queued_date__lte=day, min_to_decay__lte=60*24, active_subscribers__gte=1 ).order_by('?')[:100] inactive_count = inactive_feeds.count() cp3 = time.time() week = now - datetime.timedelta(days=7) old_feeds = Feed.objects.filter( last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1 ).order_by('?')[:500] old_count = old_feeds.count() cp4 = time.time() logging.debug(" ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB (~FC%s~FB/~FC%s~SN~FB) feeds... (%.4s/%.4s/%.4s/%.4s)" % ( active_count, refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, cp4 - cp3 )) Feed.task_feeds(feeds, verbose=False) Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(inactive_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) logging.debug(" ---> ~SN~FBTasking took ~SB%s~SN seconds" % int((time.time() - start)))
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL) tasked_feeds_size = r.zcard('tasked_feeds') hour_ago = now - datetime.timedelta(hours=1) r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) now_timestamp = int(now.strftime("%s")) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) r.zremrangebyscore('scheduled_updates', 0, now_timestamp) r.sadd('queued_feeds', *queued_feeds) logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # Regular feeds if tasked_feeds_size < 5000: feeds = r.srandmember('queued_feeds', 5000) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 cp1 = time.time() # Force refresh feeds refresh_feeds = Feed.objects.filter( active=True, fetched_once=False, active_subscribers__gte=1 ).order_by('?')[:100] refresh_count = refresh_feeds.count() cp2 = time.time() # Mistakenly inactive feeds hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: r.zremrangebyscore('tasked_feeds', 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: r.zincrby('error_feeds', feed_id, 1) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped feeds (~SB%s/%s~SN queued/tasked)" % ( inactive_count, r.scard('queued_feeds'), r.zcard('tasked_feeds'))) cp3 = time.time() old = now - datetime.timedelta(days=1) old_feeds = Feed.objects.filter( next_scheduled_update__lte=old, active_subscribers__gte=1 ).order_by('?')[:500] old_count = old_feeds.count() cp4 = time.time() logging.debug(" ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB (~FC%s~FB/~FC%s~SN~FB) feeds... (%.4s/%.4s/%.4s/%.4s)" % ( active_count, refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, cp4 - cp3 )) Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) logging.debug(" ---> ~SN~FBTasking took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL) tasked_feeds_size = r.zcard('tasked_feeds') hour_ago = now - datetime.timedelta(hours=1) r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) now_timestamp = int(now.strftime("%s")) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) r.zremrangebyscore('scheduled_updates', 0, now_timestamp) r.sadd('queued_feeds', *queued_feeds) logging.debug( " ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # Regular feeds if tasked_feeds_size < 5000: feeds = r.srandmember('queued_feeds', 5000) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug( " ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 cp1 = time.time() # Force refresh feeds refresh_feeds = Feed.objects.filter( active=True, fetched_once=False, active_subscribers__gte=1).order_by('?')[:100] refresh_count = refresh_feeds.count() cp2 = time.time() # Mistakenly inactive feeds hours_ago = (now - datetime.timedelta(hours=1)).strftime('%s') old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: r.zremrangebyscore('tasked_feeds', 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: r.zincrby('error_feeds', feed_id, 1) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() logging.debug( " ---> ~SN~FBRe-queuing ~SB%s~SN dropped feeds (~SB%s/%s~SN queued/tasked)" % (inactive_count, r.scard('queued_feeds'), r.zcard('tasked_feeds'))) cp3 = time.time() old = now - datetime.timedelta(days=1) old_feeds = Feed.objects.filter( next_scheduled_update__lte=old, active_subscribers__gte=1).order_by('?')[:500] old_count = old_feeds.count() cp4 = time.time() logging.debug( " ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB (~FC%s~FB/~FC%s~SN~FB) feeds... (%.4s/%.4s/%.4s/%.4s)" % (active_count, refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, cp4 - cp3)) Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) logging.debug( " ---> ~SN~FBTasking took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) logging.debug(" ---> ~SN~FBQueuing broken feeds...") # Force refresh feeds refresh_feeds = Feed.objects.filter( active=True, fetched_once=False, active_subscribers__gte=1 ).order_by('?')[:100] refresh_count = refresh_feeds.count() cp1 = time.time() logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count) # Mistakenly inactive feeds hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: r.zremrangebyscore('tasked_feeds', 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: r.zincrby('error_feeds', feed_id, 1) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % ( inactive_count, r.scard('queued_feeds'), r.zcard('tasked_feeds'))) cp2 = time.time() old = now - datetime.timedelta(days=1) old_feeds = Feed.objects.filter( next_scheduled_update__lte=old, active_subscribers__gte=1 ).order_by('?')[:500] old_count = old_feeds.count() cp3 = time.time() logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % ( refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, )) Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates')))
def run(self, **kwargs): try: from apps.rss_feeds.models import Feed #settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL) # get the size of tasked_feeds tasked_feeds_size = r.zcard('tasked_feeds') hour_ago = now - datetime.timedelta(hours=1) r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) # get the feed_ids in the scheduled_updates, and delete them. now_timestamp = int(now.strftime("%s")) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) r.zremrangebyscore('scheduled_updates', 0, now_timestamp) logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # and add to queued_feeds the feed_ids which come from scheduled_updates. if len(queued_feeds) > 0: r.sadd('queued_feeds', *queued_feeds) logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( len(queued_feeds), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) # if the number of fetch task is less than 5000, and then get no more than 5000 feed_ids from queued_feeds if tasked_feeds_size < 5000: feeds = r.srandmember('queued_feeds', 5000) # this method will delete the feed_ids in queued_feeds, and add them to tasked_feeds, and package feed_id # to a fetch task and distribute. # Note: feed_id delete from tasked_feeds only after the fetch is over. # the method is sync, but task.apply_async is not. Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 cp1 = time.time() # order_by('?') is to order randomly # If the system is started, and the scheduled_updates(Sorted_Set) is null, # this method will force the feed that is not fetched once to schedule. # And this method will also force the new feed to refresh. refresh_feeds = Feed.objects.filter( fetched_once=False, ).order_by('?')[:100] refresh_count = refresh_feeds.count() cp2 = time.time() # Mistakenly inactive feeds. # If the feed is not fetched in 10 minutes, we assume that the feeds are maybe wrong or fetch error. hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: r.zremrangebyscore('tasked_feeds', 0, hours_ago) for feed_id in old_tasked_feeds: # add this feed_id in error_feeds, and set next scheduled update for it. r.zincrby('error_feeds', feed_id, 1) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped feeds (~SB%s/%s~SN queued/tasked)" % ( inactive_count, r.scard('queued_feeds'), r.zcard('tasked_feeds'))) cp3 = time.time() # If the system is halt or stop, the time of next_scheduled_update will earlier than now. # and the feeds whose next_scheduled_update is earlier than now is supposed to schedule now. old = now - datetime.timedelta(days=1) old_feeds = Feed.objects.filter( next_scheduled_update__lte=old, ).order_by('?')[:500] old_count = old_feeds.count() cp4 = time.time() logging.debug(" ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB (~FC%s~FB/~FC%s~SN~FB) feeds... (%.4s/%.4s/%.4s/%.4s)" % ( active_count, refresh_count, inactive_count, old_count, cp1 - start, cp2 - cp1, cp3 - cp2, cp4 - cp3 )) Feed.task_feeds(refresh_feeds, verbose=True) Feed.task_feeds(old_feeds, verbose=True) logging.debug(" ---> ~SN~FBTasking took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( int((time.time() - start)), r.zcard('tasked_feeds'), r.scard('queued_feeds'), r.zcard('scheduled_updates'))) except Exception, e: import traceback traceback.print_exc() logging.error(str(e))