예제 #1
0
 def calculate_metrics(self):
     from apps.statistics.models import MStatistics
     
     return {
         'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'),
         'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'),
     }
예제 #2
0
파일: tasks.py 프로젝트: bodegard/NewsBlur
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics
        r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)

        mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
        compute_scores = bool(mongodb_replication_lag < 10)
        
        profiler = DBProfilerMiddleware()
        profiler_activated = profiler.process_celery()
        if profiler_activated:
            mongo_middleware = MongoDumpMiddleware()
            mongo_middleware.process_celery(profiler)
            redis_middleware = RedisDumpMiddleware()
            redis_middleware.process_celery(profiler)
        
        options = {
            'quick': float(MStatistics.get('quick_fetch', 0)),
            'updates_off': MStatistics.get('updates_off', False),
            'compute_scores': compute_scores,
            'mongodb_replication_lag': mongodb_replication_lag,
        }
        
        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]
            
        for feed_pk in feed_pks:
            feed = Feed.get_by_id(feed_pk)
            if not feed or feed.pk != int(feed_pk):
                logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk))
                r.zrem('tasked_feeds', feed_pk)
            if feed:
                feed.update(**options)
                if profiler_activated: profiler.process_celery_finished()
예제 #3
0
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics
        r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)

        mongodb_replication_lag = int(
            MStatistics.get('mongodb_replication_lag', 0))
        compute_scores = bool(mongodb_replication_lag < 10)

        options = {
            'quick': float(MStatistics.get('quick_fetch', 0)),
            'compute_scores': compute_scores,
            'mongodb_replication_lag': mongodb_replication_lag,
        }

        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]

        for feed_pk in feed_pks:
            feed = Feed.get_by_id(feed_pk)
            if not feed or feed.pk != int(feed_pk):
                logging.info(
                    " ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..."
                    % (feed_pk, feed and feed.pk))
                r.zrem('tasked_feeds', feed_pk)
            if feed:
                feed.update(**options)
예제 #4
0
파일: tasks.py 프로젝트: 0077cc/NewsBlur
 def run(self, feed_pks, **kwargs):
     from apps.rss_feeds.models import Feed
     from apps.statistics.models import MStatistics
     
     mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
     compute_scores = bool(mongodb_replication_lag < 10)
     
     options = {
         'fake': bool(MStatistics.get('fake_fetch')),
         'quick': float(MStatistics.get('quick_fetch', 0)),
         'compute_scores': compute_scores,
         'mongodb_replication_lag': mongodb_replication_lag,
     }
     
     if not isinstance(feed_pks, list):
         feed_pks = [feed_pks]
         
     for feed_pk in feed_pks:
         try:
             feed = Feed.get_by_id(feed_pk)
             if not feed:
                 raise Feed.DoesNotExist
             feed.update(**options)
         except Feed.DoesNotExist:
             logging.info(" ---> Feed doesn't exist: [%s]" % feed_pk)
예제 #5
0
파일: tasks.py 프로젝트: Alexpcole/NewsBlur
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics
        r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)

        mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
        compute_scores = bool(mongodb_replication_lag < 10)
        
        options = {
            'quick': float(MStatistics.get('quick_fetch', 0)),
            'updates_off': MStatistics.get('updates_off', False),
            'compute_scores': compute_scores,
            'mongodb_replication_lag': mongodb_replication_lag,
        }
        
        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]
            
        for feed_pk in feed_pks:
            feed = Feed.get_by_id(feed_pk)
            if not feed or feed.pk != int(feed_pk):
                logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk))
                r.zrem('tasked_feeds', feed_pk)
            if feed:
                feed.update(**options)
    def handle(self, *args, **options):
        if options['daemonize']:
            daemonize()

        settings.LOG_TO_STREAM = True
        now = datetime.datetime.utcnow()

        if options['skip']:
            feeds = Feed.objects.filter(
                next_scheduled_update__lte=now,
                average_stories_per_month__lt=options['skip'],
                active=True)
            print " ---> Skipping %s feeds" % feeds.count()
            for feed in feeds:
                feed.set_next_scheduled_update()
                print '.',
            return

        socket.setdefaulttimeout(options['timeout'])
        if options['force']:
            feeds = Feed.objects.all()
        elif options['username']:
            feeds = Feed.objects.filter(subscribers__user=User.objects.get(
                username=options['username']))
        elif options['feed']:
            feeds = Feed.objects.filter(pk=options['feed'])
        else:
            feeds = Feed.objects.filter(next_scheduled_update__lte=now,
                                        active=True)

        feeds = feeds.order_by('?')

        for f in feeds:
            f.set_next_scheduled_update()

        num_workers = min(len(feeds), options['workerthreads'])
        if options['single_threaded']:
            num_workers = 1

        options['compute_scores'] = True
        options['quick'] = float(MStatistics.get('quick_fetch', 0))
        options['updates_off'] = MStatistics.get('updates_off', False)

        disp = feed_fetcher.Dispatcher(options, num_workers)

        feeds_queue = []
        for _ in range(num_workers):
            feeds_queue.append([])

        i = 0
        for feed in feeds:
            feeds_queue[i % num_workers].append(feed.pk)
            i += 1
        disp.add_jobs(feeds_queue, i)

        django.db.connection.close()

        print " ---> Fetching %s feeds..." % feeds.count()
        disp.run_jobs()
예제 #7
0
    def calculate_metrics(self):
        from apps.statistics.models import MStatistics

        return {
            'feed_loadtimes_avg_hour':
            MStatistics.get('latest_avg_time_taken'),
            'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'),
        }
예제 #8
0
    def calculate_metrics(self):
        from apps.statistics.models import MStatistics

        return {
            'sql_avg': MStatistics.get('latest_sql_avg'),
            'mongo_avg': MStatistics.get('latest_mongo_avg'),
            'redis_avg': MStatistics.get('latest_redis_avg'),
        }
예제 #9
0
 def calculate_metrics(self):
     from apps.statistics.models import MStatistics
     
     return {
         'sql_avg': MStatistics.get('latest_sql_avg'),
         'mongo_avg': MStatistics.get('latest_mongo_avg'),
         'redis_avg': MStatistics.get('latest_redis_avg'),
     }
예제 #10
0
 def handle(self, *args, **options):
     if options['daemonize']:
         daemonize()
     
     settings.LOG_TO_STREAM = True
     now = datetime.datetime.utcnow()
     
     if options['skip']:
         feeds = Feed.objects.filter(next_scheduled_update__lte=now,
                                     average_stories_per_month__lt=options['skip'],
                                     active=True)
         print " ---> Skipping %s feeds" % feeds.count()
         for feed in feeds:
             feed.set_next_scheduled_update()
             print '.',
         return
         
     socket.setdefaulttimeout(options['timeout'])
     if options['force']:
         feeds = Feed.objects.all()
     elif options['username']:
         feeds = Feed.objects.filter(subscribers__user=User.objects.get(username=options['username']))
     elif options['feed']:
         feeds = Feed.objects.filter(pk=options['feed'])
     else:
         feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True)
     
     feeds = feeds.order_by('?')
     
     for f in feeds:
         f.set_next_scheduled_update()
     
     num_workers = min(len(feeds), options['workerthreads'])
     if options['single_threaded']:
         num_workers = 1
     
     options['compute_scores'] = True
     options['quick'] = float(MStatistics.get('quick_fetch', 0))
     options['updates_off'] = MStatistics.get('updates_off', False)
     
     disp = feed_fetcher.Dispatcher(options, num_workers)        
     
     feeds_queue = []
     for _ in range(num_workers):
         feeds_queue.append([])
     
     i = 0
     for feed in feeds:
         feeds_queue[i%num_workers].append(feed.pk)
         i += 1
     disp.add_jobs(feeds_queue, i)
     
     django.db.connection.close()
     
     print " ---> Fetching %s feeds..." % feeds.count()
     disp.run_jobs()
예제 #11
0
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics

        options = {"fake": bool(MStatistics.get("fake_fetch")), "quick": float(MStatistics.get("quick_fetch", 0))}

        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]

        for feed_pk in feed_pks:
            try:
                feed = Feed.objects.get(pk=feed_pk)
                feed.update(options=options)
            except Feed.DoesNotExist:
                logging.info(" ---> Feed doesn't exist: [%s]" % feed_pk)
예제 #12
0
    def calculate_metrics(self):
        from apps.statistics.models import MStatistics
        statistics = MStatistics.all()

        return {
            'feed_success': statistics['feeds_fetched'],
        }
예제 #13
0
파일: views.py 프로젝트: Chorior/NewsBlur
def dashboard_graphs(request):
    statistics = MStatistics.all()
    return render_to_response(
        "statistics/render_statistics_graphs.xhtml",
        {"statistics": statistics},
        context_instance=RequestContext(request),
    )
예제 #14
0
 def calculate_metrics(self):
     from apps.statistics.models import MStatistics
     statistics = MStatistics.all()
 
     return {
         'feed_success': statistics['feeds_fetched'],
     }
예제 #15
0
파일: tasks.py 프로젝트: zino974/NewsBlur
def UpdateFeeds(feed_pks):
    from apps.rss_feeds.models import Feed
    from apps.statistics.models import MStatistics
    r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)

    mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag',
                                                  0))
    compute_scores = bool(mongodb_replication_lag < 10)

    profiler = DBProfilerMiddleware()
    profiler_activated = profiler.process_celery()
    if profiler_activated:
        mongo_middleware = MongoDumpMiddleware()
        mongo_middleware.process_celery(profiler)
        redis_middleware = RedisDumpMiddleware()
        redis_middleware.process_celery(profiler)

    options = {
        'quick': float(MStatistics.get('quick_fetch', 0)),
        'updates_off': MStatistics.get('updates_off', False),
        'compute_scores': compute_scores,
        'mongodb_replication_lag': mongodb_replication_lag,
    }

    if not isinstance(feed_pks, list):
        feed_pks = [feed_pks]

    for feed_pk in feed_pks:
        feed = Feed.get_by_id(feed_pk)
        if not feed or feed.pk != int(feed_pk):
            logging.info(
                " ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..."
                % (feed_pk, feed and feed.pk))
            r.zrem('tasked_feeds', feed_pk)
        if not feed:
            continue
        try:
            feed.update(**options)
        except SoftTimeLimitExceeded, e:
            feed.save_feed_history(505, 'Timeout', e)
            logging.info(
                " ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..."
                % feed)
        if profiler_activated: profiler.process_celery_finished()
예제 #16
0
def index(request):
    if request.method == "POST":
        if request.POST['submit'] == 'login':
            login_form = LoginForm(request.POST, prefix='login')
            signup_form = SignupForm(prefix='signup')
        else:
            login_form = LoginForm(prefix='login')
            signup_form = SignupForm(request.POST, prefix='signup')
    else:
        login_form = LoginForm(prefix='login')
        signup_form = SignupForm(prefix='signup')

    user = get_user(request)
    authed = request.user.is_authenticated()
    features = Feature.objects.all()[:3]
    feature_form = FeatureForm() if request.user.is_staff else None
    feed_count = UserSubscription.objects.filter(
        user=request.user).count() if authed else 0
    active_count = UserSubscription.objects.filter(
        user=request.user, active=True).count() if authed else 0
    train_count = UserSubscription.objects.filter(
        user=request.user,
        active=True,
        is_trained=False,
        feed__stories_last_month__gte=1).count() if authed else 0
    recommended_feeds = RecommendedFeed.objects.filter(
        is_public=True,
        approved_date__lte=datetime.datetime.now()).select_related('feed')[:2]
    unmoderated_feeds = RecommendedFeed.objects.filter(
        is_public=False, declined_date__isnull=True).select_related('feed')[:2]
    statistics = MStatistics.all()
    feedbacks = MFeedback.all()
    start_import_from_google_reader = request.session.get(
        'import_from_google_reader', False)
    if start_import_from_google_reader:
        del request.session['import_from_google_reader']

    return render_to_response(
        'reader/feeds.xhtml', {
            'user_profile': hasattr(user, 'profile') and user.profile,
            'login_form': login_form,
            'signup_form': signup_form,
            'feature_form': feature_form,
            'features': features,
            'feed_count': feed_count,
            'active_count': active_count,
            'train_count': active_count - train_count,
            'account_images': range(1, 4),
            'recommended_feeds': recommended_feeds,
            'unmoderated_feeds': unmoderated_feeds,
            'statistics': statistics,
            'feedbacks': feedbacks,
            'start_import_from_google_reader': start_import_from_google_reader,
        },
        context_instance=RequestContext(request))
예제 #17
0
    def get(self, request):
        from apps.statistics.models import MStatistics
        
        data = {
            'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'),
            'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'),
        }
        chart_name = "load_times"
        chart_type = "counter"

        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'

        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain")
예제 #18
0
 def run(self, feed_id, xml, **kwargs):
     from apps.rss_feeds.models import Feed
     from apps.statistics.models import MStatistics
     
     mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
     compute_scores = bool(mongodb_replication_lag < 60)
     
     options = {
         'feed_xml': xml,
         'compute_scores': compute_scores,
         'mongodb_replication_lag': mongodb_replication_lag,
     }
     feed = Feed.get_by_id(feed_id)
     feed.update(options=options)
예제 #19
0
 def run(self, feed_pks, **kwargs):
     from apps.rss_feeds.models import Feed
     from apps.statistics.models import MStatistics
     
     mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
     compute_scores = bool(mongodb_replication_lag < 250)
     
     options = {
         'fake': bool(MStatistics.get('fake_fetch')),
         'quick': float(MStatistics.get('quick_fetch', 0)),
         'compute_scores': compute_scores,
         'mongodb_replication_lag': mongodb_replication_lag,
     }
     
     if not isinstance(feed_pks, list):
         feed_pks = [feed_pks]
         
     for feed_pk in feed_pks:
         try:
             feed = Feed.objects.get(pk=feed_pk)
             feed.update(**options)
         except Feed.DoesNotExist:
             logging.info(" ---> Feed doesn't exist: [%s]" % feed_pk)
예제 #20
0
 def run(self, feed_id, xml, **kwargs):
     from apps.rss_feeds.models import Feed
     from apps.statistics.models import MStatistics
     
     mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
     compute_scores = bool(mongodb_replication_lag < 250)
     
     options = {
         'feed_xml': xml,
         'compute_scores': compute_scores,
         'mongodb_replication_lag': mongodb_replication_lag,
     }
     feed = Feed.objects.get(pk=feed_id)
     feed.update(options=options)
예제 #21
0
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics

        mongodb_replication_lag = int(MStatistics.get("mongodb_replication_lag", 0))
        compute_scores = bool(mongodb_replication_lag < 10)

        options = {
            "fake": bool(MStatistics.get("fake_fetch")),
            "quick": float(MStatistics.get("quick_fetch", 0)),
            "compute_scores": compute_scores,
            "mongodb_replication_lag": mongodb_replication_lag,
        }

        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]

        for feed_pk in feed_pks:
            try:
                feed = Feed.get_by_id(feed_pk)
                feed.update(**options)
            except Feed.DoesNotExist:
                logging.info(" ---> Feed doesn't exist: [%s]" % feed_pk)
예제 #22
0
    def run(self, feed_pks, **kwargs):
        from apps.rss_feeds.models import Feed
        from apps.statistics.models import MStatistics
        r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)

        mongodb_replication_lag = int(
            MStatistics.get('mongodb_replication_lag', 0))
        compute_scores = bool(mongodb_replication_lag < 10)

        profiler = DBProfilerMiddleware()
        profiler_activated = profiler.process_celery()
        if profiler_activated:
            mongo_middleware = MongoDumpMiddleware()
            mongo_middleware.process_celery(profiler)
            redis_middleware = RedisDumpMiddleware()
            redis_middleware.process_celery(profiler)

        options = {
            'quick': float(MStatistics.get('quick_fetch', 0)),
            'updates_off': MStatistics.get('updates_off', False),
            'compute_scores': compute_scores,
            'mongodb_replication_lag': mongodb_replication_lag,
        }

        if not isinstance(feed_pks, list):
            feed_pks = [feed_pks]

        for feed_pk in feed_pks:
            feed = Feed.get_by_id(feed_pk)
            if not feed or feed.pk != int(feed_pk):
                logging.info(
                    " ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..."
                    % (feed_pk, feed and feed.pk))
                r.zrem('tasked_feeds', feed_pk)
            if feed:
                feed.update(**options)
                if profiler_activated: profiler.process_celery_finished()
예제 #23
0
파일: tasks.py 프로젝트: atiw003/NewsBlur
 def run(self, feed_pks, **kwargs):
     from apps.rss_feeds.models import Feed
     from apps.statistics.models import MStatistics
     
     fake = bool(MStatistics.get('fake_fetch'))
     
     if not isinstance(feed_pks, list):
         feed_pks = [feed_pks]
         
     for feed_pk in feed_pks:
         try:
             feed = Feed.objects.get(pk=feed_pk)
             feed.update(fake=fake)
         except Feed.DoesNotExist:
             logging.info(" ---> Feed doesn't exist: [%s]" % feed_pk)
예제 #24
0
def index(request):
    if request.method == "POST":
        if request.POST['submit'] == 'login':
            login_form = LoginForm(request.POST, prefix='login')
            signup_form = SignupForm(prefix='signup')
        else:
            login_form = LoginForm(prefix='login')
            signup_form = SignupForm(request.POST, prefix='signup')
    else:
        login_form = LoginForm(prefix='login')
        signup_form = SignupForm(prefix='signup')
    
    user = get_user(request)
    features = Feature.objects.all()[:3]
    feature_form = None
    if request.user.is_staff:
        feature_form = FeatureForm()

    feed_count = 0
    active_count = 0
    train_count = 0
    if request.user.is_authenticated():
        feed_count = UserSubscription.objects.filter(user=request.user).count()
        active_count = UserSubscription.objects.filter(user=request.user, active=True).count()
        train_count = UserSubscription.objects.filter(user=request.user, active=True, is_trained=False, feed__stories_last_month__gte=1).count()
    
    now = datetime.datetime.now()
    recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now).select_related('feed')[:2]
    # recommended_feed_feedback = RecommendedFeedUserFeedback.objects.filter(recommendation=recommended_feed)

    statistics = MStatistics.all()
    howitworks_page = 0 # random.randint(0, 5)
    return render_to_response('reader/feeds.xhtml', {
        'user_profile': user.profile,
        'login_form': login_form,
        'signup_form': signup_form,
        'feature_form': feature_form,
        'features': features,
        'start_import_from_google_reader': request.session.get('import_from_google_reader', False),
        'howitworks_page': howitworks_page,
        'feed_count': feed_count,
        'active_count': active_count,
        'train_count': active_count - train_count,
        'account_images': range(1, 4),
        'recommended_feeds': recommended_feeds,
        'statistics': statistics,
        # 'recommended_feed_feedback': recommended_feed_feedback,
    }, context_instance=RequestContext(request))
예제 #25
0
파일: tasks.py 프로젝트: zino974/NewsBlur
def PushFeeds(feed_id, xml):
    from apps.rss_feeds.models import Feed
    from apps.statistics.models import MStatistics

    mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag',
                                                  0))
    compute_scores = bool(mongodb_replication_lag < 60)

    options = {
        'feed_xml': xml,
        'compute_scores': compute_scores,
        'mongodb_replication_lag': mongodb_replication_lag,
    }
    feed = Feed.get_by_id(feed_id)
    if feed:
        feed.update(options=options)
예제 #26
0
파일: views.py 프로젝트: buyongji/NewsBlur
def welcome(request, **kwargs):
    user = get_user(request)
    statistics = MStatistics.all()
    social_profile = MSocialProfile.get_user(user.pk)

    if request.method == "POST":
        pass
    else:
        login_form = LoginForm(prefix='login')
        signup_form = SignupForm(prefix='signup')

    return {
        'user_profile': hasattr(user, 'profile') and user.profile,
        'login_form': login_form,
        'signup_form': signup_form,
        'statistics': statistics,
        'social_profile': social_profile,
        'post_request': request.method == 'POST'
    }, "reader/welcome.xhtml"
예제 #27
0
파일: views.py 프로젝트: mrcrabby/NewsBlur
def index(request):
    if request.method == "POST":
        if request.POST['submit'] == 'login':
            login_form  = LoginForm(request.POST, prefix='login')
            signup_form = SignupForm(prefix='signup')
        else:
            login_form  = LoginForm(prefix='login')
            signup_form = SignupForm(request.POST, prefix='signup')
    else:
        login_form  = LoginForm(prefix='login')
        signup_form = SignupForm(prefix='signup')
    
    user         = get_user(request)
    authed       = request.user.is_authenticated()
    features     = Feature.objects.all()[:3]
    feature_form = FeatureForm() if request.user.is_staff else None
    feed_count   = UserSubscription.objects.filter(user=request.user).count() if authed else 0
    active_count = UserSubscription.objects.filter(user=request.user, active=True).count() if authed else 0
    train_count  = UserSubscription.objects.filter(user=request.user, active=True, is_trained=False, feed__stories_last_month__gte=1).count() if authed else 0
    recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=datetime.datetime.now()).select_related('feed')[:2]
    unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True).select_related('feed')[:2]
    statistics   = MStatistics.all()
    feedbacks    = MFeedback.all()
    start_import_from_google_reader = request.session.get('import_from_google_reader', False)
    if start_import_from_google_reader:
        del request.session['import_from_google_reader']

    return render_to_response('reader/feeds.xhtml', {
        'user_profile'      : hasattr(user, 'profile') and user.profile,
        'login_form'        : login_form,
        'signup_form'       : signup_form,
        'feature_form'      : feature_form,
        'features'          : features,
        'feed_count'        : feed_count,
        'active_count'      : active_count,
        'train_count'       : active_count - train_count,
        'account_images'    : range(1, 4),
        'recommended_feeds' : recommended_feeds,
        'unmoderated_feeds' : unmoderated_feeds,
        'statistics'        : statistics,
        'feedbacks'         : feedbacks,
        'start_import_from_google_reader': start_import_from_google_reader,
    }, context_instance=RequestContext(request))
예제 #28
0
    def get(self, request):
        statistics = MStatistics.all()
        data = {
            'feed_success': statistics['feeds_fetched'],
        }
        chart_name = "errors"
        chart_type = "counter"
        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'feed_success {v}'

        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
예제 #29
0
파일: views.py 프로젝트: buyongji/NewsBlur
def dashboard(request, **kwargs):
    user = request.user
    feed_count = UserSubscription.objects.filter(user=user).count()
    recommended_feeds = RecommendedFeed.objects.filter(is_public=True,
                                                       approved_date__lte=datetime.datetime.now()
                                                       ).select_related('feed')[:2]
    unmoderated_feeds = []
    if user.is_staff:
        unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False,
                                                           declined_date__isnull=True
                                                           ).select_related('feed')[:2]
    statistics = MStatistics.all()
    social_profile = MSocialProfile.get_user(user.pk)

    start_import_from_google_reader = request.session.get('import_from_google_reader', False)
    if start_import_from_google_reader:
        del request.session['import_from_google_reader']

    if not user.is_active:
        url = "https://%s%s" % (Site.objects.get_current().domain,
                                 reverse('stripe-form'))
        return HttpResponseRedirect(url)

    logging.user(request, "~FBLoading dashboard")

    return {
        'user_profile': user.profile,
        'feed_count': feed_count,
        'account_images': range(1,4),
        'recommended_feeds': recommended_feeds,
        'unmoderated_feeds': unmoderated_feeds,
        'statistics': statistics,
        'social_profile': social_profile,
        'start_import_from_google_reader': start_import_from_google_reader,
        'debug': settings.DEBUG
    }, "reader/dashboard.xhtml"
예제 #30
0
    def get(self, request):

        feeds_count = MStatistics.get('munin:feeds_count')
        if not feeds_count:
            feeds_count = Feed.objects.all().count()
            MStatistics.set('munin:feeds_count', feeds_count, 60 * 60 * 12)

        subscriptions_count = MStatistics.get('munin:subscriptions_count')
        if not subscriptions_count:
            subscriptions_count = UserSubscription.objects.all().count()
            MStatistics.set('munin:subscriptions_count', subscriptions_count,
                            60 * 60 * 12)

        data = {
            'feeds':
            feeds_count,
            'subscriptions':
            subscriptions_count,
            'profiles':
            MSocialProfile.objects._collection.count(),
            'social_subscriptions':
            MSocialSubscription.objects._collection.count(),
        }
        chart_name = "feeds"
        chart_type = "counter"
        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'

        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }

        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
예제 #31
0
    def calculate_metrics(self):
        from apps.rss_feeds.models import Feed
        from apps.reader.models import UserSubscription
        from apps.social.models import MSocialProfile, MSocialSubscription
        from apps.statistics.models import MStatistics

        feeds_count = MStatistics.get('munin:feeds_count')
        if not feeds_count:
            feeds_count = Feed.objects.all().count()
            MStatistics.set('munin:feeds_count', feeds_count, 60*60*12)

        subscriptions_count = MStatistics.get('munin:subscriptions_count')
        if not subscriptions_count:
            subscriptions_count = UserSubscription.objects.all().count()
            MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12)

        return {
            'feeds': feeds_count,
            'subscriptions': subscriptions_count,
            'profiles': MSocialProfile.objects.count(),
            'social_subscriptions': MSocialSubscription.objects.count(),
        }
예제 #32
0
    def calculate_metrics(self):
        from apps.rss_feeds.models import Feed
        from apps.reader.models import UserSubscription
        from apps.social.models import MSocialProfile, MSocialSubscription
        from apps.statistics.models import MStatistics

        feeds_count = MStatistics.get('munin:feeds_count')
        if not feeds_count:
            feeds_count = Feed.objects.all().count()
            MStatistics.set('munin:feeds_count', feeds_count, 60*60*12)

        subscriptions_count = MStatistics.get('munin:subscriptions_count')
        if not subscriptions_count:
            subscriptions_count = UserSubscription.objects.all().count()
            MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12)

        return {
            'feeds': feeds_count,
            'subscriptions': subscriptions_count,
            'profiles': MSocialProfile.objects.count(),
            'social_subscriptions': MSocialSubscription.objects.count(),
        }
예제 #33
0
    def get(self, request):

        fs_size_bytes = MStatistics.get('munin:fs_size_bytes')
        if not fs_size_bytes:
            fs_size_bytes = Feed.objects.aggregate(
                Sum('fs_size_bytes'))['fs_size_bytes__sum']
            MStatistics.set('munin:fs_size_bytes', fs_size_bytes, 60 * 60 * 12)

        archive_users_size_bytes = MStatistics.get(
            'munin:archive_users_size_bytes')
        if not archive_users_size_bytes:
            archive_users_size_bytes = Feed.objects.filter(
                archive_subscribers__gte=1).aggregate(
                    Sum('fs_size_bytes'))['fs_size_bytes__sum']
            MStatistics.set('munin:archive_users_size_bytes',
                            archive_users_size_bytes, 60 * 60 * 12)

        data = {
            'fs_size_bytes': fs_size_bytes,
            'archive_users_size_bytes': archive_users_size_bytes,
        }
        chart_name = "feed_sizes"
        chart_type = "counter"

        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'

        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
예제 #34
0
 def handle(self, *args, **options):
     MStatistics.collect_statistics()
예제 #35
0
파일: tasks.py 프로젝트: seekdoor/NewsBlur
def CollectStats():
    logging.debug(" ---> ~FBCollecting stats...")
    MStatistics.collect_statistics()
예제 #36
0
def index(request):
    if request.method == "POST":
        if request.POST['submit'] == 'login':
            login_form = LoginForm(request.POST, prefix='login')
            signup_form = SignupForm(prefix='signup')
        else:
            login_form = LoginForm(prefix='login')
            signup_form = SignupForm(request.POST, prefix='signup')
    else:
        login_form = LoginForm(prefix='login')
        signup_form = SignupForm(prefix='signup')

    user = get_user(request)
    features = Feature.objects.all()[:3]
    feature_form = None
    if request.user.is_staff:
        feature_form = FeatureForm()

    feed_count = 0
    active_count = 0
    train_count = 0
    if request.user.is_authenticated():
        feed_count = UserSubscription.objects.filter(user=request.user).count()
        active_count = UserSubscription.objects.filter(user=request.user,
                                                       active=True).count()
        train_count = UserSubscription.objects.filter(
            user=request.user,
            active=True,
            is_trained=False,
            feed__stories_last_month__gte=1).count()

    now = datetime.datetime.now()
    recommended_feeds = RecommendedFeed.objects.filter(
        is_public=True, approved_date__lte=now).select_related('feed')[:2]
    # recommended_feed_feedback = RecommendedFeedUserFeedback.objects.filter(recommendation=recommended_feed)

    statistics = MStatistics.all()
    howitworks_page = 0  # random.randint(0, 5)
    return render_to_response(
        'reader/feeds.xhtml',
        {
            'user_profile':
            user.profile,
            'login_form':
            login_form,
            'signup_form':
            signup_form,
            'feature_form':
            feature_form,
            'features':
            features,
            'start_import_from_google_reader':
            request.session.get('import_from_google_reader', False),
            'howitworks_page':
            howitworks_page,
            'feed_count':
            feed_count,
            'active_count':
            active_count,
            'train_count':
            active_count - train_count,
            'account_images':
            range(1, 4),
            'recommended_feeds':
            recommended_feeds,
            'statistics':
            statistics,
            # 'recommended_feed_feedback': recommended_feed_feedback,
        },
        context_instance=RequestContext(request))
예제 #37
0
파일: tasks.py 프로젝트: melody40/monorepo
 def run(self, **kwargs):
     # logging.debug(" ---> ~FBCollecting stats...")
     MStatistics.collect_statistics()
예제 #38
0
파일: views.py 프로젝트: MilenkoM/NewsBlur
def dashboard_graphs(request):
    statistics = MStatistics.all()
    return render_to_response('statistics/render_statistics_graphs.xhtml', {
        'statistics': statistics,
    }, context_instance=RequestContext(request))
예제 #39
0
    def calculate_metrics(self):
        from apps.rss_feeds.models import Feed, DuplicateFeed
        from apps.push.models import PushSubscription
        from django.conf import settings
        from apps.statistics.models import MStatistics
        
        exception_feeds = MStatistics.get('munin:exception_feeds')
        if not exception_feeds:
            exception_feeds = Feed.objects.filter(has_feed_exception=True).count()
            MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12)

        exception_pages = MStatistics.get('munin:exception_pages')
        if not exception_pages:
            exception_pages = Feed.objects.filter(has_page_exception=True).count()
            MStatistics.set('munin:exception_pages', exception_pages, 60*60*12)

        duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
        if not duplicate_feeds:
            duplicate_feeds = DuplicateFeed.objects.count()
            MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12)

        active_feeds = MStatistics.get('munin:active_feeds')
        if not active_feeds:
            active_feeds = Feed.objects.filter(active_subscribers__gt=0).count()
            MStatistics.set('munin:active_feeds', active_feeds, 60*60*12)

        push_feeds = MStatistics.get('munin:push_feeds')
        if not push_feeds:
            push_feeds = PushSubscription.objects.filter(verified=True).count()
            MStatistics.set('munin:push_feeds', push_feeds, 60*60*12)

        r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
        
        return {
            'scheduled_feeds': r.zcard('scheduled_updates'),
            'exception_feeds': exception_feeds,
            'exception_pages': exception_pages,
            'duplicate_feeds': duplicate_feeds,
            'active_feeds': active_feeds,
            'push_feeds': push_feeds,
        }
예제 #40
0
    def get(self, request):

        exception_feeds = MStatistics.get('munin:exception_feeds')
        if not exception_feeds:
            exception_feeds = Feed.objects.filter(
                has_feed_exception=True).count()
            MStatistics.set('munin:exception_feeds', exception_feeds,
                            60 * 60 * 12)

        exception_pages = MStatistics.get('munin:exception_pages')
        if not exception_pages:
            exception_pages = Feed.objects.filter(
                has_page_exception=True).count()
            MStatistics.set('munin:exception_pages', exception_pages,
                            60 * 60 * 12)

        duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
        if not duplicate_feeds:
            duplicate_feeds = DuplicateFeed.objects.count()
            MStatistics.set('munin:duplicate_feeds', duplicate_feeds,
                            60 * 60 * 12)

        active_feeds = MStatistics.get('munin:active_feeds')
        if not active_feeds:
            active_feeds = Feed.objects.filter(
                active_subscribers__gt=0).count()
            MStatistics.set('munin:active_feeds', active_feeds, 60 * 60 * 12)

        push_feeds = MStatistics.get('munin:push_feeds')
        if not push_feeds:
            push_feeds = PushSubscription.objects.filter(verified=True).count()
            MStatistics.set('munin:push_feeds', push_feeds, 60 * 60 * 12)

        r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)

        data = {
            'scheduled_feeds': r.zcard('scheduled_updates'),
            'exception_feeds': exception_feeds,
            'exception_pages': exception_pages,
            'duplicate_feeds': duplicate_feeds,
            'active_feeds': active_feeds,
            'push_feeds': push_feeds,
        }
        chart_name = "feed_counts"
        chart_type = "counter"

        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'

        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
 def handle(self, *args, **options):
     MStatistics.collect_statistics()
    def calculate_metrics(self):
        from apps.rss_feeds.models import Feed, DuplicateFeed
        from apps.push.models import PushSubscription
        from django.conf import settings
        from apps.statistics.models import MStatistics

        exception_feeds = MStatistics.get('munin:exception_feeds')
        if not exception_feeds:
            exception_feeds = Feed.objects.filter(
                has_feed_exception=True).count()
            MStatistics.set('munin:exception_feeds', exception_feeds,
                            60 * 60 * 12)

        exception_pages = MStatistics.get('munin:exception_pages')
        if not exception_pages:
            exception_pages = Feed.objects.filter(
                has_page_exception=True).count()
            MStatistics.set('munin:exception_pages', exception_pages,
                            60 * 60 * 12)

        duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
        if not duplicate_feeds:
            duplicate_feeds = DuplicateFeed.objects.count()
            MStatistics.set('munin:duplicate_feeds', duplicate_feeds,
                            60 * 60 * 12)

        active_feeds = MStatistics.get('munin:active_feeds')
        if not active_feeds:
            active_feeds = Feed.objects.filter(
                active_subscribers__gt=0).count()
            MStatistics.set('munin:active_feeds', active_feeds, 60 * 60 * 12)

        push_feeds = MStatistics.get('munin:push_feeds')
        if not push_feeds:
            push_feeds = PushSubscription.objects.filter(verified=True).count()
            MStatistics.set('munin:push_feeds', push_feeds, 60 * 60 * 12)

        r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)

        return {
            'scheduled_feeds': r.zcard('scheduled_updates'),
            'exception_feeds': exception_feeds,
            'exception_pages': exception_pages,
            'duplicate_feeds': duplicate_feeds,
            'active_feeds': active_feeds,
            'push_feeds': push_feeds,
        }
예제 #43
0
    def handle(self, *args, **options):
        MStatistics.collect_statistics()

        MStatistics.delete_old_stats()
예제 #44
0
파일: views.py 프로젝트: melody40/monorepo
def dashboard_graphs(request):
    statistics = MStatistics.all()
    return render_to_response('statistics/render_statistics_graphs.xhtml', {
        'statistics': statistics,
    },
                              context_instance=RequestContext(request))
예제 #45
0
    def get(self, request):
        last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365)
        last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30)
        last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 *
                                                                   24)
        expiration_sec = 60 * 60  # 1 hour

        data = {
            'all':
            MStatistics.get('munin:users_count',
                            lambda: User.objects.count(),
                            set_default=True,
                            expiration_sec=expiration_sec),
            'yearly':
            MStatistics.get('munin:users_yearly',
                            lambda: Profile.objects.filter(last_seen_on__gte=
                                                           last_year).count(),
                            set_default=True,
                            expiration_sec=expiration_sec),
            'monthly':
            MStatistics.get('munin:users_monthly',
                            lambda: Profile.objects.filter(last_seen_on__gte=
                                                           last_month).count(),
                            set_default=True,
                            expiration_sec=expiration_sec),
            'daily':
            MStatistics.get('munin:users_daily',
                            lambda: Profile.objects.filter(last_seen_on__gte=
                                                           last_day).count(),
                            set_default=True,
                            expiration_sec=expiration_sec),
            'premium':
            MStatistics.get(
                'munin:users_premium',
                lambda: Profile.objects.filter(is_premium=True).count(),
                set_default=True,
                expiration_sec=expiration_sec),
            'archive':
            MStatistics.get(
                'munin:users_archive',
                lambda: Profile.objects.filter(is_archive=True).count(),
                set_default=True,
                expiration_sec=expiration_sec),
            'pro':
            MStatistics.get(
                'munin:users_pro',
                lambda: Profile.objects.filter(is_pro=True).count(),
                set_default=True,
                expiration_sec=expiration_sec),
            'queued':
            MStatistics.get('munin:users_queued',
                            lambda: RNewUserQueue.user_count(),
                            set_default=True,
                            expiration_sec=expiration_sec),
        }
        chart_name = "users"
        chart_type = "counter"

        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}'
        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
예제 #46
0
    def get(self, request):

        data = {
            'sql_avg':
            MStatistics.get('latest_sql_avg'),
            'mongo_avg':
            MStatistics.get('latest_mongo_avg'),
            'redis_avg':
            MStatistics.get('latest_redis_avg'),
            'redis_user_avg':
            MStatistics.get('latest_redis_user_avg'),
            'redis_story_avg':
            MStatistics.get('latest_redis_story_avg'),
            'redis_session_avg':
            MStatistics.get('latest_redis_session_avg'),
            'redis_pubsub_avg':
            MStatistics.get('latest_redis_pubsub_avg'),
            'task_sql_avg':
            MStatistics.get('latest_task_sql_avg'),
            'task_mongo_avg':
            MStatistics.get('latest_task_mongo_avg'),
            'task_redis_avg':
            MStatistics.get('latest_task_redis_avg'),
            'task_redis_user_avg':
            MStatistics.get('latest_task_redis_user_avg'),
            'task_redis_story_avg':
            MStatistics.get('latest_task_redis_story_avg'),
            'task_redis_session_avg':
            MStatistics.get('latest_task_redis_session_avg'),
            'task_redis_pubsub_avg':
            MStatistics.get('latest_task_redis_pubsub_avg'),
        }
        chart_name = "db_times"
        chart_type = "counter"
        formatted_data = {}
        for k, v in data.items():
            formatted_data[k] = f'{chart_name}{{db="{k}"}} {v}'
        context = {
            "data": formatted_data,
            "chart_name": chart_name,
            "chart_type": chart_type,
        }
        return render(request,
                      'monitor/prometheus_data.html',
                      context,
                      content_type="text/plain")
예제 #47
0
파일: views.py 프로젝트: zino974/NewsBlur
def dashboard_graphs(request):
    statistics = MStatistics.all()
    return render(request, 'statistics/render_statistics_graphs.xhtml',
                  {'statistics': statistics})
예제 #48
0
 def run(self, **kwargs):
     logging.debug(" ---> Collecting stats...")
     MStatistics.collect_statistics()
     MStatistics.delete_old_stats()
예제 #49
0
class ProcessFeed:
    def __init__(self, feed_id, fpf, options, raw_feed=None):
        self.feed_id = feed_id
        self.options = options
        self.fpf = fpf
        self.raw_feed = raw_feed

    def refresh_feed(self):
        self.feed = Feed.get_by_id(self.feed_id)
        if self.feed_id != self.feed.pk:
            logging.debug(" ***> Feed has changed: from %s to %s" %
                          (self.feed_id, self.feed.pk))
            self.feed_id = self.feed.pk

    def process(self):
        """ Downloads and parses a feed.
        """
        start = time.time()
        self.refresh_feed()

        ret_values = dict(new=0, updated=0, same=0, error=0)

        if hasattr(self.fpf, 'status'):
            if self.options['verbose']:
                if self.fpf.bozo and self.fpf.status != 304:
                    logging.debug(
                        u'   ---> [%-30s] ~FRBOZO exception: %s ~SB(%s entries)'
                        % (self.feed.log_title[:30], self.fpf.bozo_exception,
                           len(self.fpf.entries)))

            if self.fpf.status == 304:
                self.feed = self.feed.save()
                self.feed.save_feed_history(304, "Not modified")
                return FEED_SAME, ret_values

            # 302 and 307: Temporary redirect: ignore
            # 301 and 308: Permanent redirect: save it (after 10 tries)
            if self.fpf.status == 301 or self.fpf.status == 308:
                if self.fpf.href.endswith('feedburner.com/atom.xml'):
                    return FEED_ERRHTTP, ret_values
                redirects, non_redirects = self.feed.count_redirects_in_history(
                    'feed')
                self.feed.save_feed_history(
                    self.fpf.status,
                    "HTTP Redirect (%d to go)" % (10 - len(redirects)))
                if len(redirects) >= 10 or len(non_redirects) == 0:
                    address = self.fpf.href
                    if self.options['force'] and address:
                        address = qurl(address, remove=['_'])
                    self.feed.feed_address = address
                if not self.feed.known_good:
                    self.feed.fetched_once = True
                    logging.debug(
                        "   ---> [%-30s] ~SB~SK~FRFeed is %s'ing. Refetching..."
                        % (self.feed.log_title[:30], self.fpf.status))
                    self.feed = self.feed.schedule_feed_fetch_immediately()
                if not self.fpf.entries:
                    self.feed = self.feed.save()
                    self.feed.save_feed_history(self.fpf.status,
                                                "HTTP Redirect")
                    return FEED_ERRHTTP, ret_values
            if self.fpf.status >= 400:
                logging.debug(
                    "   ---> [%-30s] ~SB~FRHTTP Status code: %s. Checking address..."
                    % (self.feed.log_title[:30], self.fpf.status))
                fixed_feed = None
                if not self.feed.known_good:
                    fixed_feed, feed = self.feed.check_feed_link_for_feed_address(
                    )
                if not fixed_feed:
                    self.feed.save_feed_history(self.fpf.status, "HTTP Error")
                else:
                    self.feed = feed
                self.feed = self.feed.save()
                return FEED_ERRHTTP, ret_values

        if not self.fpf:
            logging.debug(
                "   ---> [%-30s] ~SB~FRFeed is Non-XML. No feedparser feed either!"
                % (self.feed.log_title[:30]))
            self.feed.save_feed_history(551, "Broken feed")
            return FEED_ERRHTTP, ret_values

        if self.fpf and not self.fpf.entries:
            if self.fpf.bozo and isinstance(self.fpf.bozo_exception,
                                            feedparser.NonXMLContentType):
                logging.debug(
                    "   ---> [%-30s] ~SB~FRFeed is Non-XML. %s entries. Checking address..."
                    % (self.feed.log_title[:30], len(self.fpf.entries)))
                fixed_feed = None
                if not self.feed.known_good:
                    fixed_feed, feed = self.feed.check_feed_link_for_feed_address(
                    )
                if not fixed_feed:
                    self.feed.save_feed_history(552, 'Non-xml feed',
                                                self.fpf.bozo_exception)
                else:
                    self.feed = feed
                self.feed = self.feed.save()
                return FEED_ERRPARSE, ret_values
            elif self.fpf.bozo and isinstance(
                    self.fpf.bozo_exception, xml.sax._exceptions.SAXException):
                logging.debug(
                    "   ---> [%-30s] ~SB~FRFeed has SAX/XML parsing issues. %s entries. Checking address..."
                    % (self.feed.log_title[:30], len(self.fpf.entries)))
                fixed_feed = None
                if not self.feed.known_good:
                    fixed_feed, feed = self.feed.check_feed_link_for_feed_address(
                    )
                if not fixed_feed:
                    self.feed.save_feed_history(553, 'Not an RSS feed',
                                                self.fpf.bozo_exception)
                else:
                    self.feed = feed
                self.feed = self.feed.save()
                return FEED_ERRPARSE, ret_values

        # the feed has changed (or it is the first time we parse it)
        # saving the etag and last_modified fields
        original_etag = self.feed.etag
        self.feed.etag = self.fpf.get('etag')
        if self.feed.etag:
            self.feed.etag = self.feed.etag[:255]
        # some times this is None (it never should) *sigh*
        if self.feed.etag is None:
            self.feed.etag = ''
        if self.feed.etag != original_etag:
            self.feed.save(update_fields=['etag'])

        original_last_modified = self.feed.last_modified
        if hasattr(self.fpf, 'modified') and self.fpf.modified:
            try:
                self.feed.last_modified = datetime.datetime.strptime(
                    self.fpf.modified, '%a, %d %b %Y %H:%M:%S %Z')
            except Exception, e:
                self.feed.last_modified = None
                logging.debug("Broken mtime %s: %s" %
                              (self.feed.last_modified, e))
                pass
        if self.feed.last_modified != original_last_modified:
            self.feed.save(update_fields=['last_modified'])

        self.fpf.entries = self.fpf.entries[:100]

        original_title = self.feed.feed_title
        if self.fpf.feed.get('title'):
            self.feed.feed_title = strip_tags(self.fpf.feed.get('title'))
        if self.feed.feed_title != original_title:
            self.feed.save(update_fields=['feed_title'])

        tagline = self.fpf.feed.get('tagline', self.feed.data.feed_tagline)
        if tagline:
            original_tagline = self.feed.data.feed_tagline
            self.feed.data.feed_tagline = smart_unicode(tagline)
            if self.feed.data.feed_tagline != original_tagline:
                self.feed.data.save(update_fields=['feed_tagline'])

        if not self.feed.feed_link_locked:
            new_feed_link = self.fpf.feed.get('link') or self.fpf.feed.get(
                'id') or self.feed.feed_link
            if self.options['force'] and new_feed_link:
                new_feed_link = qurl(new_feed_link, remove=['_'])
            if new_feed_link != self.feed.feed_link:
                logging.debug(
                    "   ---> [%-30s] ~SB~FRFeed's page is different: %s to %s"
                    % (self.feed.log_title[:30], self.feed.feed_link,
                       new_feed_link))
                redirects, non_redirects = self.feed.count_redirects_in_history(
                    'page')
                self.feed.save_page_history(
                    301, "HTTP Redirect (%s to go)" % (10 - len(redirects)))
                if len(redirects) >= 10 or len(non_redirects) == 0:
                    self.feed.feed_link = new_feed_link
                    self.feed.save(update_fields=['feed_link'])

        # Determine if stories aren't valid and replace broken guids
        guids_seen = set()
        permalinks_seen = set()
        for entry in self.fpf.entries:
            guids_seen.add(entry.get('guid'))
            permalinks_seen.add(Feed.get_permalink(entry))
        guid_difference = len(guids_seen) != len(self.fpf.entries)
        single_guid = len(guids_seen) == 1
        replace_guids = single_guid and guid_difference
        permalink_difference = len(permalinks_seen) != len(self.fpf.entries)
        single_permalink = len(permalinks_seen) == 1
        replace_permalinks = single_permalink and permalink_difference

        # Compare new stories to existing stories, adding and updating
        start_date = datetime.datetime.utcnow()
        story_hashes = []
        stories = []
        for entry in self.fpf.entries:
            story = pre_process_story(entry, self.fpf.encoding)
            if not story['title'] and not story['story_content']: continue
            if story.get('published') < start_date:
                start_date = story.get('published')
            if replace_guids:
                if replace_permalinks:
                    new_story_guid = unicode(story.get('published'))
                    if self.options['verbose']:
                        logging.debug(
                            u'   ---> [%-30s] ~FBReplacing guid (%s) with timestamp: %s'
                            % (self.feed.log_title[:30], story.get('guid'),
                               new_story_guid))
                    story['guid'] = new_story_guid
                else:
                    new_story_guid = Feed.get_permalink(story)
                    if self.options['verbose']:
                        logging.debug(
                            u'   ---> [%-30s] ~FBReplacing guid (%s) with permalink: %s'
                            % (self.feed.log_title[:30], story.get('guid'),
                               new_story_guid))
                    story['guid'] = new_story_guid
            story['story_hash'] = MStory.feed_guid_hash_unsaved(
                self.feed.pk, story.get('guid'))
            stories.append(story)
            story_hashes.append(story.get('story_hash'))

        original_story_hash_count = len(story_hashes)
        story_hashes_in_unread_cutoff = self.feed.story_hashes_in_unread_cutoff[:
                                                                                original_story_hash_count]
        story_hashes.extend(story_hashes_in_unread_cutoff)
        story_hashes = list(set(story_hashes))
        if self.options['verbose'] or settings.DEBUG:
            logging.debug(
                u'   ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN/%s guids from db'
                % (self.feed.log_title[:30], original_story_hash_count,
                   len(story_hashes) - original_story_hash_count,
                   len(story_hashes_in_unread_cutoff)))

        existing_stories = dict((s.story_hash, s) for s in MStory.objects(
            story_hash__in=story_hashes,
            # story_date__gte=start_date,
            # story_feed_id=self.feed.pk
        ))
        # if len(existing_stories) == 0:
        #     existing_stories = dict((s.story_hash, s) for s in MStory.objects(
        #         story_date__gte=start_date,
        #         story_feed_id=self.feed.pk
        #     ))

        ret_values = self.feed.add_update_stories(
            stories,
            existing_stories,
            verbose=self.options['verbose'],
            updates_off=self.options['updates_off'])

        # PubSubHubbub
        if (hasattr(self.fpf, 'feed') and hasattr(self.fpf.feed, 'links')
                and self.fpf.feed.links):
            hub_url = None
            self_url = self.feed.feed_address
            for link in self.fpf.feed.links:
                if link['rel'] == 'hub' and not hub_url:
                    hub_url = link['href']
                elif link['rel'] == 'self':
                    self_url = link['href']
            push_expired = False
            if self.feed.is_push:
                try:
                    push_expired = self.feed.push.lease_expires < datetime.datetime.now(
                    )
                except PushSubscription.DoesNotExist:
                    self.feed.is_push = False
            if (hub_url and self_url and not settings.DEBUG
                    and self.feed.active_subscribers > 0
                    and (push_expired or not self.feed.is_push
                         or self.options.get('force'))):
                logging.debug(
                    u'   ---> [%-30s] ~BB~FW%sSubscribing to PuSH hub: %s' %
                    (self.feed.log_title[:30],
                     "~SKRe-~SN" if push_expired else "", hub_url))
                try:
                    PushSubscription.objects.subscribe(self_url,
                                                       feed=self.feed,
                                                       hub=hub_url)
                except TimeoutError:
                    logging.debug(
                        u'   ---> [%-30s] ~BB~FW~FRTimed out~FW subscribing to PuSH hub: %s'
                        % (self.feed.log_title[:30], hub_url))
            elif (self.feed.is_push
                  and (self.feed.active_subscribers <= 0 or not hub_url)):
                logging.debug(
                    u'   ---> [%-30s] ~BB~FWTurning off PuSH, no hub found' %
                    (self.feed.log_title[:30]))
                self.feed.is_push = False
                self.feed = self.feed.save()

        # Push notifications
        if ret_values['new'] > 0 and MUserFeedNotification.feed_has_users(
                self.feed.pk) > 0:
            QueueNotifications.delay(self.feed.pk, ret_values['new'])

        # All Done
        logging.debug(
            u'   ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s'
            % (self.feed.log_title[:30], '~FG~SB' if ret_values['new'] else '',
               ret_values['new'], '~FY~SB' if ret_values['updated'] else '',
               ret_values['updated'], '~SB' if ret_values['same'] else '',
               ret_values['same'], '~FR~SB' if ret_values['error'] else '',
               ret_values['error'], len(self.fpf.entries)))
        self.feed.update_all_statistics(has_new_stories=bool(
            ret_values['new']),
                                        force=self.options['force'])
        fetch_date = datetime.datetime.now()
        if ret_values['new']:
            if not getattr(settings, 'TEST_DEBUG', False):
                self.feed.trim_feed()
                self.feed.expire_redis()
            if MStatistics.get('raw_feed', None) == self.feed.pk:
                self.feed.save_raw_feed(self.raw_feed, fetch_date)
        self.feed.save_feed_history(200, "OK", date=fetch_date)

        if self.options['verbose']:
            logging.debug(u'   ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss' %
                          (self.feed.log_title[:30], time.time() - start))

        return FEED_OK, ret_values