Ejemplo n.º 1
0
def load_feed_statistics(request):
    stats = dict()
    feed_id = request.GET['feed_id']
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    
    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    
    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update()
    stats['update_interval_minutes'] = update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    
    logging.info(" ---> [%s] Statistics: %s" % (request.user, feed))
    
    return stats
Ejemplo n.º 2
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.count_subscribers()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()
    
    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    if feed.is_push:
        stats['next_update'] = "real-time..."
    else:
        stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)

    # Minutes between updates
    update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False)
    if feed.is_push:
        stats['update_interval_minutes'] = 0
    else:
        stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    if feed.is_push:
        stats['premium_update_interval_minutes'] = 0
    else:
        stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers
    
    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
    
    # Fetch histories
    timezone = user.profile.timezone
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id, timezone=timezone)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id, timezone=timezone)
    stats['feed_push_history'] = MFeedPushHistory.feed_history(feed_id, timezone=timezone)
    
    logging.user(request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,))

    return stats
Ejemplo n.º 3
0
def load_feed_statistics(request):
    stats = dict()
    feed_id = request.GET['feed_id']
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    
    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    
    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update()
    stats['update_interval_minutes'] = update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    
    # Fetch histories
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id)
    
    logging.user(request.user, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,))

    return stats
Ejemplo n.º 4
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats["active"] = feed.active
    stats["last_update"] = relative_timesince(feed.last_update)
    stats["next_update"] = relative_timeuntil(feed.next_scheduled_update)
    stats["push"] = feed.is_push

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    stats["update_interval_minutes"] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats["premium_update_interval_minutes"] = premium_update_interval_minutes
    stats["errors_since_good"] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats["average_stories_per_month"] = average_stories_per_month
    stats["story_count_history"] = story_count_history and json.decode(story_count_history)

    # Subscribers
    stats["subscriber_count"] = feed.num_subscribers
    stats["stories_last_month"] = feed.stories_last_month
    stats["last_load_time"] = feed.last_load_time
    stats["premium_subscribers"] = feed.premium_subscribers
    stats["active_subscribers"] = feed.active_subscribers
    stats["active_premium_subscribers"] = feed.active_premium_subscribers

    # Classifier counts
    stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    timezone = user.profile.timezone
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats["feed_fetch_history"] = fetch_history["feed_fetch_history"]
    stats["page_fetch_history"] = fetch_history["page_fetch_history"]
    stats["feed_push_history"] = fetch_history["push_history"]

    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Ejemplo n.º 5
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    timezone = user.profile.timezone
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()
    
    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push
    if feed.is_push:
        try:
            stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires, 
                                                           timezone).strftime("%Y-%m-%d %H:%M:%S")
        except PushSubscription.DoesNotExist:
            stats['push_expires'] = 'Missing push'
            feed.is_push = False
            feed.save()

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False,
                                                                     premium_speed=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    story_count_history = story_count_history and json.decode(story_count_history)
    if story_count_history and isinstance(story_count_history, dict):
        stats['story_count_history'] = story_count_history['months']
        stats['story_days_history'] = story_count_history['days']
        stats['story_hours_history'] = story_count_history['hours']
    else:
        stats['story_count_history'] = story_count_history
    
    # Rotate hours to match user's timezone offset
    localoffset = timezone.utcoffset(datetime.datetime.utcnow())
    hours_offset = int(localoffset.total_seconds() / 3600)
    rotated_hours = {}
    for hour, value in stats['story_hours_history'].items():
        rotated_hours[str(int(hour)+hours_offset)] = value
    stats['story_hours_history'] = rotated_hours
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['num_subscribers'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
    
    # Fetch histories
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']
    
    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Ejemplo n.º 6
0
def assemble_statistics(user, feed_id):
    timezone = user.profile.timezone
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push
    if feed.is_push:
        try:
            stats['push_expires'] = localtime_for_timezone(
                feed.push.lease_expires,
                timezone).strftime("%Y-%m-%d %H:%M:%S")
        except PushSubscription.DoesNotExist:
            stats['push_expires'] = 'Missing push'
            feed.is_push = False
            feed.save()

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True,
                                                             verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1,
                                          1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(
        force=True, verbose=False, premium_speed=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    story_count_history = story_count_history and json.decode(
        story_count_history)
    if story_count_history and isinstance(story_count_history, dict):
        stats['story_count_history'] = story_count_history['months']
        stats['story_days_history'] = story_count_history['days']
        stats['story_hours_history'] = story_count_history['hours']
    else:
        stats['story_count_history'] = story_count_history

    # Rotate hours to match user's timezone offset
    localoffset = timezone.utcoffset(datetime.datetime.utcnow())
    hours_offset = int(localoffset.total_seconds() / 3600)
    rotated_hours = {}
    for hour, value in stats['story_hours_history'].items():
        rotated_hours[str(int(hour) + hours_offset)] = value
    stats['story_hours_history'] = rotated_hours

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['num_subscribers'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']

    return stats
Ejemplo n.º 7
0
def load_feed_statistics(request, feed_id):
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    if feed.is_push:
        stats['next_update'] = "real-time..."
    else:
        stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)

    # Minutes between updates
    update_interval_minutes, _ = feed.get_next_scheduled_update(force=True)
    if feed.is_push:
        stats['update_interval_minutes'] = 0
    else:
        stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1,
                                          1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes, _ = feed.get_next_scheduled_update(
        force=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    if feed.is_push:
        stats['premium_update_interval_minutes'] = 0
    else:
        stats[
            'premium_update_interval_minutes'] = premium_update_interval_minutes

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(
        story_count_history)

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id)
    stats['feed_push_history'] = MFeedPushHistory.feed_history(feed_id)

    logging.user(
        request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (
            feed,
            feed.num_subscribers,
            feed.active_subscribers,
            feed.premium_subscribers,
        ))

    return stats
Ejemplo n.º 8
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    timezone = user.profile.timezone
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push
    if feed.is_push:
        try:
            stats['push_expires'] = localtime_for_timezone(
                feed.push.lease_expires,
                timezone).strftime("%Y-%m-%d %H:%M:%S")
        except PushSubscription.DoesNotExist:
            stats['push_expires'] = 'Missing push'
            feed.is_push = False
            feed.save()

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True,
                                                             verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1,
                                          1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(
        force=True, verbose=False, premium_speed=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(
        story_count_history)

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['num_subscribers'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']

    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats