Exemplo n.º 1
0
def load_feed_statistics(request):
    stats = dict()
    feed_id = request.GET['feed_id']
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    
    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    
    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update()
    stats['update_interval_minutes'] = update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    
    logging.info(" ---> [%s] Statistics: %s" % (request.user, feed))
    
    return stats
Exemplo n.º 2
0
def load_feed_statistics(request):
    stats = dict()
    feed_id = request.GET['feed_id']
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    
    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    
    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update()
    stats['update_interval_minutes'] = update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    
    # Fetch histories
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id)
    
    logging.user(request.user, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,))

    return stats
Exemplo n.º 3
0
    def canonical(self, full=False):
        feed = {
            'id': self.pk,
            'feed_title': self.feed_title,
            'feed_address': self.feed_address,
            'feed_link': self.feed_link,
            'updated': relative_timesince(self.last_update),
            'subs': self.num_subscribers,
            'favicon': self.icon.data,
            'favicon_color': self.icon.color,
            'favicon_fetching': bool(not (self.icon.not_found or self.icon.data))
        }
        
        if not self.fetched_once:
            feed['not_yet_fetched'] = True
        if self.has_page_exception or self.has_feed_exception:
            feed['has_exception'] = True
            feed['exception_type'] = 'feed' if self.has_feed_exception else 'page'
            feed['exception_code'] = self.exception_code
        elif full:
            feed['has_exception'] = False
            feed['exception_type'] = None
            feed['exception_code'] = self.exception_code
        
        if full:
            feed['feed_tags'] = json.decode(self.data.popular_tags) if self.data.popular_tags else []
            feed['feed_authors'] = json.decode(self.data.popular_authors) if self.data.popular_authors else []

            
        return feed
Exemplo n.º 4
0
    def canonical(self, full=False):
        feed = {
            'id': self.feed.pk,
            'feed_title': self.user_title or self.feed.feed_title,
            'feed_address': self.feed.feed_address,
            'feed_link': self.feed.feed_link,
            'ps': self.unread_count_positive,
            'nt': self.unread_count_neutral,
            'ng': self.unread_count_negative, 
            'updated': relative_timesince(self.feed.last_update),
            'subs': self.feed.num_subscribers,
            'active': self.active,
            'favicon': self.feed.icon.data,
            'favicon_color': self.feed.icon.color,
            'favicon_fetching': bool(not (self.feed.icon.not_found or self.feed.icon.data))
        }
        
        if not self.feed.fetched_once:
            feed['not_yet_fetched'] = True
        if self.feed.has_page_exception or self.feed.has_feed_exception:
            feed['has_exception'] = True
            feed['exception_type'] = 'feed' if self.feed.has_feed_exception else 'page'
            feed['exception_code'] = self.feed.exception_code
        elif full:
            feed['has_exception'] = False
            feed['exception_type'] = None
            feed['exception_code'] = self.feed.exception_code

        return feed
Exemplo n.º 5
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.count_subscribers()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()
    
    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    if feed.is_push:
        stats['next_update'] = "real-time..."
    else:
        stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)

    # Minutes between updates
    update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False)
    if feed.is_push:
        stats['update_interval_minutes'] = 0
    else:
        stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    if feed.is_push:
        stats['premium_update_interval_minutes'] = 0
    else:
        stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(story_count_history)
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers
    
    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
    
    # Fetch histories
    timezone = user.profile.timezone
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id, timezone=timezone)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id, timezone=timezone)
    stats['feed_push_history'] = MFeedPushHistory.feed_history(feed_id, timezone=timezone)
    
    logging.user(request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,))

    return stats
Exemplo n.º 6
0
 def canonical(self, full=False, include_favicon=True):
     feed = {
         'id': self.pk,
         'feed_title': self.feed_title,
         'feed_address': self.feed_address,
         'feed_link': self.feed_link,
         'num_subscribers': self.num_subscribers,
         'updated': relative_timesince(self.last_update),
         'updated_seconds_ago': seconds_timesince(self.last_update),
         'last_story_date': self.last_story_date,
         'last_story_seconds_ago': seconds_timesince(self.last_story_date),
         'stories_last_month': self.stories_last_month,
         'average_stories_per_month': self.average_stories_per_month,
         'min_to_decay': self.min_to_decay,
         'subs': self.num_subscribers,
         'is_push': self.is_push,
         'fetched_once': self.fetched_once,
         'search_indexed': self.search_indexed,
         'not_yet_fetched': not self.fetched_once, # Legacy. Doh.
         'favicon_color': self.favicon_color,
         'favicon_fade': self.favicon_fade(),
         'favicon_border': self.favicon_border(),
         'favicon_text_color': self.favicon_text_color(),
         'favicon_fetching': self.favicon_fetching,
         'favicon_url': self.favicon_url,
         's3_page': self.s3_page,
         's3_icon': self.s3_icon,
     }
Exemplo n.º 7
0
    def canonical(self, full=False):
        feed = {
            'id': self.pk,
            'feed_title': self.feed_title,
            'feed_address': self.feed_address,
            'feed_link': self.feed_link,
            'updated': relative_timesince(self.last_update),
            'subs': self.num_subscribers,
            'favicon': self.icon.data,
            'favicon_color': self.icon.color,
            'favicon_fetching':
            bool(not (self.icon.not_found or self.icon.data))
        }

        if not self.fetched_once:
            feed['not_yet_fetched'] = True
        if self.has_page_exception or self.has_feed_exception:
            feed['has_exception'] = True
            feed[
                'exception_type'] = 'feed' if self.has_feed_exception else 'page'
            feed['exception_code'] = self.exception_code
        elif full:
            feed['has_exception'] = False
            feed['exception_type'] = None
            feed['exception_code'] = self.exception_code

        if full:
            feed['feed_tags'] = json.decode(
                self.data.popular_tags) if self.data.popular_tags else []
            feed['feed_authors'] = json.decode(
                self.data.popular_authors
            ) if self.data.popular_authors else []

        return feed
Exemplo n.º 8
0
def load_feeds(request):
    user            = get_user(request)
    feeds           = {}
    not_yet_fetched = False
    
    try:
        folders = UserSubscriptionFolders.objects.get(user=user)
    except UserSubscriptionFolders.DoesNotExist:
        data = dict(feeds=[], folders=[])
        return data
    except UserSubscriptionFolders.MultipleObjectsReturned:
        UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
        folders = UserSubscriptionFolders.objects.get(user=user)
        
    user_subs = UserSubscription.objects.select_related('feed', 'feed__feed_icon').filter(user=user)
    
    for sub in user_subs:
        feeds[sub.feed.pk] = {
            'id': sub.feed.pk,
            'feed_title': sub.user_title or sub.feed.feed_title,
            'feed_address': sub.feed.feed_address,
            'feed_link': sub.feed.feed_link,
            'ps': sub.unread_count_positive,
            'nt': sub.unread_count_neutral,
            'ng': sub.unread_count_negative, 
            'updated': relative_timesince(sub.feed.last_update),
            'subs': sub.feed.num_subscribers,
            'active': sub.active,
            'favicon': sub.feed.icon.data,
            'favicon_color': sub.feed.icon.color,
            'favicon_fetching': bool(not (sub.feed.icon.not_found or sub.feed.icon.data))
        }
        
        if not sub.feed.fetched_once:
            not_yet_fetched = True
            feeds[sub.feed.pk]['not_yet_fetched'] = True
        if sub.feed.has_page_exception or sub.feed.has_feed_exception:
            feeds[sub.feed.pk]['has_exception'] = True
            feeds[sub.feed.pk]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page'
            feeds[sub.feed.pk]['feed_address'] = sub.feed.feed_address
            feeds[sub.feed.pk]['exception_code'] = sub.feed.exception_code
        if not sub.feed.active and not sub.feed.has_feed_exception and not sub.feed.has_page_exception:
            sub.feed.count_subscribers()
            sub.feed.schedule_feed_fetch_immediately()
            
    if not_yet_fetched:
        for f in feeds:
            if 'not_yet_fetched' not in feeds[f]:
                feeds[f]['not_yet_fetched'] = False

    starred_count = MStarredStory.objects(user_id=user.pk).count()

    data = {
        'feeds': feeds,
        'folders': json.decode(folders.folders),
        'starred_count': starred_count,
    }
    return data
Exemplo n.º 9
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True,
                                                             verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1,
                                          1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(
        force=True, verbose=False)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(
        story_count_history)

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    timezone = user.profile.timezone
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']

    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Exemplo n.º 10
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats["active"] = feed.active
    stats["last_update"] = relative_timesince(feed.last_update)
    stats["next_update"] = relative_timeuntil(feed.next_scheduled_update)
    stats["push"] = feed.is_push

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    stats["update_interval_minutes"] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats["premium_update_interval_minutes"] = premium_update_interval_minutes
    stats["errors_since_good"] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats["average_stories_per_month"] = average_stories_per_month
    stats["story_count_history"] = story_count_history and json.decode(story_count_history)

    # Subscribers
    stats["subscriber_count"] = feed.num_subscribers
    stats["stories_last_month"] = feed.stories_last_month
    stats["last_load_time"] = feed.last_load_time
    stats["premium_subscribers"] = feed.premium_subscribers
    stats["active_subscribers"] = feed.active_subscribers
    stats["active_premium_subscribers"] = feed.active_premium_subscribers

    # Classifier counts
    stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    timezone = user.profile.timezone
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats["feed_fetch_history"] = fetch_history["feed_fetch_history"]
    stats["page_fetch_history"] = fetch_history["page_fetch_history"]
    stats["feed_push_history"] = fetch_history["push_history"]

    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Exemplo n.º 11
0
def load_feeds(request):
    user = get_user(request)
    feeds = {}
    not_yet_fetched = False

    try:
        folders = UserSubscriptionFolders.objects.get(user=user)
    except UserSubscriptionFolders.DoesNotExist:
        data = dict(feeds=[], folders=[])
        return data
    except UserSubscriptionFolders.MultipleObjectsReturned:
        UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
        folders = UserSubscriptionFolders.objects.get(user=user)

    user_subs = UserSubscription.objects.select_related("feed").filter(user=user)

    for sub in user_subs:
        feeds[sub.feed.pk] = {
            "id": sub.feed.pk,
            "feed_title": sub.user_title or sub.feed.feed_title,
            "feed_address": sub.feed.feed_address,
            "feed_link": sub.feed.feed_link,
            "ps": sub.unread_count_positive,
            "nt": sub.unread_count_neutral,
            "ng": sub.unread_count_negative,
            "updated": relative_timesince(sub.feed.last_update),
            "subs": sub.feed.num_subscribers,
            "active": sub.active,
        }

        if not sub.feed.fetched_once:
            not_yet_fetched = True
            feeds[sub.feed.pk]["not_yet_fetched"] = True
        if sub.feed.has_page_exception or sub.feed.has_feed_exception:
            feeds[sub.feed.pk]["has_exception"] = True
            feeds[sub.feed.pk]["exception_type"] = "feed" if sub.feed.has_feed_exception else "page"
            feeds[sub.feed.pk]["feed_address"] = sub.feed.feed_address
            feeds[sub.feed.pk]["exception_code"] = sub.feed.exception_code
        if not sub.feed.active and not sub.feed.has_feed_exception and not sub.feed.has_page_exception:
            sub.feed.count_subscribers()
            sub.feed.schedule_feed_fetch_immediately()

    if not_yet_fetched:
        for f in feeds:
            if "not_yet_fetched" not in feeds[f]:
                feeds[f]["not_yet_fetched"] = False

    starred_count = MStarredStory.objects(user_id=user.pk).count()

    data = {"feeds": feeds, "folders": json.decode(folders.folders), "starred_count": starred_count}
    return data
Exemplo n.º 12
0
def load_feeds(request):
    user            = get_user(request)
    feeds           = {}
    not_yet_fetched = False
    
    try:
        folders = UserSubscriptionFolders.objects.get(user=user)
    except UserSubscriptionFolders.DoesNotExist:
        data = dict(feeds=[], folders=[])
        return data
        
    user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
    
    for sub in user_subs:
        feeds[sub.feed.pk] = {
            'id': sub.feed.pk,
            'feed_title': sub.feed.feed_title,
            'feed_address': sub.feed.feed_address,
            'feed_link': sub.feed.feed_link,
            'ps': sub.unread_count_positive,
            'nt': sub.unread_count_neutral,
            'ng': sub.unread_count_negative, 
            'updated': relative_timesince(sub.feed.last_update),
            'subs': sub.feed.num_subscribers,
            'active': sub.active
        }
        
        if not sub.feed.fetched_once:
            not_yet_fetched = True
            feeds[sub.feed.pk]['not_yet_fetched'] = True
        if sub.feed.has_page_exception or sub.feed.has_feed_exception:
            feeds[sub.feed.pk]['has_exception'] = True
            feeds[sub.feed.pk]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page'
            feeds[sub.feed.pk]['feed_address'] = sub.feed.feed_address
            feeds[sub.feed.pk]['exception_code'] = sub.feed.exception_code
        if not sub.feed.active and not sub.feed.has_feed_exception and not sub.feed.has_page_exception:
            sub.feed.count_subscribers()
            sub.feed.schedule_feed_fetch_immediately()
            
    if not_yet_fetched:
        for f in feeds:
            if 'not_yet_fetched' not in feeds[f]:
                feeds[f]['not_yet_fetched'] = False
                
    data = dict(feeds=feeds, folders=json.decode(folders.folders))
    return data
Exemplo n.º 13
0
def load_feed_statistics(request, feed_id):
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)

    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update(
        force=True)
    stats['update_interval_minutes'] = update_interval_minutes

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(
        story_count_history)

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id)
    stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id)

    logging.user(
        request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (
            feed,
            feed.num_subscribers,
            feed.active_subscribers,
            feed.premium_subscribers,
        ))

    return stats
Exemplo n.º 14
0
def load_feed_statistics(request):
    stats = dict()
    feed_id = request.GET['feed_id']
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.save_feed_story_history_statistics()

    # Dates of last and next update
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)

    # Minutes between updates
    update_interval_minutes, random_factor = feed.get_next_scheduled_update()
    stats['update_interval_minutes'] = update_interval_minutes

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    stats['story_count_history'] = story_count_history and json.decode(
        story_count_history)

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers

    logging.info(" ---> [%s] ~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (
        request.user,
        feed,
        feed.num_subscribers,
        feed.active_subscribers,
        feed.premium_subscribers,
    ))

    return stats
Exemplo n.º 15
0
def check_share_on_site(request, token):
    code       = 0
    story_url  = request.GET['story_url']
    rss_url    = request.GET.get('rss_url')
    callback   = request.GET['callback']
    other_stories = None
    same_stories = None
    usersub    = None
    message    = None
    user       = None
    
    if not story_url:
        code = -1
    else:
        try:
            user_profile = Profile.objects.get(secret_token=token)
            user = user_profile.user
        except Profile.DoesNotExist:
            code = -1
    
    logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url)
    feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
    if not feed:
        logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % story_url)
        feed = Feed.get_feed_from_url(story_url, create=False, fetch=False)
    if not feed:
        parsed_url = urlparse.urlparse(story_url)
        base_url = "%s://%s%s" % (parsed_url.scheme, parsed_url.hostname, parsed_url.path)
        logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url)
        feed = Feed.get_feed_from_url(base_url, create=False, fetch=False)
    if not feed:
        logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + '/'))
        feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False)
    
    if feed and user:
        try:
            usersub = UserSubscription.objects.filter(user=user, feed=feed)
        except UserSubscription.DoesNotExist:
            usersub = None
    feed_id = feed and feed.pk
    your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id,
                                              user_id=user_profile.user.pk, story_url=story_url)
    previous_stories = MSharedStory.objects.filter(user_id=user_profile.user.pk).order_by('-shared_date').limit(3)
    previous_stories = [{
        "user_id": story.user_id,
        "story_title": story.story_title,
        "comments": story.comments,
        "shared_date": story.shared_date,
        "relative_date": relative_timesince(story.shared_date),
        "blurblog_permalink": story.blurblog_permalink(),
    } for story in previous_stories]
    
    user_ids = set([user_profile.user.pk])
    for story in same_stories:
        user_ids.add(story['user_id'])
    for story in other_stories:
        user_ids.add(story['user_id'])
    
    users = {}
    profiles = MSocialProfile.profiles(user_ids)
    for profile in profiles:
        users[profile.user_id] = {
            "username": profile.username,
            "photo_url": profile.photo_url,
        }
        
    logging.user(user_profile.user, "~BM~FCChecking share from site: ~SB%s" % (story_url),
                 request=request)
    
    response = HttpResponse(callback + '(' + json.encode({
        'code'              : code,
        'message'           : message,
        'feed'              : feed,
        'subscribed'        : bool(usersub),
        'your_story'        : your_story,
        'same_stories'      : same_stories,
        'other_stories'     : other_stories,
        'previous_stories'  : previous_stories,
        'users'             : users,
    }) + ')', content_type='text/plain')
    response['Access-Control-Allow-Origin'] = '*'
    response['Access-Control-Allow-Methods'] = 'GET'
    
    return response
Exemplo n.º 16
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    timezone = user.profile.timezone
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()
    
    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push
    if feed.is_push:
        try:
            stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires, 
                                                           timezone).strftime("%Y-%m-%d %H:%M:%S")
        except PushSubscription.DoesNotExist:
            stats['push_expires'] = 'Missing push'
            feed.is_push = False
            feed.save()

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False,
                                                                     premium_speed=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good
    
    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    story_count_history = story_count_history and json.decode(story_count_history)
    if story_count_history and isinstance(story_count_history, dict):
        stats['story_count_history'] = story_count_history['months']
        stats['story_days_history'] = story_count_history['days']
        stats['story_hours_history'] = story_count_history['hours']
    else:
        stats['story_count_history'] = story_count_history
    
    # Rotate hours to match user's timezone offset
    localoffset = timezone.utcoffset(datetime.datetime.utcnow())
    hours_offset = int(localoffset.total_seconds() / 3600)
    rotated_hours = {}
    for hour, value in stats['story_hours_history'].items():
        rotated_hours[str(int(hour)+hours_offset)] = value
    stats['story_hours_history'] = rotated_hours
    
    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['num_subscribers'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
    
    # Fetch histories
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']
    
    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Exemplo n.º 17
0
def load_single_feed(request, feed_id):
    start        = time.time()
    user         = get_user(request)
    offset       = int(request.REQUEST.get('offset', 0))
    limit        = int(request.REQUEST.get('limit', 12))
    page         = int(request.REQUEST.get('page', 1))
    dupe_feed_id = None
    userstories_db = None
    
    if page: offset = limit * (page-1)
    if not feed_id: raise Http404
        
    try:
        feed = Feed.objects.get(id=feed_id)
    except Feed.DoesNotExist:
        feed_address = request.REQUEST.get('feed_address')
        dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
        if dupe_feed:
            feed = dupe_feed[0].feed
            dupe_feed_id = feed_id
        else:
            raise Http404
        
    stories = feed.get_stories(offset, limit) 
        
    # Get intelligence classifier for user
    classifier_feeds   = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id))
    classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id))
    classifier_titles  = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id))
    classifier_tags    = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id))
    
    checkpoint1 = time.time()
    
    usersub = UserSubscription.objects.get(user=user, feed=feed)
    userstories = []
    if usersub and stories:
        story_ids = [story['id'] for story in stories]
        userstories_db = MUserStory.objects(user_id=user.pk,
                                            feed_id=feed.pk,
                                            story_id__in=story_ids).only('story_id')
        starred_stories = MStarredStory.objects(user_id=user.pk, 
                                                story_feed_id=feed_id, 
                                                story_guid__in=story_ids).only('story_guid', 'starred_date')
        starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories])
        userstories = set(us.story_id for us in userstories_db)
            
    checkpoint2 = time.time()
    
    for story in stories:
        story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
        now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
        story['short_parsed_date'] = format_story_link_date__short(story_date, now)
        story['long_parsed_date'] = format_story_link_date__long(story_date, now)
        if usersub:
            if story['id'] in userstories:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] > usersub.last_read_date:
                story['read_status'] = 0
            if story['id'] in starred_stories:
                story['starred'] = True
                starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
                story['starred_date'] = format_story_link_date__long(starred_date, now)
        else:
            story['read_status'] = 1
        story['intelligence'] = {
            'feed': apply_classifier_feeds(classifier_feeds, feed),
            'author': apply_classifier_authors(classifier_authors, story),
            'tags': apply_classifier_tags(classifier_tags, story),
            'title': apply_classifier_titles(classifier_titles, story),
        }

    checkpoint3 = time.time()
    
    # Intelligence
    feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
    feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
    classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, 
                                           classifier_authors, classifier_titles, classifier_tags)
    
    if usersub:
        usersub.feed_opens += 1
        usersub.save()
    diff1 = checkpoint1-start
    diff2 = checkpoint2-start
    diff3 = checkpoint3-start
    timediff = time.time()-start
    last_update = relative_timesince(feed.last_update)
    logging.user(request, "~FYLoading feed: ~SB%s%s ~SN(%.4s seconds, ~SB%.4s/%.4s(%s)/%.4s~SN)" % (
        feed.feed_title[:32], ('~SN/p%s' % page) if page > 1 else '', timediff,
        diff1, diff2, userstories_db and userstories_db.count() or '~SN0~SB', diff3))
    FeedLoadtime.objects.create(feed=feed, loadtime=timediff)
    
    data = dict(stories=stories, 
                feed_tags=feed_tags, 
                feed_authors=feed_authors, 
                classifiers=classifiers,
                last_update=last_update,
                feed_id=feed.pk)
    
    if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
    if not usersub:
        data.update(feed.canonical())
        
    return data
Exemplo n.º 18
0
def load_single_feed(request, feed_id):
    start        = time.time()
    user         = get_user(request)
    offset       = int(request.REQUEST.get('offset', 0))
    limit        = int(request.REQUEST.get('limit', 12))
    page         = int(request.REQUEST.get('page', 1))
    dupe_feed_id = None
    userstories_db = None
    
    if page: offset = limit * (page-1)
    if not feed_id: raise Http404
        
    try:
        feed = Feed.objects.get(id=feed_id)
    except Feed.DoesNotExist:
        feed_address = request.REQUEST.get('feed_address')
        dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
        if dupe_feed:
            feed = dupe_feed[0].feed
            dupe_feed_id = feed_id
        else:
            raise Http404
        
    stories = feed.get_stories(offset, limit) 
        
    # Get intelligence classifier for user
    classifier_feeds   = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id))
    classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id))
    classifier_titles  = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id))
    classifier_tags    = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id))
    
    checkpoint1 = time.time()
    
    usersub = UserSubscription.objects.get(user=user, feed=feed)
    userstories = []
    if usersub:
        userstories_db = MUserStory.objects(user_id=user.pk,
                                            feed_id=feed.pk,
                                            read_date__gte=usersub.mark_read_date)
        starred_stories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).only('story_guid', 'starred_date')
        starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories])

        for us in userstories_db:
            if hasattr(us.story, 'story_guid') and isinstance(us.story.story_guid, unicode):
                userstories.append(us.story.story_guid)
            elif hasattr(us.story, 'id') and isinstance(us.story.id, unicode):
                userstories.append(us.story.id) # TODO: Remove me after migration from story.id->guid
            
    checkpoint2 = time.time()
    
    for story in stories:
        story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
        now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
        story['short_parsed_date'] = format_story_link_date__short(story_date, now)
        story['long_parsed_date'] = format_story_link_date__long(story_date, now)
        if usersub:
            if story['id'] in userstories:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] > usersub.last_read_date:
                story['read_status'] = 0
            if story['id'] in starred_stories:
                story['starred'] = True
                starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
                story['starred_date'] = format_story_link_date__long(starred_date, now)
        else:
            story['read_status'] = 1
        story['intelligence'] = {
            'feed': apply_classifier_feeds(classifier_feeds, feed),
            'author': apply_classifier_authors(classifier_authors, story),
            'tags': apply_classifier_tags(classifier_tags, story),
            'title': apply_classifier_titles(classifier_titles, story),
        }

    checkpoint3 = time.time()
    
    # Intelligence
    feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
    feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
    classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, 
                                           classifier_authors, classifier_titles, classifier_tags)
    
    if usersub:
        usersub.feed_opens += 1
        usersub.save()
    timediff = time.time()-start
    last_update = relative_timesince(feed.last_update)
    logging.user(request.user, "~FYLoading feed: ~SB%s%s ~SN(%.4s seconds)" % (
        feed, ('~SN/p%s' % page) if page > 1 else '', timediff))
    FeedLoadtime.objects.create(feed=feed, loadtime=timediff)
    
    if timediff >= 1:
        diff1 = checkpoint1-start
        diff2 = checkpoint2-start
        diff3 = checkpoint3-start
        logging.user(request.user, "~FYSlow feed load: ~SB%.4s/%.4s(%s)/%.4s" % (
            diff1, diff2, userstories_db and userstories_db.count(), diff3))
    
    data = dict(stories=stories, 
                feed_tags=feed_tags, 
                feed_authors=feed_authors, 
                classifiers=classifiers,
                last_update=last_update,
                feed_id=feed.pk)
    
    if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
    if not usersub:
        data.update(feed.canonical())
        
    return data
Exemplo n.º 19
0
def check_share_on_site(request, token):
    code       = 0
    story_url  = request.GET['story_url']
    rss_url    = request.GET.get('rss_url')
    callback   = request.GET['callback']
    other_stories = None
    same_stories = None
    usersub    = None
    message    = None
    user       = None
    
    if not story_url:
        code = -1
    else:
        try:
            user_profile = Profile.objects.get(secret_token=token)
            user = user_profile.user
        except Profile.DoesNotExist:
            code = -1
    
    feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
    if not feed:
        feed = Feed.get_feed_from_url(story_url, create=False, fetch=False)
    if not feed:
        parsed_url = urlparse.urlparse(story_url)
        base_url = "%s://%s%s" % (parsed_url.scheme, parsed_url.hostname, parsed_url.path)
        feed = Feed.get_feed_from_url(base_url, create=False, fetch=False)
    if not feed:
        feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False)
    
    if feed and user:
        try:
            usersub = UserSubscription.objects.filter(user=user, feed=feed)
        except UserSubscription.DoesNotExist:
            usersub = None
    feed_id = feed and feed.pk
    your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id,
                                              user_id=user_profile.user.pk, story_url=story_url)
    previous_stories = MSharedStory.objects.filter(user_id=user_profile.user.pk).order_by('-shared_date').limit(3)
    previous_stories = [{
        "user_id": story.user_id,
        "story_title": story.story_title,
        "comments": story.comments,
        "shared_date": story.shared_date,
        "relative_date": relative_timesince(story.shared_date),
        "blurblog_permalink": story.blurblog_permalink(),
    } for story in previous_stories]
    
    user_ids = set([user_profile.user.pk])
    for story in same_stories:
        user_ids.add(story['user_id'])
    for story in other_stories:
        user_ids.add(story['user_id'])
    
    users = {}
    profiles = MSocialProfile.profiles(user_ids)
    for profile in profiles:
        users[profile.user_id] = {
            "username": profile.username,
            "photo_url": profile.photo_url,
        }
        
    logging.user(user_profile.user, "~BM~FCChecking share from site: ~SB%s" % (story_url),
                 request=request)
    
    response = HttpResponse(callback + '(' + json.encode({
        'code'              : code,
        'message'           : message,
        'feed'              : feed,
        'subscribed'        : bool(usersub),
        'your_story'        : your_story,
        'same_stories'      : same_stories,
        'other_stories'     : other_stories,
        'previous_stories'  : previous_stories,
        'users'             : users,
    }) + ')', mimetype='text/plain')
    response['Access-Control-Allow-Origin'] = '*'
    response['Access-Control-Allow-Methods'] = 'GET'
    
    return response
Exemplo n.º 20
0
def check_share_on_site(request, token):
    code = 0
    story_url = request.GET["story_url"]
    rss_url = request.GET.get("rss_url")
    callback = request.GET["callback"]
    other_stories = None
    same_stories = None
    usersub = None
    message = None
    user = None

    if not story_url:
        code = -1
    else:
        try:
            user_profile = Profile.objects.get(secret_token=token)
            user = user_profile.user
        except Profile.DoesNotExist:
            code = -1

    feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
    if not feed:
        feed = Feed.get_feed_from_url(story_url, create=False, fetch=False)
    if not feed:
        parsed_url = urlparse.urlparse(story_url)
        base_url = "%s://%s%s" % (parsed_url.scheme, parsed_url.hostname, parsed_url.path)
        feed = Feed.get_feed_from_url(base_url, create=False, fetch=False)
    if not feed:
        feed = Feed.get_feed_from_url(base_url + "/", create=False, fetch=False)

    if feed and user:
        try:
            usersub = UserSubscription.objects.filter(user=user, feed=feed)
        except UserSubscription.DoesNotExist:
            usersub = None
    feed_id = feed and feed.pk
    your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(
        feed_id, user_id=user_profile.user.pk, story_url=story_url
    )
    previous_stories = MSharedStory.objects.filter(user_id=user_profile.user.pk).order_by("-shared_date").limit(3)
    previous_stories = [
        {
            "user_id": story.user_id,
            "story_title": story.story_title,
            "comments": story.comments,
            "shared_date": story.shared_date,
            "relative_date": relative_timesince(story.shared_date),
            "blurblog_permalink": story.blurblog_permalink(),
        }
        for story in previous_stories
    ]

    user_ids = set([user_profile.user.pk])
    for story in same_stories:
        user_ids.add(story["user_id"])
    for story in other_stories:
        user_ids.add(story["user_id"])

    users = {}
    profiles = MSocialProfile.profiles(user_ids)
    for profile in profiles:
        users[profile.user_id] = {"username": profile.username, "photo_url": profile.photo_url}

    logging.user(user_profile.user, "~BM~FCChecking share from site: ~SB%s" % (story_url), request=request)

    response = HttpResponse(
        callback
        + "("
        + json.encode(
            {
                "code": code,
                "message": message,
                "feed": feed,
                "subscribed": bool(usersub),
                "your_story": your_story,
                "same_stories": same_stories,
                "other_stories": other_stories,
                "previous_stories": previous_stories,
                "users": users,
            }
        )
        + ")",
        mimetype="text/plain",
    )
    response["Access-Control-Allow-Origin"] = "*"
    response["Access-Control-Allow-Methods"] = "GET"

    return response
Exemplo n.º 21
0
def load_single_feed(request, feed_id):
    start = datetime.datetime.utcnow()
    user = get_user(request)
    offset = int(request.REQUEST.get('offset', 0))
    limit = int(request.REQUEST.get('limit', 12))
    page = int(request.REQUEST.get('page', 1))
    if page:
        offset = limit * (page-1)
    dupe_feed_id = None
    if not feed_id:
        raise Http404
        
    try:
        feed = Feed.objects.get(id=feed_id)
    except Feed.DoesNotExist:
        feed_address = request.REQUEST.get('feed_address')
        dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
        if dupe_feed:
            feed = dupe_feed[0].feed
            dupe_feed_id = feed_id
        else:
            raise Http404
        
    stories = feed.get_stories(offset, limit) 
        
    # Get intelligence classifier for user
    classifier_feeds   = MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id)
    classifier_authors = MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)
    classifier_titles  = MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)
    classifier_tags    = MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)
    
    usersub = UserSubscription.objects.get(user=user, feed=feed)
    userstories = []
    if usersub:
        userstories_db = MUserStory.objects(user_id=user.pk,
                                            feed_id=feed.pk,
                                            read_date__gte=usersub.mark_read_date)
        starred_stories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).only('story_guid', 'starred_date')
        starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories])

        for us in userstories_db:
            if hasattr(us.story, 'story_guid') and isinstance(us.story.story_guid, unicode):
                userstories.append(us.story.story_guid)
            elif hasattr(us.story, 'id') and isinstance(us.story.id, unicode):
                userstories.append(us.story.id) # TODO: Remove me after migration from story.id->guid
            
    for story in stories:
        [x.rewind() for x in [classifier_feeds, classifier_authors, classifier_tags, classifier_titles]]
        story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
        now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
        story['short_parsed_date'] = format_story_link_date__short(story_date, now)
        story['long_parsed_date'] = format_story_link_date__long(story_date, now)
        if usersub:
            if story['id'] in userstories:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date:
                story['read_status'] = 1
            elif not story.get('read_status') and story['story_date'] > usersub.last_read_date:
                story['read_status'] = 0
            if story['id'] in starred_stories:
                story['starred'] = True
                starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
                story['starred_date'] = format_story_link_date__long(starred_date, now)
        else:
            story['read_status'] = 1
        story['intelligence'] = {
            'feed': apply_classifier_feeds(classifier_feeds, feed),
            'author': apply_classifier_authors(classifier_authors, story),
            'tags': apply_classifier_tags(classifier_tags, story),
            'title': apply_classifier_titles(classifier_titles, story),
        }
    
    # Intelligence
    feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
    feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
    classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, 
                                           classifier_authors, classifier_titles, classifier_tags)
    
    if usersub:
        usersub.feed_opens += 1
        usersub.save()
    
    diff = datetime.datetime.utcnow()-start
    timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000)))
    last_update = relative_timesince(feed.last_update)
    logging.user(request.user, "~FYLoading feed: ~SB%s%s ~SN(%s seconds)" % (
        feed, ('~SN/p%s' % page) if page > 1 else '', timediff))
    FeedLoadtime.objects.create(feed=feed, loadtime=timediff)
    
    data = dict(stories=stories, 
                feed_tags=feed_tags, 
                feed_authors=feed_authors, 
                classifiers=classifiers,
                last_update=last_update,
                feed_id=feed.pk)
    
    if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
    if not usersub:
        data.update(feed.canonical())
        
    return data
Exemplo n.º 22
0
def load_single_feed(request):
    user = get_user(request)
    offset = int(request.REQUEST.get("offset", 0))
    limit = int(request.REQUEST.get("limit", 30))
    page = int(request.REQUEST.get("page", 0))
    if page:
        offset = limit * page
    feed_id = int(request.REQUEST.get("feed_id", 0))
    if feed_id == 0:
        raise Http404

    try:
        feed = Feed.objects.get(id=feed_id)
    except Feed.DoesNotExist:
        feed_address = request.REQUEST.get("feed_address")
        dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
        if dupe_feed:
            feed = dupe_feed[0].feed
        else:
            raise Http404

    force_update = request.GET.get("force_update", False)

    now = datetime.datetime.utcnow()
    stories = feed.get_stories(offset, limit)

    if force_update:
        feed.update(force_update)

    # Get intelligence classifier for user
    classifier_feeds = MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id)
    classifier_authors = MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)
    classifier_titles = MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)
    classifier_tags = MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)

    usersub = UserSubscription.objects.get(user=user, feed=feed)
    userstories = []
    userstories_db = MUserStory.objects(user_id=user.pk, feed_id=feed.pk, read_date__gte=usersub.mark_read_date)
    starred_stories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).only("story_guid", "starred_date")
    starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories])

    for us in userstories_db:
        if hasattr(us.story, "story_guid") and isinstance(us.story.story_guid, unicode):
            userstories.append(us.story.story_guid)
        elif hasattr(us.story, "id") and isinstance(us.story.id, unicode):
            userstories.append(us.story.id)  # TODO: Remove me after migration from story.id->guid

    for story in stories:
        classifier_feeds.rewind()
        classifier_authors.rewind()
        classifier_tags.rewind()
        classifier_titles.rewind()
        story_date = localtime_for_timezone(story["story_date"], user.profile.timezone)
        story["short_parsed_date"] = format_story_link_date__short(story_date)
        story["long_parsed_date"] = format_story_link_date__long(story_date)
        if story["id"] in userstories:
            story["read_status"] = 1
        elif not story.get("read_status") and story["story_date"] < usersub.mark_read_date:
            story["read_status"] = 1
        elif not story.get("read_status") and story["story_date"] > usersub.last_read_date:
            story["read_status"] = 0
        if story["id"] in starred_stories:
            story["starred"] = True
            starred_date = localtime_for_timezone(starred_stories[story["id"]], user.profile.timezone)
            story["starred_date"] = format_story_link_date__long(starred_date)
        story["intelligence"] = {
            "feed": apply_classifier_feeds(classifier_feeds, feed),
            "author": apply_classifier_authors(classifier_authors, story),
            "tags": apply_classifier_tags(classifier_tags, story),
            "title": apply_classifier_titles(classifier_titles, story),
        }

    # Intelligence
    feed_tags = json.decode(feed.popular_tags) if feed.popular_tags else []
    feed_authors = json.decode(feed.popular_authors) if feed.popular_authors else []
    classifiers = get_classifiers_for_user(
        user, feed_id, classifier_feeds, classifier_authors, classifier_titles, classifier_tags
    )

    usersub.feed_opens += 1
    usersub.save()

    diff = datetime.datetime.utcnow() - now
    timediff = float("%s.%s" % (diff.seconds, (diff.microseconds / 1000)))
    last_update = relative_timesince(feed.last_update)
    logging.info(" ---> [%s] ~FYLoading feed: ~SB%s ~SN(%s seconds)" % (request.user, feed, timediff))
    FeedLoadtime.objects.create(feed=feed, loadtime=timediff)

    data = dict(
        stories=stories,
        feed_tags=feed_tags,
        feed_authors=feed_authors,
        classifiers=classifiers,
        last_update=last_update,
        feed_id=feed.pk,
    )
    return data
Exemplo n.º 23
0
def load_feed_statistics(request, feed_id):
    user = get_user(request)
    timezone = user.profile.timezone
    stats = dict()
    feed = get_object_or_404(Feed, pk=feed_id)
    feed.update_all_statistics()
    feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
    feed.save_feed_story_history_statistics()
    feed.save_classifier_counts()

    # Dates of last and next update
    stats['active'] = feed.active
    stats['last_update'] = relative_timesince(feed.last_update)
    stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
    stats['push'] = feed.is_push
    if feed.is_push:
        try:
            stats['push_expires'] = localtime_for_timezone(
                feed.push.lease_expires,
                timezone).strftime("%Y-%m-%d %H:%M:%S")
        except PushSubscription.DoesNotExist:
            stats['push_expires'] = 'Missing push'
            feed.is_push = False
            feed.save()

    # Minutes between updates
    update_interval_minutes = feed.get_next_scheduled_update(force=True,
                                                             verbose=False)
    stats['update_interval_minutes'] = update_interval_minutes
    original_active_premium_subscribers = feed.active_premium_subscribers
    original_premium_subscribers = feed.premium_subscribers
    feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1,
                                          1)
    feed.premium_subscribers += 1
    premium_update_interval_minutes = feed.get_next_scheduled_update(
        force=True, verbose=False, premium_speed=True)
    feed.active_premium_subscribers = original_active_premium_subscribers
    feed.premium_subscribers = original_premium_subscribers
    stats['premium_update_interval_minutes'] = premium_update_interval_minutes
    stats['errors_since_good'] = feed.errors_since_good

    # Stories per month - average and month-by-month breakout
    average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
    stats['average_stories_per_month'] = average_stories_per_month
    story_count_history = story_count_history and json.decode(
        story_count_history)
    if story_count_history and isinstance(story_count_history, dict):
        stats['story_count_history'] = story_count_history['months']
        stats['story_days_history'] = story_count_history['days']
        stats['story_hours_history'] = story_count_history['hours']
    else:
        stats['story_count_history'] = story_count_history

    # Rotate hours to match user's timezone offset
    localoffset = timezone.utcoffset(datetime.datetime.utcnow())
    hours_offset = int(localoffset.total_seconds() / 3600)
    rotated_hours = {}
    for hour, value in stats['story_hours_history'].items():
        rotated_hours[str(int(hour) + hours_offset)] = value
    stats['story_hours_history'] = rotated_hours

    # Subscribers
    stats['subscriber_count'] = feed.num_subscribers
    stats['num_subscribers'] = feed.num_subscribers
    stats['stories_last_month'] = feed.stories_last_month
    stats['last_load_time'] = feed.last_load_time
    stats['premium_subscribers'] = feed.premium_subscribers
    stats['active_subscribers'] = feed.active_subscribers
    stats['active_premium_subscribers'] = feed.active_premium_subscribers

    # Classifier counts
    stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)

    # Fetch histories
    fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
    stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
    stats['page_fetch_history'] = fetch_history['page_fetch_history']
    stats['feed_push_history'] = fetch_history['push_history']

    logging.user(request, "~FBStatistics: ~SB%s" % (feed))

    return stats
Exemplo n.º 24
0
def load_single_feed(request):
    user = get_user(request)
    offset = int(request.REQUEST.get('offset', 0))
    limit = int(request.REQUEST.get('limit', 30))
    page = int(request.REQUEST.get('page', 0))
    if page:
        offset = limit * page
    feed_id = int(request.REQUEST.get('feed_id', 0))
    if feed_id == 0:
        raise Http404
        
    try:
        feed = Feed.objects.get(id=feed_id)
    except Feed.DoesNotExist:
        feed_address = request.REQUEST.get('feed_address')
        dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
        if dupe_feed:
            feed = dupe_feed[0].feed
        else:
            raise Http404
        
    force_update = request.GET.get('force_update', False)
    
    now = datetime.datetime.utcnow()
    stories = feed.get_stories(offset, limit) 
        
    if force_update:
        feed.update(force_update)
    
    # Get intelligence classifier for user
    classifier_feeds = MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id)
    classifier_authors = MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)
    classifier_titles = MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)
    classifier_tags = MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)
    
    try:
        usersub = UserSubscription.objects.get(user=user, feed=feed)
    except UserSubscription.DoesNotExist:
        # FIXME: Why is this happening for `conesus` when logged into another account?!
        logging.info(" ***> [%s] UserSub DNE, creating: %s" % (user, feed))
        usersub = UserSubscription.objects.create(user=user, feed=feed)
            
    userstories = []
    userstories_db = MUserStory.objects(user_id=user.pk, 
                                        feed_id=feed.pk,
                                        read_date__gte=usersub.mark_read_date)
    for us in userstories_db:
        if hasattr(us.story, 'story_guid') and isinstance(us.story.story_guid, unicode):
            userstories.append(us.story.story_guid)
        elif hasattr(us.story, 'id') and isinstance(us.story.id, unicode):
            userstories.append(us.story.id) # TODO: Remove me after migration from story.id->guid
            
    for story in stories:
        classifier_feeds.rewind()
        classifier_authors.rewind()
        classifier_tags.rewind()
        classifier_titles.rewind()
        if story['id'] in userstories:
            story['read_status'] = 1
        elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date:
            story['read_status'] = 1
        elif not story.get('read_status') and story['story_date'] > usersub.last_read_date:
            story['read_status'] = 0
        story['intelligence'] = {
            'feed': apply_classifier_feeds(classifier_feeds, feed),
            'author': apply_classifier_authors(classifier_authors, story),
            'tags': apply_classifier_tags(classifier_tags, story),
            'title': apply_classifier_titles(classifier_titles, story),
        }
    
    # Intelligence
    feed_tags = json.decode(feed.popular_tags) if feed.popular_tags else []
    feed_authors = json.decode(feed.popular_authors) if feed.popular_authors else []
    classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, 
                                           classifier_authors, classifier_titles, classifier_tags)
    
    usersub.feed_opens += 1
    usersub.save()
    
    diff = datetime.datetime.utcnow()-now
    timediff = float("%s.%s" % (diff.seconds, (diff.microseconds / 1000)))
    logging.info(" ---> [%s] Loading feed: %s (%s seconds)" % (request.user, feed, timediff))
    FeedLoadtime.objects.create(feed=feed, loadtime=timediff)
    
    last_update = relative_timesince(feed.last_update)
    data = dict(stories=stories, 
                feed_tags=feed_tags, 
                feed_authors=feed_authors, 
                classifiers=classifiers,
                last_update=last_update,
                feed_id=feed.pk)
    return data