def test_load_feeds__gothamist(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'gothamist_aug_2009_1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='gothamist') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=4, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) response = self.client.post('/reader/load_single_feed', { "feed_id": 4 }) content = json.decode(response.content) self.assertEquals(len(content['stories']), 30) management.call_command('loaddata', 'gothamist_aug_2009_2.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=4, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) response = self.client.get('/reader/load_single_feed', { "feed_id": 4 }) # print [c['story_title'] for c in json.decode(response.content)] content = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(content['stories']), 30)
def get_view_setting(request): code = 1 feed_id = request.POST['feed_id'] view_settings = json.decode(request.user.profile.view_settings) response = dict(code=code, payload=view_settings.get(feed_id)) return response
def get_preference(request): code = 1 preference_name = request.POST['preference'] preferences = json.decode(request.user.profile.preferences) response = dict(code=code, payload=preferences.get(preference_name)) return response
def delete_feed(request): feed_id = int(request.POST['feed_id']) user_sub = get_object_or_404(UserSubscription, user=request.user, feed=feed_id) user_sub.delete() MUserStory.objects(user_id=request.user.pk, feed_id=feed_id).delete() def _find_feed_in_folders(old_folders): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): if folder == feed_id: logging.info(" ---> [%s] Delete folder: %s'th item: %s folders/feeds" % ( request.user, k, len(old_folders) )) # folders.remove(folder) else: new_folders.append(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): new_folders.append({f_k: _find_feed_in_folders(f_v)}) return new_folders user_sub_folders_object = UserSubscriptionFolders.objects.get(user=request.user) user_sub_folders = json.decode(user_sub_folders_object.folders) user_sub_folders = _find_feed_in_folders(user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() return dict(code=1)
def delete_feed(self, feed_id, in_folder): def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): if (folder == feed_id and ( (folder_name != in_folder) or (folder_name == in_folder and deleted))): multiples_found = True logging.info(" ---> [%s] Deleting feed, and a multiple has been found in '%s'" % (self.user, folder_name)) if folder == feed_id and folder_name == in_folder and not deleted: logging.info(" ---> [%s] Delete feed: %s'th item: %s folders/feeds" % ( self.user, k, len(old_folders) )) deleted = True else: new_folders.append(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): nf, multiples_found, deleted = _find_feed_in_folders(f_v, f_k, multiples_found, deleted) new_folders.append({f_k: nf}) return new_folders, multiples_found, deleted user_sub_folders = json.decode(self.folders) user_sub_folders, multiples_found, deleted = _find_feed_in_folders(user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() if not multiples_found and deleted: user_sub = UserSubscription.objects.get(user=self.user, feed=feed_id) user_sub.delete() MUserStory.objects(user_id=self.user.pk, feed_id=feed_id).delete()
def bootstrap_stories(): print "Mongo DB stories: %s" % MStory.objects().count() # db.stories.drop() print "Dropped! Mongo DB stories: %s" % MStory.objects().count() print "Stories: %s" % Story.objects.all().count() pprint(db.stories.index_information()) feeds = Feed.objects.all().order_by('-average_stories_per_month') feed_count = feeds.count() i = 0 for feed in feeds: i += 1 print "%s/%s: %s (%s stories)" % (i, feed_count, feed, Story.objects.filter(story_feed=feed).count()) sys.stdout.flush() stories = Story.objects.filter(story_feed=feed).values() for story in stories: # story['story_tags'] = [tag.name for tag in Tag.objects.filter(story=story['id'])] try: story['story_tags'] = json.decode(story['story_tags']) except: continue del story['id'] del story['story_author_id'] try: MStory(**story).save() except: continue print "\nMongo DB stories: %s" % MStory.objects().count()
def get_feeds_trainer(request): classifiers = [] usersubs = UserSubscription.objects.filter(user=request.user).select_related('feed')\ .order_by('-feed__stories_last_month') for us in usersubs: if not us.is_trained and us.feed.stories_last_month > 0: classifier = dict() classifier['classifiers'] = get_classifiers_for_user(request.user, us.feed.pk) classifier['feed_id'] = us.feed.pk classifier['stories_last_month'] = us.feed.stories_last_month classifier['feed_tags'] = json.decode(us.feed.popular_tags) if us.feed.popular_tags else [] classifier['feed_authors'] = json.decode(us.feed.popular_authors) if us.feed.popular_authors else [] classifiers.append(classifier) logging.info(" ---> [%s] Loading Trainer: %s feeds" % (request.user, len(classifiers))) return classifiers
def set_preference(request): code = 1 preference_name = request.POST['preference'] preference_value = request.POST['value'] preferences = json.decode(request.user.profile.preferences) preferences[preference_name] = preference_value request.user.profile.preferences = json.encode(preferences) request.user.profile.save() response = dict(code=code) return response
def set_view_setting(request): code = 1 feed_id = request.POST['feed_id'] feed_view_setting = request.POST['feed_view_setting'] view_settings = json.decode(request.user.profile.view_settings) view_settings[feed_id] = feed_view_setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() response = dict(code=code) return response
def save_feed_order(request): folders = request.POST.get('folders') if folders: # Test that folders can be JSON decoded folders_list = json.decode(folders) assert folders_list is not None logging.info(" ---> [%s] Feed re-ordering: %s folders/feeds" % (request.user, len(folders_list))) user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user) user_sub_folders.folders = folders user_sub_folders.save() return {}
def test_load_feeds__brokelyn__invalid_xml(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'brokelyn.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=6, single_threaded=True, daemonize=False) response = self.client.post('/reader/load_single_feed', { "feed_id": 6 }) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 10)
def add_url(request): code = 0 url = request.POST['url'] folder = request.POST['folder'] feed = None logging.info(" ---> [%s] Adding URL: %s (in %s)" % (request.user, url, folder)) if url: url = urlnorm.normalize(url) # See if it exists as a duplicate first duplicate_feed = DuplicateFeed.objects.filter(duplicate_address=url) if duplicate_feed: feed = [duplicate_feed[0].feed] else: feed = Feed.objects.filter(feed_address=url) if feed: feed = feed[0] else: try: feed = fetch_address_from_page(url) except: code = -2 message = "This feed has been added, but something went wrong"\ " when downloading it. Maybe the server's busy." if not feed: code = -1 message = "That URL does not point to an RSS feed or a website that has an RSS feed." else: us, _ = UserSubscription.objects.get_or_create( feed=feed, user=request.user, defaults={'needs_unread_recalc': True} ) code = 1 message = "" user_sub_folders_object, created = UserSubscriptionFolders.objects.get_or_create(user=request.user, defaults={'folders': '[]'} ) if created: user_sub_folders = [] else: user_sub_folders = json.decode(user_sub_folders_object.folders) user_sub_folders = _add_object_to_folder(feed.pk, folder, user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() return dict(code=code, message=message)
def load_feeds_iphone(request): user = get_user(request) feeds = {} try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: data = dict(folders=[]) return data user_subs = UserSubscription.objects.select_related('feed').filter(user=user) for sub in user_subs: if sub.needs_unread_recalc: sub.calculate_feed_scores() feeds[sub.feed.pk] = { 'id': sub.feed.pk, 'feed_title': sub.feed.feed_title, 'feed_link': sub.feed.feed_link, 'ps': sub.unread_count_positive, 'nt': sub.unread_count_neutral, 'ng': sub.unread_count_negative, } folders = json.decode(folders.folders) flat_folders = {} def make_feeds_folder(items, parent_folder="", depth=0): for item in items: if isinstance(item, int): feed = feeds[item] if not parent_folder: parent_folder = ' ' if parent_folder in flat_folders: flat_folders[parent_folder].append(feed) else: flat_folders[parent_folder] = [feed] elif isinstance(item, dict): for folder_name in item: folder = item[folder_name] flat_folder_name = "%s%s%s" % ( parent_folder, " - " if parent_folder else "", folder_name ) make_feeds_folder(folder, flat_folder_name, depth+1) make_feeds_folder(folders) data = dict(flat_folders=flat_folders) return data
def test_google_reader_import(self): self.client.login(username='******', password='******') user = User.objects.get(username='******') f = open(os.path.join(os.path.dirname(__file__), 'fixtures/google_reader.xml')) xml = f.read() f.close() reader_importer = GoogleReaderImporter(xml, user) reader_importer.process() subs = UserSubscription.objects.filter(user=user) self.assertEquals(subs.count(), 66) usf = UserSubscriptionFolders.objects.get(user=user) self.assertEquals(json.decode(usf.folders), [{u'Blogs \u2014 The Bloglets': [6, 16, 22, 35, 51, 56]}, {u'Blogs': [1, 3, 25, 29, 30, 39, 40, 41, 50, 55, 57, 58, 59, 60, 66]}, {u'Cooking': [11, 15, 42, 43, 46]}, {u'New York': [7, 8, 17, 18, 19, 36, 45, 47, 52, 61]}, {u'Tech': [2, 4, 9, 10, 12, 13, 14, 20, 23, 24, 26, 27, 28, 31, 32, 33, 34, 48, 49, 62, 64]}, {u'Blogs \u2014 Tumblrs': [5, 21, 37, 38, 53, 54, 63, 65]}, 44])
def test_load_single_feed(self): from django.conf import settings from django.db import connection settings.DEBUG = True connection.queries = [] self.client.login(username="******", password="******") response = self.client.get(reverse("load-single-feed"), {"feed_id": 56}) feed = json.decode(response.content) pprint(connection.queries) self.assert_(connection.queries) settings.DEBUG = False
def exception_retry(request): feed_id = request.POST['feed_id'] reset_fetch = json.decode(request.POST['reset_fetch']) feed = get_object_or_404(Feed, pk=feed_id) feed.next_scheduled_update = datetime.datetime.now() feed.has_page_exception = False feed.has_feed_exception = False if reset_fetch: logging.info(' ---> [%s] Refreshing exception feed: %s' % (request.user, feed)) feed.fetched_once = False else: logging.info(' ---> [%s] Forcing refreshing feed: %s' % (request.user, feed)) feed.save() feed.update(force=True) return {'code': 1}
def load_feeds(request): user = get_user(request) feeds = {} not_yet_fetched = False try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: data = dict(feeds=[], folders=[]) return data user_subs = UserSubscription.objects.select_related('feed').filter(user=user) for sub in user_subs: feeds[sub.feed.pk] = { 'id': sub.feed.pk, 'feed_title': sub.feed.feed_title, 'feed_link': sub.feed.feed_link, 'ps': sub.unread_count_positive, 'nt': sub.unread_count_neutral, 'ng': sub.unread_count_negative, 'updated': format_relative_date(sub.feed.last_update) } if not sub.feed.fetched_once: not_yet_fetched = True feeds[sub.feed.pk]['not_yet_fetched'] = True if sub.feed.has_page_exception or sub.feed.has_feed_exception: feeds[sub.feed.pk]['has_exception'] = True feeds[sub.feed.pk]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page' feeds[sub.feed.pk]['feed_address'] = sub.feed.feed_address feeds[sub.feed.pk]['exception_code'] = sub.feed.exception_code if not_yet_fetched: for f in feeds: if 'not_yet_fetched' not in feeds[f]: feeds[f]['not_yet_fetched'] = False data = dict(feeds=feeds, folders=json.decode(folders.folders)) return data
def add_folder(request): folder = request.POST['folder'] parent_folder = request.POST['parent_folder'] logging.info(" ---> [%s] Adding Folder: %s (in %s)" % (request.user, folder, parent_folder)) if folder: code = 1 message = "" user_sub_folders_object, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user) if user_sub_folders_object.folders: user_sub_folders = json.decode(user_sub_folders_object.folders) else: user_sub_folders = [] obj = {folder: []} user_sub_folders = _add_object_to_folder(obj, parent_folder, user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() else: code = -1 message = "Gotta write in a folder name." return dict(code=code, message=message)
def merge_feeds(original_feed_id, duplicate_feed_id): from apps.reader.models import UserSubscription, UserSubscriptionFolders, MUserStory from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag try: original_feed = Feed.objects.get(pk=original_feed_id) duplicate_feed = Feed.objects.get(pk=duplicate_feed_id) except Feed.DoesNotExist: logging.info(" ***> Already deleted feed: %s" % duplicate_feed_id) return logging.info(" ---> Feed: [%s - %s] %s - %s" % (original_feed_id, duplicate_feed_id, original_feed, original_feed.feed_link)) logging.info(" --> %s" % original_feed.feed_address) logging.info(" --> %s" % duplicate_feed.feed_address) user_subs = UserSubscription.objects.filter(feed=duplicate_feed) for user_sub in user_subs: # Rewrite feed in subscription folders try: user_sub_folders = UserSubscriptionFolders.objects.get(user=user_sub.user) except Exception, e: logging.info(" *** ---> UserSubscriptionFolders error: %s" % e) continue # Switch to original feed for the user subscription logging.info(" ===> %s " % user_sub.user) user_sub.feed = original_feed user_sub.needs_unread_recalc = True try: user_sub.save() folders = json.decode(user_sub_folders.folders) folders = rewrite_folders(folders, original_feed, duplicate_feed) user_sub_folders.folders = json.encode(folders) user_sub_folders.save() except (IntegrityError, OperationError): logging.info(" !!!!> %s already subscribed" % user_sub.user) user_sub.delete()
def test_load_feeds__gawker(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'gawker1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='gawker') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) management.call_command('loaddata', 'gawker2.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False) # Test: 1 changed char in content stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) response = self.client.post('/reader/load_single_feed', { "feed_id": 1 }) feed = json.decode(response.content) self.assertEquals(len(feed['stories']), 30)
def load_feed_statistics(request): stats = dict() feed_id = request.GET['feed_id'] feed = get_object_or_404(Feed, pk=feed_id) # Dates of last and next update stats['last_update'] = relative_timesince(feed.last_update) stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) # Minutes between updates update_interval_minutes, random_factor = feed.get_next_scheduled_update() stats['update_interval_minutes'] = update_interval_minutes # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.story_count_history stats['average_stories_per_month'] = average_stories_per_month stats['story_count_history'] = story_count_history and json.decode(story_count_history) # Subscribers stats['subscriber_count'] = feed.num_subscribers logging.info(" ---> [%s] Statistics: %s" % (request.user, feed)) return stats
def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder): def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): new_folders.append(folder) if folder in feeds_to_delete: feeds_to_delete.remove(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): if f_k == folder_to_delete and folder_name == in_folder: logging.info(" ---> [%s] Deleting folder '%s' in '%s': %s" % (self.user, f_k, folder_name, folder)) else: nf, feeds_to_delete = _find_folder_in_folders(f_v, f_k, feeds_to_delete) new_folders.append({f_k: nf}) return new_folders, feeds_to_delete user_sub_folders = json.decode(self.folders) user_sub_folders, feeds_to_delete = _find_folder_in_folders(user_sub_folders, '', feed_ids_in_folder) self.folders = json.encode(user_sub_folders) self.save() UserSubscription.objects.filter(user=self.user, feed__in=feeds_to_delete).delete()
def save_feed_story_history_statistics(self, lock=None, current_counts=None): """ Fills in missing months between earlier occurances and now. Save format: [('YYYY-MM, #), ...] Example output: [(2010-12, 123), (2011-01, 146)] """ now = datetime.datetime.now() min_year = now.year total = 0 month_count = 0 if not current_counts: current_counts = self.story_count_history and json.decode(self.story_count_history) if not current_counts: current_counts = [] # Count stories, aggregate by year and month. Map Reduce! map_f = """ function() { var date = (this.story_date.getFullYear()) + "-" + (this.story_date.getMonth()+1); emit(date, 1); } """ reduce_f = """ function(key, values) { var total = 0; for (var i=0; i < values.length; i++) { total += values[i]; } return total; } """ dates = {} res = MStory.objects(story_feed_id=self.pk).map_reduce(map_f, reduce_f) for r in res: dates[r.key] = r.value # Add on to existing months, always amending up, never down. (Current month # is guaranteed to be accurate, since trim_feeds won't delete it until after # a month. Hacker News can have 1,000+ and still be counted.) for current_month, current_count in current_counts: if current_month not in dates or dates[current_month] < current_count: dates[current_month] = current_count year = int(re.findall(r"(\d{4})-\d{1,2}", current_month)[0]) if year < min_year: min_year = year # Assemble a list with 0's filled in for missing months, # trimming left and right 0's. months = [] start = False for year in range(min_year, now.year+1): for month in range(1, 12+1): if datetime.datetime(year, month, 1) < now: key = u'%s-%s' % (year, month) if dates.get(key) or start: start = True months.append((key, dates.get(key, 0))) total += dates.get(key, 0) month_count += 1 self.story_count_history = json.encode(months) if not total: self.average_stories_per_month = 0 else: self.average_stories_per_month = total / month_count self.save(lock)
def test_delete_feed(self): self.client.login(username="******", password="******") response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 5) self.assertTrue(1 in feeds["folders"]) self.assertEquals(feeds["folders"], [1, {u"Tech": [4, 5, {u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": [8, 9]}]) # Delete feed response = self.client.post(reverse("delete-feed"), {"feed_id": 1}) response = json.decode(response.content) self.assertEquals(response["code"], 1) response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 4) self.assertTrue(1 not in feeds["folders"]) self.assertEquals(feeds["folders"], [{u"Tech": [4, 5, {u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": [8, 9]}]) # Delete feed response = self.client.post(reverse("delete-feed"), {"feed_id": 9}) response = json.decode(response.content) self.assertEquals(response["code"], 1) response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 4) self.assertTrue(1 not in feeds["folders"]) self.assertEquals(feeds["folders"], [{u"Tech": [4, 5, {u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": [8]}]) # Delete feed response = self.client.post(reverse("delete-feed"), {"feed_id": 5}) response = json.decode(response.content) self.assertEquals(response["code"], 1) response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 4) self.assertTrue(1 not in feeds["folders"]) self.assertEquals(feeds["folders"], [{u"Tech": [4, {u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": [8]}]) # Delete feed response = self.client.post(reverse("delete-feed"), {"feed_id": 4}) response = json.decode(response.content) self.assertEquals(response["code"], 1) response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 4) self.assertTrue(1 not in feeds["folders"]) self.assertEquals(feeds["folders"], [{u"Tech": [{u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": [8]}]) # Delete feed response = self.client.post(reverse("delete-feed"), {"feed_id": 8}) response = json.decode(response.content) self.assertEquals(response["code"], 1) response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) self.assertEquals(len(feeds["folders"]), 4) self.assertTrue(1 not in feeds["folders"]) self.assertEquals(feeds["folders"], [{u"Tech": [{u"Deep Tech": [6, 7]}]}, 2, 3, {u"Blogs": []}])
def load_single_feed(request): user = get_user(request) offset = int(request.REQUEST.get('offset', 0)) limit = int(request.REQUEST.get('limit', 30)) page = int(request.REQUEST.get('page', 0)) if page: offset = limit * page feed_id = int(request.REQUEST['feed_id']) feed = Feed.objects.get(id=feed_id) force_update = request.GET.get('force_update', False) now = datetime.datetime.now() stories = feed.get_stories(offset, limit) if force_update: feed.update(force_update) # Get intelligence classifier for user classifier_feeds = MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id) classifier_authors = MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id) classifier_titles = MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id) classifier_tags = MClassifierTag.objects(user_id=user.pk, feed_id=feed_id) try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: # FIXME: Why is this happening for `conesus` when logged into another account?! logging.info(" ***> [%s] UserSub DNE, creating: %s" % (user, feed)) usersub = UserSubscription.objects.create(user=user, feed=feed) userstories = MUserStory.objects(user_id=user.pk, feed_id=feed.pk, read_date__gte=usersub.mark_read_date) userstories = [us.story.id for us in userstories] for story in stories: classifier_feeds.rewind() classifier_authors.rewind() classifier_tags.rewind() classifier_titles.rewind() if story.get('id') in userstories: story['read_status'] = 1 elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date: story['read_status'] = 1 elif not story.get('read_status') and story['story_date'] > usersub.last_read_date: story['read_status'] = 0 story['intelligence'] = { 'feed': apply_classifier_feeds(classifier_feeds, feed), 'author': apply_classifier_authors(classifier_authors, story), 'tags': apply_classifier_tags(classifier_tags, story), 'title': apply_classifier_titles(classifier_titles, story), } # Intelligence feed_tags = json.decode(feed.popular_tags) if feed.popular_tags else [] feed_authors = json.decode(feed.popular_authors) if feed.popular_authors else [] classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, classifier_authors, classifier_titles, classifier_tags) usersub.feed_opens += 1 usersub.save() diff = datetime.datetime.now()-now logging.info(" ---> [%s] Loading feed: %s (%s.%s seconds)" % (request.user, feed, diff.seconds, diff.microseconds / 1000)) data = dict(stories=stories, feed_tags=feed_tags, feed_authors=feed_authors, classifiers=classifiers) return data