def canonical(self, full=False): feed = { 'id': self.pk, 'feed_title': self.feed_title, 'feed_address': self.feed_address, 'feed_link': self.feed_link, 'updated': relative_timesince(self.last_update), 'subs': self.num_subscribers, 'favicon': self.icon.data, 'favicon_color': self.icon.color, 'favicon_fetching': bool(not (self.icon.not_found or self.icon.data)) } if not self.fetched_once: feed['not_yet_fetched'] = True if self.has_page_exception or self.has_feed_exception: feed['has_exception'] = True feed['exception_type'] = 'feed' if self.has_feed_exception else 'page' feed['exception_code'] = self.exception_code elif full: feed['has_exception'] = False feed['exception_type'] = None feed['exception_code'] = self.exception_code if full: feed['feed_tags'] = json.decode(self.data.popular_tags) if self.data.popular_tags else [] feed['feed_authors'] = json.decode(self.data.popular_authors) if self.data.popular_authors else [] return feed
def load_feed_statistics(request, feed_id): user = get_user(request) stats = dict() feed = get_object_or_404(Feed, pk=feed_id) feed.count_subscribers() feed.set_next_scheduled_update(verbose=True, skip_scheduling=True) feed.save_feed_story_history_statistics() feed.save_classifier_counts() # Dates of last and next update stats['active'] = feed.active stats['last_update'] = relative_timesince(feed.last_update) if feed.is_push: stats['next_update'] = "real-time..." else: stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) # Minutes between updates update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False) if feed.is_push: stats['update_interval_minutes'] = 0 else: stats['update_interval_minutes'] = update_interval_minutes original_active_premium_subscribers = feed.active_premium_subscribers original_premium_subscribers = feed.premium_subscribers feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1) feed.premium_subscribers += 1 premium_update_interval_minutes, _ = feed.get_next_scheduled_update(force=True, verbose=False) feed.active_premium_subscribers = original_active_premium_subscribers feed.premium_subscribers = original_premium_subscribers if feed.is_push: stats['premium_update_interval_minutes'] = 0 else: stats['premium_update_interval_minutes'] = premium_update_interval_minutes # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history stats['average_stories_per_month'] = average_stories_per_month stats['story_count_history'] = story_count_history and json.decode(story_count_history) # Subscribers stats['subscriber_count'] = feed.num_subscribers stats['stories_last_month'] = feed.stories_last_month stats['last_load_time'] = feed.last_load_time stats['premium_subscribers'] = feed.premium_subscribers stats['active_subscribers'] = feed.active_subscribers stats['active_premium_subscribers'] = feed.active_premium_subscribers # Classifier counts stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts) # Fetch histories timezone = user.profile.timezone stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id, timezone=timezone) stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id, timezone=timezone) stats['feed_push_history'] = MFeedPushHistory.feed_history(feed_id, timezone=timezone) logging.user(request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,)) return stats
def test_google_reader_import(self): self.client.login(username="******", password="******") user = User.objects.get(username="******") f = open(os.path.join(os.path.dirname(__file__), "fixtures/google_reader.xml")) xml = f.read() f.close() reader_importer = GoogleReaderImporter(user, xml=xml) reader_importer.import_feeds() subs = UserSubscription.objects.filter(user=user) self.assertEquals(subs.count(), 66) usf = UserSubscriptionFolders.objects.get(user=user) print json.decode(usf.folders) self.assertEquals( json.decode(usf.folders), [ {u"Tech": [4, 5, 2, 9, 10, 12, 13, 14, 20, 23, 24, 26, 27, 28, 31, 32, 33, 34, 48, 49, 62, 64]}, 1, 2, 3, 6, {u"Blogs": [1, 3, 25, 29, 30, 39, 40, 41, 50, 55, 57, 58, 59, 60, 66]}, {u"Blogs \u2014 Tumblrs": [5, 21, 37, 38, 53, 54, 63, 65]}, {u"Blogs \u2014 The Bloglets": [6, 16, 22, 35, 51, 56]}, {u"New York": [7, 8, 17, 18, 19, 36, 45, 47, 52, 61]}, {u"Cooking": [11, 15, 42, 43, 46]}, 44, ], )
def test_load_feeds__gothamist(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'gothamist_aug_2009_1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='gothamist') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=4, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) response = self.client.post('/reader/feed', { "feed_id": 4 }) content = json.decode(response.content) self.assertEquals(len(content['stories']), 30) management.call_command('loaddata', 'gothamist_aug_2009_2.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=4, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) response = self.client.get('/reader/feed', { "feed_id": 4 }) # print [c['story_title'] for c in json.decode(response.content)] content = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(content['stories']), 30)
def test_load_feeds__gothamist(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'gothamist_aug_2009_1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='gothamist') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) feed.update(force=True) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) url = reverse('load-single-feed', kwargs=dict(feed_id=4)) response = self.client.get(url) content = json.decode(response.content) self.assertEquals(len(content['stories']), 6) management.call_command('loaddata', 'gothamist_aug_2009_2.json', verbosity=0) feed.update(force=True) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 42) url = reverse('load-single-feed', kwargs=dict(feed_id=4)) response = self.client.get(url) # print [c['story_title'] for c in json.decode(response.content)] content = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(content['stories']), 6)
def move_feed_to_folder(self, feed_id, in_folder=None, to_folder=None): user_sub_folders = json.decode(self.folders) self.delete_feed(feed_id, in_folder, commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() return self
def move_folder_to_folder(self, folder_name, in_folder=None, to_folder=None): user_sub_folders = json.decode(self.folders) deleted_folder = self.delete_folder(folder_name, in_folder, [], commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(deleted_folder, to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() return self
def move_folder_to_folder(self, folder_name, in_folder=None, to_folder=None): logging.user(self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % (folder_name, in_folder, to_folder)) user_sub_folders = json.decode(self.folders) deleted_folder = self.delete_folder(folder_name, in_folder, [], commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(deleted_folder, to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() return self
def move_feed_to_folder(self, feed_id, in_folder=None, to_folder=None): logging.user(self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % (feed_id, in_folder, to_folder)) user_sub_folders = json.decode(self.folders) self.delete_feed(feed_id, in_folder, commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() return self
def load_feed_statistics(request, feed_id): user = get_user(request) stats = dict() feed = get_object_or_404(Feed, pk=feed_id) feed.update_all_statistics() feed.set_next_scheduled_update(verbose=True, skip_scheduling=True) feed.save_feed_story_history_statistics() feed.save_classifier_counts() # Dates of last and next update stats["active"] = feed.active stats["last_update"] = relative_timesince(feed.last_update) stats["next_update"] = relative_timeuntil(feed.next_scheduled_update) stats["push"] = feed.is_push # Minutes between updates update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False) stats["update_interval_minutes"] = update_interval_minutes original_active_premium_subscribers = feed.active_premium_subscribers original_premium_subscribers = feed.premium_subscribers feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1) feed.premium_subscribers += 1 premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False) feed.active_premium_subscribers = original_active_premium_subscribers feed.premium_subscribers = original_premium_subscribers stats["premium_update_interval_minutes"] = premium_update_interval_minutes stats["errors_since_good"] = feed.errors_since_good # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history stats["average_stories_per_month"] = average_stories_per_month stats["story_count_history"] = story_count_history and json.decode(story_count_history) # Subscribers stats["subscriber_count"] = feed.num_subscribers stats["stories_last_month"] = feed.stories_last_month stats["last_load_time"] = feed.last_load_time stats["premium_subscribers"] = feed.premium_subscribers stats["active_subscribers"] = feed.active_subscribers stats["active_premium_subscribers"] = feed.active_premium_subscribers # Classifier counts stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts) # Fetch histories timezone = user.profile.timezone fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) stats["feed_fetch_history"] = fetch_history["feed_fetch_history"] stats["page_fetch_history"] = fetch_history["page_fetch_history"] stats["feed_push_history"] = fetch_history["push_history"] logging.user(request, "~FBStatistics: ~SB%s" % (feed)) return stats
def test_move_feeds_by_folder(self): self.client.login(username='******', password='******') response = self.client.get(reverse('load-feeds')) feeds = json.decode(response.content) self.assertEquals(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379, 3530, 5994357]}, {"Videos": [3240, 5168]}]) # Move feeds by folder response = self.client.post(reverse('move-feeds-by-folder-to-folder'), {u'feeds_by_folder': u'[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', u'to_folder': u'Brainiacs & Opinion'}) response = json.decode(response.content) self.assertEquals(response['code'], 1) response = self.client.get(reverse('load-feeds')) feeds = json.decode(response.content) self.assertEquals(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379]}, {"Videos": [3240, 5168]}])
def test_delete_feed__multiple_folders(self): self.client.login(username='******', password='******') response = self.client.get(reverse('load-feeds')) feeds = json.decode(response.content) self.assertEquals(feeds['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) # Delete feed response = self.client.post(reverse('delete-feed'), {'feed_id': 1}) response = json.decode(response.content) self.assertEquals(response['code'], 1) response = self.client.get(reverse('load-feeds')) feeds = json.decode(response.content) self.assertEquals(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}])
def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder, commit_delete=True): def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_folder=None): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): new_folders.append(folder) if folder in feeds_to_delete: feeds_to_delete.remove(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): if f_k == folder_to_delete and folder_name == in_folder: logging.user(self.user, "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder)) deleted_folder = folder else: nf, feeds_to_delete, deleted_folder = _find_folder_in_folders(f_v, f_k, feeds_to_delete, deleted_folder) new_folders.append({f_k: nf}) return new_folders, feeds_to_delete, deleted_folder user_sub_folders = json.decode(self.folders) user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders(user_sub_folders, '', feed_ids_in_folder) self.folders = json.encode(user_sub_folders) self.save() if commit_delete: UserSubscription.objects.filter(user=self.user, feed__in=feeds_to_delete).delete() return deleted_folder
def test_google_reader_import(self): self.client.login(username='******', password='******') user = User.objects.get(username='******') f = open( os.path.join(os.path.dirname(__file__), 'fixtures/google_reader.xml')) xml = f.read() f.close() reader_importer = GoogleReaderImporter(xml, user) reader_importer.process() subs = UserSubscription.objects.filter(user=user) self.assertEquals(subs.count(), 66) usf = UserSubscriptionFolders.objects.get(user=user) self.assertEquals(json.decode(usf.folders), [{ u'Blogs \u2014 The Bloglets': [6, 16, 22, 35, 51, 56] }, { u'Blogs': [1, 3, 25, 29, 30, 39, 40, 41, 50, 55, 57, 58, 59, 60, 66] }, { u'Cooking': [11, 15, 42, 43, 46] }, { u'New York': [7, 8, 17, 18, 19, 36, 45, 47, 52, 61] }, { u'Tech': [ 2, 4, 9, 10, 12, 13, 14, 20, 23, 24, 26, 27, 28, 31, 32, 33, 34, 48, 49, 62, 64 ] }, { u'Blogs \u2014 Tumblrs': [5, 21, 37, 38, 53, 54, 63, 65] }, 44])
def bootstrap_stories(): print "Mongo DB stories: %s" % MStory.objects().count() # db.stories.drop() print "Dropped! Mongo DB stories: %s" % MStory.objects().count() print "Stories: %s" % Story.objects.all().count() pprint(db.stories.index_information()) feeds = Feed.objects.all().order_by('-average_stories_per_month') feed_count = feeds.count() i = 0 for feed in feeds: i += 1 print "%s/%s: %s (%s stories)" % ( i, feed_count, feed, Story.objects.filter(story_feed=feed).count()) sys.stdout.flush() stories = Story.objects.filter(story_feed=feed).values() for story in stories: # story['story_tags'] = [tag.name for tag in Tag.objects.filter(story=story['id'])] try: story['story_tags'] = json.decode(story['story_tags']) except: continue del story['id'] del story['story_author_id'] try: MStory(**story).save() except: continue print "\nMongo DB stories: %s" % MStory.objects().count()
def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in new_preferences.items(): if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in SPECIAL_PREFERENCES: if (preference_name == 'old_password' and (new_preferences['old_password'] or new_preferences['new_password'])): code = change_password(request.user, new_preferences['old_password'], new_preferences['new_password']) if code == -1: message = "Your old password is incorrect." else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value request.user.profile.preferences = json.encode(preferences) request.user.profile.save() response = dict(code=code, message=message, new_preferences=new_preferences) return response
def set_view_setting(request): print 'KHong profile_views set_view_setting()' code = 1 feed_id = request.POST['feed_id'] feed_view_setting = request.POST.get('feed_view_setting') feed_order_setting = request.POST.get('feed_order_setting') feed_read_filter_setting = request.POST.get('feed_read_filter_setting') feed_layout_setting = request.POST.get('feed_layout_setting') view_settings = json.decode(request.user.profile.view_settings) setting = view_settings.get(feed_id, {}) if isinstance(setting, basestring): setting = {'v': setting} if feed_view_setting: setting['v'] = feed_view_setting if feed_order_setting: setting['o'] = feed_order_setting if feed_read_filter_setting: setting['r'] = feed_read_filter_setting if feed_layout_setting: setting['l'] = feed_layout_setting view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() logging.user(request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting)) response = dict(code=code) return response
def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True, skip_fetch=False): feed = None us = None logging.user( user, "~FRAdding URL: ~SB%s (in %s) %s" % (feed_address, folder, "~FCAUTO-ADD" if not auto_active else "")) feed = Feed.get_feed_from_url(feed_address) if not feed: code = -1 if bookmarklet: message = "This site does not have an RSS feed. Nothing is linked to from this page." else: message = "This address does not point to an RSS feed or a website with an RSS feed." else: us, subscription_created = cls.objects.get_or_create( feed=feed, user=user, defaults={ 'needs_unread_recalc': True, 'active': auto_active, }) code = 1 message = "" if us: user_sub_folders_object, created = UserSubscriptionFolders.objects.get_or_create( user=user, defaults={'folders': '[]'}) if created: user_sub_folders = [] else: user_sub_folders = json.decode(user_sub_folders_object.folders) user_sub_folders = add_object_to_folder(feed.pk, folder, user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() if auto_active or user.profile.is_premium: us.active = True us.save() if not skip_fetch and feed.last_update < datetime.datetime.utcnow( ) - datetime.timedelta(days=1): feed = feed.update() from apps.social.models import MActivity MActivity.new_feed_subscription(user_id=user.pk, feed_id=feed.pk, feed_title=feed.title) feed.setup_feed_for_premium_subscribers() return code, message, us
def bootstrap_stories(): print "Mongo DB stories: %s" % MStory.objects().count() # db.stories.drop() print "Dropped! Mongo DB stories: %s" % MStory.objects().count() print "Stories: %s" % Story.objects.all().count() pprint(db.stories.index_information()) feeds = Feed.objects.all().order_by('-average_stories_per_month') feed_count = feeds.count() i = 0 for feed in feeds: i += 1 print "%s/%s: %s (%s stories)" % (i, feed_count, feed, Story.objects.filter(story_feed=feed).count()) sys.stdout.flush() stories = Story.objects.filter(story_feed=feed).values() for story in stories: # story['story_tags'] = [tag.name for tag in Tag.objects.filter(story=story['id'])] try: story['story_tags'] = json.decode(story['story_tags']) except: continue del story['id'] del story['story_author_id'] try: MStory(**story).save() except: continue print "\nMongo DB stories: %s" % MStory.objects().count()
def set_view_setting(request): code = 1 feed_id = request.POST['feed_id'] feed_view_setting = request.POST.get('feed_view_setting') feed_order_setting = request.POST.get('feed_order_setting') feed_read_filter_setting = request.POST.get('feed_read_filter_setting') feed_layout_setting = request.POST.get('feed_layout_setting') feed_dashboard_count_setting = request.POST.get( 'feed_dashboard_count_setting') view_settings = json.decode(request.user.profile.view_settings) setting = view_settings.get(feed_id, {}) if isinstance(setting, str): setting = {'v': setting} if feed_view_setting: setting['v'] = feed_view_setting if feed_order_setting: setting['o'] = feed_order_setting if feed_read_filter_setting: setting['r'] = feed_read_filter_setting if feed_dashboard_count_setting: setting['d'] = feed_dashboard_count_setting if feed_layout_setting: setting['l'] = feed_layout_setting view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() logging.user( request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting)) response = dict(code=code) return response
def get_view_setting(request): code = 1 feed_id = request.POST['feed_id'] view_settings = json.decode(request.user.profile.view_settings) response = dict(code=code, payload=view_settings.get(feed_id)) return response
def get_preference(request): code = 1 preference_name = request.POST['preference'] preferences = json.decode(request.user.profile.preferences) response = dict(code=code, payload=preferences.get(preference_name)) return response
def collect_feedback(cls): seen_posts = set() try: data = requests.get('https://forum.newsblur.com/posts.json', timeout=3).content except (urllib.error.HTTPError, requests.exceptions.ConnectTimeout) as e: logging.debug(" ***> Failed to collect feedback: %s" % e) return data = json.decode(data).get('latest_posts', "") if not len(data): print("No data!") return cls.objects.delete() post_count = 0 for post in data: if post['topic_id'] in seen_posts: continue seen_posts.add(post['topic_id']) feedback = {} feedback['order'] = post_count post_count += 1 feedback['date'] = dateutil.parser.parse(post['created_at']).replace(tzinfo=None) feedback['date_short'] = relative_date(feedback['date']) feedback['subject'] = post['topic_title'] feedback['url'] = "https://forum.newsblur.com/t/%s/%s/%s" % (post['topic_slug'], post['topic_id'], post['post_number']) feedback['style'] = cls.CATEGORIES[post['category_id']] cls.objects.create(**feedback) # if settings.DEBUG: # print("%s: %s (%s)" % (feedback['style'], feedback['subject'], feedback['date_short'])) if post_count >= 4: break
def test_load_feeds__slashdot(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'slashdot1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='slashdot') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) management.call_command('loaddata', 'slashdot2.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) response = self.client.post('/reader/load_single_feed', { "feed_id": 5 }) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 30)
def exception_retry(request): feed_id = request.POST['feed_id'] reset_fetch = json.decode(request.POST['reset_fetch']) feed = get_object_or_404(Feed, pk=feed_id) feed.next_scheduled_update = datetime.datetime.utcnow() feed.has_page_exception = False feed.has_feed_exception = False feed.active = True if reset_fetch: logging.user(request.user, "~FRRefreshing exception feed: ~SB%s" % (feed)) feed.fetched_once = False else: logging.user(request.user, "~FRForcing refreshing feed: ~SB%s" % (feed)) feed.fetched_once = True feed.save() feed = feed.update(force=True, compute_scores=False) usersub = UserSubscription.objects.get(user=request.user, feed=feed) usersub.calculate_feed_scores(silent=False) feeds = {feed.pk: usersub.canonical(full=True)} return {'code': 1, 'feeds': feeds}
def clear_view_setting(request): code = 1 view_setting_type = request.POST.get('view_setting_type') view_settings = json.decode(request.user.profile.view_settings) new_view_settings = {} removed = 0 for feed_id, view_setting in list(view_settings.items()): if view_setting_type == 'layout' and 'l' in view_setting: del view_setting['l'] removed += 1 if view_setting_type == 'view' and 'v' in view_setting: del view_setting['v'] removed += 1 if view_setting_type == 'order' and 'o' in view_setting: del view_setting['o'] removed += 1 if view_setting_type == 'order' and 'r' in view_setting: del view_setting['r'] removed += 1 new_view_settings[feed_id] = view_setting request.user.profile.view_settings = json.encode(new_view_settings) request.user.profile.save() logging.user( request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed)) response = dict(code=code, view_settings=view_settings, removed=removed) return response
def collect_orphan_feeds(cls, user): us = cls.objects.filter(user=user) try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return us_feed_ids = set([sub.feed_id for sub in us]) folders = json.decode(usf.folders) def collect_ids(folders, found_ids): for item in folders: # print ' --> %s' % item if isinstance(item, int): # print ' --> Adding feed: %s' % item found_ids.add(item) elif isinstance(item, dict): # print ' --> Descending folder dict: %s' % item.values() found_ids.update(collect_ids(item.values(), found_ids)) elif isinstance(item, list): # print ' --> Descending folder list: %s' % len(item) found_ids.update(collect_ids(item, found_ids)) # print ' --> Returning: %s' % found_ids return found_ids found_ids = collect_ids(folders, set()) diff = len(us_feed_ids) - len(found_ids) if diff > 0: logging.info( " ---> Collecting orphans on %s. %s feeds with %s orphans" % (user.username, len(us_feed_ids), diff)) orphan_ids = us_feed_ids - found_ids folders.extend(list(orphan_ids)) usf.folders = json.encode(folders) usf.save()
def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in list(new_preferences.items()): if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in SPECIAL_PREFERENCES: if preference_name == 'autofollow_friends': social_services = MSocialServices.get_user(request.user.pk) social_services.autofollow = preference_value social_services.save() elif preference_name == 'dashboard_date': request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value if preference_name == 'intro_page': logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) request.user.profile.preferences = json.encode(preferences) request.user.profile.save() logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response
def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder, commit_delete=True): def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_folder=None): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): new_folders.append(folder) if folder in feeds_to_delete: feeds_to_delete.remove(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): if f_k == folder_to_delete and (folder_name == in_folder or in_folder is None): logging.user(self.user, "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder)) deleted_folder = folder else: nf, feeds_to_delete, deleted_folder = _find_folder_in_folders(f_v, f_k, feeds_to_delete, deleted_folder) new_folders.append({f_k: nf}) return new_folders, feeds_to_delete, deleted_folder user_sub_folders = json.decode(self.folders) user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders(user_sub_folders, '', feed_ids_in_folder) self.folders = json.encode(user_sub_folders) self.save() if commit_delete: UserSubscription.objects.filter(user=self.user, feed__in=feeds_to_delete).delete() return deleted_folder
def test_load_feeds__gawker(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'gawker1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='gawker') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) feed.update(force=True) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) management.call_command('loaddata', 'gawker2.json', verbosity=0) feed.update(force=True) # Test: 1 changed char in content stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) url = reverse('load-single-feed', kwargs=dict(feed_id=1)) response = self.client.get(url) feed = json.decode(response.content) self.assertEquals(len(feed['stories']), 6)
def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in new_preferences.items(): if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in SPECIAL_PREFERENCES: if preference_name == 'autofollow_friends': social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) social_services.autofollow = preference_value social_services.save() elif preference_name == 'dashboard_date': request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value if preference_name == 'intro_page': logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) request.user.profile.preferences = json.encode(preferences) request.user.profile.save() logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response
def import_starred_items(self, count=10): continuation = "" while True: if continuation: sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s&c=%s" % ( self.scope, count, continuation, ) else: sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s" % (self.scope, count) stories_str = self.send_request(sub_url) try: stories = json.decode(stories_str) continuation = stories.get("continuation") except: logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FWNo stories") stories = None if stories: logging.user( self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FW%s stories" % (len(stories["items"])) ) self.process_starred_items(stories["items"]) if not continuation or count < 1000: break starred_count = MStarredStory.objects.filter(user_id=self.user.pk).count() return starred_count
def add_missing_feeds(self): all_feeds = self.flat() subs = [us.feed_id for us in UserSubscription.objects.filter(user=self.user).only('feed')] missing_subs = set(all_feeds) - set(subs) if missing_subs: logging.debug(" ---> %s is missing %s subs. Adding %s..." % ( self.user, len(missing_subs), missing_subs)) for feed_id in missing_subs: feed = Feed.get_by_id(feed_id) if feed: us, _ = UserSubscription.objects.get_or_create(user=self.user, feed=feed, defaults={ 'needs_unread_recalc': True }) if not us.needs_unread_recalc: us.needs_unread_recalc = True us.save() missing_folder_feeds = set(subs) - set(all_feeds) if missing_folder_feeds: user_sub_folders = json.decode(self.folders) logging.debug(" ---> %s is missing %s folder feeds. Adding %s..." % ( self.user, len(missing_folder_feeds), missing_folder_feeds)) for feed_id in missing_folder_feeds: feed = Feed.get_by_id(feed_id) if feed and feed.pk == feed_id: user_sub_folders = add_object_to_folder(feed_id, "", user_sub_folders) self.folders = json.encode(user_sub_folders) self.save()
def set_view_setting(request): code = 1 feed_id = request.POST["feed_id"] feed_view_setting = request.POST.get("feed_view_setting") feed_order_setting = request.POST.get("feed_order_setting") feed_read_filter_setting = request.POST.get("feed_read_filter_setting") feed_layout_setting = request.POST.get("feed_layout_setting") view_settings = json.decode(request.user.profile.view_settings) setting = view_settings.get(feed_id, {}) if isinstance(setting, basestring): setting = {"v": setting} if feed_view_setting: setting["v"] = feed_view_setting if feed_order_setting: setting["o"] = feed_order_setting if feed_read_filter_setting: setting["r"] = feed_read_filter_setting if feed_layout_setting: setting["l"] = feed_layout_setting view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() logging.user( request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting), ) response = dict(code=code) return response
def api_share_new_story(request): user = request.user body = json.decode(request.body) fields = body.get('actionFields') story_url = fields['story_url'] content = fields.get('story_content', "") story_title = fields.get('story_title', "[Untitled]") story_author = fields.get('story_author', "") comments = fields.get('comments', None) feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) content = lxml.html.fromstring(content) content.make_links_absolute(story_url) content = lxml.html.tostring(content) shared_story = MSharedStory.objects.filter(user_id=user.pk, story_feed_id=feed and feed.pk or 0, story_guid=story_url).limit(1).first() if not shared_story: story_db = { "story_guid": story_url, "story_permalink": story_url, "story_title": story_title, "story_feed_id": feed and feed.pk or 0, "story_content": content, "story_author": story_author, "story_date": datetime.datetime.now(), "user_id": user.pk, "comments": comments, "has_comments": bool(comments), } shared_story = MSharedStory.objects.create(**story_db) socialsubs = MSocialSubscription.objects.filter(subscription_user_id=user.pk) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() logging.user(request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) else: logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) try: socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=user.pk) except MSocialSubscription.DoesNotExist: socialsub = None if socialsub: socialsub.mark_story_ids_as_read([shared_story.story_hash], shared_story.story_feed_id, request=request) else: RUserStory.mark_read(user.pk, shared_story.story_feed_id, shared_story.story_hash) shared_story.publish_update_to_subscribers() return {"data": [{ "id": shared_story and shared_story.story_guid, "url": shared_story and shared_story.blurblog_permalink() }]}
def load_feed_statistics(request): stats = dict() feed_id = request.GET['feed_id'] feed = get_object_or_404(Feed, pk=feed_id) feed.save_feed_story_history_statistics() # Dates of last and next update stats['last_update'] = relative_timesince(feed.last_update) stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) # Minutes between updates update_interval_minutes, random_factor = feed.get_next_scheduled_update() stats['update_interval_minutes'] = update_interval_minutes # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history stats['average_stories_per_month'] = average_stories_per_month stats['story_count_history'] = story_count_history and json.decode(story_count_history) # Subscribers stats['subscriber_count'] = feed.num_subscribers stats['stories_last_month'] = feed.stories_last_month stats['last_load_time'] = feed.last_load_time stats['premium_subscribers'] = feed.premium_subscribers stats['active_subscribers'] = feed.active_subscribers # Fetch histories stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id) stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id) logging.user(request.user, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,)) return stats
def collect_orphan_feeds(cls, user): us = cls.objects.filter(user=user) try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return us_feed_ids = set([sub.feed_id for sub in us]) folders = json.decode(usf.folders) def collect_ids(folders, found_ids): for item in folders: # print ' --> %s' % item if isinstance(item, int): # print ' --> Adding feed: %s' % item found_ids.add(item) elif isinstance(item, dict): # print ' --> Descending folder dict: %s' % item.values() found_ids.update(collect_ids(item.values(), found_ids)) elif isinstance(item, list): # print ' --> Descending folder list: %s' % len(item) found_ids.update(collect_ids(item, found_ids)) # print ' --> Returning: %s' % found_ids return found_ids found_ids = collect_ids(folders, set()) diff = len(us_feed_ids) - len(found_ids) if diff > 0: logging.info(" ---> Collecting orphans on %s. %s feeds with %s orphans" % (user.username, len(us_feed_ids), diff)) orphan_ids = us_feed_ids - found_ids folders.extend(list(orphan_ids)) usf.folders = json.encode(folders) usf.save()
def load_feed_statistics(request): stats = dict() feed_id = request.GET['feed_id'] feed = get_object_or_404(Feed, pk=feed_id) feed.save_feed_story_history_statistics() # Dates of last and next update stats['last_update'] = relative_timesince(feed.last_update) stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) # Minutes between updates update_interval_minutes, random_factor = feed.get_next_scheduled_update() stats['update_interval_minutes'] = update_interval_minutes # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.story_count_history stats['average_stories_per_month'] = average_stories_per_month stats['story_count_history'] = story_count_history and json.decode(story_count_history) # Subscribers stats['subscriber_count'] = feed.num_subscribers logging.info(" ---> [%s] Statistics: %s" % (request.user, feed)) return stats
def api_save_new_subscription(request): user = request.user body = json.decode(request.body) fields = body.get('actionFields') url = fields['url'] folder = fields['folder'] if folder == "Top Level": folder = " " code, message, us = UserSubscription.add_subscription( user=user, feed_address=url, folder=folder, bookmarklet=True ) logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder)) if us and us.feed: url = us.feed.feed_address return {"data": [{ "id": us and us.feed_id, "url": url, }]}
def clear_view_setting(request): code = 1 view_setting_type = request.POST.get('view_setting_type') view_settings = json.decode(request.user.profile.view_settings) new_view_settings = {} removed = 0 for feed_id, view_setting in view_settings.items(): if view_setting_type == 'layout' and 'l' in view_setting: del view_setting['l'] removed += 1 if view_setting_type == 'view' and 'v' in view_setting: del view_setting['v'] removed += 1 if view_setting_type == 'order' and 'o' in view_setting: del view_setting['o'] removed += 1 if view_setting_type == 'order' and 'r' in view_setting: del view_setting['r'] removed += 1 new_view_settings[feed_id] = view_setting request.user.profile.view_settings = json.encode(new_view_settings) request.user.profile.save() logging.user(request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed)) response = dict(code=code, view_settings=view_settings, removed=removed) return response
def test_load_feeds__google(self): # Freezegun the date to 2017-04-30 self.client.login(username='******', password='******') old_story_guid = "blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/" management.call_command('loaddata', 'google1.json', verbosity=1, skip_checks=False) print Feed.objects.all() feed = Feed.objects.get(pk=766) print " Testing test_load_feeds__google: %s" % feed stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 20) response = self.client.get(reverse('load-feeds')+"?update_counts=true") content = json.decode(response.content) self.assertEquals(content['feeds']['766']['nt'], 20) old_story = MStory.objects.get(story_feed_id=feed.pk, story_guid__contains=old_story_guid) self.client.post(reverse('mark-story-hashes-as-read'), {'story_hash': old_story.story_hash}) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['766']['nt'], 19) management.call_command('loaddata', 'google2.json', verbosity=1, skip_checks=False) management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 20) url = reverse('load-single-feed', kwargs=dict(feed_id=766)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 6) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['766']['nt'], 19)
def appdotnet_connect(request): domain = Site.objects.get_current().domain args = { "client_id": settings.APPDOTNET_CLIENTID, "client_secret": settings.APPDOTNET_SECRET, "redirect_uri": "http://" + domain + reverse('appdotnet-connect'), "scope": ["email", "write_post", "follow"], } oauth_code = request.REQUEST.get('code') denied = request.REQUEST.get('denied') if denied: logging.user(request, "~BB~FRDenied App.net connect") return {'error': 'Denied! Try connecting again.'} elif oauth_code: try: adn_auth = appdotnet.Appdotnet(**args) response = adn_auth.getAuthResponse(oauth_code) adn_resp = json.decode(response) access_token = adn_resp['access_token'] adn_userid = adn_resp['user_id'] except (IOError): logging.user(request, "~BB~FRFailed App.net connect") return dict( error="App.net has returned an error. Try connecting again.") # Be sure that two people aren't using the same Twitter account. existing_user = MSocialServices.objects.filter( appdotnet_uid=unicode(adn_userid)) if existing_user and existing_user[0].user_id != request.user.pk: try: user = User.objects.get(pk=existing_user[0].user_id) logging.user( request, "~BB~FRFailed App.net connect, another user: %s" % user.username) return dict(error=( "Another user (%s, %s) has " "already connected with those App.net credentials." % (user.username, user.email or "no email"))) except User.DoesNotExist: existing_user.delete() social_services, _ = MSocialServices.objects.get_or_create( user_id=request.user.pk) social_services.appdotnet_uid = unicode(adn_userid) social_services.appdotnet_access_token = access_token social_services.syncing_appdotnet = True social_services.save() SyncAppdotnetFriends.delay(user_id=request.user.pk) logging.user(request, "~BB~FRFinishing App.net connect") return {} else: # Start the OAuth process adn_auth = appdotnet.Appdotnet(**args) auth_url = adn_auth.generateAuthUrl() logging.user(request, "~BB~FRStarting App.net connect") return {'next': auth_url}
class MFeedback(mongo.Document): date = mongo.DateTimeField() date_short = mongo.StringField() subject = mongo.StringField() url = mongo.StringField() style = mongo.StringField() order = mongo.IntField() meta = { 'collection': 'feedback', 'allow_inheritance': False, 'indexes': ['style'], 'ordering': ['order'], } CATEGORIES = { 5: 'idea', 6: 'problem', 7: 'praise', 8: 'question', } def __unicode__(self): return "%s: (%s) %s" % (self.style, self.date, self.subject) @classmethod def collect_feedback(cls): seen_posts = set() try: data = urllib2.urlopen( 'https://forum.newsblur.com/posts.json').read() except (urllib2.HTTPError), e: logging.debug(" ***> Failed to collect feedback: %s" % e) return data = json.decode(data).get('latest_posts', "") if not len(data): print "No data!" return cls.objects.delete() post_count = 0 for post in data: if post['topic_id'] in seen_posts: continue seen_posts.add(post['topic_id']) feedback = {} feedback['order'] = post_count post_count += 1 feedback['date'] = dateutil.parser.parse( post['created_at']).replace(tzinfo=None) feedback['date_short'] = relative_date(feedback['date']) feedback['subject'] = post['topic_title'] feedback['url'] = "https://forum.newsblur.com/t/%s/%s/%s" % ( post['topic_slug'], post['topic_id'], post['post_number']) feedback['style'] = cls.CATEGORIES[post['category_id']] cls.objects.create(**feedback) print "%s: %s (%s)" % (feedback['style'], feedback['subject'], feedback['date_short']) if post_count >= 4: break
def test_load_feeds__slashdot(self): self.client.login(username='******', password='******') old_story_guid = "{'original-id': u'http://yro.slashdot.org/story/09/09/05/0112254/Court-Allows-Microsoft-To-Sell-Word-During-Appeal?from=rss', 'gr:original-id': u'http://yro.slashdot.org/story/09/09/05/0112254/Court-Allows-Microsoft-To-Sell-Word-During-Appeal?from=rss'}" new_story_guid = "{'original-id': u'http://yro.slashdot.org/story/09/09/05/0112254/Court-Allows-Microsoft-To-Sell-Word-During-Appeal?from=rss!!', 'gr:original-id': u'http://yro.slashdot.org/story/09/09/05/0112254/Court-Allows-Microsoft-To-Sell-Word-During-Appeal?from=rss!!'}" management.call_command('loaddata', 'slashdot1.json', verbosity=0) feed = Feed.objects.get(feed_link__contains='slashdot') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) response = self.client.get(reverse('load-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 38) self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 5}) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 37) management.call_command('loaddata', 'slashdot2.json', verbosity=0) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) url = reverse('load-single-feed', kwargs=dict(feed_id=5)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 12) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 37)
def load_feed_statistics(request, feed_id): stats = dict() feed = get_object_or_404(Feed, pk=feed_id) feed.save_feed_story_history_statistics() feed.save_classifier_counts() # Dates of last and next update stats['last_update'] = relative_timesince(feed.last_update) stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) # Minutes between updates update_interval_minutes, random_factor = feed.get_next_scheduled_update( force=True) stats['update_interval_minutes'] = update_interval_minutes # Stories per month - average and month-by-month breakout average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history stats['average_stories_per_month'] = average_stories_per_month stats['story_count_history'] = story_count_history and json.decode( story_count_history) # Subscribers stats['subscriber_count'] = feed.num_subscribers stats['stories_last_month'] = feed.stories_last_month stats['last_load_time'] = feed.last_load_time stats['premium_subscribers'] = feed.premium_subscribers stats['active_subscribers'] = feed.active_subscribers stats['active_premium_subscribers'] = feed.active_premium_subscribers # Classifier counts stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts) # Fetch histories stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id) stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id) logging.user( request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % ( feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers, )) return stats
def exception_retry(request): user = get_user(request) feed_id = get_argument_or_404(request, 'feed_id') reset_fetch = json.decode(request.POST['reset_fetch']) feed = Feed.get_by_id(feed_id) original_feed = feed if not feed: raise Http404 feed.schedule_feed_fetch_immediately() changed = False if feed.has_page_exception: changed = True feed.has_page_exception = False if feed.has_feed_exception: changed = True feed.has_feed_exception = False if not feed.active: changed = True feed.active = True if changed: feed.save(update_fields=[ 'has_page_exception', 'has_feed_exception', 'active' ]) original_fetched_once = feed.fetched_once if reset_fetch: logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed)) feed.fetched_once = False else: logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed)) feed.fetched_once = True if feed.fetched_once != original_fetched_once: feed.save(update_fields=['fetched_once']) feed = feed.update(force=True, compute_scores=False, verbose=True) feed = Feed.get_by_id(feed.pk) try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: usersubs = UserSubscription.objects.filter(user=user, feed=original_feed) if usersubs: usersub = usersubs[0] usersub.switch_feed(feed, original_feed) else: return {'code': -1} usersub.calculate_feed_scores(silent=False) feeds = { feed.pk: usersub and usersub.canonical(full=True), feed_id: usersub.canonical(full=True) } return {'code': 1, 'feeds': feeds}
def ios_subscription_status(request): logging.debug(" ---> iOS Subscription Status: %s" % request.body) data = json.decode(request.body) subject = "iOS Subscription Status: %s" % data.get('notification_type', "[missing]") message = """%s""" % (request.body) mail_admins(subject, message) return {"code": 1}
def test_api_feeds(self): self.client.login(username='******', password='******') response = self.client.get(reverse('load-feeds')) content = json.decode(response.content) self.assertEquals(len(content['feeds']), 10) self.assertEquals(content['feeds']['1']['feed_title'], 'Gawker') self.assertEquals(content['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1])
def add_folder(self, parent_folder, folder): if self.folders: user_sub_folders = json.decode(self.folders) else: user_sub_folders = [] obj = {folder: []} user_sub_folders = add_object_to_folder(obj, parent_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save()
def move_folder_to_folder(self, folder_name, in_folder=None, to_folder=None): logging.user( self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % (folder_name, in_folder, to_folder)) user_sub_folders = json.decode(self.folders) deleted_folder = self.delete_folder(folder_name, in_folder, [], commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(deleted_folder, to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() return self
def move_folder_to_folder(request): folder_name = request.POST['folder_name'] in_folder = request.POST.get('in_folder', '') to_folder = request.POST.get('to_folder', '') user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder) return dict(code=1, folders=json.decode(user_sub_folders.folders))
def test_load_feeds__slashdot(self): self.client.login(username='******', password='******') old_story_guid = "tag:google.com,2005:reader/item/4528442633bc7b2b" management.call_command('loaddata', 'slashdot1.json', verbosity=0, commit=False, skip_checks=False) feed = Feed.objects.get(feed_link__contains='slashdot') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) response = self.client.get(reverse('load-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 38) self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 5}) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 37) management.call_command('loaddata', 'slashdot2.json', verbosity=0, commit=False, skip_checks=False) management.call_command('refresh_feed', force=1, feed=5, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 38) url = reverse('load-single-feed', kwargs=dict(feed_id=5)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 6) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds']['5']['nt'], 37)
def delete_feed(self, feed_id, in_folder, commit_delete=True): def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): if (folder == feed_id and ((folder_name != in_folder) or (folder_name == in_folder and deleted))): multiples_found = True logging.user( self.user, "~FB~SBDeleting feed, and a multiple has been found in '%s'" % (folder_name)) if folder == feed_id and (folder_name == in_folder) and not deleted: logging.user( self.user, "~FBDelete feed: %s'th item: %s folders/feeds" % (k, len(old_folders))) deleted = True else: new_folders.append(folder) elif isinstance(folder, dict): for f_k, f_v in folder.items(): nf, multiples_found, deleted = _find_feed_in_folders( f_v, f_k, multiples_found, deleted) new_folders.append({f_k: nf}) return new_folders, multiples_found, deleted user_sub_folders = json.decode(self.folders) user_sub_folders, multiples_found, deleted = _find_feed_in_folders( user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() if not multiples_found and deleted and commit_delete: try: user_sub = UserSubscription.objects.get(user=self.user, feed=feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter( duplicate_feed_id=feed_id) if duplicate_feed: try: user_sub = UserSubscription.objects.get( user=self.user, feed=duplicate_feed[0].feed) except Feed.DoesNotExist: return if user_sub: user_sub.delete() MUserStory.objects(user_id=self.user_id, feed_id=feed_id).delete()
def test_load_feeds__motherjones(self): self.client.login(username='******', password='******') management.call_command('loaddata', 'motherjones1.json', verbosity=0, commit=False, skip_checks=False) feed = Feed.objects.get(feed_link__contains='motherjones') stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 0) management.call_command('refresh_feed', force=1, feed=feed.pk, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 10) response = self.client.get(reverse('load-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds'][str(feed.pk)]['nt'], 10) self.client.post(reverse('mark-story-as-read'), {'story_id': stories[0].story_guid, 'feed_id': feed.pk}) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds'][str(feed.pk)]['nt'], 9) management.call_command('loaddata', 'motherjones2.json', verbosity=0, commit=False, skip_checks=False) management.call_command('refresh_feed', force=1, feed=feed.pk, single_threaded=True, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEquals(stories.count(), 10) url = reverse('load-single-feed', kwargs=dict(feed_id=feed.pk)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title self.assertEquals(len(feed['stories']), 6) response = self.client.get(reverse('refresh-feeds')) content = json.decode(response.content) self.assertEquals(content['feeds'][str(feed['feed_id'])]['nt'], 9)
def forwards(self, orm): for profile in orm.Profile.objects.all(): if 'feed_pane_size' in profile.preferences: preferences = json.decode(profile.preferences) feed_pane_size = int(preferences.get('feed_pane_size', 240)) del preferences['feed_pane_size'] print " --> User %s: %s" % (profile.user.username, feed_pane_size) profile.feed_pane_size = feed_pane_size profile.preferences = json.encode(preferences) profile.save()
def canonical(self): return { 'is_premium': self.is_premium, 'preferences': json.decode(self.preferences), 'tutorial_finished': self.tutorial_finished, 'hide_getting_started': self.hide_getting_started, 'has_setup_feeds': self.has_setup_feeds, 'has_found_friends': self.has_found_friends, 'has_trained_intelligence': self.has_trained_intelligence, 'dashboard_date': self.dashboard_date }