def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) UserSubscription.verify_feeds_scheduled(user_id) if random.random() < 0.01: ss = MSocialServices.objects.get(user_id=user_id) ss.sync_twitter_photo()
def opml_upload(request): xml_opml = None message = "OK" code = 1 payload = {} if request.method == 'POST': if 'file' in request.FILES: logging.user(request, "~FR~SBOPML upload starting...") file = request.FILES['file'] xml_opml = file.read() try: UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml) except (UnicodeDecodeError, ValidationError, InvalidStringData): folders = None code = -1 message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?" opml_importer = OPMLImporter(xml_opml, request.user) try: folders = opml_importer.try_processing() except TimeoutError: folders = None ProcessOPML.delay(request.user.pk) feed_count = opml_importer.count_feeds_in_opml() logging.user( request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count) payload = dict(folders=folders, delayed=True, feed_count=feed_count) code = 2 message = "" except AttributeError: code = -1 message = "OPML import failed. Couldn't parse XML file." folders = None if folders: code = 1 feeds = UserSubscription.objects.filter( user=request.user).values() payload = dict(folders=folders, feeds=feeds) logging.user( request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds))) UserSubscription.queue_new_feeds(request.user) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) else: message = "Attach an .opml file." code = -1 return HttpResponse(json.encode( dict(message=message, code=code, payload=payload)), content_type='text/html')
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) UserSubscription.verify_feeds_scheduled(user_id) try: ss = MSocialServices.objects.get(user_id=user_id) except MSocialServices.DoesNotExist: logging.debug(" ---> ~FRCleaning up user, can't find social_services for user_id: ~SB%s" % user_id) return ss.sync_twitter_photo()
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) UserSubscription.verify_feeds_scheduled(user_id) try: ss = MSocialServices.objects.get(user_id=user_id) except MSocialServices.DoesNotExist: logging.debug( " ---> ~FRCleaning up user, can't find social_services for user_id: ~SB%s" % user_id) return ss.sync_twitter_photo()
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) UserSubscription.verify_feeds_scheduled(user_id) Profile.count_all_feed_subscribers_for_user(user_id) MInteraction.trim(user_id) MActivity.trim(user_id) # UserSubscription.refresh_stale_feeds(user_id) try: ss = MSocialServices.objects.get(user_id=user_id) except MSocialServices.DoesNotExist: logging.debug(" ---> ~FRCleaning up user, can't find social_services for user_id: ~SB%s" % user_id) return ss.sync_twitter_photo()
def ProcessOPML(user_id): user = User.objects.get(pk=user_id) logging.user(user, "~FR~SBOPML upload (task) starting...") opml = UploadedOPML.objects.filter(user_id=user_id).first() opml_importer = OPMLImporter(opml.opml_file.encode('utf-8'), user) opml_importer.process() feed_count = UserSubscription.objects.filter(user=user).count() user.profile.send_upload_opml_finished_email(feed_count) logging.user( user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count)) UserSubscription.queue_new_feeds(user) UserSubscription.refresh_stale_feeds(user, exclude_new=True)
def add_site(request, token): code = 0 url = request.GET["url"] folder = request.GET["folder"] new_folder = request.GET.get("new_folder") callback = request.GET["callback"] if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=profile.user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 if code > 0: message = "OK" logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request) return HttpResponse( callback + "(" + json.encode({"code": code, "message": message, "usersub": us and us.feed_id}) + ")", mimetype="text/plain", )
def api_save_new_subscription(request): user = request.user body = request.body_json fields = body.get('actionFields') url = urlnorm.normalize(fields['url']) folder = fields['folder'] if folder == "Top Level": folder = " " code, message, us = UserSubscription.add_subscription( user=user, feed_address=url, folder=folder, bookmarklet=True ) logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder)) if us and us.feed: url = us.feed.feed_address return {"data": [{ "id": us and us.feed_id, "url": url, }]}
def add_site(request, token): code = 0 url = request.GET['url'] folder = request.GET['folder'] callback = request.GET['callback'] if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 if code > 0: message = 'OK' return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed.pk, }) + ')', mimetype='text/plain')
def add_site(request, token): code = 0 url = request.GET['url'] folder = request.GET['folder'] new_folder = request.GET.get('new_folder') callback = request.GET['callback'] if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=profile.user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 if code > 0: message = 'OK' logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder)) return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed.pk, }) + ')', mimetype='text/plain')
def add_site_authed(request): code = 0 url = request.GET['url'] folder = request.GET['folder'] new_folder = request.GET.get('new_folder') callback = request.GET['callback'] user = get_user(request) if not url: code = -1 else: if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=user, feed_address=url, folder=folder, bookmarklet=True ) if code > 0: message = 'OK' logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), request=request) return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed_id, }) + ')', mimetype='text/plain')
def add_site_authed(request): code = 0 url = request.GET['url'] folder = request.GET['folder'] new_folder = request.GET.get('new_folder') callback = request.GET['callback'] user = get_user(request) if not url: code = -1 else: if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=user, feed_address=url, folder=folder, bookmarklet=True ) if code > 0: message = 'OK' logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), request=request) return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed_id, }) + ')', content_type='text/plain')
def add_url(request): code = 0 url = request.POST['url'] folder = request.POST['folder'] code, message, _ = UserSubscription.add_subscription(user=request.user, feed_address=url, folder=folder) return dict(code=code, message=message)
def receive_newsletter(self, params): user = self.user_from_email(params['recipient']) if not user: return sender_name, sender_username, sender_domain = self.split_sender(params['from']) feed_address = self.feed_address(user, "%s@%s" % (sender_username, sender_domain)) usf = UserSubscriptionFolders.objects.get(user=user) usf.add_folder('', 'Newsletters') try: feed = Feed.objects.get(feed_address=feed_address) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link='http://' + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:%s' % feed.pk) try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder='Newsletters' ) story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_title": params['subject'], "story_content": self.get_content(params), "story_author_name": escape(params['from']), "story_permalink": reverse('newsletter-story', kwargs={'story_hash': story_hash}), "story_guid": params['signature'], } try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self.publish_to_subscribers(feed) MFetchHistory.add(feed_id=feed.pk, fetch_type='push') logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in list(new_preferences.items()): if preference_value in ['true', 'false']: preference_value = True if preference_value == 'true' else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in INTEGER_FIELD_PREFS: if preference_name == "days_of_unread" and int( preference_value) != request.user.profile.days_of_unread: UserSubscription.all_subs_needs_unread_recalc(request.user.pk) setattr(request.user.profile, preference_name, int(preference_value)) if preference_name in preferences: del preferences[preference_name] elif preference_name in SPECIAL_PREFERENCES: if preference_name == 'autofollow_friends': social_services = MSocialServices.get_user(request.user.pk) social_services.autofollow = preference_value social_services.save() elif preference_name == 'dashboard_date': request.user.profile.dashboard_date = datetime.datetime.utcnow( ) else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value if preference_name == 'intro_page': logging.user( request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) request.user.profile.preferences = json.encode(preferences) request.user.profile.save() logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response
def FinishFetchArchiveFeeds(results, user_id, start_time, starting_story_count): # logging.debug(" ---> Fetching archive stories finished for %s" % (user_id)) ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds( user_id, start_time, starting_story_count) user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_archive_email(ending_story_count, pre_archive_count)
def add_url(request): code = 0 url = request.POST['url'] folder = request.POST.get('folder', '') code, message, _ = UserSubscription.add_subscription(user=request.user, feed_address=url, folder=folder) return dict(code=code, message=message)
def add_url(request): code = 0 url = request.POST['url'] if not url: code = -1 message = 'Enter in the website address or the feed URL.' else: folder = request.POST.get('folder', '') code, message, _ = UserSubscription.add_subscription(user=request.user, feed_address=url, folder=folder) return dict(code=code, message=message)
def refresh_feeds(request): user = get_user(request) feed_ids = request.REQUEST.getlist('feed_id') check_fetch_status = request.REQUEST.get('check_fetch_status') favicons_fetching = request.REQUEST.getlist('favicons_fetching') start = datetime.datetime.utcnow() feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, check_fetch_status=check_fetch_status) favicons_fetching = [int(f) for f in favicons_fetching if f] feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)]) for feed_id, feed in feeds.items(): if feed_id in favicons_fetching and feed_id in feed_icons: feeds[feed_id]['favicon'] = feed_icons[feed_id].data feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching') user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) sub_feed_ids = [s.feed_id for s in user_subs] if favicons_fetching: moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids] for moved_feed_id in moved_feed_ids: duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id) if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds: feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed.pk] feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed.pk if check_fetch_status: missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids)) if missing_feed_ids: duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids) for duplicate_feed in duplicate_feeds: feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed.pk} if settings.DEBUG or check_fetch_status: diff = datetime.datetime.utcnow()-start timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000))) logging.user(request, "~FBRefreshing %s feeds (%s seconds) (%s/%s)" % ( len(feeds.keys()), timediff, check_fetch_status, len(favicons_fetching))) return {'feeds': feeds}
def add_site(request, token): code = 0 get_post = getattr(request, request.method) url = get_post.get('url') folder = get_post.get('folder') new_folder = get_post.get('new_folder') callback = get_post.get('callback', '') if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create( user=profile.user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True) except Profile.DoesNotExist: code = -1 if code > 0: message = 'OK' logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request) return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed_id, }) + ')', content_type='text/plain')
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id)
def api_unread_story(request, trigger_slug=None): user = request.user body = request.body_json after = body.get('after', None) before = body.get('before', None) limit = body.get('limit', 50) fields = body.get('triggerFields') feed_or_folder = fields['feed_or_folder'] entries = [] if isinstance(feed_or_folder, int) or feed_or_folder.isdigit(): feed_id = int(feed_or_folder) usersub = UserSubscription.objects.get(user=user, feed_id=feed_id) found_feed_ids = [feed_id] found_trained_feed_ids = [feed_id] if usersub.is_trained else [] stories = usersub.get_stories(order="newest", read_filter="unread", offset=0, limit=limit, default_cutoff_date=user.profile.unread_cutoff) else: folder_title = feed_or_folder if folder_title == "Top Level": folder_title = " " usf = UserSubscriptionFolders.objects.get(user=user) flat_folders = usf.flatten_folders() feed_ids = None if folder_title != "all": feed_ids = flat_folders.get(folder_title) usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="unread") feed_ids = [sub.feed_id for sub in usersubs] params = { "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": limit, "order": "newest", "read_filter": "unread", "usersubs": usersubs, "cutoff_date": user.profile.unread_cutoff, } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') stories = Feed.format_stories(mstories) found_feed_ids = list(set([story['story_feed_id'] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) if found_trained_feed_ids: classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) feeds = dict([(f.pk, { "title": f.feed_title, "website": f.feed_link, "address": f.feed_address, }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) for story in stories: if before and int(story['story_date'].strftime("%s")) > before: continue if after and int(story['story_date'].strftime("%s")) < after: continue score = 0 if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids: score = compute_story_score(story, classifier_titles=classifier_titles, classifier_authors=classifier_authors, classifier_tags=classifier_tags, classifier_feeds=classifier_feeds) if score < 0: continue if trigger_slug == "new-unread-focus-story" and score < 1: continue feed = feeds.get(story['story_feed_id'], None) entries.append({ "StoryTitle": story['story_title'], "StoryContent": story['story_content'], "StoryURL": story['story_permalink'], "StoryAuthor": story['story_authors'], "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), "StoryScore": score, "Site": feed and feed['title'], "SiteURL": feed and feed['website'], "SiteRSS": feed and feed['address'], "ifttt": { "id": story['story_hash'], "timestamp": int(story['story_date'].strftime("%s")) }, }) if after: entries = sorted(entries, key=lambda s: s['ifttt']['timestamp']) logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries))) return {"data": entries[:limit]}
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) if random.random() < 0.01: ss = MSocialServices.objects.get(user_id=user_id) ss.sync_twitter_photo()
def receive_newsletter(self, params): user = self.user_from_email(params["recipient"]) if not user: return sender_name, sender_username, sender_domain = self.split_sender(params["from"]) feed_address = self.feed_address(user, "%s@%s" % (sender_username, sender_domain)) usf = UserSubscriptionFolders.objects.get(user=user) usf.add_folder("", "Newsletters") try: feed = Feed.objects.get(feed_address=feed_address) except Feed.DoesNotExist: feed = Feed.objects.create( feed_address=feed_address, feed_link="http://" + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True, ) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, "reload:%s" % feed.pk) if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder="Newsletters" ) story_hash = MStory.ensure_story_hash(params["signature"], feed.pk) story_content = self.get_content(params) story_content = self.clean_content(story_content) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params["timestamp"])), "story_title": params["subject"], "story_content": story_content, "story_author_name": params["from"], "story_permalink": "https://%s%s" % (Site.objects.get_current().domain, reverse("newsletter-story", kwargs={"story_hash": story_hash})), "story_guid": params["signature"], } try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self.publish_to_subscribers(feed) MFetchHistory.add(feed_id=feed.pk, fetch_type="push") logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def process_outline(self, outline, folders, in_folder=''): for item in outline: if (not hasattr(item, 'xmlUrl') and (hasattr(item, 'text') or hasattr(item, 'title'))): folder = item title = getattr(item, 'text', None) or getattr(item, 'title', None) # if hasattr(folder, 'text'): # logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text)) obj = {title: []} folders = add_object_to_folder(obj, in_folder, folders) folders = self.process_outline(folder, folders, title) elif hasattr(item, 'xmlUrl'): feed = item if not hasattr(feed, 'htmlUrl'): setattr(feed, 'htmlUrl', None) # If feed title matches what's in the DB, don't override it on subscription. feed_title = getattr(feed, 'title', None) or getattr(feed, 'text', None) if not feed_title: setattr(feed, 'title', feed.htmlUrl or feed.xmlUrl) user_feed_title = None else: setattr(feed, 'title', feed_title) user_feed_title = feed.title feed_address = urlnorm.normalize(feed.xmlUrl) feed_link = urlnorm.normalize(feed.htmlUrl) if len(feed_address) > Feed._meta.get_field('feed_address').max_length: continue if feed_link and len(feed_link) > Feed._meta.get_field('feed_link').max_length: continue # logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,)) feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title) # feeds.append(feed_data) # See if it exists as a duplicate first duplicate_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address) if duplicate_feed: feed_db = duplicate_feed[0].feed else: feed_data['active_subscribers'] = 1 feed_data['num_subscribers'] = 1 feed_db, _ = Feed.find_or_create(feed_address=feed_address, feed_link=feed_link, defaults=dict(**feed_data)) if user_feed_title == feed_db.feed_title: user_feed_title = None try: us = UserSubscription.objects.get( feed=feed_db, user=self.user) except UserSubscription.DoesNotExist: us = None if not us: us = UserSubscription( feed=feed_db, user=self.user, needs_unread_recalc=True, mark_read_date=datetime.datetime.utcnow() - datetime.timedelta(days=1), active=self.user.profile.is_premium, user_title=user_feed_title) us.save() if self.user.profile.is_premium and not us.active: us.active = True us.save() if not us.needs_unread_recalc: us.needs_unread_recalc = True us.save() folders = add_object_to_folder(feed_db.pk, in_folder, folders) return folders
def receive_newsletter(self, params): user = self._user_from_email(params['recipient']) if not user: return sender_name, sender_username, sender_domain = self._split_sender( params['from']) feed_address = self._feed_address( user, "%s@%s" % (sender_username, sender_domain)) usf = UserSubscriptionFolders.objects.get(user=user) usf.add_folder('', 'Newsletters') try: feed = Feed.objects.get(feed_address=feed_address) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link='http://' + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:%s' % feed.pk) self._check_if_first_newsletter(user) if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder='Newsletters') r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:feeds') story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): story_content = plain_story_content story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_title": params['subject'], "story_content": story_content, "story_author_name": params['from'], "story_permalink": "https://%s%s" % (Site.objects.get_current().domain, reverse('newsletter-story', kwargs={'story_hash': story_hash})), "story_guid": params['signature'], } print story_params try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self._publish_to_subscribers(feed) MFetchHistory.add(feed_id=feed.pk, fetch_type='push') logging.user( user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def receive_newsletter(self, params): user = self._user_from_email(params['recipient']) if not user: return sender_name, sender_username, sender_domain = self._split_sender(params['from']) feed_address = self._feed_address(user, "%s@%s" % (sender_username, sender_domain)) try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: logging.user(user, "~FRUser does not have a USF, ignoring newsletter.") return usf.add_folder('', 'Newsletters') try: feed = Feed.objects.get(feed_address=feed_address) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link='http://' + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:%s' % feed.pk) self._check_if_first_newsletter(user) feed.last_update = datetime.datetime.now() feed.last_story_date = datetime.datetime.now() feed.save() if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder='Newsletters' ) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:feeds') story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): story_content = plain_story_content story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_title": params['subject'], "story_content": story_content, "story_author_name": params['from'], "story_permalink": "https://%s%s" % ( Site.objects.get_current().domain, reverse('newsletter-story', kwargs={'story_hash': story_hash})), "story_guid": params['signature'], } try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self._publish_to_subscribers(feed, story.story_hash) MFetchHistory.add(feed_id=feed.pk, fetch_type='push') logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def receive_newsletter(self, params): user = self._user_from_email(params['recipient']) if not user: return sender_name, sender_username, sender_domain = self._split_sender( params['from']) feed_address = self._feed_address( user, "%s@%s" % (sender_username, sender_domain)) try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: logging.user(user, "~FRUser does not have a USF, ignoring newsletter.") return usf.add_folder('', 'Newsletters') # First look for the email address try: feed = Feed.objects.get(feed_address=feed_address) except Feed.MultipleObjectsReturned: feeds = Feed.objects.filter(feed_address=feed_address)[:1] if feeds.count(): feed = feeds[0] except Feed.DoesNotExist: feed = None # If not found, check among titles user has subscribed to if not feed: newsletter_subs = UserSubscription.objects.filter( user=user, feed__feed_address__contains="newsletter:").only('feed') newsletter_feed_ids = [us.feed.pk for us in newsletter_subs] feeds = Feed.objects.filter(feed_title__iexact=sender_name, pk__in=newsletter_feed_ids) if feeds.count(): feed = feeds[0] # Create a new feed if it doesn't exist by sender name or email if not feed: feed = Feed.objects.create(feed_address=feed_address, feed_link='http://' + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:%s' % feed.pk) self._check_if_first_newsletter(user) feed.last_update = datetime.datetime.now() feed.last_story_date = datetime.datetime.now() feed.save() if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder='Newsletters') r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:feeds') story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): story_content = plain_story_content story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_title": params['subject'], "story_content": story_content, "story_author_name": params['from'], "story_permalink": "https://%s%s" % (Site.objects.get_current().domain, reverse('newsletter-story', kwargs={'story_hash': story_hash})), "story_guid": params['signature'], } try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self._publish_to_subscribers(feed, story.story_hash) MFetchHistory.add(feed_id=feed.pk, fetch_type='push') logging.user( user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def run(self, user_id): UserSubscription.trim_user_read_stories(user_id) UserSubscription.verify_feeds_scheduled(user_id) ss = MSocialServices.objects.get(user_id=user_id) ss.sync_twitter_photo()
def FetchArchiveFeedsForUser(user_id): # subs = UserSubscription.objects.filter(user=user_id) # user_profile = Profile.objects.get(user__pk=user_id) # logging.user(user_profile.user, f"~FCBeginning archive feed fetches for ~SB~FG{subs.count()} feeds~SN...") UserSubscription.fetch_archive_feeds_for_user(user_id)
def api_unread_story(request, trigger_slug=None): user = request.user body = request.body_json after = body.get('after', None) before = body.get('before', None) limit = body.get('limit', 50) fields = body.get('triggerFields') feed_or_folder = fields['feed_or_folder'] entries = [] if isinstance(feed_or_folder, int) or feed_or_folder.isdigit(): feed_id = int(feed_or_folder) try: usersub = UserSubscription.objects.get(user=user, feed_id=feed_id) except UserSubscription.DoesNotExist: return dict(data=[]) found_feed_ids = [feed_id] found_trained_feed_ids = [feed_id] if usersub.is_trained else [] stories = usersub.get_stories(order="newest", read_filter="unread", offset=0, limit=limit, default_cutoff_date=user.profile.unread_cutoff) else: folder_title = feed_or_folder if folder_title == "Top Level": folder_title = " " usf = UserSubscriptionFolders.objects.get(user=user) flat_folders = usf.flatten_folders() feed_ids = None if folder_title != "all": feed_ids = flat_folders.get(folder_title) usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="unread") feed_ids = [sub.feed_id for sub in usersubs] params = { "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": limit, "order": "newest", "read_filter": "unread", "usersubs": usersubs, "cutoff_date": user.profile.unread_cutoff, } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') stories = Feed.format_stories(mstories) found_feed_ids = list(set([story['story_feed_id'] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) if found_trained_feed_ids: classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) feeds = dict([(f.pk, { "title": f.feed_title, "website": f.feed_link, "address": f.feed_address, }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) for story in stories: if before and int(story['story_date'].strftime("%s")) > before: continue if after and int(story['story_date'].strftime("%s")) < after: continue score = 0 if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids: score = compute_story_score(story, classifier_titles=classifier_titles, classifier_authors=classifier_authors, classifier_tags=classifier_tags, classifier_feeds=classifier_feeds) if score < 0: continue if trigger_slug == "new-unread-focus-story" and score < 1: continue feed = feeds.get(story['story_feed_id'], None) entries.append({ "StoryTitle": story['story_title'], "StoryContent": story['story_content'], "StoryURL": story['story_permalink'], "StoryAuthor": story['story_authors'], "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), "StoryScore": score, "Site": feed and feed['title'], "SiteURL": feed and feed['website'], "SiteRSS": feed and feed['address'], "meta": { "id": story['story_hash'], "timestamp": int(story['story_date'].strftime("%s")) }, }) if after: entries = sorted(entries, key=lambda s: s['meta']['timestamp']) logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries))) return {"data": entries[:limit]}
def FetchArchiveFeedsChunk(user_id, feed_ids): # logging.debug(" ---> Fetching archive stories: %s for %s" % (feed_ids, user_id)) UserSubscription.fetch_archive_feeds_chunk(user_id, feed_ids)