def import_from_google_reader(request): code = 0 feed_count = 0 starred_count = 0 delayed = False if request.user.is_authenticated(): reader_importer = GoogleReaderImporter(request.user) auto_active = bool(request.REQUEST.get('auto_active') or False) try: code = reader_importer.try_import_feeds(auto_active=auto_active) except TimeoutError: ProcessReaderImport.delay(request.user.pk, auto_active=auto_active) feed_count = UserSubscription.objects.filter(user=request.user).count() logging.user(request, "~FR~SBGoogle Reader import took too long, found %s feeds. Tasking..." % feed_count) delayed = True code = 2 if 'import_from_google_reader' in request.session: del request.session['import_from_google_reader'] feed_count = UserSubscription.objects.filter(user=request.user).count() return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
def send_new_premium_email(self, force=False): subs = UserSubscription.objects.filter(user=self.user) message = """Woohoo! User: %(user)s Feeds: %(feeds)s Sincerely, NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()} mail_admins('New premium account', message, fail_silently=True) if not self.user.email or not self.send_emails: return sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk, email_type='new_premium') if not created and not force: return user = self.user text = render_to_string('mail/email_new_premium.txt', locals()) html = render_to_string('mail/email_new_premium.xhtml', locals()) subject = "Thanks for going premium on NewsBlur!" msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.send(fail_silently=True) logging.user(self.user, "~BB~FM~SBSending email for new premium: %s" % self.user.email)
def mark_story_as_unread(request): story_id = request.POST['story_id'] feed_id = int(request.POST['feed_id']) usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True usersub.save() data = dict(code=0, payload=dict(story_id=story_id)) logging.user(request, "~FY~SBUnread~SN story in feed: %s" % (usersub.feed)) story = MStory.objects(story_feed_id=feed_id, story_guid=story_id)[0] if story.story_date < usersub.mark_read_date: # Story is outside the mark as read range, so invert all stories before. newer_stories = MStory.objects(story_feed_id=story.story_feed_id, story_date__gte=story.story_date, story_date__lte=usersub.mark_read_date ).only('story_guid') newer_stories = [s.story_guid for s in newer_stories] usersub.mark_read_date = story.story_date - datetime.timedelta(minutes=1) usersub.needs_unread_recalc = True usersub.save() # Mark stories as read only after the mark_read_date has been moved, otherwise # these would be ignored. data = usersub.mark_story_ids_as_read(newer_stories, request=request) m = MUserStory.objects(story_id=story_id, user_id=request.user.pk, feed_id=feed_id) m.delete() return data
def trim_user_read_stories(self, user_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) subs = UserSubscription.objects.filter(user_id=user_id).only('feed') if not subs: return feeds = [f.feed_id for f in subs] old_rs = r.smembers("RS:%s" % user_id) old_count = len(old_rs) # new_rs = r.sunionstore("RS:%s" % user_id, *["RS:%s:%s" % (user_id, f) for f in feeds]) new_rs = r.sunion(*["RS:%s:%s" % (user_id, f) for f in feeds]) if not old_count: return r.sunionstore("RS:%s:backup" % user_id, "RS:%s" % user_id) r.expire("RS:%s:backup" % user_id, 60*60*24) missing_rs = [] feed_re = re.compile(r'(\d+):.*?') for rs in old_rs: found = feed_re.search(rs) if not found: print " ---> Not found: %s" % rs continue rs_feed_id = found.groups()[0] if int(rs_feed_id) not in feeds: missing_rs.append(rs) # r.sadd("RS:%s" % user_id, *missing_rs) new_count = len(new_rs) missing_count = len(missing_rs) new_total = new_count + missing_count user = User.objects.get(pk=user_id) logging.user(user, "~FBTrimming ~FR%s~FB/%s (~SB%s~SN+~SB%s~SN saved) user read stories..." % (old_count - new_total, old_count, new_count, missing_count))
def load_starred_stories(request): user = get_user(request) offset = int(request.REQUEST.get('offset', 0)) limit = int(request.REQUEST.get('limit', 10)) page = int(request.REQUEST.get('page', 0)) if page: offset = limit * (page - 1) mstories = MStarredStory.objects(user_id=user.pk).order_by('-starred_date')[offset:offset+limit] stories = Feed.format_stories(mstories) for story in stories: story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) story['short_parsed_date'] = format_story_link_date__short(story_date, now) story['long_parsed_date'] = format_story_link_date__long(story_date, now) starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone) story['starred_date'] = format_story_link_date__long(starred_date, now) story['read_status'] = 1 story['starred'] = True story['intelligence'] = { 'feed': 0, 'author': 0, 'tags': 0, 'title': 0, } logging.user(request, "~FCLoading starred stories: ~SB%s stories" % (len(stories))) return dict(stories=stories)
def mark_story_ids_as_read(self, story_ids, request=None): data = dict(code=0, payload=story_ids) if not request: request = self.user if not self.needs_unread_recalc: self.needs_unread_recalc = True self.save() if len(story_ids) > 1: logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_ids), self.feed)) else: logging.user(request, "~FYRead story in feed: %s" % (self.feed)) for story_id in set(story_ids): try: story = MStory.objects.get(story_feed_id=self.feed_id, story_guid=story_id) except MStory.DoesNotExist: # Story has been deleted, probably by feed_fetcher. continue except MStory.MultipleObjectsReturned: story = MStory.objects.filter(story_feed_id=self.feed_id, story_guid=story_id)[0] now = datetime.datetime.utcnow() date = now if now > story.story_date else story.story_date # For handling future stories m, _ = MUserStory.objects.get_or_create(story_id=story_id, user_id=self.user_id, feed_id=self.feed_id, defaults={ 'read_date': date, 'story': story, 'story_date': story.story_date, }) return data
def mark_feed_as_read(request): feed_ids = [int(f) for f in request.REQUEST.getlist('feed_id') if f] feed_count = len(feed_ids) multiple = feed_count > 1 code = 0 for feed_id in feed_ids: try: feed = Feed.objects.get(id=feed_id) except Feed.DoesNotExist: continue code = 0 us = UserSubscription.objects.get(feed=feed, user=request.user) try: us.mark_feed_read() except IntegrityError: code = -1 else: code = 1 if not multiple: logging.user(request, "~FMMarking feed as read: ~SB%s" % (feed,)) if multiple: logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % (feed_count,)) return dict(code=code)
def import_starred_items(self, count=10): continuation = "" while True: if continuation: sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s&c=%s" % ( self.scope, count, continuation, ) else: sub_url = "%s/0/stream/contents/user/-/state/com.google/starred?n=%s" % (self.scope, count) stories_str = self.send_request(sub_url) try: stories = json.decode(stories_str) continuation = stories.get("continuation") except: logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FWNo stories") stories = None if stories: logging.user( self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FW%s stories" % (len(stories["items"])) ) self.process_starred_items(stories["items"]) if not continuation or count < 1000: break starred_count = MStarredStory.objects.filter(user_id=self.user.pk).count() return starred_count
def add_site(request, token): code = 0 url = request.GET["url"] folder = request.GET["folder"] new_folder = request.GET.get("new_folder") callback = request.GET["callback"] if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=profile.user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 if code > 0: message = "OK" logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request) return HttpResponse( callback + "(" + json.encode({"code": code, "message": message, "usersub": us and us.feed_id}) + ")", mimetype="text/plain", )
def save_feed_chooser(request): approved_feeds = [int(feed_id) for feed_id in request.POST.getlist('approved_feeds') if feed_id][:64] activated = 0 usersubs = UserSubscription.objects.filter(user=request.user) for sub in usersubs: try: if sub.feed.pk in approved_feeds: sub.active = True activated += 1 sub.save() sub.feed.count_subscribers() elif sub.active: sub.active = False sub.save() except Feed.DoesNotExist: pass logging.user(request, "~BB~FW~SBActivated standard account: ~FC%s~SN/~SB%s" % ( activated, usersubs.count() )) request.user.profile.queue_new_feeds() request.user.profile.refresh_stale_feeds(exclude_new=True) return {'activated': activated}
def fetch_request(self): try: r = requests.get(self.story.story_permalink, headers=self.headers, verify=False) except (AttributeError, SocketError, requests.ConnectionError, requests.models.MissingSchema, requests.sessions.InvalidSchema), e: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % e) return
def set_view_setting(request): code = 1 feed_id = request.POST["feed_id"] feed_view_setting = request.POST.get("feed_view_setting") feed_order_setting = request.POST.get("feed_order_setting") feed_read_filter_setting = request.POST.get("feed_read_filter_setting") feed_layout_setting = request.POST.get("feed_layout_setting") view_settings = json.decode(request.user.profile.view_settings) setting = view_settings.get(feed_id, {}) if isinstance(setting, basestring): setting = {"v": setting} if feed_view_setting: setting["v"] = feed_view_setting if feed_order_setting: setting["o"] = feed_order_setting if feed_read_filter_setting: setting["r"] = feed_read_filter_setting if feed_layout_setting: setting["l"] = feed_layout_setting view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() logging.user( request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting), ) response = dict(code=code) return response
def send_opml_export_email(self): if not self.user.email: return MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk, email_type='opml_export') exporter = OPMLExporter(self.user) opml = exporter.process() params = { 'feed_count': UserSubscription.objects.filter(user=self.user).count(), } user = self.user text = render_to_string('mail/email_opml_export.txt', params) html = render_to_string('mail/email_opml_export.xhtml', params) subject = "Backup OPML file of your NewsBlur sites" filename= 'NewsBlur Subscriptions - %s.xml' % datetime.datetime.now().strftime('%Y-%m-%d') msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.attach(filename, opml, 'text/xml') msg.send(fail_silently=True) logging.user(self.user, "~BB~FM~SBSending OPML backup email to: %s" % self.user.email)
def send_first_share_to_blurblog_email(self, force=False): from apps.social.models import MSocialProfile, MSharedStory if not self.user.email: return sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk, email_type='first_share') if not created and not force: return social_profile = MSocialProfile.objects.get(user_id=self.user.pk) params = { 'shared_stories': MSharedStory.objects.filter(user_id=self.user.pk).count(), 'blurblog_url': social_profile.blurblog_url, 'blurblog_rss': social_profile.blurblog_rss } user = self.user text = render_to_string('mail/email_first_share_to_blurblog.txt', params) html = render_to_string('mail/email_first_share_to_blurblog.xhtml', params) subject = "Your shared stories on NewsBlur are available on your Blurblog" msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.send(fail_silently=True) logging.user(self.user, "~BB~FM~SBSending first share to blurblog email to: %s" % self.user.email)
def process_response(self, request, response): if ( ( request.path == "/" or request.path.startswith("/reader/refresh_feeds") or request.path.startswith("/reader/load_feeds") or request.path.startswith("/reader/feeds") ) and hasattr(request, "user") and request.user.is_authenticated() ): hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60) ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] # SUBSCRIBER_EXPIRE = datetime.datetime.utcnow() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) if request.user.profile.last_seen_on < hour_ago: logging.user(request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (request.user.profile.last_seen_on, ip)) CleanupUser.delay(user_id=request.user.pk) elif settings.DEBUG: logging.user( request, "~FG~BBRepeat visitor (ignored): ~SB%s (%s)" % (request.user.profile.last_seen_on, ip) ) request.user.profile.last_seen_on = datetime.datetime.utcnow() request.user.profile.last_seen_ip = ip[-15:] request.user.profile.save() return response
def original_text(request): story_id = request.REQUEST.get('story_id') feed_id = request.REQUEST.get('feed_id') story_hash = request.REQUEST.get('story_hash', None) force = request.REQUEST.get('force', False) debug = request.REQUEST.get('debug', False) if story_hash: story, _ = MStory.find_story(story_hash=story_hash) else: story, _ = MStory.find_story(story_id=story_id, story_feed_id=feed_id) if not story: logging.user(request, "~FYFetching ~FGoriginal~FY story text: ~FRstory not found") return {'code': -1, 'message': 'Story not found.', 'original_text': None, 'failed': True} original_text = story.fetch_original_text(force=force, request=request, debug=debug) return { 'feed_id': story.story_feed_id, 'story_hash': story.story_hash, 'story_id': story.story_guid, 'image_urls': story.image_urls, 'secure_image_urls': Feed.secure_image_urls(story.image_urls), 'original_text': original_text, 'failed': not original_text or len(original_text) < 100, }
def send_opml_export_email(self, reason=None, force=False): if not self.user.email: return emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, email_type='opml_export') day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~SN~FMNot sending opml export email, already sent today.") return MSentEmail.record(receiver_user_id=self.user.pk, email_type='opml_export') exporter = OPMLExporter(self.user) opml = exporter.process() params = { 'feed_count': UserSubscription.objects.filter(user=self.user).count(), 'reason': reason, } user = self.user text = render_to_string('mail/email_opml_export.txt', params) html = render_to_string('mail/email_opml_export.xhtml', params) subject = "Backup OPML file of your NewsBlur sites" filename= 'NewsBlur Subscriptions - %s.xml' % datetime.datetime.now().strftime('%Y-%m-%d') msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.attach(filename, opml, 'text/xml') msg.send(fail_silently=True) logging.user(self.user, "~BB~FM~SBSending OPML backup email to: %s" % self.user.email)
def paypal_form(request): domain = Site.objects.get_current().domain paypal_dict = { "cmd": "_xclick-subscriptions", "business": "*****@*****.**", "a3": "12.00", # price "p3": 1, # duration of each unit (depends on unit) "t3": "Y", # duration unit ("M for Month") "src": "1", # make payments recur "sra": "1", # reattempt payment on payment error "no_note": "1", # remove extra notes (optional) "item_name": "NewsBlur Premium Account", "notify_url": "http://%s%s" % (domain, reverse('paypal-ipn')), "return_url": "http://%s%s" % (domain, reverse('paypal-return')), "cancel_return": "http://%s%s" % (domain, reverse('index')), "custom": request.user.username, } # Create the instance. form = PayPalPaymentsForm(initial=paypal_dict, button_type="subscribe") logging.user(request, "~FBLoading paypal/feedchooser") # Output the button. return HttpResponse(form.render(), mimetype='text/html')
def refund_premium(self): refunded = False if self.stripe_id: stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_payments = stripe.Charge.all(customer=stripe_customer.id).data stripe_payments[0].refund() refunded = stripe_payments[0].amount/100 logging.user(self.user, "~FRRefunding stripe payment: $%s" % refunded) self.cancel_premium() else: paypal_opts = { 'API_ENVIRONMENT': 'PRODUCTION', 'API_USERNAME': settings.PAYPAL_API_USERNAME, 'API_PASSWORD': settings.PAYPAL_API_PASSWORD, 'API_SIGNATURE': settings.PAYPAL_API_SIGNATURE, } paypal = PayPalInterface(**paypal_opts) transaction = PayPalIPN.objects.filter(custom=self.user.username, txn_type='subscr_payment')[0] refund = paypal.refund_transaction(transaction.txn_id) refunded = int(float(refund['raw']['TOTALREFUNDEDAMOUNT'][0])) logging.user(self.user, "~FRRefunding paypal payment: $%s" % refunded) self.cancel_premium() return refunded
def _check_if_first_newsletter(self, user, force=False): if not user.email: return subs = UserSubscription.objects.filter(user=user) found_newsletter = False for sub in subs: if sub.feed.is_newsletter: found_newsletter = True break if not found_newsletter and not force: return params = dict(receiver_user_id=user.pk, email_type='first_newsletter') try: sent_email = MSentEmail.objects.get(**params) if not force: # Return if email already sent return except MSentEmail.DoesNotExist: sent_email = MSentEmail.objects.create(**params) text = render_to_string('mail/email_first_newsletter.txt', {}) html = render_to_string('mail/email_first_newsletter.xhtml', {}) subject = "Your email newsletters are now being sent to NewsBlur" msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.send(fail_silently=True) logging.user(user, "~BB~FM~SBSending first newsletter email to: %s" % user.email)
def activate_premium(self): from apps.profile.tasks import EmailNewPremium EmailNewPremium.delay(user_id=self.user.pk) self.is_premium = True self.save() self.user.is_active = True self.user.save() subs = UserSubscription.objects.filter(user=self.user) for sub in subs: if sub.active: continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % len(scheduled_feeds)) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) self.queue_new_feeds() self.setup_premium_history() logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) return True
def fetch_request(self, use_mercury=True): headers = self.headers url = self.story_url if self.story and not url: url = self.story.story_permalink if use_mercury: mercury_api_key = getattr(settings, 'MERCURY_PARSER_API_KEY', 'abc123') headers["content-type"] = "application/json" headers["x-api-key"] = mercury_api_key if settings.DEBUG: url = "http://nb.local.com:4040/rss_feeds/original_text_fetcher?url=%s" % url else: url = "https://www.newsblur.com/rss_feeds/original_text_fetcher?url=%s" % url try: r = requests.get(url, headers=headers, verify=False) r.connection.close() except (AttributeError, SocketError, requests.ConnectionError, requests.models.MissingSchema, requests.sessions.InvalidSchema, requests.sessions.TooManyRedirects, requests.models.InvalidURL, requests.models.ChunkedEncodingError, requests.models.ContentDecodingError, urllib3.exceptions.LocationValueError, LocationParseError, OpenSSLError, PyAsn1Error), e: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % e) return
def load_recommended_feed(request): user = get_user(request) page = int(request.REQUEST.get('page', 0)) usersub = None refresh = request.REQUEST.get('refresh') now = datetime.datetime.now unmoderated = request.REQUEST.get('unmoderated', False) == 'true' if unmoderated: recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[page:page+2] else: recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[page:page+2] if recommended_feeds and request.user.is_authenticated(): usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) if refresh != 'true' and page > 0: logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1)) recommended_feed = recommended_feeds and recommended_feeds[0] feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed.pk) if recommended_feed: return render_to_response('recommendations/render_recommended_feed.xhtml', { 'recommended_feed' : recommended_feed, 'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline, 'usersub' : usersub, 'feed_icon' : feed_icon and feed_icon[0], 'has_next_page' : len(recommended_feeds) > 1, 'has_previous_page' : page != 0, 'unmoderated' : unmoderated, 'today' : datetime.datetime.now(), }, context_instance=RequestContext(request)) else: return HttpResponse("")
def mark_story_as_unread(request): story_id = request.POST['story_id'] feed_id = int(request.POST['feed_id']) try: usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: try: usersub = UserSubscription.objects.get(user=request.user, feed=duplicate_feed[0].feed) except Feed.DoesNotExist: return dict(code=-1) if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True usersub.save() data = dict(code=0, payload=dict(story_id=story_id)) logging.user(request, "~FY~SBUnread~SN story in feed: %s" % (usersub.feed)) story = MStory.objects(story_feed_id=feed_id, story_guid=story_id)[0] m = MUserStory.objects(story=story, user_id=request.user.pk, feed_id=feed_id) m.delete() return data
def opml_upload(request): xml_opml = None message = "OK" code = 1 payload = {} if request.method == 'POST': if 'file' in request.FILES: logging.user(request, "~FR~SBOPML upload starting...") file = request.FILES['file'] xml_opml = file.read() opml_importer = OPMLImporter(xml_opml, request.user) folders = opml_importer.process() feeds = UserSubscription.objects.filter(user=request.user).values() payload = dict(folders=folders, feeds=feeds) logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds))) request.session['import_from_google_reader'] = False else: message = "Attach an .opml file." code = -1 data = json.encode(dict(message=message, code=code, payload=payload)) return HttpResponse(data, mimetype='text/plain')
def api_save_new_subscription(request): user = request.user body = request.body_json fields = body.get('actionFields') url = urlnorm.normalize(fields['url']) folder = fields['folder'] if folder == "Top Level": folder = " " code, message, us = UserSubscription.add_subscription( user=user, feed_address=url, folder=folder, bookmarklet=True ) logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder)) if us and us.feed: url = us.feed.feed_address return {"data": [{ "id": us and us.feed_id, "url": url, }]}
def import_signup(request): if request.method == "POST": signup_form = SignupForm(prefix='signup', data=request.POST) if signup_form.is_valid(): new_user = signup_form.save() user_token = None if not user_token: user_uuid = request.COOKIES.get('newsblur_reader_uuid') if user_uuid: user_token = OAuthToken.objects.filter(uuid=user_uuid).order_by('-created_date') if not user_token: if request.session.session_key: user_token = OAuthToken.objects.filter(session_id=request.session.session_key).order_by('-created_date') if not user_token: user_token = OAuthToken.objects.filter(remote_ip=request.META['REMOTE_ADDR']).order_by('-created_date') if user_token: user_token = user_token[0] user_token.session_id = request.session.session_key user_token.user = new_user user_token.save() login_user(request, new_user) return HttpResponseRedirect(reverse('index')) else: logging.user(request, "~BR~FW ***> Can't find user token during import/signup. Re-authenticating...") return HttpResponseRedirect(reverse('google-reader-authorize')) else: signup_form = SignupForm(prefix='signup') return render_to_response('import/signup.xhtml', { 'signup_form': signup_form, }, context_instance=RequestContext(request))
def ifttt_status(request): logging.user(request, "~FCChecking ~SBIFTTT~SN status") return {"data": { "status": "OK", "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), }}
def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in new_preferences.items(): if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in SPECIAL_PREFERENCES: if preference_name == 'autofollow_friends': social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) social_services.autofollow = preference_value social_services.save() elif preference_name == 'dashboard_date': request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value if preference_name == 'intro_page': logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) request.user.profile.preferences = json.encode(preferences) request.user.profile.save() logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response
def add_site(request, token): code = 0 url = request.GET['url'] folder = request.GET['folder'] new_folder = request.GET.get('new_folder') callback = request.GET['callback'] if not url: code = -1 else: try: profile = Profile.objects.get(secret_token=token) if new_folder: usf, _ = UserSubscriptionFolders.objects.get_or_create(user=profile.user) usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 if code > 0: message = 'OK' logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder)) return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, 'usersub': us and us.feed.pk, }) + ')', mimetype='text/plain')
def send_launch_social_email(self, force=False): if not self.user.email or not self.send_emails: logging.user( self.user, "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) return sent_email, created = MSentEmail.objects.get_or_create( receiver_user_id=self.user.pk, email_type='launch_social') if not created and not force: logging.user( self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email) return delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 user = self.user data = dict(user=user, months_ago=months_ago) text = render_to_string('mail/email_launch_social.txt', data) html = render_to_string('mail/email_launch_social.xhtml', data) subject = "NewsBlur is now a social news reader" msg = EmailMultiAlternatives(subject, text, from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, to=['%s <%s>' % (user, user.email)]) msg.attach_alternative(html, "text/html") msg.send(fail_silently=True) logging.user( self.user, "~BB~FM~SBSending launch social email for user: %s months, %s" % (months_ago, self.user.email))
def fetch_mercury(self, skip_save=False, return_document=False): try: resp = self.fetch_request(use_mercury=True) except TimeoutError: logging.user( self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: timed out") resp = None except requests.exceptions.TooManyRedirects: logging.user( self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: too many redirects" ) resp = None if not resp: return doc = resp.json() if doc.get('error', False): logging.user( self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % doc.get('messages', "[unknown merucry error]")) return text = doc['content'] title = doc['title'] url = doc['url'] image = doc['lead_image_url'] return self.process_content(text, title, url, image, skip_save=skip_save, return_document=return_document)
def fetch(self, skip_save=False): try: resp = self.fetch_request() except TimeoutError: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: timed out") resp = None if not resp: return try: text = resp.text except (LookupError, TypeError): text = resp.content if resp.encoding and resp.encoding != 'utf-8': try: text = text.encode(resp.encoding) except LookupError: pass original_text_doc = readability.Document(text, url=resp.url, debug=settings.DEBUG) content = original_text_doc.summary(html_partial=True) if content: if not skip_save: self.story.original_text_z = zlib.compress(content) self.story.save() logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" % ( len(unicode(content)), self.story.story_content_z and len(zlib.decompress(self.story.story_content_z)) )), warn_color=False) else: logging.user(self.request, ("~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" % ( self.story.story_content_z and len(zlib.decompress(self.story.story_content_z)) )), warn_color=False) return content
def popularity_query(request): if request.method == 'POST': form = PopularityQueryForm(request.POST) if form.is_valid(): logging.user( request.user, "~BC~FRPopularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) query = MPopularityQuery.objects.create( email=request.POST['email'], query=request.POST['query']) query.queue_email() response = render(request, 'analyzer/popularity_query.xhtml', { 'success': True, 'popularity_query_form': form, }) response.set_cookie('newsblur_popularity_query', request.POST['query']) return response else: logging.user( request.user, "~BC~FRFailed popularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) else: logging.user(request.user, "~BC~FRPopularity query form loading") form = PopularityQueryForm( initial={ 'query': request.COOKIES.get('newsblur_popularity_query', "") }) response = render(request, 'analyzer/popularity_query.xhtml', { 'popularity_query_form': form, }) return response
def delete_account(request): if request.method == 'POST': form = DeleteAccountForm(request.POST, user=request.user) if form.is_valid(): logging.user( request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % request.user.username) request.user.profile.delete_user(confirm=True) logout_user(request) return HttpResponseRedirect(reverse('index')) else: logging.user( request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % request.user.username) else: logging.user( request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % request.user.username) form = DeleteAccountForm(user=request.user) return { 'delete_form': form, }
def share_story(request, token=None): code = 0 story_url = request.POST['story_url'] comments = request.POST['comments'] title = request.POST['title'] content = request.POST.get('content', None) rss_url = request.POST.get('rss_url', None) feed_id = request.POST.get('feed_id', None) or 0 feed = None message = None profile = None if request.user.is_authenticated: profile = request.user.profile else: try: profile = Profile.objects.get(secret_token=token) except Profile.DoesNotExist: code = -1 if token: message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." if not profile: return HttpResponse(json.encode({ 'code': code, 'message': message, 'story': None, }), content_type='text/plain') if feed_id: feed = Feed.get_by_id(feed_id) else: if rss_url: logging.user(request.user, "~FBFinding feed (share_story): %s" % rss_url) feed = Feed.get_feed_from_url(rss_url, create=True, fetch=True) if not feed: logging.user(request.user, "~FBFinding feed (share_story): %s" % story_url) feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) content = lxml.html.tostring(content) else: importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) content = document['content'] if not title: title = document['title'] shared_story = MSharedStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url).limit(1).first() if not shared_story: story_db = { "story_guid": story_url, "story_permalink": story_url, "story_title": title, "story_feed_id": feed_id, "story_content": content, "story_date": datetime.datetime.now(), "user_id": profile.user.pk, "comments": comments, "has_comments": bool(comments), } shared_story = MSharedStory.objects.create(**story_db) socialsubs = MSocialSubscription.objects.filter(subscription_user_id=profile.user.pk) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() logging.user(profile.user, "~BM~FYSharing story from site: ~SB%s: %s" % (story_url, comments)) message = "Sharing story from site: %s: %s" % (story_url, comments) else: shared_story.story_content = content shared_story.story_title = title shared_story.comments = comments shared_story.story_permalink = story_url shared_story.story_guid = story_url shared_story.has_comments = bool(comments) shared_story.story_feed_id = feed_id shared_story.save() logging.user(profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments)) message = "Updating shared story from site: %s: %s" % (story_url, comments) try: socialsub = MSocialSubscription.objects.get(user_id=profile.user.pk, subscription_user_id=profile.user.pk) except MSocialSubscription.DoesNotExist: socialsub = None if socialsub: socialsub.mark_story_ids_as_read([shared_story.story_hash], shared_story.story_feed_id, request=request) else: RUserStory.mark_read(profile.user.pk, shared_story.story_feed_id, shared_story.story_hash) shared_story.publish_update_to_subscribers() response = HttpResponse(json.encode({ 'code': code, 'message': message, 'story': shared_story, }), content_type='text/plain') response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'POST' return response
def check_share_on_site(request, token): code = 0 story_url = request.GET['story_url'] rss_url = request.GET.get('rss_url') callback = request.GET['callback'] other_stories = None same_stories = None usersub = None message = None user = None if not story_url: code = -1 else: try: user_profile = Profile.objects.get(secret_token=token) user = user_profile.user except Profile.DoesNotExist: code = -1 logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url) feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False) if not feed: logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % story_url) feed = Feed.get_feed_from_url(story_url, create=False, fetch=False) if not feed: parsed_url = urlparse.urlparse(story_url) base_url = "%s://%s%s" % (parsed_url.scheme, parsed_url.hostname, parsed_url.path) logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url) feed = Feed.get_feed_from_url(base_url, create=False, fetch=False) if not feed: logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + '/')) feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False) if feed and user: try: usersub = UserSubscription.objects.filter(user=user, feed=feed) except UserSubscription.DoesNotExist: usersub = None feed_id = feed and feed.pk your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id, user_id=user_profile.user.pk, story_url=story_url) previous_stories = MSharedStory.objects.filter(user_id=user_profile.user.pk).order_by('-shared_date').limit(3) previous_stories = [{ "user_id": story.user_id, "story_title": story.story_title, "comments": story.comments, "shared_date": story.shared_date, "relative_date": relative_timesince(story.shared_date), "blurblog_permalink": story.blurblog_permalink(), } for story in previous_stories] user_ids = set([user_profile.user.pk]) for story in same_stories: user_ids.add(story['user_id']) for story in other_stories: user_ids.add(story['user_id']) users = {} profiles = MSocialProfile.profiles(user_ids) for profile in profiles: users[profile.user_id] = { "username": profile.username, "photo_url": profile.photo_url, } logging.user(user_profile.user, "~BM~FCChecking share from site: ~SB%s" % (story_url), request=request) response = HttpResponse(callback + '(' + json.encode({ 'code' : code, 'message' : message, 'feed' : feed, 'subscribed' : bool(usersub), 'your_story' : your_story, 'same_stories' : same_stories, 'other_stories' : other_stories, 'previous_stories' : previous_stories, 'users' : users, }) + ')', content_type='text/plain') response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'GET' return response
def stripe_form(request): user = request.user success_updating = False stripe.api_key = settings.STRIPE_SECRET plan = PLANS[0][0] renew = is_true(request.GET.get('renew', False)) error = None if request.method == 'POST': zebra_form = StripePlusPaymentForm(request.POST, email=user.email) if zebra_form.is_valid(): user.email = zebra_form.cleaned_data['email'] user.save() customer = None current_premium = ( user.profile.is_premium and user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now()) # Are they changing their existing card? if user.profile.stripe_id: customer = stripe.Customer.retrieve(user.profile.stripe_id) try: card = customer.sources.create( source=zebra_form.cleaned_data['stripe_token']) except stripe.error.CardError: error = "This card was declined." else: customer.default_card = card.id customer.save() user.profile.strip_4_digits = zebra_form.cleaned_data[ 'last_4_digits'] user.profile.save() user.profile.activate_premium( ) # TODO: Remove, because webhooks are slow success_updating = True else: try: customer = stripe.Customer.create( **{ 'source': zebra_form.cleaned_data['stripe_token'], 'plan': zebra_form.cleaned_data['plan'], 'email': user.email, 'description': user.username, }) except stripe.error.CardError: error = "This card was declined." else: user.profile.strip_4_digits = zebra_form.cleaned_data[ 'last_4_digits'] user.profile.stripe_id = customer.id user.profile.save() user.profile.activate_premium( ) # TODO: Remove, because webhooks are slow success_updating = True # Check subscription to ensure latest plan, otherwise cancel it and subscribe if success_updating and customer and customer.subscriptions.total_count == 1: subscription = customer.subscriptions.data[0] if subscription['plan']['id'] != "newsblur-premium-36": for sub in customer.subscriptions: sub.delete() customer = stripe.Customer.retrieve(user.profile.stripe_id) if success_updating and customer and customer.subscriptions.total_count == 0: params = dict(customer=customer.id, items=[ { "plan": "newsblur-premium-36", }, ]) premium_expire = user.profile.premium_expire if current_premium and premium_expire: if premium_expire < (datetime.datetime.now() + datetime.timedelta(days=365)): params[ 'billing_cycle_anchor'] = premium_expire.strftime( '%s') params['trial_end'] = premium_expire.strftime('%s') stripe.Subscription.create(**params) else: zebra_form = StripePlusPaymentForm(email=user.email, plan=plan) if success_updating: return render(request, 'reader/paypal_return.xhtml') new_user_queue_count = RNewUserQueue.user_count() new_user_queue_position = RNewUserQueue.user_position(request.user.pk) new_user_queue_behind = 0 if new_user_queue_position >= 0: new_user_queue_behind = new_user_queue_count - new_user_queue_position new_user_queue_position -= 1 immediate_charge = True if user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now( ): immediate_charge = False logging.user(request, "~BM~FBLoading Stripe form") return render( request, 'profile/stripe_form.xhtml', { 'zebra_form': zebra_form, 'publishable': settings.STRIPE_PUBLISHABLE, 'success_updating': success_updating, 'new_user_queue_count': new_user_queue_count - 1, 'new_user_queue_position': new_user_queue_position, 'new_user_queue_behind': new_user_queue_behind, 'renew': renew, 'immediate_charge': immediate_charge, 'error': error, })
def appdotnet_connect(request): domain = Site.objects.get_current().domain args = { "client_id": settings.APPDOTNET_CLIENTID, "client_secret": settings.APPDOTNET_SECRET, "redirect_uri": "http://" + domain + reverse('appdotnet-connect'), "scope": ["email", "write_post", "follow"], } oauth_code = request.REQUEST.get('code') denied = request.REQUEST.get('denied') if denied: logging.user(request, "~BB~FRDenied App.net connect") return {'error': 'Denied! Try connecting again.'} elif oauth_code: try: adn_auth = appdotnet.Appdotnet(**args) response = adn_auth.getAuthResponse(oauth_code) adn_resp = json.decode(response) access_token = adn_resp['access_token'] adn_userid = adn_resp['user_id'] except (IOError): logging.user(request, "~BB~FRFailed App.net connect") return dict( error="App.net has returned an error. Try connecting again.") # Be sure that two people aren't using the same Twitter account. existing_user = MSocialServices.objects.filter( appdotnet_uid=unicode(adn_userid)) if existing_user and existing_user[0].user_id != request.user.pk: try: user = User.objects.get(pk=existing_user[0].user_id) logging.user( request, "~BB~FRFailed App.net connect, another user: %s" % user.username) return dict(error=( "Another user (%s, %s) has " "already connected with those App.net credentials." % (user.username, user.email or "no email"))) except User.DoesNotExist: existing_user.delete() social_services, _ = MSocialServices.objects.get_or_create( user_id=request.user.pk) social_services.appdotnet_uid = unicode(adn_userid) social_services.appdotnet_access_token = access_token social_services.syncing_appdotnet = True social_services.save() # SyncAppdotnetFriends.delay(user_id=request.user.pk) # XXX TODO: Remove below and uncomment above. Only for www->dev. social_services.sync_appdotnet_friends() logging.user(request, "~BB~FRFinishing App.net connect") return {} else: # Start the OAuth process adn_auth = appdotnet.Appdotnet(**args) auth_url = adn_auth.generateAuthUrl() logging.user(request, "~BB~FRStarting App.net connect") return {'next': auth_url}
def setup_premium_history(self, alt_email=None): paypal_payments = [] stripe_payments = [] existing_history = PaymentHistory.objects.filter( user=self.user, payment_provider__in=['paypal', 'stripe']) if existing_history.count(): logging.user( self.user, "~BY~SN~FRDeleting~FW existing history: ~SB%s payments" % existing_history.count()) existing_history.delete() # Record Paypal payments paypal_payments = PayPalIPN.objects.filter(custom=self.user.username, payment_status='Completed', txn_type='subscr_payment') if not paypal_payments.count(): paypal_payments = PayPalIPN.objects.filter( payer_email=self.user.email, payment_status='Completed', txn_type='subscr_payment') if alt_email and not paypal_payments.count(): paypal_payments = PayPalIPN.objects.filter( payer_email=alt_email, payment_status='Completed', txn_type='subscr_payment') if paypal_payments.count(): # Make sure this doesn't happen again, so let's use Paypal's email. self.user.email = alt_email self.user.save() for payment in paypal_payments: PaymentHistory.objects.create(user=self.user, payment_date=payment.payment_date, payment_amount=payment.payment_gross, payment_provider='paypal') # Record Stripe payments if self.stripe_id: stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_payments = stripe.Charge.all( customer=stripe_customer.id).data for payment in stripe_payments: created = datetime.datetime.fromtimestamp(payment.created) PaymentHistory.objects.create(user=self.user, payment_date=created, payment_amount=payment.amount / 100.0, payment_provider='stripe') # Calculate payments in last year, then add together payment_history = PaymentHistory.objects.filter(user=self.user) last_year = datetime.datetime.now() - datetime.timedelta(days=364) recent_payments_count = 0 oldest_recent_payment_date = None for payment in payment_history: if payment.payment_date > last_year: recent_payments_count += 1 if not oldest_recent_payment_date or payment.payment_date < oldest_recent_payment_date: oldest_recent_payment_date = payment.payment_date if oldest_recent_payment_date: self.premium_expire = ( oldest_recent_payment_date + datetime.timedelta(days=365 * recent_payments_count)) self.save() logging.user( self.user, "~BY~SN~FWFound ~SB%s paypal~SN and ~SB%s stripe~SN payments (~SB%s payments expire: ~SN~FB%s~FW)" % (len(paypal_payments), len(stripe_payments), len(payment_history), self.premium_expire))
social_services.twitter_uid = unicode(twitter_user.id) social_services.twitter_access_key = auth.access_token social_services.twitter_access_secret = auth.access_token_secret social_services.syncing_twitter = True social_services.save() SyncTwitterFriends.delay(user_id=request.user.pk) logging.user(request, "~BB~FRFinishing Twitter connect") return {} else: # Start the OAuth process auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret) auth_url = auth.get_authorization_url() request.session['twitter_request_token'] = auth.request_token logging.user(request, "~BB~FRStarting Twitter connect: %s" % auth.request_token) return {'next': auth_url} @login_required @render_to('social/social_connect.xhtml') def facebook_connect(request): facebook_app_id = settings.FACEBOOK_APP_ID facebook_secret = settings.FACEBOOK_SECRET args = { "client_id": facebook_app_id, "redirect_uri": "https://" + Site.objects.get_current().domain + '/oauth/facebook_connect', "scope": "user_friends", "display": "popup", }
def logout(request): code = 1 logging.user(request.user, "~FG~BBAPI Logout~FW") logout_user(request) return dict(code=code)
original_story_content = "" if content and len(content) > len(original_story_content): if self.story and not skip_save: self.story.original_text_z = zlib.compress(smart_str(content)) try: self.story.save() except NotUniqueError, e: logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: %s" % (e)), warn_color=False) pass logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" % ( len(content), len(original_story_content) )), warn_color=False) else: logging.user(self.request, ("~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" % ( len(original_story_content) )), warn_color=False) return if return_document: return dict(content=content, title=title, url=url, doc=original_text_doc, image=image) return content def rewrite_content(self, content): soup = BeautifulSoup(content) for noscript in soup.findAll('noscript'): if len(noscript.contents) > 0: noscript.replaceWith(noscript.contents[0])
def load_single_feed(request, feed_id): start = time.time() user = get_user(request) offset = int(request.REQUEST.get('offset', 0)) limit = int(request.REQUEST.get('limit', 12)) page = int(request.REQUEST.get('page', 1)) dupe_feed_id = None userstories_db = None if page: offset = limit * (page-1) if not feed_id: raise Http404 try: feed = Feed.objects.get(id=feed_id) except Feed.DoesNotExist: feed_address = request.REQUEST.get('feed_address') dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address) if dupe_feed: feed = dupe_feed[0].feed dupe_feed_id = feed_id else: raise Http404 stories = feed.get_stories(offset, limit) # Get intelligence classifier for user classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)) classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)) classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)) checkpoint1 = time.time() usersub = UserSubscription.objects.get(user=user, feed=feed) userstories = [] if usersub: userstories_db = MUserStory.objects(user_id=user.pk, feed_id=feed.pk, read_date__gte=usersub.mark_read_date) starred_stories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).only('story_guid', 'starred_date') starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories]) for us in userstories_db: if hasattr(us.story, 'story_guid') and isinstance(us.story.story_guid, unicode): userstories.append(us.story.story_guid) elif hasattr(us.story, 'id') and isinstance(us.story.id, unicode): userstories.append(us.story.id) # TODO: Remove me after migration from story.id->guid checkpoint2 = time.time() for story in stories: story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) story['short_parsed_date'] = format_story_link_date__short(story_date, now) story['long_parsed_date'] = format_story_link_date__long(story_date, now) if usersub: if story['id'] in userstories: story['read_status'] = 1 elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date: story['read_status'] = 1 elif not story.get('read_status') and story['story_date'] > usersub.last_read_date: story['read_status'] = 0 if story['id'] in starred_stories: story['starred'] = True starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone) story['starred_date'] = format_story_link_date__long(starred_date, now) else: story['read_status'] = 1 story['intelligence'] = { 'feed': apply_classifier_feeds(classifier_feeds, feed), 'author': apply_classifier_authors(classifier_authors, story), 'tags': apply_classifier_tags(classifier_tags, story), 'title': apply_classifier_titles(classifier_titles, story), } checkpoint3 = time.time() # Intelligence feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else [] feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else [] classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, classifier_authors, classifier_titles, classifier_tags) if usersub: usersub.feed_opens += 1 usersub.save() timediff = time.time()-start last_update = relative_timesince(feed.last_update) logging.user(request.user, "~FYLoading feed: ~SB%s%s ~SN(%.4s seconds)" % ( feed, ('~SN/p%s' % page) if page > 1 else '', timediff)) FeedLoadtime.objects.create(feed=feed, loadtime=timediff) if timediff >= 1: diff1 = checkpoint1-start diff2 = checkpoint2-start diff3 = checkpoint3-start logging.user(request.user, "~FYSlow feed load: ~SB%.4s/%.4s(%s)/%.4s" % ( diff1, diff2, userstories_db and userstories_db.count(), diff3)) data = dict(stories=stories, feed_tags=feed_tags, feed_authors=feed_authors, classifiers=classifiers, last_update=last_update, feed_id=feed.pk) if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id if not usersub: data.update(feed.canonical()) return data
def load_river_stories(request): limit = 18 offset = 0 start = datetime.datetime.utcnow() user = get_user(request) feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id] original_feed_ids = list(feed_ids) page = int(request.REQUEST.get('page', 0))+1 read_stories_count = int(request.REQUEST.get('read_stories_count', 0)) bottom_delta = datetime.timedelta(days=settings.DAYS_OF_UNREAD) if not feed_ids: logging.user(request.user, "~FCLoading empty river stories: page %s" % (page)) return dict(stories=[]) # Fetch all stories at and before the page number. # Not a single page, because reading stories can move them up in the unread order. # `read_stories_count` is an optimization, works best when all 25 stories before have been read. limit = limit * page - read_stories_count # Read stories to exclude read_stories = MUserStory.objects(user_id=user.pk, feed_id__in=feed_ids).only('story') read_stories = [rs.story.id for rs in read_stories] # Determine mark_as_read dates for all feeds to ignore all stories before this date. # max_feed_count = 0 feed_counts = {} feed_last_reads = {} for feed_id in feed_ids: try: usersub = UserSubscription.objects.get(feed__pk=feed_id, user=user) except UserSubscription.DoesNotExist: continue if not usersub: continue feed_counts[feed_id] = (usersub.unread_count_negative * 1 + usersub.unread_count_neutral * 10 + usersub.unread_count_positive * 20) # if feed_counts[feed_id] > max_feed_count: # max_feed_count = feed_counts[feed_id] feed_last_reads[feed_id] = int(time.mktime(usersub.mark_read_date.timetuple())) feed_counts = sorted(feed_counts.items(), key=itemgetter(1))[:50] feed_ids = [f[0] for f in feed_counts] feed_last_reads = dict([(str(feed_id), feed_last_reads[feed_id]) for feed_id in feed_ids]) feed_counts = dict(feed_counts) # After excluding read stories, all that's left are stories # past the mark_read_date. Everything returned is guaranteed to be unread. mstories = MStory.objects( id__nin=read_stories, story_feed_id__in=feed_ids, story_date__gte=start - bottom_delta ).map_reduce("""function() { var d = feed_last_reads[this[~story_feed_id]]; if (this[~story_date].getTime()/1000 > d) { emit(this[~id], this); } }""", """function(key, values) { return values[0]; }""", output='inline', scope={ 'feed_last_reads': feed_last_reads } ) mstories = [story.value for story in mstories] mstories = sorted(mstories, cmp=lambda x, y: cmp(story_score(y, bottom_delta), story_score(x, bottom_delta))) # story_feed_counts = defaultdict(int) # mstories_pruned = [] # for story in mstories: # print story['story_title'], story_feed_counts[story['story_feed_id']] # if story_feed_counts[story['story_feed_id']] >= 3: continue # mstories_pruned.append(story) # story_feed_counts[story['story_feed_id']] += 1 stories = [] for i, story in enumerate(mstories): if i < offset: continue if i >= offset + limit: break stories.append(bunch(story)) stories = Feed.format_stories(stories) found_feed_ids = list(set([story['story_feed_id'] for story in stories])) # Find starred stories starred_stories = MStarredStory.objects( user_id=user.pk, story_feed_id__in=found_feed_ids ).only('story_guid', 'starred_date') starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories]) # Intelligence classifiers for all feeds involved def sort_by_feed(classifiers): feed_classifiers = defaultdict(list) for classifier in classifiers: feed_classifiers[classifier.feed_id].append(classifier) return feed_classifiers classifier_feeds = sort_by_feed(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_authors = sort_by_feed(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_titles = sort_by_feed(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_tags = sort_by_feed(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_feed_ids)) # Just need to format stories for story in stories: story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) story['short_parsed_date'] = format_story_link_date__short(story_date, now) story['long_parsed_date'] = format_story_link_date__long(story_date, now) story['read_status'] = 0 if story['id'] in starred_stories: story['starred'] = True starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone) story['starred_date'] = format_story_link_date__long(starred_date, now) story['intelligence'] = { 'feed': apply_classifier_feeds(classifier_feeds[story['story_feed_id']], story['story_feed_id']), 'author': apply_classifier_authors(classifier_authors[story['story_feed_id']], story), 'tags': apply_classifier_tags(classifier_tags[story['story_feed_id']], story), 'title': apply_classifier_titles(classifier_titles[story['story_feed_id']], story), } diff = datetime.datetime.utcnow() - start timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000))) logging.user(request.user, "~FCLoading river stories: page %s - ~SB%s/%s " "stories ~SN(%s/%s/%s feeds) ~FB(%s seconds)" % (page, len(stories), len(mstories), len(found_feed_ids), len(feed_ids), len(original_feed_ids), timediff)) return dict(stories=stories)
def iframe_buster(request): logging.user(request.user, "~FB~SBiFrame bust!") return HttpResponse(status=204)
def exception_change_feed_link(request): feed_id = request.POST['feed_id'] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed feed_link = request.POST['feed_link'] timezone = request.user.profile.timezone code = -1 if feed.has_page_exception or feed.has_feed_exception: # Fix broken feed logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) feed_address = feedfinder.feed(feed_link) if feed_address: code = 1 feed.has_page_exception = False feed.active = True feed.fetched_once = False feed.feed_link = feed_link feed.feed_address = feed_address duplicate_feed = feed.schedule_feed_fetch_immediately() if duplicate_feed: new_feed = Feed.objects.get(pk=duplicate_feed.pk) feed = new_feed new_feed.schedule_feed_fetch_immediately() new_feed.has_page_exception = False new_feed.active = True new_feed.save() else: # Branch good feed logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) feed, _ = Feed.objects.get_or_create(feed_address=feed.feed_address, feed_link=feed_link) code = 1 if feed.pk != original_feed.pk: try: feed.branch_from_feed = original_feed.branch_from_feed or original_feed except Feed.DoesNotExist: feed.branch_from_feed = original_feed feed.feed_link_locked = True feed.save() feed = feed.update() feed = Feed.get_by_id(feed.pk) try: usersub = UserSubscription.objects.get(user=request.user, feed=feed) except UserSubscription.DoesNotExist: usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed) if usersubs: usersub = usersubs[0] usersub.switch_feed(feed, original_feed) else: fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { 'code': -1, 'feed_fetch_history': fetch_history['feed_fetch_history'], 'page_fetch_history': fetch_history['page_fetch_history'], 'push_history': fetch_history['push_history'], } usersub.calculate_feed_scores(silent=False) feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id) if feed and feed.has_feed_exception: code = -1 feeds = { original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), } fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { 'code': code, 'feeds': feeds, 'new_feed_id': usersub.feed_id, 'feed_fetch_history': fetch_history['feed_fetch_history'], 'page_fetch_history': fetch_history['page_fetch_history'], 'push_history': fetch_history['push_history'], }
def ios_ipa(request): filename = os.path.join(settings.NEWSBLUR_DIR, 'clients/ios/NewsBlur.ipa') manifest = open(filename).read() logging.user(request, "~SK~FR~BBDownloading NewsBlur.ipa...") return HttpResponse(manifest, content_type='application/octet-stream')
def delete_user(self, confirm=False, fast=False): if not confirm: print " ---> You must pass confirm=True to delete this user." return try: self.cancel_premium() except: logging.user( self.user, "~BR~SK~FWError cancelling premium renewal for: %s" % self.user.username) from apps.social.models import MSocialProfile, MSharedStory, MSocialSubscription from apps.social.models import MActivity, MInteraction try: social_profile = MSocialProfile.objects.get(user_id=self.user.pk) logging.user( self.user, "Unfollowing %s followings and %s followers" % (social_profile.following_count, social_profile.follower_count)) for follow in social_profile.following_user_ids: social_profile.unfollow_user(follow) for follower in social_profile.follower_user_ids: follower_profile = MSocialProfile.objects.get(user_id=follower) follower_profile.unfollow_user(self.user.pk) social_profile.delete() except MSocialProfile.DoesNotExist: logging.user(self.user, " ***> No social profile found. S'ok, moving on.") pass shared_stories = MSharedStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s shared stories" % shared_stories.count()) for story in shared_stories: try: if not fast: original_story = MStory.objects.get( story_hash=story.story_hash) original_story.sync_redis() except MStory.DoesNotExist: pass story.delete() subscriptions = MSocialSubscription.objects.filter( subscription_user_id=self.user.pk) logging.user( self.user, "Deleting %s social subscriptions" % subscriptions.count()) subscriptions.delete() interactions = MInteraction.objects.filter(user_id=self.user.pk) logging.user( self.user, "Deleting %s interactions for user." % interactions.count()) interactions.delete() interactions = MInteraction.objects.filter(with_user_id=self.user.pk) logging.user( self.user, "Deleting %s interactions with user." % interactions.count()) interactions.delete() activities = MActivity.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s activities for user." % activities.count()) activities.delete() activities = MActivity.objects.filter(with_user_id=self.user.pk) logging.user(self.user, "Deleting %s activities with user." % activities.count()) activities.delete() starred_stories = MStarredStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s starred stories." % starred_stories.count()) starred_stories.delete() logging.user(self.user, "Deleting user: %s" % self.user) self.user.delete()
def save_story(request, token=None): code = 0 story_url = request.POST['story_url'] user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') or [] add_user_tag = request.POST.get('add_user_tag', None) title = request.POST['title'] content = request.POST.get('content', None) rss_url = request.POST.get('rss_url', None) user_notes = request.POST.get('user_notes', None) feed_id = request.POST.get('feed_id', None) or 0 feed = None message = None profile = None if request.user.is_authenticated(): profile = request.user.profile else: try: profile = Profile.objects.get(secret_token=token) except Profile.DoesNotExist: code = -1 if token: message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." if not profile: return HttpResponse(json.encode({ 'code': code, 'message': message, 'story': None, }), content_type='text/plain') if feed_id: feed = Feed.get_by_id(feed_id) else: if rss_url: logging.user(request.user, "~FBFinding feed (save_story): %s" % rss_url) feed = Feed.get_feed_from_url(rss_url, create=True, fetch=True) if not feed: logging.user(request.user, "~FBFinding feed (save_story): %s" % story_url) feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) content = lxml.html.tostring(content) else: importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) content = document['content'] if not title: title = document['title'] if add_user_tag: user_tags = user_tags + [tag for tag in add_user_tag.split(',')] starred_story = MStarredStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url).limit(1).first() if not starred_story: story_db = { "story_guid": story_url, "story_permalink": story_url, "story_title": title, "story_feed_id": feed_id, "story_content": content, "story_date": datetime.datetime.now(), "starred_date": datetime.datetime.now(), "user_id": profile.user.pk, "user_tags": user_tags, "user_notes": user_notes, } starred_story = MStarredStory.objects.create(**story_db) logging.user(profile.user, "~BM~FCStarring story from site: ~SB%s: %s" % (story_url, user_tags)) message = "Saving story from site: %s: %s" % (story_url, user_tags) else: starred_story.story_content = content starred_story.story_title = title starred_story.user_tags = user_tags starred_story.story_permalink = story_url starred_story.story_guid = story_url starred_story.story_feed_id = feed_id starred_story.user_notes = user_notes starred_story.save() logging.user(profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags)) message = "Updating saved story from site: %s: %s" % (story_url, user_tags) MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) response = HttpResponse(json.encode({ 'code': code, 'message': message, 'story': starred_story, }), content_type='text/plain') response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'POST' return response
def exception_change_feed_link(request): feed_id = request.POST['feed_id'] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed feed_link = request.POST['feed_link'] code = -1 if feed.has_page_exception or feed.has_feed_exception: # Fix broken feed logging.user( request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) feed_address = feedfinder.feed(feed_link) if feed_address: code = 1 feed.has_page_exception = False feed.active = True feed.fetched_once = False feed.feed_link = feed_link feed.feed_address = feed_address feed.next_scheduled_update = datetime.datetime.utcnow() duplicate_feed = feed.save() if duplicate_feed: new_feed = Feed.objects.get(pk=duplicate_feed.pk) feed = new_feed new_feed.next_scheduled_update = datetime.datetime.utcnow() new_feed.has_page_exception = False new_feed.active = True new_feed.save() else: # Branch good feed logging.user( request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) feed, _ = Feed.objects.get_or_create(feed_address=feed.feed_address, feed_link=feed_link) if feed.pk != original_feed.pk: try: feed.branch_from_feed = original_feed.branch_from_feed or original_feed except Feed.DoesNotExist: feed.branch_from_feed = original_feed feed.feed_link_locked = True feed.save() code = 1 feed = feed.update() feed = Feed.objects.get(pk=feed.pk) usersub = UserSubscription.objects.get(user=request.user, feed=original_feed) if usersub: usersub.switch_feed(feed, original_feed) usersub = UserSubscription.objects.get(user=request.user, feed=feed) usersub.calculate_feed_scores(silent=False) feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, usersub.feed.pk) feeds = { original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), } return { 'code': code, 'feeds': feeds, 'new_feed_id': usersub.feed.pk, }
def facebook_connect(request): facebook_app_id = settings.FACEBOOK_APP_ID facebook_secret = settings.FACEBOOK_SECRET args = { "client_id": facebook_app_id, "redirect_uri": "https://" + Site.objects.get_current().domain + '/oauth/facebook_connect', "scope": "user_friends", "display": "popup", } verification_code = request.GET.get('code') if verification_code: args["client_secret"] = facebook_secret args["code"] = verification_code uri = "https://graph.facebook.com/oauth/access_token?" + \ urllib.urlencode(args) response_text = urllib.urlopen(uri).read() response = json.decode(response_text) if "access_token" not in response: logging.user(request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response)) return dict(error="Facebook has returned an error. Try connecting again.") access_token = response["access_token"] # Get the user's profile. graph = facebook.GraphAPI(access_token) profile = graph.get_object("me") uid = profile["id"] # Be sure that two people aren't using the same Facebook account. existing_user = MSocialServices.objects.filter(facebook_uid=uid) if existing_user and existing_user[0].user_id != request.user.pk: try: user = User.objects.get(pk=existing_user[0].user_id) logging.user(request, "~BB~FRFailed FB connect, another user: %s" % user.username) return dict(error=("Another user (%s, %s) has " "already connected with those Facebook credentials." % (user.username, user.email or "no email"))) except User.DoesNotExist: existing_user.delete() social_services = MSocialServices.get_user(request.user.pk) social_services.facebook_uid = uid social_services.facebook_access_token = access_token social_services.syncing_facebook = True social_services.save() SyncFacebookFriends.delay(user_id=request.user.pk) logging.user(request, "~BB~FRFinishing Facebook connect") return {} elif request.GET.get('error'): logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get('error')) return {'error': '%s... Try connecting again.' % request.GET.get('error')} else: # Start the OAuth process logging.user(request, "~BB~FRStarting Facebook connect") url = "https://www.facebook.com/dialog/oauth?" + urllib.urlencode(args) return {'next': url}
def facebook_disconnect(request): logging.user(request, "~BB~FRDisconnecting Facebook") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_facebook() return HttpResponseRedirect(reverse('load-user-friends'))
def api_share_new_story(request): user = request.user body = request.body_json fields = body.get('actionFields') story_url = urlnorm.normalize(fields['story_url']) story_content = fields.get('story_content', "") story_title = fields.get('story_title', "") story_author = fields.get('story_author', "") comments = fields.get('comments', None) logging.user(request.user, "~FBFinding feed (api_share_new_story): %s" % story_url) original_feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) story_hash = MStory.guid_hash_unsaved(story_url) feed_id = (original_feed and original_feed.pk or 0) if not user.profile.is_premium and MSharedStory.feed_quota(user.pk, story_hash, feed_id=feed_id): return {"errors": [{ 'message': 'Only premium users can share multiple stories per day from the same site.' }]} quota = 3 if MSharedStory.feed_quota(user.pk, story_hash, quota=quota): logging.user(request, "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments)) return {"errors": [{ 'message': 'You can only share %s stories per day.' % quota }]} if not story_content or not story_title: ti = TextImporter(feed=original_feed, story_url=story_url, request=request) original_story = ti.fetch(return_document=True) if original_story: story_url = original_story['url'] if not story_content: story_content = original_story['content'] if not story_title: story_title = original_story['title'] if story_content: story_content = lxml.html.fromstring(story_content) story_content.make_links_absolute(story_url) story_content = lxml.html.tostring(story_content) shared_story = MSharedStory.objects.filter(user_id=user.pk, story_feed_id=original_feed and original_feed.pk or 0, story_guid=story_url).limit(1).first() if not shared_story: title_max = MSharedStory._fields['story_title'].max_length story_db = { "story_guid": story_url, "story_permalink": story_url, "story_title": story_title and story_title[:title_max] or "[Untitled]", "story_feed_id": original_feed and original_feed.pk or 0, "story_content": story_content, "story_author_name": story_author, "story_date": datetime.datetime.now(), "user_id": user.pk, "comments": comments, "has_comments": bool(comments), } try: shared_story = MSharedStory.objects.create(**story_db) socialsubs = MSocialSubscription.objects.filter(subscription_user_id=user.pk) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() logging.user(request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) except NotUniqueError: logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) else: logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) try: socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=user.pk) except MSocialSubscription.DoesNotExist: socialsub = None if socialsub and shared_story: socialsub.mark_story_ids_as_read([shared_story.story_hash], shared_story.story_feed_id, request=request) elif shared_story: RUserStory.mark_read(user.pk, shared_story.story_feed_id, shared_story.story_hash) if shared_story: shared_story.publish_update_to_subscribers() return {"data": [{ "id": shared_story and shared_story.story_guid, "url": shared_story and shared_story.blurblog_permalink() }]}
def receive_newsletter(self, params): user = self._user_from_email(params['recipient']) if not user: return sender_name, sender_username, sender_domain = self._split_sender( params['from']) feed_address = self._feed_address( user, "%s@%s" % (sender_username, sender_domain)) usf = UserSubscriptionFolders.objects.get(user=user) usf.add_folder('', 'Newsletters') try: feed = Feed.objects.get(feed_address=feed_address) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link='http://' + sender_domain, feed_title=sender_name, fetched_once=True, known_good=True) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:%s' % feed.pk) self._check_if_first_newsletter(user) if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( user=user, feed_address=feed_address, folder='Newsletters') r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) r.publish(user.username, 'reload:feeds') story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): story_content = plain_story_content story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), "story_title": params['subject'], "story_content": story_content, "story_author_name": params['from'], "story_permalink": "https://%s%s" % (Site.objects.get_current().domain, reverse('newsletter-story', kwargs={'story_hash': story_hash})), "story_guid": params['signature'], } print story_params try: story = MStory.objects.get(story_hash=story_hash) except MStory.DoesNotExist: story = MStory(**story_params) story.save() usersub.needs_unread_recalc = True usersub.save() self._publish_to_subscribers(feed) MFetchHistory.add(feed_id=feed.pk, fetch_type='push') logging.user( user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) return story
def api_shared_story(request): user = request.user body = request.body_json after = body.get('after', None) before = body.get('before', None) limit = body.get('limit', 50) fields = body.get('triggerFields') blurblog_user = fields['blurblog_user'] entries = [] if isinstance(blurblog_user, int) or blurblog_user.isdigit(): social_user_ids = [int(blurblog_user)] elif blurblog_user == "all": socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) social_user_ids = [ss.subscription_user_id for ss in socialsubs] mstories = MSharedStory.objects( user_id__in=social_user_ids ).order_by('-shared_date')[:limit] stories = Feed.format_stories(mstories) found_feed_ids = list(set([story['story_feed_id'] for story in stories])) share_user_ids = list(set([story['user_id'] for story in stories])) users = dict([(u.pk, u.username) for u in User.objects.filter(pk__in=share_user_ids).only('pk', 'username')]) feeds = dict([(f.pk, { "title": f.feed_title, "website": f.feed_link, "address": f.feed_address, }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id__in=social_user_ids)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, social_user_id__in=social_user_ids)) classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id__in=social_user_ids)) classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id__in=social_user_ids)) # Merge with feed specific classifiers classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_feed_ids)) classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_feed_ids)) for story in stories: if before and int(story['shared_date'].strftime("%s")) > before: continue if after and int(story['shared_date'].strftime("%s")) < after: continue score = compute_story_score(story, classifier_titles=classifier_titles, classifier_authors=classifier_authors, classifier_tags=classifier_tags, classifier_feeds=classifier_feeds) if score < 0: continue feed = feeds.get(story['story_feed_id'], None) entries.append({ "StoryTitle": story['story_title'], "StoryContent": story['story_content'], "StoryURL": story['story_permalink'], "StoryAuthor": story['story_authors'], "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), "StoryScore": score, "Comments": story['comments'], "Username": users.get(story['user_id']), "SharedAt": story['shared_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), "Site": feed and feed['title'], "SiteURL": feed and feed['website'], "SiteRSS": feed and feed['address'], "meta": { "id": story['story_hash'], "timestamp": int(story['shared_date'].strftime("%s")) }, }) if after: entries = sorted(entries, key=lambda s: s['meta']['timestamp']) logging.user(request, "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" % (blurblog_user, len(entries))) return {"data": entries}
def appdotnet_disconnect(request): logging.user(request, "~BB~FRDisconnecting App.net") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_appdotnet() return HttpResponseRedirect(reverse('load-user-friends'))
def api_unread_story(request, trigger_slug=None): user = request.user body = request.body_json after = body.get('after', None) before = body.get('before', None) limit = body.get('limit', 50) fields = body.get('triggerFields') feed_or_folder = fields['feed_or_folder'] entries = [] if isinstance(feed_or_folder, int) or feed_or_folder.isdigit(): feed_id = int(feed_or_folder) try: usersub = UserSubscription.objects.get(user=user, feed_id=feed_id) except UserSubscription.DoesNotExist: return dict(data=[]) found_feed_ids = [feed_id] found_trained_feed_ids = [feed_id] if usersub.is_trained else [] stories = usersub.get_stories(order="newest", read_filter="unread", offset=0, limit=limit, default_cutoff_date=user.profile.unread_cutoff) else: folder_title = feed_or_folder if folder_title == "Top Level": folder_title = " " usf = UserSubscriptionFolders.objects.get(user=user) flat_folders = usf.flatten_folders() feed_ids = None if folder_title != "all": feed_ids = flat_folders.get(folder_title) usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="unread") feed_ids = [sub.feed_id for sub in usersubs] params = { "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": limit, "order": "newest", "read_filter": "unread", "usersubs": usersubs, "cutoff_date": user.profile.unread_cutoff, } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') stories = Feed.format_stories(mstories) found_feed_ids = list(set([story['story_feed_id'] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) if found_trained_feed_ids: classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) feeds = dict([(f.pk, { "title": f.feed_title, "website": f.feed_link, "address": f.feed_address, }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) for story in stories: if before and int(story['story_date'].strftime("%s")) > before: continue if after and int(story['story_date'].strftime("%s")) < after: continue score = 0 if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids: score = compute_story_score(story, classifier_titles=classifier_titles, classifier_authors=classifier_authors, classifier_tags=classifier_tags, classifier_feeds=classifier_feeds) if score < 0: continue if trigger_slug == "new-unread-focus-story" and score < 1: continue feed = feeds.get(story['story_feed_id'], None) entries.append({ "StoryTitle": story['story_title'], "StoryContent": story['story_content'], "StoryURL": story['story_permalink'], "StoryAuthor": story['story_authors'], "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), "StoryScore": score, "Site": feed and feed['title'], "SiteURL": feed and feed['website'], "SiteRSS": feed and feed['address'], "meta": { "id": story['story_hash'], "timestamp": int(story['story_date'].strftime("%s")) }, }) if after: entries = sorted(entries, key=lambda s: s['meta']['timestamp']) logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries))) return {"data": entries[:limit]}
def stripe_form(request): user = request.user success_updating = False stripe.api_key = settings.STRIPE_SECRET plan = int(request.GET.get('plan', 2)) plan = PLANS[plan-1][0] error = None if request.method == 'POST': zebra_form = StripePlusPaymentForm(request.POST, email=user.email) if zebra_form.is_valid(): user.email = zebra_form.cleaned_data['email'] user.save() current_premium = (user.profile.is_premium and user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now()) # Are they changing their existing card? if user.profile.stripe_id and current_premium: customer = stripe.Customer.retrieve(user.profile.stripe_id) try: card = customer.cards.create(card=zebra_form.cleaned_data['stripe_token']) except stripe.CardError: error = "This card was declined." else: customer.default_card = card.id customer.save() success_updating = True else: try: customer = stripe.Customer.create(**{ 'card': zebra_form.cleaned_data['stripe_token'], 'plan': zebra_form.cleaned_data['plan'], 'email': user.email, 'description': user.username, }) except stripe.CardError: error = "This card was declined." else: user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] user.profile.stripe_id = customer.id user.profile.save() user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True else: zebra_form = StripePlusPaymentForm(email=user.email, plan=plan) if success_updating: return render_to_response('reader/paypal_return.xhtml', {}, context_instance=RequestContext(request)) new_user_queue_count = RNewUserQueue.user_count() new_user_queue_position = RNewUserQueue.user_position(request.user.pk) new_user_queue_behind = 0 if new_user_queue_position >= 0: new_user_queue_behind = new_user_queue_count - new_user_queue_position new_user_queue_position -= 1 logging.user(request, "~BM~FBLoading Stripe form") return render_to_response('profile/stripe_form.xhtml', { 'zebra_form': zebra_form, 'publishable': settings.STRIPE_PUBLISHABLE, 'success_updating': success_updating, 'new_user_queue_count': new_user_queue_count - 1, 'new_user_queue_position': new_user_queue_position, 'new_user_queue_behind': new_user_queue_behind, 'error': error, }, context_instance=RequestContext(request) )
def trigger_error(request): logging.user(request.user, "~BR~FW~SBTriggering divison by zero") division_by_zero = 1 / 0 return HttpResponseRedirect(reverse('index'))