def bump(pk=None): from goodquestions.apps.posts.models import Post from goodquestions.apps.users.models import User from goodquestions.const import now if not pk: query = Post.objects.filter(type=Post.QUESTION, status=Post.OPEN) value = random.random() if value > 0.5: since = now() - timedelta(weeks=10) query = query.filter(reply_count=0, creation_date__gt=since) query = query.values_list("id") ids = [p[0] for p in query] pk = random.choice(ids) community = User.objects.get(pk=1) post = Post.objects.get(pk=pk) logger.info(post.title) if not post.is_toplevel: logger.warning("post is not at toplevel") post.lastedit_date = now() post.lastedit_user = community post.save()
def save(self, *args, **kwargs): if not self.id: # Set the date to current time if missing. self.date = self.date or const.now() super(Subscription, self).save(*args, **kwargs)
def get_traffic(minutes=60): "Obtains the number of distinct IP numbers " global TRAFFIC_KEY traffic = cache.get(TRAFFIC_KEY) if not traffic: recent = const.now() - timedelta(minutes=minutes) try: traffic = PostView.objects.filter(date__gt=recent).distinct('ip').count() except NotImplementedError, exc: traffic = PostView.objects.filter(date__gt=recent).values_list('ip') traffic = [t[0] for t in traffic] traffic = len(set(traffic)) cache.set(TRAFFIC_KEY, traffic, CACHE_TIMEOUT)
def apply_sort(request, query): # Note: the naming here needs to match that in the server_tag.py template tags. # Apply sort order sort = request.GET.get('sort', const.POST_SORT_DEFAULT) field = const.POST_SORT_MAP.get(sort, "-lastedit_date") query = query.order_by(field) # Apply time limit. limit = request.GET.get('limit', const.POST_LIMIT_DEFAULT) days = const.POST_LIMIT_MAP.get(limit, 0) if days: delta = const.now() - timedelta(days=days) query = query.filter(lastedit_date__gt=delta) return query
def save(self, *args, **kwargs): # Clean the info fields. self.info = bleach.clean(self.info, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, styles=ALLOWED_STYLES) # Strip whitespace from location string self.location = self.location.strip() if not self.id: # This runs only once upon object creation. self.uuid = util.make_uuid() self.date_joined = self.date_joined or now() self.last_login = self.date_joined super(Profile, self).save(*args, **kwargs)
def user_create_messages(sender, instance, created, *args, **kwargs): "The actions to undertake when creating a new post" from goodquestions.apps.messages.models import Message, MessageBody from goodquestions.apps.util import html from goodquestions.const import now user = instance if created: # Create a welcome message to a user # We do this so that tests pass, there is no admin user there authors = User.objects.filter(is_admin=True) or [user] author = authors[0] title = "Welcome!" content = html.render(name=NEW_USER_WELCOME_TEMPLATE, user=user) body = MessageBody.objects.create(author=author, subject=title, text=content, sent_at=now()) message = Message(user=user, body=body, sent_at=body.sent_at) message.save()
def get_users(self, sort, limit, q, user): sort = const.USER_SORT_MAP.get(sort, None) days = const.POST_LIMIT_MAP.get(limit, 0) if q: query = self.filter(name__icontains=q) else: query = self if days: delta = const.now() - timedelta(days=days) query = self.filter(profile__last_login__gt=delta) if user.is_authenticated() and user.is_moderator: query = query.select_related("profile").order_by(sort) else: query = query.exclude(status=User.BANNED).select_related("profile").order_by(sort) return query
def update_post_views(post, request, minutes=settings.POST_VIEW_MINUTES): "Views are updated per user session" # Extract the IP number from the request. ip1 = request.META.get("REMOTE_ADDR", "") ip2 = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0].strip() # 'localhost' is not a valid ip address. ip1 = "" if ip1.lower() == "localhost" else ip1 ip2 = "" if ip2.lower() == "localhost" else ip2 ip = ip1 or ip2 or "0.0.0.0" now = const.now() since = now - datetime.timedelta(minutes=minutes) # One view per time interval from each IP address. if not PostView.objects.filter(ip=ip, post=post, date__gt=since): PostView.objects.create(ip=ip, post=post, date=now) Post.objects.filter(id=post.id).update(view_count=F("view_count") + 1) return post
def get_counts(request, weeks=settings.COUNT_INTERVAL_WEEKS): "Returns the number of counts for each post type in the interval that has passed" user = request.user now = const.now() # Authenticated users get counts since their last login. if user.is_authenticated(): since = user.profile.last_login else: since = now - timedelta(weeks=weeks) # This fetches the posts since last login. posts = Post.objects.filter(type__in=Post.TOP_LEVEL, status=Post.OPEN, creation_date__gt=since).order_by( '-id').only("id").prefetch_related("tag_set") posts = posts[:200] counts = defaultdict(int) # How many news posts. counts['latest'] = len(posts) # Produce counts per tag. for post in posts: for tag in post.tag_set.all(): counts[tag.name] += 1 # Fill in the unanswered counts. counts['open'] = Post.objects.filter(type=Post.QUESTION, reply_count=0, status=Post.OPEN, creation_date__gt=since).count() # How many new planet posts counts['planet'] = BlogPost.objects.filter(insert_date__gt=since).count() # Compute a few more counts for the user. if user.is_authenticated(): # These are the new messages since the last login. counts['messages'] = Message.objects.filter(user=user, unread=True, sent_at__gt=since).count() # These are the new votes since the last login. counts['votes'] = Vote.objects.filter(post__author=user, date__gt=since).count() return counts
def time_ago(date): # Rare bug. TODO: Need to investigate why this can happen. if not date: return '' delta = const.now() - date if delta < timedelta(minutes=1): return 'just now' elif delta < timedelta(hours=1): unit = pluralize(delta.seconds // 60, "minute") elif delta < timedelta(days=1): unit = pluralize(delta.seconds // 3600, "hour") elif delta < timedelta(days=30): unit = pluralize(delta.days, "day") elif delta < timedelta(days=90): unit = pluralize(int(delta.days / 7), "week") elif delta < timedelta(days=730): unit = pluralize(int(delta.days / 30), "month") else: diff = delta.days / 365.0 unit = '%0.1f years' % diff return "%s ago" % unit
def render_digest(days, text_tmpl, html_tmpl, send, options, limit=10, verbosity=1): from_email = settings.DEFAULT_FROM_EMAIL from goodquestions.apps.posts.models import Post from goodquestions.apps.users.models import User site = site = Site.objects.get_current() start = (now() - timedelta(days=days)) # Posts created since the start date. top_posts = Post.objects.filter(status=Post.OPEN, type__in=Post.TOP_LEVEL, creation_date__gt=start).select_related('author') top_posts = top_posts.order_by('-view_count')[:limit] # Updated post created before the start date. upd_posts = Post.objects.filter(status=Post.OPEN, lastedit_date__gt=start) upd_posts = upd_posts.exclude(creation_date__gt=start, type__in=Post.TOP_LEVEL).select_related("author") upd_posts = upd_posts.order_by('-vote_count')[:limit] # Blog posts created since the start date. blogs = Post.objects.filter(status=Post.OPEN, type=Post.BLOG, creation_date__gt=start).select_related('author') blogs = blogs[:limit] # Total post count total_post_count = Post.objects.filter(status=Post.OPEN).count() # Total user count total_user_count = User.objects.filter().count() hard_worker = User.objects.filter(post__status=Post.OPEN, post__lastedit_date__gt=start) \ .annotate(total=Count("post")).order_by('-total').select_related("profile") hard_worker = hard_worker[:limit] params = dict( site=site, top_posts=top_posts, upd_posts=upd_posts, blogs=blogs, total_post_count=total_post_count, total_user_count=total_user_count, start=start.strftime("%b %d, %Y"), hard_worker=hard_worker, days=days, ) text_body = html_body = '' if text_tmpl: text_body = html.render(text_tmpl, **params) if html_tmpl: html_body = html.render(html_tmpl, **params) if verbosity > 0: extras = dict( digest_manage=reverse("digest_manage"), digest_unsubscribe=reverse("digest_unsubscribe", kwargs=dict(uuid=1)) ) print text_body % extras print html_body % extras if send: logger.info('sending emails') emails = map(string.strip, open(send)) def chunks(data, size): "Break into chunks of 100" for i in xrange(0, len(data), size): yield data[i:i+size] for chunk in chunks(emails, 100): users = User.objects.filter(email__in=chunk).select_related('profile') for user in users: try: extras = dict( digest_manage=reverse("digest_manage"), digest_unsubscribe=reverse("digest_unsubscribe", kwargs=dict(uuid=user.profile.uuid)) ) text_content = text_body % extras html_content = html_body % extras subject = options['subject'] msg = EmailMultiAlternatives(subject, text_content, from_email, [user.email]) msg.attach_alternative(html_content, "text/html") msg.send() time.sleep(0.3) # Throttle on Amazon. except Exception, exc: logger.error('error %s sending email to %s' % (exc, user.email))
def process_request(self, request, weeks=settings.COUNT_INTERVAL_WEEKS): global SESSION_KEY, ANON_USER user, session = request.user, request.session # Suspended users are logged out immediately. if user.is_authenticated() and user.is_suspended: logout(request) messages.error(request, 'Sorry, this account has been suspended. Please contact the administrators.') # Add attributes to anonymous users. if not user.is_authenticated(): # This attribute is required inside templates. user.is_moderator = user.is_admin = False # Check external logins. if settings.EXTERNAL_AUTH and valid_external_login(request): messages.success(request, "Login completed") # We do this to detect when an anonymous session turns into a logged in one. if ANON_USER not in session: session[ANON_USER] = True # User attributes that refresh at given intervals. if user.is_authenticated(): # The time between two count refreshes. elapsed = (const.now() - user.profile.last_login).seconds # The user has an anonymous session already. # Update the user login data now. if ANON_USER in session: del session[ANON_USER] elapsed = settings.SESSION_UPDATE_SECONDS + 1 # The user session will be updated. if elapsed > settings.SESSION_UPDATE_SECONDS: # Set the last login time. Profile.objects.filter(user_id=user.id).update(last_login=const.now()) # Compute the counts. counts = get_counts(request) # Store the counts in the session for later use. session[SESSION_KEY] = counts # Create user awards if possible. create_user_award.delay(user=user.id) # check user and fill in details check_user_profile.delay(ip=get_ip(request), user=user.id) # Get the counts from the session or the cache. counts = session.get(SESSION_KEY) or cache.get(SESSION_KEY) # No sessions found, set the them into the session. if not counts: # Compute the counts counts = get_counts(request) # Put them into the session. session[SESSION_KEY] = counts # Store them in the cache for the next anonymous user. cache.set(SESSION_KEY, counts, settings.SESSION_UPDATE_SECONDS)
def email_handler(request): key = request.POST.get("key") if key != settings.EMAIL_REPLY_SECRET_KEY: data = dict(status="error", msg="key does not match") else: body = request.POST.get("body") body = smart_text(body, errors="ignore") # This is for debug only #fname = "%s/email-debug.txt" % settings.LIVE_DIR #fp = file(fname, "wt") #fp.write(body.encode("utf-8")) #fp.close() try: # Parse the incoming email. # Emails can be malformed in which case we will force utf8 on them before parsing try: msg = pyzmail.PyzMessage.factory(body) except Exception, exc: body = body.encode('utf8', errors='ignore') msg = pyzmail.PyzMessage.factory(body) # Extract the address from the address tuples. address = msg.get_addresses('to')[0][1] # Parse the token from the address. start, token, rest = address.split('+') # Verify that the token exists. token = ReplyToken.objects.get(token=token) # Find the post that the reply targets post, author = token.post, token.user # Extract the body of the email. part = msg.text_part or msg.html_part text = part.get_payload() # Remove the reply related content if settings.EMAIL_REPLY_REMOVE_QUOTED_TEXT: text = EmailReplyParser.parse_reply(text) else: text = text.decode("utf8", errors='replace') text = u"<div class='preformatted'>%s</div>" % text # Apply server specific formatting text = html.parse_html(text) # Apply the markdown on the text text = markdown.markdown(text) # Rate-limit sanity check, potentially a runaway process since = const.now() - timedelta(days=1) if Post.objects.filter(author=author, creation_date__gt=since).count() > settings.MAX_POSTS_TRUSTED_USER: raise Exception("too many posts created %s" % author.id) # Create the new post. post_type = Post.ANSWER if post.is_toplevel else Post.COMMENT obj = Post.objects.create(type=post_type, parent=post, content=text, author=author) # Delete the token. Disabled for now. # Old token should be deleted in the data pruning #token.delete() # Form the return message. data = dict(status="ok", id=obj.id) except Exception, exc: output = StringIO.StringIO() traceback.print_exc(file=output) data = dict(status="error", msg=str(output.getvalue()))
def age_in_days(self): delta = const.now() - self.creation_date return delta.days
def items(self): # Delay posts hours. delay_time = const.now() - timedelta(hours=2) posts = Post.objects.filter(type__in=Post.TOP_LEVEL, status=Post.OPEN, creation_date__lt=delay_time).exclude(type=Post.BLOG).order_by('-creation_date') return posts[:FEED_COUNT]