def _blog_post_key_prefixer(request): if request.method != 'GET': return None if request.user.is_authenticated(): return None prefix = urllib.urlencode(request.GET) oid = request.path.split('/')[-1] cache_key = 'latest_comment_add_date:%s' % oid latest_date = cache.get(cache_key) if latest_date is None: try: blogitem = BlogItem.objects.get(oid=oid) except BlogItem.DoesNotExist: # don't bother, something's really wrong return None latest_date = blogitem.modify_date for c in (BlogComment.objects .filter(blogitem=blogitem, add_date__gt=latest_date) .order_by('-add_date')[:1]): latest_date = c.add_date latest_date = latest_date.strftime('%f') cache.set(cache_key, latest_date, ONE_WEEK) prefix += str(latest_date) try: redis_increment('plog:hits', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) return prefix
def home(request, oc=None): data = {} qs = BlogItem.objects.filter(pub_date__lt=utc_now()) if oc: categories = parse_ocs_to_categories(oc) cat_q = make_categories_q(categories) qs = qs.filter(cat_q) data['categories'] = categories ## Reasons for not being here if request.method == 'HEAD': return http.HttpResponse('') try: redis_increment('homepage:misses', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) BATCH_SIZE = 10 try: page = max(1, int(request.GET.get('page', 1))) - 1 except ValueError: raise http.Http404('invalid page value') n, m = page * BATCH_SIZE, (page + 1) * BATCH_SIZE max_count = qs.count() if (page + 1) * BATCH_SIZE < max_count: data['next_page'] = page + 2 data['previous_page'] = page data['blogitems'] = ( qs .prefetch_related('categories') .order_by('-pub_date') )[n:m] return render(request, 'homepage/home.html', data)
def _blog_post_key_prefixer(request): if request.method != 'GET': return None if request.user.is_authenticated(): return None prefix = urllib.urlencode(request.GET) oid = request.path.split('/')[-1] cache_key = 'latest_comment_add_date:%s' % oid latest_date = cache.get(cache_key) if latest_date is None: try: blogitem = BlogItem.objects.get(oid=oid) except BlogItem.DoesNotExist: # don't bother, something's really wrong return None latest_date = blogitem.modify_date for c in (BlogComment.objects.filter( blogitem=blogitem, add_date__gt=latest_date).order_by('-add_date')[:1]): latest_date = c.add_date latest_date = latest_date.strftime('%f') cache.set(cache_key, latest_date, ONE_WEEK) prefix += str(latest_date) try: redis_increment('plog:hits', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) return prefix
def home(request, oc=None): data = {} qs = BlogItem.objects.filter(pub_date__lt=utc_now()) if oc: categories = parse_ocs_to_categories(oc) cat_q = make_categories_q(categories) qs = qs.filter(cat_q) data['categories'] = categories ## Reasons for not being here if request.method == 'HEAD': return http.HttpResponse('') try: redis_increment('homepage:misses', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) BATCH_SIZE = 10 try: page = max(1, int(request.GET.get('page', 1))) - 1 except ValueError: raise http.Http404('invalid page value') n, m = page * BATCH_SIZE, (page + 1) * BATCH_SIZE max_count = qs.count() first_post, = qs.order_by('-pub_date')[:1] data['first_post_url'] = request.build_absolute_uri( reverse('blog_post', args=[first_post.oid])) if (page + 1) * BATCH_SIZE < max_count: data['next_page'] = page + 2 data['previous_page'] = page data['blogitems'] = ( qs.prefetch_related('categories').order_by('-pub_date'))[n:m] return render(request, 'homepage/home.html', data)
def blog_post(request, oid): if oid.endswith('/'): oid = oid[:-1] try: post = BlogItem.objects.get(oid=oid) except BlogItem.DoesNotExist: try: post = BlogItem.objects.get(oid__iexact=oid) except BlogItem.DoesNotExist: if oid == 'add': return redirect(reverse('add_post')) raise http.Http404(oid) ## Reasons for not being here if request.method == 'HEAD': return http.HttpResponse('') elif (request.method == 'GET' and (request.GET.get('replypath') or request.GET.get('show-comments'))): return http.HttpResponsePermanentRedirect(request.path) try: redis_increment('plog:misses', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) data = { 'post': post, } try: data['previous_post'] = post.get_previous_by_pub_date() except BlogItem.DoesNotExist: data['previous_post'] = None try: data['next_post'] = post.get_next_by_pub_date(pub_date__lt=utc_now()) except BlogItem.DoesNotExist: data['next_post'] = None comments = ( BlogComment.objects .filter(blogitem=post) .order_by('add_date') ) if not request.user.is_staff: comments = comments.filter(approved=True) all_comments = defaultdict(list) for comment in comments: all_comments[comment.parent_id].append(comment) data['all_comments'] = all_comments data['related'] = get_related_posts(post) data['show_buttons'] = True data['home_url'] = request.build_absolute_uri('/') return render(request, 'plog/post.html', data)
def _home_key_prefixer(request): if request.method != 'GET': return None prefix = urllib.urlencode(request.GET) cache_key = 'latest_comment_add_date' latest_date = cache.get(cache_key) if latest_date is None: latest, = (BlogItem.objects.order_by('-modify_date').values( 'modify_date')[:1]) latest_date = latest['modify_date'].strftime('%f') cache.set(cache_key, latest_date, 60 * 60) prefix += str(latest_date) try: redis_increment('homepage:hits', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) return prefix
def _home_key_prefixer(request): if request.method != 'GET': return None prefix = urllib.urlencode(request.GET) cache_key = 'latest_comment_add_date' latest_date = cache.get(cache_key) if latest_date is None: latest, = (BlogItem.objects .order_by('-modify_date') .values('modify_date')[:1]) latest_date = latest['modify_date'].strftime('%f') cache.set(cache_key, latest_date, 60 * 60) prefix += str(latest_date) try: redis_increment('homepage:hits', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) return prefix
def _blog_post_key_prefixer(request): if request.method != 'GET': return None if request.user.is_authenticated(): return None prefix = urllib.urlencode(request.GET) if request.path.endswith('/'): oid = request.path.split('/')[-2] else: oid = request.path.split('/')[-1] cache_key = 'latest_comment_add_date:%s' % oid latest_date = cache.get(cache_key) if latest_date is None: try: blogitem = ( BlogItem.objects.filter(oid=oid) .values('pk', 'modify_date')[0] ) except IndexError: # don't bother, something's really wrong return None latest_date = blogitem['modify_date'] blogitem_pk = blogitem['pk'] for c in (BlogComment.objects .filter(blogitem=blogitem_pk, add_date__gt=latest_date) .values('add_date') .order_by('-add_date')[:1]): latest_date = c['add_date'] latest_date = latest_date.strftime('%f') cache.set(cache_key, latest_date, ONE_MONTH) prefix += str(latest_date) try: redis_increment('plog:hits', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) return prefix
def blog_post(request, oid): if oid.endswith('/'): oid = oid[:-1] try: post = BlogItem.objects.get(oid=oid) except BlogItem.DoesNotExist: try: post = BlogItem.objects.get(oid__iexact=oid) except BlogItem.DoesNotExist: if oid == 'add': return redirect(reverse('add_post')) raise http.Http404(oid) ## Reasons for not being here if request.method == 'HEAD': return http.HttpResponse('') elif (request.method == 'GET' and (request.GET.get('replypath') or request.GET.get('show-comments'))): return http.HttpResponsePermanentRedirect(request.path) try: redis_increment('plog:misses', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) data = { 'post': post, } try: data['previous_post'] = post.get_previous_by_pub_date() except BlogItem.DoesNotExist: data['previous_post'] = None try: data['next_post'] = post.get_next_by_pub_date(pub_date__lt=utc_now()) except BlogItem.DoesNotExist: data['next_post'] = None data['related'] = get_related_posts(post) data['show_buttons'] = True return render(request, 'plog/post.html', data)
def blog_post(request, oid): if oid.endswith('/'): oid = oid[:-1] try: post = BlogItem.objects.get(oid=oid) except BlogItem.DoesNotExist: try: post = BlogItem.objects.get(oid__iexact=oid) except BlogItem.DoesNotExist: if oid == 'add': return redirect(reverse('add_post')) raise http.Http404(oid) ## Reasons for not being here if request.method == 'HEAD': return http.HttpResponse('') elif (request.method == 'GET' and (request.GET.get('replypath') or request.GET.get('show-comments'))): return http.HttpResponsePermanentRedirect(request.path) try: redis_increment('plog:misses', request) except Exception: logging.error('Unable to redis.zincrby', exc_info=True) data = { 'post': post, } try: data['previous_post'] = post.get_previous_by_pub_date() except BlogItem.DoesNotExist: data['previous_post'] = None try: data['next_post'] = post.get_next_by_pub_date(pub_date__lt=utc_now()) except BlogItem.DoesNotExist: data['next_post'] = None data['related'] = get_related_posts(post) data['show_buttons'] = True data['home_url'] = request.build_absolute_uri('/') return render(request, 'plog/post.html', data)