Exemplo n.º 1
0
def blog_post(request, oid):
    if oid.endswith('/'):
        oid = oid[:-1]
    try:
        post = BlogItem.objects.get(oid=oid)
    except BlogItem.DoesNotExist:
        try:
            post = BlogItem.objects.get(oid__iexact=oid)
        except BlogItem.DoesNotExist:
            if oid == 'add':
                return redirect(reverse('add_post'))
            raise http.Http404(oid)

    ## Reasons for not being here
    if request.method == 'HEAD':
        return http.HttpResponse('')
    elif (request.method == 'GET' and
        (request.GET.get('replypath') or request.GET.get('show-comments'))):
        return http.HttpResponsePermanentRedirect(request.path)

    try:
        redis_increment('plog:misses', request)
    except Exception:
        logging.error('Unable to redis.zincrby', exc_info=True)

    data = {
      'post': post,
    }
    try:
        data['previous_post'] = post.get_previous_by_pub_date()
    except BlogItem.DoesNotExist:
        data['previous_post'] = None
    try:
        data['next_post'] = post.get_next_by_pub_date(pub_date__lt=utc_now())
    except BlogItem.DoesNotExist:
        data['next_post'] = None

    comments = (
        BlogComment.objects
        .filter(blogitem=post)
        .order_by('add_date')
    )
    if not request.user.is_staff:
        comments = comments.filter(approved=True)

    comments_truncated = False
    if request.GET.get('comments') != 'all':
        comments = comments[:100]
        if post.count_comments() > 100:
            comments_truncated = 100

    all_comments = defaultdict(list)
    for comment in comments:
        all_comments[comment.parent_id].append(comment)
    data['comments_truncated'] = comments_truncated
    data['all_comments'] = all_comments
    data['related'] = get_related_posts(post)
    data['show_buttons'] = True
    data['home_url'] = request.build_absolute_uri('/')
    return render(request, 'plog/post.html', data)
Exemplo n.º 2
0
def _home_key_prefixer(request):
    if request.method != 'GET':
        return None
    prefix = make_prefix(request.GET)
    cache_key = 'latest_comment_add_date'
    if request.path_info.startswith('/oc-'):
        categories = parse_ocs_to_categories(request.path_info[len('/oc-'):])
        cache_key += ''.join(str(x.pk) for x in categories)
    else:
        categories = None

    latest_date = cache.get(cache_key)
    if latest_date is None:
        qs = BlogItem.objects.all()
        if categories:
            cat_q = make_categories_q(categories)
            qs = qs.filter(cat_q)
        latest, = qs.order_by('-modify_date').values('modify_date')[:1]
        latest_date = latest['modify_date'].strftime('%f')
        cache.set(cache_key, latest_date, 60 * 60 * 12)
    prefix += str(latest_date)

    try:
        redis_increment('homepage:hits', request)
    except Exception:
        logger.error('Unable to redis.zincrby', exc_info=True)

    return prefix
Exemplo n.º 3
0
def blog_post(request, oid):
    if oid.endswith("/"):
        oid = oid[:-1]
    try:
        post = BlogItem.objects.get(oid=oid)
    except BlogItem.DoesNotExist:
        try:
            post = BlogItem.objects.get(oid__iexact=oid)
        except BlogItem.DoesNotExist:
            if oid == "add":
                return redirect(reverse("add_post"))
            raise http.Http404(oid)

    ## Reasons for not being here
    if request.method == "HEAD":
        return http.HttpResponse("")
    elif request.method == "GET" and (request.GET.get("replypath") or request.GET.get("show-comments")):
        return http.HttpResponsePermanentRedirect(request.path)

    try:
        redis_increment("plog:misses", request)
    except Exception:
        logging.error("Unable to redis.zincrby", exc_info=True)

    # attach a field called `_absolute_url` which depends on the request
    base_url = "https://" if request.is_secure() else "http://"
    base_url += RequestSite(request).domain
    post._absolute_url = base_url + reverse("blog_post", args=(post.oid,))

    data = {"post": post}
    try:
        data["previous_post"] = post.get_previous_by_pub_date()
    except BlogItem.DoesNotExist:
        data["previous_post"] = None
    try:
        data["next_post"] = post.get_next_by_pub_date(pub_date__lt=utc_now())
    except BlogItem.DoesNotExist:
        data["next_post"] = None

    comments = BlogComment.objects.filter(blogitem=post).order_by("add_date")
    if not request.user.is_staff:
        comments = comments.filter(approved=True)

    comments_truncated = False
    if request.GET.get("comments") != "all":
        comments = comments[:100]
        if post.count_comments() > 100:
            comments_truncated = 100

    all_comments = defaultdict(list)
    for comment in comments:
        all_comments[comment.parent_id].append(comment)
    data["comments_truncated"] = comments_truncated
    data["all_comments"] = all_comments
    data["related"] = get_related_posts(post)
    data["show_buttons"] = not settings.DEBUG
    data["show_fusion_ad"] = not settings.DEBUG
    data["home_url"] = request.build_absolute_uri("/")
    return render(request, "plog/post.html", data)
Exemplo n.º 4
0
def _blog_post_key_prefixer(request):
    if request.method != 'GET':
        return None
    if request.user.is_authenticated():
        return None
    prefix = utils.make_prefix(request.GET)
    if request.path.endswith('/'):
        oid = request.path.split('/')[-2]
    else:
        oid = request.path.split('/')[-1]

    cache_key = 'latest_comment_add_date:%s' % oid
    latest_date = cache.get(cache_key)
    if latest_date is None:
        try:
            blogitem = (
                BlogItem.objects.filter(oid=oid)
                .values('pk', 'modify_date')[0]
            )
        except IndexError:
            # don't bother, something's really wrong
            return None
        latest_date = blogitem['modify_date']
        blogitem_pk = blogitem['pk']
        for c in (BlogComment.objects
                  .filter(blogitem=blogitem_pk,
                          add_date__gt=latest_date)
                  .values('add_date')
                  .order_by('-add_date')[:1]):
            latest_date = c['add_date']
        latest_date = latest_date.strftime('%f')
        cache.set(cache_key, latest_date, ONE_MONTH)
    prefix += str(latest_date)

    try:
        redis_increment('plog:hits', request)
    except Exception:
        logging.error('Unable to redis.zincrby', exc_info=True)

    # temporary solution because I can't get Google Analytics API to work
    ua = request.META.get('HTTP_USER_AGENT', '')
    if 'bot' not in ua:
        # because not so important exactly how many hits each post gets,
        # just that some posts are more popular than other, therefore
        # we don't need to record this every week.
        if datetime.datetime.utcnow().strftime('%A') == 'Tuesday':
            # so we only do this once a week
            hits, __ = BlogItemHits.objects.get_or_create(oid=oid)
            hits.hits += 1
            hits.save()

    return prefix
Exemplo n.º 5
0
def _blog_post_key_prefixer(request):
    if request.method != 'GET':
        return None
    if request.user.is_authenticated():
        return None
    prefix = utils.make_prefix(request.GET)

    all_comments = False
    if request.path.endswith('/all-comments'):
        oid = request.path.split('/')[-2]
        all_comments = True
    elif request.path.endswith('/'):
        oid = request.path.split('/')[-2]
    else:
        oid = request.path.split('/')[-1]

    cache_key = 'latest_comment_add_date:%s' % hashlib.md5(oid).hexdigest()
    latest_date = cache.get(cache_key)
    if latest_date is None:
        try:
            blogitem = (
                BlogItem.objects.filter(oid=oid)
                .values('pk', 'modify_date')[0]
            )
        except IndexError:
            # don't bother, something's really wrong
            return None
        latest_date = blogitem['modify_date']
        blogitem_pk = blogitem['pk']
        for c in (BlogComment.objects
                  .filter(blogitem=blogitem_pk,
                          add_date__gt=latest_date)
                  .values('add_date')
                  .order_by('-add_date')[:1]):
            latest_date = c['add_date']
        latest_date = latest_date.strftime('%f')
        cache.set(cache_key, latest_date, ONE_MONTH)
    prefix += str(latest_date)

    if not all_comments:
        try:
            redis_increment('plog:hits', request)
        except Exception:
            logging.error('Unable to redis.zincrby', exc_info=True)

        # temporary solution because I can't get Google Analytics API to work
        ua = request.META.get('HTTP_USER_AGENT', '')
        if 'bot' not in ua.lower():
            tasks.increment_blogitem_hit.delay(oid)

    return prefix
Exemplo n.º 6
0
def home(request, oc=None):
    data = {}
    qs = BlogItem.objects.filter(pub_date__lt=utc_now())
    if oc:
        categories = parse_ocs_to_categories(oc)
        cat_q = make_categories_q(categories)
        qs = qs.filter(cat_q)
        data['categories'] = categories

    ## Reasons for not being here
    if request.method == 'HEAD':
        return http.HttpResponse('')

    try:
        redis_increment('homepage:misses', request)
    except Exception:
        logging.error('Unable to redis.zincrby', exc_info=True)

    BATCH_SIZE = 10
    try:
        page = max(1, int(request.GET.get('page', 1))) - 1
    except ValueError:
        raise http.Http404('invalid page value')
    n, m = page * BATCH_SIZE, (page + 1) * BATCH_SIZE
    max_count = qs.count()
    first_post, = qs.order_by('-pub_date')[:1]
    data['first_post_url'] = request.build_absolute_uri(
        reverse('blog_post', args=[first_post.oid])
    )
    if (page + 1) * BATCH_SIZE < max_count:
        data['next_page'] = page + 2
    data['previous_page'] = page

    if n == 0 and not oc:
        # On the first page and no category filtering.
        # Then, load only the first two posts and tell the template
        # to render the other remaining ones later
        data['rest'] = {'from_index': 2, 'to_index': m}
        m = 2
    else:
        data['rest'] = None
    data['blogitems'] =  (
      qs
      .prefetch_related('categories')
      .order_by('-pub_date')
    )[n:m]

    return render(request, 'homepage/home.html', data)
Exemplo n.º 7
0
def home(request, oc=None):
    context = {}
    qs = BlogItem.objects.filter(pub_date__lt=utc_now())
    if oc is not None:
        if not oc:  # empty string
            return redirect('/', permanent=True)
        categories = parse_ocs_to_categories(oc)
        cat_q = make_categories_q(categories)
        qs = qs.filter(cat_q)
        context['categories'] = categories

    # Reasons for not being here
    if request.method == 'HEAD':
        return http.HttpResponse('')

    try:
        redis_increment('homepage:misses', request)
    except Exception:
        logger.error('Unable to redis.zincrby', exc_info=True)

    BATCH_SIZE = 10
    try:
        page = max(1, int(request.GET.get('page', 1))) - 1
    except ValueError:
        raise http.Http404('invalid page value')
    n, m = page * BATCH_SIZE, (page + 1) * BATCH_SIZE
    max_count = qs.count()
    first_post, = qs.order_by('-pub_date')[:1]
    context['first_post_url'] = request.build_absolute_uri(
        reverse('blog_post', args=[first_post.oid])
    )
    if (page + 1) * BATCH_SIZE < max_count:
        context['next_page'] = page + 2
    context['previous_page'] = page

    context['blogitems'] = (
        qs
        .prefetch_related('categories')
        .order_by('-pub_date')
    )[n:m]

    if page > 0:  # page starts on 0
        context['page_title'] = 'Page {}'.format(page + 1)

    return render(request, 'homepage/home.html', context)
Exemplo n.º 8
0
def _blog_post_key_prefixer(request):
    if request.method != 'GET':
        return None
    if request.user.is_authenticated():
        return None
    prefix = utils.make_prefix(request.GET)
    if request.path.endswith('/'):
        oid = request.path.split('/')[-2]
    else:
        oid = request.path.split('/')[-1]

    cache_key = 'latest_comment_add_date:%s' % oid
    latest_date = cache.get(cache_key)
    if latest_date is None:
        try:
            blogitem = (
                BlogItem.objects.filter(oid=oid)
                .values('pk', 'modify_date')[0]
            )
        except IndexError:
            # don't bother, something's really wrong
            return None
        latest_date = blogitem['modify_date']
        blogitem_pk = blogitem['pk']
        for c in (BlogComment.objects
                  .filter(blogitem=blogitem_pk,
                          add_date__gt=latest_date)
                  .values('add_date')
                  .order_by('-add_date')[:1]):
            latest_date = c['add_date']
        latest_date = latest_date.strftime('%f')
        cache.set(cache_key, latest_date, ONE_MONTH)
    prefix += str(latest_date)

    try:
        redis_increment('plog:hits', request)
    except Exception:
        logging.error('Unable to redis.zincrby', exc_info=True)
    return prefix
Exemplo n.º 9
0
def blog_post(request, oid):

    # temporary debugging
    if request.method == 'GET':
        print "blog_post.MISS (%r, %r, %s)" % (
            request.path,
            request.META.get('QUERY_STRING'),
            timezone.now().isoformat()
        )

    # legacy fix
    if request.GET.get('comments') == 'all':
        if '/all-comments' in request.path:
            return http.HttpResponseBadRequest('invalid URL')
        return redirect(request.path + '/all-comments', permanent=True)

    if oid.endswith('/'):
        oid = oid[:-1]
    try:
        post = BlogItem.objects.get(oid=oid)
    except BlogItem.DoesNotExist:
        try:
            post = BlogItem.objects.get(oid__iexact=oid)
        except BlogItem.DoesNotExist:
            if oid == 'add':
                return redirect(reverse('add_post'))
            raise http.Http404(oid)

    # Reasons for not being here
    if request.method == 'HEAD':
        return http.HttpResponse('')
    elif (
        request.method == 'GET' and
        (request.GET.get('replypath') or request.GET.get('show-comments'))
    ):
        return http.HttpResponsePermanentRedirect(request.path)

    try:
        redis_increment('plog:misses', request)
    except Exception:
        logging.error('Unable to redis.zincrby', exc_info=True)

    # attach a field called `_absolute_url` which depends on the request
    base_url = 'https://' if request.is_secure() else 'http://'
    base_url += RequestSite(request).domain
    post._absolute_url = base_url + reverse('blog_post', args=(post.oid,))

    data = {
        'post': post,
    }
    try:
        data['previous_post'] = post.get_previous_by_pub_date()
    except BlogItem.DoesNotExist:
        data['previous_post'] = None
    try:
        data['next_post'] = post.get_next_by_pub_date(pub_date__lt=utc_now())
    except BlogItem.DoesNotExist:
        data['next_post'] = None

    comments = (
        BlogComment.objects
        .filter(blogitem=post)
        .order_by('add_date')
    )
    if not request.user.is_staff:
        comments = comments.filter(approved=True)

    comments_truncated = False
    if request.GET.get('comments') != 'all':
        comments = comments[:100]
        if post.count_comments() > 100:
            comments_truncated = 100

    all_comments = defaultdict(list)
    for comment in comments:
        all_comments[comment.parent_id].append(comment)
    data['comments_truncated'] = comments_truncated
    data['all_comments'] = all_comments
    data['related'] = get_related_posts(post)
    data['related'] = get_related_posts(post)
    data['show_buttons'] = not settings.DEBUG
    data['show_fusion_ad'] = not settings.DEBUG
    data['home_url'] = request.build_absolute_uri('/')
    data['page_title'] = post.title
    return render(request, 'plog/post.html', data)