def feed(request): fg = FeedGenerator() fg.id("bearblog") fg.author({"name": "Bear Blog", "email": "*****@*****.**"}) newest = request.GET.get("newest") if newest: fg.title("Bear Blog Most Recent Posts") fg.subtitle("Most recent posts on Bear Blog") fg.link(href="https://bearblog.dev/discover/?newest=True", rel="alternate") all_posts = (Post.objects.annotate( upvote_count=Count("upvote"), ).filter( publish=True, blog__reviewed=True, blog__blocked=False, show_in_feed=True, published_date__lte=timezone.now(), ).order_by("-published_date").select_related("blog") [0:posts_per_page]) else: fg.title("Bear Blog Trending Posts") fg.subtitle("Trending posts on Bear Blog") fg.link(href="https://bearblog.dev/discover/", rel="alternate") all_posts = (Post.objects.annotate( upvote_count=Count("upvote"), score=ExpressionWrapper( ((Count("upvote") - 1) / ((Seconds(Now() - F("published_date"))) + 4)**gravity) * 100000, output_field=FloatField(), ), ).filter( publish=True, blog__reviewed=True, blog__blocked=False, show_in_feed=True, published_date__lte=timezone.now(), ).order_by("-score", "-published_date").select_related( "blog").prefetch_related("upvote_set")[0:posts_per_page]) for post in all_posts: fe = fg.add_entry() fe.id(f"{post.blog.useful_domain()}/{post.slug}/") fe.title(post.title) fe.author({"name": post.blog.subdomain, "email": "hidden"}) fe.link(href=f"{post.blog.useful_domain()}/{post.slug}/") fe.content(clean_text(mistune.html(post.content)), type="html") fe.published(post.published_date) fe.updated(post.published_date) if request.GET.get("type") == "rss": fg.link(href=f"{post.blog.useful_domain()}/feed/?type=rss", rel="self") rssfeed = fg.rss_str(pretty=True) return HttpResponse(rssfeed, content_type="application/rss+xml") else: fg.link(href=f"{post.blog.useful_domain()}/feed/", rel="self") atomfeed = fg.atom_str(pretty=True) return HttpResponse(atomfeed, content_type="application/atom+xml")
def discover(request): http_host = request.META['HTTP_HOST'] if not (http_host == 'bearblog.dev' or http_host == 'localhost:8000'): raise Http404("No Post matches the given query.") if request.method == "POST": pk = request.POST.get("pk", "") post = get_object_or_404(Post, pk=pk) ip_address = client_ip(request) posts_upvote_dupe = post.upvote_set.filter(ip_address=ip_address) if len(posts_upvote_dupe) == 0: upvote = Upvote(post=post, ip_address=ip_address) upvote.save() posts_per_page = 20 page = 0 gravity = 1.8 if request.GET.get('page'): page = int(request.GET.get('page')) posts_from = page * posts_per_page posts_to = (page * posts_per_page) + posts_per_page newest = request.GET.get('newest') if newest: posts = Post.objects.annotate(upvote_count=Count('upvote'), ).filter( publish=True, show_in_feed=True, published_date__lte=timezone.now()).order_by( '-published_date').select_related('blog')[posts_from:posts_to] else: posts = Post.objects.annotate( upvote_count=Count('upvote'), score=ExpressionWrapper( ((Count('upvote')) / ((Seconds(Now() - F('published_date'))) + 2)**gravity) * 100000, output_field=FloatField())).filter( publish=True, show_in_feed=True, published_date__lte=timezone.now()).order_by( '-score', '-published_date').select_related( 'blog')[posts_from:posts_to] return render( request, 'discover.html', { 'posts': posts, 'next_page': page + 1, 'posts_from': posts_from, 'gravity': gravity, 'newest': newest, })
def discover(request): http_host = request.META['HTTP_HOST'] get_object_or_404(Site, domain=http_host) ip_address = client_ip(request) if request.method == "POST": pk = request.POST.get("pk", "") post = get_object_or_404(Post, pk=pk) posts_upvote_dupe = post.upvote_set.filter(ip_address=ip_address) if len(posts_upvote_dupe) == 0: upvote = Upvote(post=post, ip_address=ip_address) upvote.save() posts_per_page = 20 page = 0 gravity = 1.2 if request.GET.get('page'): page = int(request.GET.get('page')) posts_from = page * posts_per_page posts_to = (page * posts_per_page) + posts_per_page newest = request.GET.get('newest') if newest: posts = Post.objects.annotate(upvote_count=Count('upvote'), ).filter( publish=True, show_in_feed=True, published_date__lte=timezone.now()).order_by( '-published_date').select_related('blog')[posts_from:posts_to] else: posts = Post.objects.annotate( upvote_count=Count('upvote'), score=ExpressionWrapper( ((Count('upvote')) / ((Seconds(Now() - F('published_date'))) + 2)**gravity) * 100000, output_field=FloatField()), ).filter( publish=True, show_in_feed=True, published_date__lte=timezone.now()).order_by( '-score', '-published_date').select_related( 'blog').prefetch_related('upvote_set')[posts_from:posts_to] upvoted_posts = [] for post in posts: for upvote in post.upvote_set.all(): if upvote.ip_address == ip_address: upvoted_posts.append(post.pk) return render( request, 'discover.html', { 'site': Site.objects.get_current(), 'posts': posts, 'next_page': page + 1, 'posts_from': posts_from, 'gravity': gravity, 'newest': newest, 'upvoted_posts': upvoted_posts })
def discover(request): ip_address = client_ip(request) if request.method == "POST": pk = sanitise_int(request.POST.get("pk", ""), 7) post = get_object_or_404(Post, pk=pk) posts_upvote_dupe = post.upvote_set.filter(ip_address=ip_address) if len(posts_upvote_dupe) == 0: upvote = Upvote(post=post, ip_address=ip_address) upvote.save() page = 0 if request.GET.get("page", 0): page = sanitise_int(request.GET.get("page"), 7) posts_from = page * posts_per_page posts_to = (page * posts_per_page) + posts_per_page newest = request.GET.get("newest") if newest: posts = (Post.objects.annotate(upvote_count=Count("upvote"), ).filter( publish=True, blog__reviewed=True, blog__blocked=False, show_in_feed=True, published_date__lte=timezone.now(), ).order_by("-published_date").select_related("blog") [posts_from:posts_to]) else: posts = (Post.objects.annotate( upvote_count=Count("upvote"), score=ExpressionWrapper( ((Count("upvote") - 1) / ((Seconds(Now() - F("published_date"))) + 4)**gravity) * 100000, output_field=FloatField(), ), ).filter( publish=True, blog__reviewed=True, blog__blocked=False, show_in_feed=True, published_date__lte=timezone.now(), ).order_by("-score", "-published_date").select_related( "blog").prefetch_related("upvote_set")[posts_from:posts_to]) upvoted_posts = [] for post in posts: for upvote in post.upvote_set.all(): if upvote.ip_address == ip_address: upvoted_posts.append(post.pk) return render( request, "discover.html", { "site": Site.objects.get_current(), "posts": posts, "previous_page": page - 1, "next_page": page + 1, "posts_from": posts_from, "gravity": gravity, "newest": newest, "upvoted_posts": upvoted_posts, }, )
def resource_hours2(resource_email, sDate, eDate): total_meetings = 0 local_tz = pytz.timezone('Asia/Kolkata') queryset = Resources.objects.get( resourceEmail=resource_email).events.exclude( recurr__isnull=False).filter( Q(start_dateTime__gte=sDate, end_dateTime__lte=eDate) | Q(start_date__gte=dateutil.parser.parse(sDate).astimezone( local_tz).date(), end_date__lte=dateutil.parser.parse(eDate).astimezone( local_tz).date())) total_time = queryset.aggregate(time=Sum( Case(When(start_date__isnull=True, then=Seconds(F('end_dateTime') - F('start_dateTime'))), When(start_date__isnull=False, then=Extract( (Func(F('end_date'), F('start_date'), function='age') ), 'day') * 8 * 3600), output_field=IntegerField()), )).get('time', 0) if total_time is None: total_time = 0 total_meetings = total_meetings + queryset.count() total_time2 = 0 filtered_recurring = Resources.objects.get( resourceEmail=resource_email).events.exclude(recurr__isnull=True) # .filter( # Q(start_dateTime__hour__gt=dateutil.parser.parse(sDate).hour) | ( # Q(start_dateTime__hour=dateutil.parser.parse(sDate).hour) & Q( # start_dateTime__minute__gte=dateutil.parser.parse(sDate).minute))) for meeting in filtered_recurring: recurrences = meeting.recurr.between( dateutil.parser.parse(sDate).astimezone(local_tz).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None), dateutil.parser.parse(eDate).astimezone(local_tz).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None), dtstart=meeting.start_dateTime.astimezone(local_tz).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None), inc=True) temp_recurrences = copy.deepcopy(recurrences) for element in temp_recurrences: if element.date() in meeting.changed_dates: recurrences.remove(element) if element < dateutil.parser.parse(sDate).replace(tzinfo=None): try: recurrences.remove(element) except Exception: 1 == 1 if element.replace( hour=meeting.start_dateTime.hour, minute=meeting.start_dateTime.minute, second=meeting.start_dateTime.second, tzinfo=None) >= dateutil.parser.parse(eDate).replace( tzinfo=None): try: recurrences.remove(element) except Exception: 1 == 1 diff = meeting.end_dateTime - meeting.start_dateTime days_to_hours = diff.days * 24 diff_btw_two_times = diff.seconds net_time = days_to_hours * 3600 + diff_btw_two_times total_time2 = total_time2 + len(recurrences) * net_time total_meetings = total_meetings + len(recurrences) total_time = total_time + total_time2 total_time = round(total_time / 3600, 2) return round(total_time, 2), total_meetings