def get_context_data(self, **kwargs): context = super(CommentedDetailView, self).get_context_data(**kwargs) queryset = Comment.objects.filter(page=self.get_comment_page()) context['has_comments'] = queryset.exists() context['comment_lock'] = self.is_comment_locked() queryset = queryset.select_related('author__user').defer('author__about').annotate(revisions=Count('versions')) if self.request.user.is_authenticated: queryset = queryset.annotate(vote_score=Coalesce(RawSQLColumn(CommentVote, 'score'), Value(0))) profile = self.request.user.profile unique_together_left_join(queryset, CommentVote, 'comment', 'voter', profile.id) context['is_new_user'] = (not self.request.user.is_staff and not profile.submission_set.filter(points=F('problem__points')).exists()) context['comment_list'] = queryset context['vote_hide_threshold'] = getattr(settings, 'COMMENT_VOTE_HIDE_THRESHOLD', -5) return context
def _add_search_rank(self, qs): """Annotates query with search rank value. Search rank is calculated as result of `ts_rank` PostgreSQL function, which ranks vectors based on the frequency of their matching lexemes. Search rank is normalized by dividing it by itself + 1: """ keywords = self.filters.get('keywords') if not keywords: return qs.annotate( search_rank=Value(0.0, output_field=db_fields.FloatField())) search_rank_fn = Func(F('search_vector'), psql_search.SearchQuery(keywords), RANK_NORMALIZATION, function=RANK_FUNCTION, output_field=db_fields.FloatField()) return qs.annotate(search_rank=search_rank_fn)
def get(cls, public_key: str, next_nonce: int = None, include_user: bool = True) -> Optional[BaseToken]: if not public_key: return None try: if include_user: queryset = cls.objects.select_related('user') else: queryset = cls.objects token: cls = queryset.get( public_key=public_key, last_modified__gt=timezone.now() - F('duration'), user__is_active=True, ) if next_nonce is None: token.save(update_fields=[]) else: if not 0 <= next_nonce < base_settings.NONCE_MAX: return None if not token.nonce: token.nonce = str(next_nonce) else: nonce_list = [int(n) for n in token.nonce.split(';')] if next_nonce in nonce_list: return None if next_nonce < nonce_list[0]: return None nonce_list.append(next_nonce) nonce_list = sorted( nonce_list)[-base_settings.NONCE_WINDOW_SIZE:] token.nonce = ';'.join(str(n) for n in nonce_list) token.save(update_fields=['nonce']) return token except cls.DoesNotExist: return None
def get_queryset(self): public_result_queryset = CompendiumResult.objects.filter( result__is_public=True) latest_version = self.request.query_params.get("latest_version", False) if latest_version: version_filter = Q( primary_organism=OuterRef("primary_organism"), quant_sf_only=OuterRef("quant_sf_only"), ) latest_version = ( public_result_queryset.filter(version_filter).order_by( "-compendium_version").values("compendium_version")) return public_result_queryset.annotate( latest_version=Subquery(latest_version[:1])).filter( compendium_version=F("latest_version")) return public_result_queryset
class VersionDailyListView(_DailyVersionStatsListView): """View for listing VersionDaily instances.""" queryset = ( VersionDaily.objects.annotate( build_fingerprint=F("version__build_fingerprint") ) .all() .order_by("date") ) filter_class = VersionDailyFilter filter_fields = ( "version__build_fingerprint", "version__is_official_release", "version__is_beta_release", ) serializer_class = VersionDailySerializer
def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertQuerysetEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], lambda o: o)
class RadioVersionDailyListView(_DailyVersionStatsListView): """View for listing RadioVersionDaily instances.""" queryset = ( RadioVersionDaily.objects.annotate( radio_version=F("version__radio_version") ) .all() .order_by("date") ) filter_class = RadioVersionDailyFilter filter_fields = ( "version__radio_version", "version__is_official_release", "version__is_beta_release", ) serializer_class = RadioVersionDailySerializer
def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertQuerysetEqual(self.company_query, [{ "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 }], lambda o: o)
def add_goods_view(request): all_goods = Goods.objects.all().filter(user=request.user) if request.method == 'POST': name = request.POST.get('good') quantity = request.POST.get('quantity') par_good = Goods.objects.get(good_name=name, user=request.user) b = par_good.raw_material.all() for product in b: reporter = Product.objects.get(name=product, user=request.user) amount = Amount.objects.get(goods=par_good, raw_mate=product) realq = amount.required_amount reporter.total_inventory = F('total_inventory') - (float(realq) * float(quantity)) reporter.save() return render(request, 'products/add.html', context={'all_goods': all_goods})
def test_time_subtraction(self): if connection.features.supports_microsecond_precision: time = datetime.time(12, 30, 15, 2345) timedelta = datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) else: time = datetime.time(12, 30, 15) timedelta = datetime.timedelta(hours=1, minutes=15, seconds=15) Time.objects.create(time=time) queryset = Time.objects.annotate(difference=ExpressionWrapper( F('time') - Value(datetime.time(11, 15, 0), output_field=models.TimeField()), output_field=models.DurationField(), )) self.assertEqual(queryset.get().difference, timedelta)
def search(self): x = self.get_search_phrase() res = IPPool.objects.all() if x: res = res.filter(ip__icontains=x) return res.annotate( username=F('fk_user_ip_static_ip__user__username'), user_id=F('fk_user_ip_static_ip__user__pk'), first_name=F('fk_user_ip_static_ip__user__first_name'), expire_date=F('fk_user_ip_static_ip__expire_date'), is_free=F('fk_user_ip_static_ip__is_free'), is_used=F('fk_user_ip_static_ip__is_deleted'))
def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4'])
def form_valid(self, form): comment = form.save(self.blog, self.get_client_ip(self.request), get_current_site(self.request)) self.blog.comment_count = F('comment_count') + 1 self.blog.save() self.template_name = self.get_template_name(comment.related) if comment.related: context = {'reply': comment} else: context = {'comment': comment} html = self.render_to_response(context) html.render() data = {'html': html.content} return JsonResponse(status=1, data=data)
def vote(request, question_id): question = get_object_or_404(Question, pk=question_id) try: selected_choice = question.choice_set.get(pk=request.POST['choice']) except (KeyError, Choice.DoesNotExist): # Redisplay the question voting form. return render(request, 'polls/detail.html', { 'question': question, 'error_message': "You didn't select a choice.", }) else: selected_choice.votes = F('votes') + 1 selected_choice.save() # Always return an HttpResponseRedirect after successfully dealing # with POST data. This prevents data from being posted twice if a # user hits the Back button. return HttpResponseRedirect( reverse('polls:results', args=(question.id, )))
def calendar(request): template_name = "calendar.html" if request.is_ajax(): form = RangeForm( data=request.GET, ) if not form.is_valid(): print(form.errors) appointments = Appointment.objects.filter( available_end__gte=form.cleaned_data.get('start'), available_start__lte=form.cleaned_data.get('end'), num_people__lt=F('max_people'), ) return HttpResponse( content=dumps(appointments, cls=EventEncoder), content_type='applacation/json') return render(request, template_name)
def test_cte_queryset_with_model_result(self): cte = With( Order.objects.annotate(region_parent=F("region__parent_id")), ) orders = cte.queryset().with_cte(cte).order_by("region_id", "amount") print(orders.query) data = [(x.region_id, x.amount, x.region_parent) for x in orders][:5] self.assertEqual(data, [ ("earth", 30, "sun"), ("earth", 31, "sun"), ("earth", 32, "sun"), ("earth", 33, "sun"), ("mars", 40, "sun"), ]) self.assertTrue( all(isinstance(x, Order) for x in orders), repr([x for x in orders]), )
def get(self, request, key='', **kwargs): date = request.GET.get('date') obj = check_signature(key) if not obj: messages.error(request, '签名错误或已过期,请重新预约') return redirect(reverse('reserve:user_auth')) # 被预约者 reservee_id = obj.get('reservee') reservee = User.objects.get(id=reservee_id) rts = ReserveTime.objects.filter(reservee=reservee, date=date, enabled=True, ed__lte=F('max')) self.extra_context = {'reservee': reservee, 'rts': rts} return super().get(request, key=key, **kwargs)
def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual( Number.objects.filter(integer__gt=0) .update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), [ '<Number: -1, -1.000>', '<Number: 43, 42.000>', '<Number: 1338, 1337.000>' ], ordered=False )
def calculate_people(self, use_clickhouse=is_clickhouse_enabled()): if self.is_static: return try: if not use_clickhouse: self.is_calculating = True self.save() persons_query = self._postgres_persons_query() else: persons_query = self._clickhouse_persons_query() try: sql, params = persons_query.distinct("pk").only( "pk").query.sql_with_params() except EmptyResultSet: query = DELETE_QUERY.format(cohort_id=self.pk) params = {} else: query = f""" {DELETE_QUERY}; {UPDATE_QUERY}; """.format( cohort_id=self.pk, values_query=sql.replace( 'FROM "posthog_person"', ', {} FROM "posthog_person"'.format(self.pk), 1, ), ) cursor = connection.cursor() with transaction.atomic(): cursor.execute(query, params) if not use_clickhouse: self.last_calculation = timezone.now() self.errors_calculating = 0 except Exception as err: if not use_clickhouse: self.errors_calculating = F("errors_calculating") + 1 raise err finally: if not use_clickhouse: self.is_calculating = False self.save()
def action_report(request, test_id, action_id): action_aggregate_data = list( TestActionAggregateData.objects.annotate( test_name=F('test__display_name')).filter( action_id=action_id).values( 'test_name', 'data').order_by('-test__start_time')) action_data = [] for e in action_aggregate_data: data = e['data'] mean = data['mean'] min = data['min'] max = data['max'] q3 = data['75%'] q2 = data['50%'] if '25%' in data: q1 = data['25%'] else: # WTF lol q1 = q2 - (q3 - q2) IQR = q3 - q1 LW = q1 - 1.5 * IQR if LW < 0: LW = 0.1 UW = q3 + 1.5 * IQR test_name = e['test_name'] action_data.append({ "q1": q1, "q2": q2, "q3": q3, "IQR": IQR, "LW": LW, "UW": UW, "mean": mean, "min": min, "max": max, "test_name": test_name }) print(action_data) return render( request, 'url_report.html', { 'test_id': test_id, 'action': Action.objects.get(id=action_id), 'action_data': action_data })
def insert_users_by_list(self, items: List[str]) -> None: """ Items can be distinct_id or email Important! Does not insert into clickhouse """ batchsize = 1000 use_clickhouse = is_clickhouse_enabled() if use_clickhouse: from ee.clickhouse.models.cohort import insert_static_cohort try: cursor = connection.cursor() for i in range(0, len(items), batchsize): batch = items[i:i + batchsize] persons_query = (Person.objects.filter( team_id=self.team_id).filter( Q(persondistinctid__team_id=self.team_id, persondistinctid__distinct_id__in=batch)).exclude( cohort__id=self.id)) if use_clickhouse: insert_static_cohort([ p for p in persons_query.values_list("uuid", flat=True) ], self.pk, self.team) sql, params = persons_query.distinct("pk").only( "pk").query.sql_with_params() query = UPDATE_QUERY.format( cohort_id=self.pk, values_query=sql.replace( 'FROM "posthog_person"', ', {} FROM "posthog_person"'.format(self.pk), 1, ), ) cursor.execute(query, params) self.is_calculating = False self.last_calculation = timezone.now() self.errors_calculating = 0 self.save() except Exception as err: if settings.DEBUG: raise err self.is_calculating = False self.errors_calculating = F("errors_calculating") + 1 self.save() capture_exception(err)
def products_as_excel(self, request, Slug=None): shop = self.get_object() shop_products = shop.ShopProduct.annotate( عنوان=F('Title'), بارکد=F('barcode'), قیمت=F('Price'), قیمت_قبل=F('OldPrice'), موجودی=F('Inventory'), وزن=F('Net_Weight'), وزن_با_بسته_بندی=F('Weight_With_Packing'), دسته_بندی=F('category_id'), وضعیت_انتشار=F('Publish'), ).values( 'عنوان', 'بارکد', 'قیمت', 'قیمت_قبل', 'وزن', 'وزن_با_بسته_بندی', 'موجودی', 'دسته_بندی', 'وضعیت_انتشار', ) return ExcelResponse(data=shop_products)
def _expand_money_kwargs(model, kwargs): """ Augments kwargs so that they contain _currency lookups. """ to_append = {} for name, value in kwargs.items(): if isinstance(value, Money): clean_name = _get_clean_name(name) to_append[name] = value.amount to_append[get_currency_field_name(clean_name)] = smart_unicode( value.currency) if isinstance(value, BaseExpression): field = _get_field(model, name) if isinstance(field, MoneyField): clean_name = _get_clean_name(name) to_append['_'.join([clean_name, 'currency'])] = F(get_currency_field_name(value.name)) kwargs.update(to_append) return kwargs
def search(self, query): if not query: return self tsquery = Func(Value('russian'), Value(query), function='plainto_tsquery') return self.annotate( query_size_annotation=Func(tsquery, function='numnode', output_field=models.IntegerField()), found_annotation=Separator( '@@', Func(Value('russian'), F('content'), function='post_content_to_tsvector'), tsquery, output_field=models.BooleanField())).filter( Q(query_size_annotation=0) | Q(found_annotation=True))
def test_array_agg_charfield_ordering(self): ordering_test_cases = ( (F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']), (F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), (F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ([F('boolean_field'), F('char_field').desc()], ['Foo4', 'Foo2', 'Foo3', 'Foo1']), ((F('boolean_field'), F('char_field').desc()), ['Foo4', 'Foo2', 'Foo3', 'Foo1']), ('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('char_field', ordering=ordering)) self.assertEqual(values, {'arrayagg': expected_output})
def increment_completed_steps(self, steps=1): """ Increase the value of :py:attr:`completed_steps` by the given number and save, then check for cancellation. If cancellation of the task has been requested, a TaskCanceledException will be raised to abort execution. If any special cleanup is required, this exception should be caught and handled appropriately. This method should be called often enough to provide a useful indication of progress, but not so often as to cause undue burden on the database. """ UserTaskStatus.objects.filter(pk=self.id).update( completed_steps=F('completed_steps') + steps, modified=now()) self.refresh_from_db(fields={'completed_steps', 'modified', 'state'}) if self.parent: self.parent.increment_completed_steps(steps) # pylint: disable=no-member # Was a cancellation command recently sent? if self.state == self.CANCELED and not self.is_container: raise TaskCanceledException
def daily_posted_ratings(date=None): initial_qs = RssNotification.objects.annotate( day_date=Trunc("published_parsed", "day", output_field=DateField()), plain_field=F("published_parsed"), ) if date: initial_qs = initial_qs.filter(day_date=date) qs = (initial_qs.values("day_date").distinct().filter( plain_field__isnull=False).order_by("-day_date").annotate( total=Count("id")).annotate( upvoted=Count("id", filter=Q(rating=Ratings.UPVOTED))). annotate(downvoted=Count("id", filter=Q( rating=Ratings.DOWNVOTED))).annotate( plain=Count("id", filter=Q( rating=Ratings.DEFAULT))).annotate( bookmarked=Count("id", filter=Q( is_bookmarked=True)))) return qs
def lead_browser_totals(user: User, date_from: date, date_to: date, projects: list = None, label_type=None, label_values=None, os_groups=None, browser_groups=None, traffic_channels=None): leads_qs = Lead.objects.filter(pixel__project__user=user) leads_qs = _apply_lead_common_filters(leads_qs, date_from, date_to, projects, label_type, label_values, os_groups, browser_groups, traffic_channels) leads_qs = leads_qs.annotate(group_name=Coalesce( F('browser__family__group__name'), Value(_('Unknown')))) leads_qs = leads_qs.values('group_name') leads_qs = leads_qs.annotate(leads_count=Count('id')) leads_qs = leads_qs.order_by('group_name') return leads_qs
def project_history(request, project_id): ''' Return whole list of tests with avg and median response times for project_id ''' source = 'default' data = TestData.objects. \ filter(test__project_id=project_id, test__show=True, source=source, data_resolution_id=1).\ annotate(test_name=F('test__display_name')). \ values('test_name'). \ annotate(mean=Sum(RawSQL("((data->>%s)::numeric)", ('avg',)) * RawSQL("((data->>%s)::numeric)", ('count',))) / Sum(RawSQL("((data->>%s)::numeric)", ('count',)))). \ annotate(median=Sum(RawSQL("((data->>%s)::numeric)", ('median',))*RawSQL("((data->>%s)::numeric)", ('count',)))/Sum(RawSQL("((data->>%s)::numeric)", ('count',)))). \ order_by('test__start_time') return JsonResponse(list(data), safe=False)
def get_interval_annotation(key: str) -> Dict[str, Any]: map: Dict[str, Any] = { "minute": functions.TruncMinute("timestamp"), "hour": functions.TruncHour("timestamp"), "day": functions.TruncDay("timestamp"), "week": functions.TruncWeek( ExpressionWrapper(F("timestamp") + datetime.timedelta(days=1), output_field=DateTimeField())), "month": functions.TruncMonth("timestamp"), } func = map.get(key) if func is None: return {"day": map.get("day")} # default return {key: func}