def posts(request): if request.method == 'GET': md = Markdown() posts = Post.objects \ .filter(creator=request.user) \ .annotate(views_count=Subquery( Post_views.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .annotate(comments_count=Subquery( Comment.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .order_by('-created_at') \ .order_by('deleted') # .filter(deleted=False) \ for post in posts: post.body = md.convert(post.body) if post.views_count is None: post.views_count = 0 if post.views_count > 1000: post.views_count = f'{round(post.views_count / 1000)}k' return render(request, 'account/pages/posts.html', {'posts': posts})
def in_expr(self, expr: Expression): assert expr.operator == "In" (left, right) = expr.args right_path = dot_path(right) if left == "_this" and right_path: if right_path[1:]: # _this in _this.foo.bar # _this in _some_var.foo.bar path = self.translate_path_to_field(right_path) # path = "__".join(right_path[1:]) self.filter &= COMPARISONS["Unify"]("pk", path) else: # _this in _this # _this in _some_var raise UnsupportedError( f"Unsupported partial expression: {expr}") elif isinstance(left, Variable) and right_path: if right_path[1:]: # var in _this.foo.bar # var in other_var.foo.bar # get the base query for the RHS of the `in` root = self while root.parent: root = root.parent base_query = root.get_query_from_var(right_path[0]) or root # Left is a variable => apply constraints to the subquery. if left not in base_query.variables: subquery_path = right_path[1:] model = get_model_by_path(base_query.model, subquery_path) base_query.variables[left] = FilterBuilder( model, parent=base_query, name=left) else: # This means we have two paths for the same variable # the subquery will handle the intersection pass # Get the model for the subfield subquery = base_query.variables[left] # <var> in <partial> # => set up <var> as a new filtered query over the model # filtered to the entries of right_path path = base_query.translate_path_to_field(right_path) field = OuterRef(path.name) if isinstance( path, F) else OuterRef(path) subquery.filter &= COMPARISONS["Unify"]("pk", field) # Maybe redundant, but want to be sure base_query.variables[left] = subquery else: # var in _this # var in other_var raise UnsupportedError( f"Unsupported partial expression: {expr}") else: # <value> in <partial> self.filter &= COMPARISONS["Unify"]("__".join(right_path[1:]), left)
def get_queryset(self): price = Subquery( OrderOption.objects.filter(order=OuterRef('pk'), ).values( 'order').order_by('order').annotate(sum=Coalesce( Sum('price'), 0, output_field=models.DecimalField(), )).values('sum'), ) work_duration = Subquery( OrderOption.objects.filter(order=OuterRef('pk'), ).values( 'order').order_by('order').annotate( sum=Sum('work_duration')).values('sum'), ) return super().get_queryset().select_related( 'author', 'client', 'branch', ).prefetch_related('options', ).annotate( price=Coalesce( price, 0, output_field=models.DecimalField(), ), work_duration=work_duration, )
def test_order_by_exists(self): author_without_posts = Author.objects.create(name="other author") authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).desc()) self.assertSequenceEqual(authors_by_posts, [self.author, author_without_posts]) authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).asc()) self.assertSequenceEqual(authors_by_posts, [author_without_posts, self.author])
def effective_permissions(self) -> "QuerySet[Permission]": if self._effective_permissions is None: self._effective_permissions = get_permissions() if not self.is_superuser: UserPermission = User.user_permissions.through user_permission_queryset = UserPermission.objects.filter( user_id=self.pk).values("permission_id") UserGroup = User.groups.through GroupPermission = Group.permissions.through user_group_queryset = UserGroup.objects.filter( user_id=self.pk).values("group_id") group_permission_queryset = GroupPermission.objects.filter( Exists( user_group_queryset.filter(group_id=OuterRef( "group_id")))).values("permission_id") self._effective_permissions = self._effective_permissions.filter( Q( Exists( user_permission_queryset.filter( permission_id=OuterRef("pk")))) | Q( Exists( group_permission_queryset.filter( permission_id=OuterRef("pk"))))) return self._effective_permissions
def annotate_is_admin(self, user): """Annotate each project with whether or not the given user is an admin for it """ if user.is_authenticated: return self.annotate( is_admin=Cast( Subquery( user.collaboration_set.filter( project_id=OuterRef("pk"), access=CollaboratorAccess.admin ) .values("pk") .order_by() ), output_field=BooleanField(), ), is_editor=Cast( Subquery( user.collaboration_set.filter( project_id=OuterRef("pk"), access__in=[ CollaboratorAccess.admin, CollaboratorAccess.edit, ], ) .values("pk") .order_by() ), output_field=BooleanField(), ), ) else: return self.annotate(is_admin=Value(False, output_field=BooleanField()))
def get_queryset(self): queryset = super().get_queryset() if self.action == 'retrieve': queryset = queryset.annotate( previous_article_id=Subquery( queryset=Article.objects.filter( created_at__lt=OuterRef('created_at') ).order_by('-created_at').values('id')[:1] ), next_article_id=Subquery( queryset=Article.objects.filter( created_at__gt=OuterRef('created_at') ).order_by('created_at').values('id')[:1] ) ).prefetch_related( Prefetch('article_comments', queryset=Comment.objects.filter(active=True)) ) # Admin has access to unpublished articles to visually # inspect them after frontend rendering. if not self.request.user.is_staff: queryset = queryset.filter( status=Article.StatusChoices.PUBLISHED ) return queryset
def _get_assignment_queryset(start, end, *fields): """Формирует queryset к Assignment. Args: start - начальная дата end - конечная дата *fields - поля запроса Returns: queryset """ # Сумма часов по проекту для должности (employment) project_hours = Subquery( ProjectAssignment.objects.filter( project=OuterRef('projectassignments__project'), assignment__employment=OuterRef('employment'), assignment__start__gte=start, assignment__end__lte=end, ).values('project').order_by('project').annotate( sum=Coalesce(Sum('hours'), 0)).values('sum'), output_field=PositiveSmallIntegerField(), ) # Общая сумма часов для должности (employment) employment_hours = Subquery( ProjectAssignment.objects.filter( assignment__employment=OuterRef('employment'), assignment__start__gte=start, assignment__end__lte=end, ).values('assignment__employment').order_by( 'assignment__employment').annotate( sum=Coalesce(Sum('hours'), 0)).values('sum'), output_field=PositiveSmallIntegerField(), ) # Выбираем записи полностью попадающие в интервал queryset = Assignment.objects.filter( projects__isnull=False, start__gte=start, end__lte=end, ).annotate( employee=Concat( F('employment__employee__last_name'), V(' '), F('employment__employee__first_name'), V(' '), F('employment__employee__middle_name'), ), number=F('employment__number'), department=F('employment__staffing__department__name'), position=F('employment__staffing__position__name'), staff_units=F('employment__count'), employment_hours=Coalesce(employment_hours, 0), project=F('projectassignments__project__name'), project_hours=Coalesce(project_hours, 0), ).order_by('employee').values(*fields).distinct() return queryset
def getRentalsForSummary(current_date=None): result_dict = {} created_today_filter = Q(created_date__date=current_date) ends_today_filter = Q(end_date__date=current_date) paid_filter = Q(paid=True) unpaid_filter = Q(Q(paid=False) | Q(paid__isnull=True)) rentals = Rental.objects.filter( ends_today_filter).annotate( rent_duration=ExpressionWrapper( ( (F('end_date') - F('start_date')) * 0.000001 \ / Value('3600', IntegerField() )), output_field=DecimalField(decimal_places=1) ) ) rent_subquery = rentals.filter(rentaldetail=OuterRef('id')) rentals_detail = RentalDetail.objects.filter( rental__in=rentals).select_related('rental').annotate( rent_duration=Subquery(rent_subquery.values('rent_duration')) ).annotate(item_rent_gross_amt=ExpressionWrapper( F('rent_duration') * F('price') * F('quantity'), output_field=FloatField())) rent_detail_subquery = rentals_detail.filter( rental_id=OuterRef('id')).values('item_rent_gross_amt') test = rent_detail_subquery.values('rental_id').annotate( total=Sum('item_rent_gross_amt')).values('total') rentals = rentals.prefetch_related( Prefetch('rentaldetail_set', rentals_detail)).annotate(total_rent_amt=Subquery(test)) students_with_rentals = Student.objects.filter( Q(rental__in=rentals)).prefetch_related( Prefetch( 'rental_set', rentals)).annotate(student_total_rent_amt=Subquery( rentals.values('student_id').annotate( total_amt=Sum('total_rent_amt')).values( 'total_amt'))).order_by('name').distinct() # Aggregates used for overall summary rentals_counts = rentals.aggregate( ends_today=Count('*'), paid=Count('paid', filter=paid_filter), ) total_rentals_value = rentals_detail.aggregate( sum=Sum('item_rent_gross_amt')) result_dict['count'] = rentals_counts result_dict['total_profit'] = total_rentals_value result_dict['students'] = students_with_rentals return result_dict
def test_nested_outerref_lhs(self): tag = Tag.objects.create(name=self.au1.alias) tag.articles.add(self.a1) qs = Tag.objects.annotate(has_author_alias_match=Exists( Article.objects.annotate(author_exists=Exists( Author.objects.filter( alias=OuterRef(OuterRef('name')))), ).filter( author_exists=True)), ) self.assertEqual(qs.get(has_author_alias_match=True), tag)
def get_gift_card_lines(line_pks: Iterable[int]): product_types = ProductType.objects.filter(kind=ProductTypeKind.GIFT_CARD) products = Product.objects.filter( Exists(product_types.filter(pk=OuterRef("product_type_id")))) variants = ProductVariant.objects.filter( Exists(products.filter(pk=OuterRef("product_id")))) gift_card_lines = OrderLine.objects.filter(id__in=line_pks).filter( Exists(variants.filter(pk=OuterRef("variant_id")))) return gift_card_lines
def _retrieve_people(self, filter: RetentionFilter, team: Team): period = filter.period trunc, fields = self._get_trunc_func("timestamp", period) is_first_time_retention = filter.retention_type == RETENTION_FIRST_TIME entity_condition, _ = self.get_entity_condition( filter.target_entity, "events") returning_condition, _ = self.get_entity_condition( filter.returning_entity, "first_event_date") _entity_condition = returning_condition if filter.selected_interval > 0 else entity_condition events = Event.objects.filter(team_id=team.pk).add_person_id(team.pk) filtered_events = events.filter( filter.recurring_date_filter_Q()).filter( properties_to_Q(filter.properties, team_id=team.pk)) inner_events = (Event.objects.filter(team_id=team.pk).filter( properties_to_Q(filter.properties, team_id=team.pk)).add_person_id( team.pk).filter(**{ "person_id": OuterRef("id") }).filter(entity_condition).values("person_id").annotate( first_date=Min(trunc)).filter( filter.reference_date_filter_Q("first_date")).distinct( ) if is_first_time_retention else Event.objects.filter( team_id=team.pk).filter( filter.reference_date_filter_Q()).filter( properties_to_Q( filter.properties, team_id=team.pk)).add_person_id( team.pk).filter( **{ "person_id": OuterRef("id") }).filter(entity_condition)) filtered_events = (filtered_events.filter(_entity_condition).filter( Exists( Person.objects.filter(**{ "id": OuterRef("person_id"), }).filter(Exists(inner_events)).only("id"))).values( "person_id").distinct()).all() people = Person.objects.filter( team=team, id__in=[ p["person_id"] for p in filtered_events[filter.offset:filter.offset + 100] ], ) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) from posthog.api.person import PersonSerializer return PersonSerializer(people, many=True).data
def test_prepare_xls_content(self): entity_requirement = EntityVersion.objects.filter(entity=OuterRef( 'learning_container_year__requirement_entity'), ).current( OuterRef('academic_year__start_date')).values('acronym')[:1] entity_allocation = EntityVersion.objects.filter(entity=OuterRef( 'learning_container_year__allocation_entity'), ).current( OuterRef('academic_year__start_date')).values('acronym')[:1] qs = LearningUnitYear.objects.filter( pk=self.learning_unit_yr_1.pk).annotate( entity_requirement=Subquery(entity_requirement), entity_allocation=Subquery(entity_allocation), ) result = prepare_xls_content(qs, with_grp=True, with_attributions=True) self.assertEqual(len(result), 1) luy = annotate_qs(qs).get() self.assertListEqual( result[0], [ luy.acronym, luy.academic_year.__str__(), luy.complete_title, luy.get_container_type_display(), luy.get_subtype_display(), luy.entity_requirement, '', # Proposal '', # Proposal state luy.credits, luy.entity_allocation, luy.complete_title_english, '', luy.get_periodicity_display(), yesno(luy.status), _get_significant_volume(luy.pm_vol_tot or 0), _get_significant_volume(luy.pm_vol_q1 or 0), _get_significant_volume(luy.pm_vol_q2 or 0), luy.pm_classes or 0, _get_significant_volume(luy.pp_vol_tot or 0), _get_significant_volume(luy.pp_vol_q1 or 0), _get_significant_volume(luy.pp_vol_q2 or 0), luy.pp_classes or 0, luy.get_quadrimester_display() or '', luy.get_session_display() or '', luy.language or "", "{} ({}) - {} - {}\n".format( self.an_education_group_parent.partial_acronym, "{0:.2f}".format( luy.credits), self.an_education_group_parent.acronym, self.an_education_group_parent.title) ])
def index(request): posts = Post.objects \ .annotate(views_count=Subquery( Post_views.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .annotate(comments_count=Subquery( Comment.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .filter(deleted=False) \ .order_by('-created_at') for post in posts: post.body = md.convert(post.body) if post.views_count is None: post.views_count = 0 if post.views_count > 1000: post.views_count = f'{round(post.views_count / 1000)}k' recommend_posts = Post.objects \ .annotate(views_count=Subquery( Post_views.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .annotate(comments_count=Subquery( Comment.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .filter(deleted=False) \ .order_by('-comments_count') \ .order_by('-views_count')[:5] for recommend_post in recommend_posts: if recommend_post.views_count is None: recommend_post.views_count = 0 if recommend_post.views_count > 1000: recommend_post.views_count = f'{round(recommend_post.views_count / 1000)}k' return render(request, 'main/pages/index.html', { 'posts': posts, 'recommend_posts': recommend_posts })
def annotate_student_queryset_with_scores( queryset: QuerySet[Student]) -> QuerySet[Student]: """Helper function for constructing large lists of students Selects all important information to prevent a bunch of SQL queries""" guess_subquery = Guess.objects.filter( user=OuterRef('user'), market__semester=OuterRef('semester'), market__end_date__lt=timezone.now(), ).order_by().values( 'user', 'market__semester').annotate(total=Sum('score')).values('total') return queryset.select_related( 'user', 'user__profile', 'assistant', 'semester').annotate( num_psets=SubqueryCount('user__student__pset', filter=Q(approved=True, eligible=True)), clubs_any=SubquerySum('user__student__pset__clubs', filter=Q(approved=True, eligible=True)), clubs_D=SubquerySum('user__student__pset__clubs', filter=Q(approved=True, eligible=True, unit__code__startswith='D')), clubs_Z=SubquerySum('user__student__pset__clubs', filter=Q(approved=True, eligible=True, unit__code__startswith='Z')), hearts=SubquerySum('user__student__pset__hours', filter=Q(approved=True, eligible=True)), diamonds=SubquerySum( 'user__achievementunlock__achievement__diamonds'), pset_B_count=SubqueryCount('pset__pk', filter=Q(eligible=True, unit__code__startswith='B')), pset_D_count=SubqueryCount('pset__pk', filter=Q(eligible=True, unit__code__startswith='D')), pset_Z_count=SubqueryCount('pset__pk', filter=Q(eligible=True, unit__code__startswith='Z')), spades_quizzes=SubquerySum('user__student__examattempt__score'), spades_quests=SubquerySum('user__student__questcomplete__spades'), spades_markets=Subquery(guess_subquery), # type: ignore spades_count_mocks=SubqueryCount('mockcompleted'), spades_suggestions=SubqueryCount( 'user__problemsuggestion__unit__pk', filter=Q(resolved=True, eligible=True), ), # hints definitely not handled here )
def safes_with_number_of_transactions_executed_and_master_copy(self): master_copy_query = SafeStatus.objects.filter( address=OuterRef('safe')).order_by('-nonce').values('master_copy') return self.safes_with_number_of_transactions_executed().annotate( master_copy=Subquery(master_copy_query[:1])).order_by( '-transactions')
def annotate_documentations(klass: Union[Event, LessonPeriod, ExtraLesson], wanted_week: CalendarWeek, pks: List[int]) -> QuerySet: """Return an annotated queryset of all provided register objects.""" if isinstance(klass, LessonPeriod): prefetch = Prefetch( "documentations", queryset=LessonDocumentation.objects.filter(week=wanted_week.week, year=wanted_week.year), ) else: prefetch = Prefetch("documentations") instances = klass.objects.prefetch_related(prefetch).filter(pk__in=pks) if klass == LessonPeriod: instances = instances.annotate_week(wanted_week) elif klass in (LessonPeriod, ExtraLesson): instances = instances.order_by("period__weekday", "period__period") else: instances = instances.order_by("period_from__weekday", "period_from__period") instances = instances.annotate(has_documentation=Exists( LessonDocumentation.objects.filter( ~Q(topic__exact=""), week=wanted_week.week, year=wanted_week.year, ).filter(**{klass.label_: OuterRef("pk")}))) return instances
class CompanyChartsViewSet(GenericViewSet, ListModelMixin): queryset = Company.objects.annotate(__session_average=Subquery( Session.objects.filter(company=OuterRef("id")).annotate( value=ExpressionWrapper(Avg("answered_questions__value") * F("set__weight"), output_field=DecimalField( max_digits=3, decimal_places=2, ))).values("value"))).annotate( date=Case( When(sessions__until__lte=Now(), then=F("sessions__until")), default=Now()), data=ExpressionWrapper( Sum("__session_average") * F("sessions__theme__weight"), output_field=DecimalField( decimal_places=2, max_digits=3))).values( "data", "date") serializer_class = CompanyChartSerializer def filter_queryset(self, queryset): return queryset.filter(id=self.request.user.member.company_id)
def test_outerref_in_cte_query(self): # This query is meant to return the difference between min and max # order of each region, through a subquery min_and_max = With( Order.objects.filter(region=OuterRef("pk")).values( 'region') # This is to force group by region_id .annotate( amount_min=Min("amount"), amount_max=Max("amount"), ).values('amount_min', 'amount_max')) regions = (Region.objects.annotate( difference=Subquery(min_and_max.queryset().with_cte( min_and_max).annotate(difference=ExpressionWrapper( F('amount_max') - F('amount_min'), output_field=int_field, ), ).values('difference')[:1], output_field=IntegerField())).order_by("name")) print(regions.query) data = [(r.name, r.difference) for r in regions] self.assertEqual(data, [("bernard's star", None), ('deimos', None), ('earth', 3), ('mars', 2), ('mercury', 2), ('moon', 2), ('phobos', None), ('proxima centauri', 0), ('proxima centauri b', 2), ('sun', 0), ('venus', 3)])
def setUpTestData(cls): cls._create_luy() cls._set_entities() cls._create_cms_data() cls.teaching_material = TeachingMaterialFactory( learning_unit_year=cls.l_unit_yr_1, title='Teaching material title') cls.learning_unit_achievement_fr = LearningAchievementFactory( learning_unit_year=cls.l_unit_yr_1, language=LanguageFactory(code='FR')) cls.learning_unit_achievement_en = LearningAchievementFactory( learning_unit_year=cls.l_unit_yr_1, language=LanguageFactory(code='EN')) cls.entity_requirement = EntityVersion.objects.filter(entity=OuterRef( 'learning_container_year__requirement_entity'), ).current( OuterRef('academic_year__start_date')).values('acronym')[:1]
def test_string_agg_array_agg_filter_in_subquery(self): StatTestModel.objects.bulk_create([ StatTestModel(related_field=self.agg1, int1=0, int2=5), StatTestModel(related_field=self.agg1, int1=1, int2=4), StatTestModel(related_field=self.agg1, int1=2, int2=3), ]) for aggregate, expected_result in ( ( ArrayAgg('stattestmodel__int1', filter=Q(stattestmodel__int2__gt=3)), [('Foo1', [0, 1]), ('Foo2', None)], ), ( StringAgg( Cast('stattestmodel__int2', CharField()), delimiter=';', filter=Q(stattestmodel__int1__lt=2), ), [('Foo1', '5;4'), ('Foo2', None)], ), ): with self.subTest(aggregate=aggregate.__class__.__name__): subquery = AggregateTestModel.objects.filter( pk=OuterRef('pk'), ).annotate(agg=aggregate).values('agg') values = AggregateTestModel.objects.annotate( agg=Subquery(subquery), ).filter(char_field__in=[ 'Foo1', 'Foo2' ], ).order_by('char_field').values_list( 'char_field', 'agg') self.assertEqual(list(values), expected_result)
def test_string_agg_array_agg_ordering_in_subquery(self): stats = [] for i, agg in enumerate( AggregateTestModel.objects.order_by('char_field')): stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1)) stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i)) StatTestModel.objects.bulk_create(stats) for aggregate, expected_result in ( ( ArrayAgg('stattestmodel__int1', ordering='-stattestmodel__int2'), [('Foo1', [0, 1]), ('Foo2', [1, 2]), ('Foo3', [2, 3]), ('Foo4', [3, 4])], ), ( StringAgg( Cast('stattestmodel__int1', CharField()), delimiter=';', ordering='-stattestmodel__int2', ), [('Foo1', '0;1'), ('Foo2', '1;2'), ('Foo3', '2;3'), ('Foo4', '3;4')], ), ): with self.subTest(aggregate=aggregate.__class__.__name__): subquery = AggregateTestModel.objects.filter( pk=OuterRef('pk'), ).annotate(agg=aggregate).values('agg') values = AggregateTestModel.objects.annotate(agg=Subquery( subquery), ).order_by('char_field').values_list( 'char_field', 'agg') self.assertEqual(list(values), expected_result)
def can_be_decoded(self): """ Every InternalTx can be decoded if: - Has data - Parent InternalTx is not errored - InternalTx is not errored - EthereumTx is successful (not reverted or out of gas) - CallType is a DELEGATE_CALL (to the master copy contract) - Not already decoded :return: Txs that can be decoded """ # Get errored parents for every InternalTx. We check every InternalTx whose trace_address starts the same as # our InternalTx and is errored parent_errored_query = InternalTx.objects.annotate( child_trace_address=RawSQL('"history_internaltx"."trace_address"', tuple()) # Django bug, so we use RawSQL instead of: child_trace_address=OuterRef('trace_address') ).filter( child_trace_address__startswith=F('trace_address'), ethereum_tx=OuterRef('ethereum_tx'), ).exclude( error=None ) return self.exclude(data=None).annotate( parent_errored=Subquery(parent_errored_query.values('pk')[:1]) ).filter( call_type=EthereumTxCallType.DELEGATE_CALL.value, error=None, ethereum_tx__status=1, decoded_tx=None, parent_errored=None, )
def filterPosts(request): """accepts a search form through the request and returns posts that match the data from the search form""" search_form = BookSearchForm(request.POST) if search_form.is_valid(): posts = search_form.filter().exclude(seller=request.user) bookmarks = Bookmark.objects.filter( user=request.user, post__id=OuterRef('id'))[:1].values('user__id') posts = posts.annotate(bookmarked=Subquery(bookmarks)) bookmarked = [] if hasattr(request.user, 'bookmark'): bookmarked = request.user.bookmark.posts.all() if_empty = { 'main': "Sorry! It seems we don't have anybooks that match your search", 'small': 'Try slightly tweaking or removing some filters to see that works better' } html = render_to_string( 'tradeboard/postpopulate.html', { 'posts': posts, 'bookmarked': bookmarked, 'tab': 'Tradeboard', 'if_empty': if_empty }, request) form = render_to_string('tradeboard/searchForm.html', {'search_form': search_form}, request) return HttpResponse(json.dumps({ 'searchResults': html, 'form': form }), content_type="application/json") else: form = render_to_string('tradeboard/searchForm.html', {'search_form': search_form}, request) return HttpResponse(form, status=400)
def test_attname_should_not_mask_col_name(self): def make_regions_cte(cte): return Region.objects.filter( name="moon" ).values( "name", "parent_id", ).union( cte.join(Region, name=cte.col.parent_id).values( "name", "parent_id", ), all=True, ) cte = With.recursive(make_regions_cte) regions = ( Region.objects.all() .with_cte(cte) .annotate(_ex=Exists( cte.queryset() .values(value=Value("1", output_field=int_field)) .filter(name=OuterRef("name")) )) .filter(_ex=True) .order_by("name") ) print(regions.query) data = [r.name for r in regions] self.assertEqual(data, ['earth', 'moon', 'sun'])
def annotate_estimated_price(self, content_type): estimates = PriceEstimate.objects.filter( object_id=OuterRef('pk'), content_type=content_type, ) subquery = Subquery(estimates.values('total')[:1]) return Project.objects.annotate(value=subquery)
def recipes_in_bookmarks(user, recipes): bookmarks_subquery = Bookmark.objects.filter(recipe=OuterRef("pk"), user__pk=user.pk) in_bookmarks = dict( recipes.annotate(in_bookmarks=Exists(bookmarks_subquery)).values_list( "pk", "in_bookmarks")) return {"bookmarks": in_bookmarks}
def test_alias_as_subquery(self): # This test covers CTEColumnRef.relabeled_clone def make_regions_cte(cte): return KeyPair.objects.filter( parent__key="level 1", ).annotate( rank=F('value'), ).union( cte.join( KeyPair.objects.all().order_by(), parent_id=cte.col.id, ).annotate( rank=F('value'), ), all=True, ) cte = With.recursive(make_regions_cte) children = cte.queryset().with_cte(cte) xdups = With(cte.queryset().filter( parent__key="level 1", ).annotate( rank=F('value') ).values('id', 'rank'), name='xdups') children = children.annotate( _exclude=Exists( ( xdups.queryset().filter( id=OuterRef("id"), rank=OuterRef("rank"), ) ) ) ).filter(_exclude=True).with_cte(xdups) print(children.query) query = KeyPair.objects.filter(parent__in=children) print(query.query) print(children.query) self.assertEqual(query.get().key, 'level 3') # Tests the case in which children's query was modified since it was # used in a subquery to define `query` above. self.assertEqual( list(c.key for c in children), ['level 2', 'level 2'] )
def test_exact_exists(self): qs = Article.objects.filter(pk=OuterRef('pk')) seasons = Season.objects.annotate( pk_exists=Exists(qs), ).filter( pk_exists=Exists(qs), ) self.assertCountEqual(seasons, Season.objects.all())
def annotate_quotas(self, quota_name, content_type): quotas = Quota.objects.filter( object_id=OuterRef('pk'), content_type=content_type, name=quota_name, ) subquery = Subquery(quotas.values('usage')[:1]) return Project.objects.annotate(value=subquery)