def delete_errored_internal_txs(apps, schema_editor): """ Previously all traces were stored, even the errored ones. This method will delete old errored traces still present on the database :param apps: :param schema_editor: :return: """ InternalTx = apps.get_model("history", "InternalTx") parent_errored_query = ( InternalTx.objects.annotate( child_trace_address=RawSQL('"history_internaltx"."trace_address"', tuple()) # Django bug, so we use RawSQL instead of: child_trace_address=OuterRef('trace_address') ).filter( child_trace_address__startswith=F("trace_address"), ethereum_tx=OuterRef("ethereum_tx"), ).exclude(error=None)) InternalTx.objects.annotate(parent_errored=Subquery( parent_errored_query.values("pk")[:1])).exclude( parent_errored=None, ).delete()
def test_aggregation_subquery_annotation_values(self): """ Subquery annotations and external aliases are excluded from the GROUP BY if they are not selected. """ books_qs = Book.objects.annotate( first_author_the_same_age=Subquery( Author.objects.filter( age=OuterRef('contact__friends__age'), ).order_by('age').values('id')[:1], ) ).filter( publisher=self.p1, first_author_the_same_age__isnull=False, ).annotate( min_age=Min('contact__friends__age'), ).values('name', 'min_age').order_by('name') self.assertEqual(list(books_qs), [ {'name': 'Practical Django Projects', 'min_age': 34}, { 'name': 'The Definitive Guide to Django: Web Development Done Right', 'min_age': 29, }, ])
def delete_old_export_files(): now = timezone.now() events = ExportEvent.objects.filter( date__lte=now - settings.EXPORT_FILES_TIMEDELTA, ).values("export_file_id") export_files = ExportFile.objects.filter( Q(events__isnull=True) | Exists(events.filter(export_file_id=OuterRef("id")))) if not export_files: return paths_to_delete = list(export_files.values_list("content_file", flat=True)) counter = 0 for path in paths_to_delete: if path and default_storage.exists(path): default_storage.delete(path) counter += 1 export_files.delete() task_logger.debug("Delete %s export files.", counter)
def test_obj_subquery_lookup(self): qs = HStoreModel.objects.annotate(value=Subquery( HStoreModel.objects.filter( pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, self.objs[:2])
def get_queryset(self): config = PartnershipConfiguration.get_configuration() academic_year = config.get_current_academic_year_for_api() self.academic_year = academic_year academic_year_repr = Concat( Cast(F('academic_year__year'), models.CharField()), Value('-'), Right( Cast(F('academic_year__year') + 1, output_field=models.CharField()), 2), ) return (PartnershipPartnerRelation.objects.filter_for_api( academic_year ).annotate_partner_address( 'country__continent__name', 'country__iso_code', 'country__name', 'country_id', 'city', 'location', ).select_related( 'entity__partnerentity', 'entity__organization', ).prefetch_related( Prefetch( 'partnership', queryset=Partnership.objects.add_acronyms().select_related( 'subtype', 'supervisor', ).prefetch_related( 'contacts', 'missions', Prefetch( 'partner_entities', queryset=EntityProxy.objects.with_partner_info(), ))), Prefetch( 'partnership__medias', queryset=Media.objects.select_related('type').filter( is_visible_in_portal=True), ), Prefetch( 'entity__organization__partner', queryset=(Partner.objects.annotate_address( 'country__iso_code', 'country__name', 'city', ).annotate_website( ).select_related('organization').prefetch_related( Prefetch( 'medias', queryset=Media.objects.filter( is_visible_in_portal=True).select_related('type')), ).annotate_partnerships_count()), to_attr='partner_prefetched', ), Prefetch('partnership__ucl_entity', queryset=EntityProxy.objects.select_related( 'uclmanagement_entity__academic_responsible', 'uclmanagement_entity__administrative_responsible', 'uclmanagement_entity__contact_out_person', 'uclmanagement_entity__contact_in_person', ).with_title().with_acronym()), Prefetch( 'partnership__years', queryset=(PartnershipYear.objects.select_related( 'academic_year', 'funding_source', 'funding_program', 'funding_type', ).prefetch_related( Prefetch('entities', queryset=EntityProxy.objects.with_title( ).with_acronym()), 'education_fields', 'education_levels', 'offers', ).filter(academic_year=academic_year)), to_attr='current_year_for_api', ), Prefetch( 'partnership__agreements', queryset=(PartnershipAgreement.objects.select_related( 'media', 'end_academic_year').filter( status=AgreementStatus.VALIDATED.name).filter( start_academic_year__year__lte=academic_year.year, end_academic_year__year__gte=academic_year.year, )), to_attr='valid_current_agreements', ), ).annotate( validity_end_year=Subquery( AcademicYear.objects.filter( partnership_agreements_end__partnership=OuterRef( 'partnership_id'), partnership_agreements_end__status=AgreementStatus. VALIDATED.name).order_by('-end_date').values('year')[:1]), agreement_start=Subquery( PartnershipAgreement.objects.filter( partnership=OuterRef('partnership_id'), start_date__lte=Now(), end_date__gte=Now(), ).order_by('-end_date').values('start_date')[:1]), start_year=Subquery( PartnershipYear.objects.filter( partnership=OuterRef('partnership_id'), ).annotate( name=academic_year_repr).order_by( 'academic_year').values('name')[:1]), end_year=Subquery( PartnershipYear.objects.filter( partnership=OuterRef('partnership_id'), ).annotate( name=academic_year_repr).order_by( '-academic_year').values('name')[:1]), agreement_end=Subquery( PartnershipAgreement.objects.filter( partnership=OuterRef('partnership_id'), start_date__lte=Now(), end_date__gte=Now(), ).order_by('-end_date').values('end_date')[:1]), ).annotate( validity_years=Concat(Value(academic_year.year), Value('-'), F('validity_end_year') + 1, output_field=models.CharField()), agreement_status=Subquery( PartnershipAgreement.objects.filter( partnership=OuterRef('partnership_id'), start_academic_year__year__lte=academic_year.year, end_academic_year__year__gte=academic_year.year, ).order_by('-end_academic_year__year').values('status')[:1]), funding_name=Subquery( Financing.objects.filter( academic_year=academic_year, countries=OuterRef('country_id'), ).values('type__name')[:1]), funding_url=Subquery( Financing.objects.filter( academic_year=academic_year, countries=OuterRef('country_id'), ).values('type__url')[:1]), ).distinct('pk').order_by('pk'))
def pset_subquery(student: Student) -> Exists: return Exists(PSet.objects.filter(unit=OuterRef('pk'), student=student))
def post(request, post_id): if request.method == 'GET': post = None try: post = Post.objects \ .annotate(views_count=Subquery( Post_views.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .annotate(comments_count=Subquery( Comment.objects .filter(post=OuterRef('pk')) .values('post') .annotate(count=Count('pk')) .values('count') )) \ .get(pk=post_id) except ObjectDoesNotExist: return Http404() post.body = md.convert(post.body) session_key = request.session.session_key if post.views_count is None: post.views_count = 0 if post.views_count > 1000: post.views_count = f'{round(post.views_count / 1000)}k' comments = Comment.objects.filter(post_id=post_id) if comments is not None and len(comments) > 0: for comment in comments: comment.text = md.convert(comment.text) setattr(post, 'comments', comments) else: setattr(post, 'comments', []) if session_key is not None: existsView = None try: existsView = Post_views.objects.get(session_id=session_key, post_id=post_id) except ObjectDoesNotExist: existsView = None if existsView is None or existsView.id is None: Post_views.objects.create( session=Session.objects.get(session_key=session_key), post=post) return render(request, 'main/pages/post.html', {'post': post}) user = request.user if user is None or user.id is None: return HttpResponseForbidden() if request.method == 'DELETE': try: post = Post.objects.get(pk=post_id) if post.id is None: return HttpResponseNotFound() post.deleted = True post.save() return HttpResponse(status=200) except ObjectDoesNotExist: return HttpResponseNotFound() if request.method == 'PUT': try: post = Post.objects.get(pk=post_id) if post.id is None: return HttpResponseNotFound() body = json.loads(request.body) post.deleted = body.get('deleted', post.deleted) post.save() return HttpResponse(status=200) except: return HttpResponseNotFound()
def test_annotation(self): """Test whether the soft deleted model can be found in annotation subqueries.""" queryset = OtherModel.objects.annotate(has_related=Exists( QuerySetModel.objects.filter(other_id=OuterRef('pk')))) self.assertEqual(queryset.count(), 1) self.assertFalse(queryset[0].has_related)
def test_exact_exists(self): qs = Article.objects.filter(pk=OuterRef('pk')) seasons = Season.objects.annotate(pk_exists=Exists(qs), ).filter( pk_exists=Exists(qs), ) self.assertCountEqual(seasons, Season.objects.all())
def annotate_liked(self, user): return self.annotate(liked=Exists( Like.objects.filter(user=user.id, post_id=OuterRef("id")).only("id", ), ), )
def test_annotate_subquery(self): tests = Test.objects.filter(owner=OuterRef('pk')).values('name')
def setUpTestData(cls): cls.academic_year = AcademicYearFactory(year=2017) cls.learning_container_luy1 = LearningContainerYearFactory( academic_year=cls.academic_year) cls.learning_unit_yr_1 = LearningUnitYearFactory( academic_year=cls.academic_year, learning_container_year=cls.learning_container_luy1, credits=50) cls.learning_unit_yr_2 = LearningUnitYearFactory() cls.proposal_creation_1 = ProposalLearningUnitFactory( state=proposal_state.ProposalState.ACCEPTED.name, type=proposal_type.ProposalType.CREATION.name, ) cls.proposal_creation_2 = ProposalLearningUnitFactory( state=proposal_state.ProposalState.ACCEPTED.name, type=proposal_type.ProposalType.CREATION.name, ) direct_parent_type = EducationGroupTypeFactory( name='Bachelor', category=education_group_categories.TRAINING) cls.an_education_group_parent = EducationGroupYearFactory( academic_year=cls.academic_year, education_group_type=direct_parent_type, acronym=ROOT_ACRONYM) cls.group_element_child = GroupElementYearFactory( parent=cls.an_education_group_parent, child_branch=None, child_leaf=cls.learning_unit_yr_1) cls.an_education_group = EducationGroupYearFactory( academic_year=cls.academic_year, acronym=PARENT_ACRONYM, title=PARENT_TITLE, partial_acronym=PARENT_PARTIAL_ACRONYM) cls.group_element_child2 = GroupElementYearFactory( parent=cls.an_education_group, child_branch=cls.group_element_child.parent, ) cls.old_academic_year = AcademicYearFactory( year=datetime.date.today().year - 2) cls.current_academic_year = AcademicYearFactory( year=datetime.date.today().year) generatorContainer = GenerateContainer(cls.old_academic_year, cls.current_academic_year) cls.learning_unit_year_with_entities = generatorContainer.generated_container_years[ 0].learning_unit_year_full entities = [ EntityVersionFactory( start_date=datetime.datetime(1900, 1, 1), end_date=None, entity_type=entity_type.FACULTY, entity__organization__type=organization_type.MAIN) for _ in range(4) ] cls.learning_unit_year_with_entities.entity_requirement = entities[0] cls.learning_unit_year_with_entities.entity_allocation = entities[1] cls.proposal_creation_3 = ProposalLearningUnitFactory( learning_unit_year=cls.learning_unit_year_with_entities, state=proposal_state.ProposalState.ACCEPTED.name, type=proposal_type.ProposalType.CREATION.name, ) cls.learning_container_luy = LearningContainerYearFactory( academic_year=cls.academic_year) cls.luy_with_attribution = LearningUnitYearFactory( academic_year=cls.academic_year, learning_container_year=cls.learning_container_luy, periodicity=learning_unit_year_periodicity.ANNUAL, status=True, language=None, ) cls.luy_with_attribution.entity_requirement = entities[0] cls.luy_with_attribution.entity_allocation = entities[1] cls.component_lecturing = LearningComponentYearFactory( learning_unit_year=cls.luy_with_attribution, type=learning_component_year_type.LECTURING, hourly_volume_total_annual=15, hourly_volume_partial_q1=10, hourly_volume_partial_q2=5, planned_classes=1) cls.component_practical = LearningComponentYearFactory( learning_unit_year=cls.luy_with_attribution, type=learning_component_year_type.PRACTICAL_EXERCISES, hourly_volume_total_annual=15, hourly_volume_partial_q1=10, hourly_volume_partial_q2=5, planned_classes=1) a_person_tutor_1 = PersonFactory(last_name='Dupuis', first_name='Tom', email="*****@*****.**") cls.a_tutor_1 = TutorFactory(person=a_person_tutor_1) cls.an_attribution_1 = AttributionNewFactory(tutor=cls.a_tutor_1, start_year=2017, function=COORDINATOR) cls.attribution_charge_new_lecturing_1 = AttributionChargeNewFactory( learning_component_year=cls.component_lecturing, attribution=cls.an_attribution_1, allocation_charge=15.0) cls.attribution_charge_new_practical_1 = AttributionChargeNewFactory( learning_component_year=cls.component_practical, attribution=cls.an_attribution_1, allocation_charge=5.0) cls.a_tutor_2 = TutorFactory( person=PersonFactory(last_name='Maréchal', first_name='Didier')) cls.an_attribution_2 = AttributionNewFactory(tutor=cls.a_tutor_2, start_year=2017) cls.attribution_charge_new_lecturing_2 = AttributionChargeNewFactory( learning_component_year=cls.component_lecturing, attribution=cls.an_attribution_2, allocation_charge=15.0) cls.attribution_charge_new_practical_2 = AttributionChargeNewFactory( learning_component_year=cls.component_practical, attribution=cls.an_attribution_2, allocation_charge=5.0) cls.entity_requirement = EntityVersion.objects.filter(entity=OuterRef( 'learning_container_year__requirement_entity'), ).current( OuterRef('academic_year__start_date')).values('acronym')[:1] cls.entity_allocation = EntityVersion.objects.filter(entity=OuterRef( 'learning_container_year__allocation_entity'), ).current( OuterRef('academic_year__start_date')).values('acronym')[:1]
def get(self, request, format=None): songs = Song.objects.all() if 'albumId' in request.query_params: songs = Album.objects.get( id=request.query_params.get('albumId')).songs.all() if 'name' in request.query_params: songs = Song.objects.filter( Q(title__contains=request.query_params.get('name')) | Q(performer__contains=request.query_params.get('name'))) if 'genres' in request.query_params: songs = songs.filter( genre__in=request.query_params.get('genres').split(',')) if 'yearSince' in request.query_params: songs = songs.filter( year__gte=request.query_params.get('yearSince')) if 'yearTo' in request.query_params: songs = songs.filter(year__lte=request.query_params.get('yearTo')) marks_subquery = SongMark.objects.filter(song_id=OuterRef('id')) marks_subquery = marks_subquery.annotate( dummy=Value(1)).values('dummy').annotate( marks_avg=Avg('mark')).values_list('marks_avg') songs = songs.annotate(marks_avg=marks_subquery) comments_subquery = SongComment.objects.filter(song_id=OuterRef('id')) comments_subquery = comments_subquery.annotate( dummy=Value(1)).values('dummy').annotate( count=Count('*')).values_list('count') songs = songs.annotate(comments_count=comments_subquery) if 'mark' in request.query_params: mark_filter = request.query_params.get('mark_filter') if mark_filter == 'lte': songs = songs.filter( marks_avg__lte=request.query_params.get('mark')) elif mark_filter == 'gte': songs = songs.filter( marks_avg__gte=request.query_params.get('mark')) elif mark_filter == 'exact': songs = songs.filter( marks_avg=request.query_params.get('mark')) elif mark_filter == 'gt': songs = songs.filter( marks_avg__gt=request.query_params.get('mark')) elif mark_filter == 'lt': songs = songs.filter( marks_avg__lt=request.query_params.get('mark')) if request.user.id is not None: favourite_subquery = FavouriteSong.objects.filter( author_id=request.user.id, song_id=OuterRef('id')).values('id') songs = songs.annotate(favourite=favourite_subquery) if 'favourite' in request.query_params: if request.query_params.get( 'favourite' ) and request.query_params.get('favourite') != 'false': songs = songs.filter(favourite__isnull=False) if 'offset' in request.query_params: offset = int(request.query_params.get('offset')) else: offset = 0 if 'sortMode' in request.query_params: songs = songs.order_by(request.query_params.get('sortMode')) songs = songs[offset:offset + 20] serializer = SongSerializer(songs, many=True) return Response(serializer.data)
COMPLETE: "Complete", } class ProgressAnnotation(NamedTuple): name: str expression: Expression LOC_PROGRESS = ProgressAnnotation( "loc_progress", Case( When(letter_request__isnull=False, then=Value(COMPLETE)), When( Q(onboarding_info__signup_intent=SIGNUP_INTENT_CHOICES.LOC) | Q(Exists(AccessDate.objects.filter(user=OuterRef("pk")))), then=Value(IN_PROGRESS), ), default=Value(NOT_STARTED), ), ) EHP_PROGRESS = ProgressAnnotation( "ehp_progress", Case( When( Exists( DocusignEnvelope.objects.filter( docs__user=OuterRef("pk"), status=HP_DOCUSIGN_STATUS_CHOICES.SIGNED)), then=Value(COMPLETE),
def _cte_get_descendants(self, node, include_self=False): """Query node descendants :param node: A model instance or a QuerySet or Q object querying the adjacency list model. If a QuerySet, it should query a single value with something like `.values('id')`. If Q the `include_self` argument will be ignored. :returns: A `QuerySet` instance. """ ordering_col = self.model.ordering_col_attr discard_dups = False if isinstance(node, Q): where = node discard_dups = True elif include_self: if isinstance(node, QuerySet): if _is_empty(node): return self.none() where = Q(id__in=node.order_by()) discard_dups = True else: where = Q(id=node.id) elif isinstance(node, QuerySet): if _is_empty(node): return self.none() where = Q(parent_id__in=node.order_by()) discard_dups = True else: where = Q(parent_id=node.id) def make_cte_query(cte): return self.filter(where).order_by().annotate( _cte_ordering=str_array(ordering_col), ).union( cte.join( self.all().order_by(), parent_id=cte.col.id, ).annotate(_cte_ordering=array_append( cte.col._cte_ordering, F(ordering_col), )), all=True, ) cte = With.recursive(make_cte_query) query = cte.queryset().with_cte(cte) if discard_dups: # Remove duplicates when the supplied Queryset or Q object # may contain/match both parents and children. For a given # id, retain the row with the longest path. TODO remove this # and ensure duplicates do not matter or the criteria never # matches both parents and children in all calling code. xdups = With(cte.queryset().annotate(max_len=array_length( F("_cte_ordering"), output_field=field), ).distinct("id").order_by( "id", "-max_len", ).values( "id", "_cte_ordering", ), name="xdups") query = query.annotate( _exclude_dups=Exists(xdups.queryset().filter( id=OuterRef("id"), _cte_ordering=OuterRef("_cte_ordering"), ))).filter(_exclude_dups=True).with_cte(xdups) return query.order_by(cte.col._cte_ordering)
def get_queryset(self): return super().get_queryset().filter( Exists(Sharing.objects.filter(object_id=OuterRef('pk'), workspace=self.request.claim.workspace_id, deleted_at__isnull=True)), ).order_by('id')
def _retrieve_people(self, filter: RetentionFilter, team: Team): period = filter.period trunc, fields = self._get_trunc_func("timestamp", period) is_first_time_retention = filter.retention_type == RETENTION_FIRST_TIME entity_condition, _ = self.get_entity_condition( filter.target_entity, "events") returning_condition, _ = self.get_entity_condition( filter.returning_entity, "first_event_date") _entity_condition = returning_condition if filter.selected_interval > 0 else entity_condition events = Event.objects.filter(team_id=team.pk).add_person_id(team.pk) reference_date_from = filter.date_from reference_date_to = filter.date_from + filter.period_increment date_from = filter.date_from + filter.selected_interval * filter.period_increment date_to = date_from + filter.period_increment filter._date_from = date_from.isoformat() filter._date_to = date_to.isoformat() filtered_events = events.filter(filter.date_filter_Q).filter( filter.properties_to_Q(team_id=team.pk)) filter._date_from = reference_date_from.isoformat() filter._date_to = reference_date_to.isoformat() inner_events = (Event.objects.filter(team_id=team.pk).filter( filter.properties_to_Q(team_id=team.pk)).add_person_id( team.pk).filter(**{ "person_id": OuterRef("id") }).filter(entity_condition).values("person_id").annotate( first_date=Min(trunc)).filter( filter.custom_date_filter_Q("first_date")).distinct() if is_first_time_retention else Event.objects.filter( team_id=team.pk).filter( filter.date_filter_Q).filter( filter.properties_to_Q( team_id=team.pk)).add_person_id( team.pk).filter( **{ "person_id": OuterRef("id") }).filter(entity_condition)) filtered_events = (filtered_events.filter(_entity_condition).filter( Exists( Person.objects.filter(**{ "id": OuterRef("person_id"), }).filter(Exists(inner_events)).only("id"))).values( "person_id").distinct()).all() people = Person.objects.filter( team=team, id__in=[ p["person_id"] for p in filtered_events[filter.offset:filter.offset + 100] ], ) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) from posthog.api.person import PersonSerializer return PersonSerializer(people, many=True).data
def test_custom_subquery(self): tests = Test.objects.filter(permission=OuterRef('pk')).values('name') qs = Permission.objects.annotate(first_permission=Subquery(tests[:1])) self.assert_tables(qs, Permission, Test) self.assert_query_cached(qs, list(Permission.objects.all()))
def get_status_reviu(self, subsektor=None, cari=None, inspektorat=None, tahun=None, kegiatan=None): queryset = super().filter( assignment_auditi__assign_auditee_id_auditee__auditee_del_st=1, assignment_st__isnull=False) if kegiatan is None or kegiatan == '': queryset = queryset.filter(assign_jenis_kegiatan__gt=1) else: queryset = queryset.filter(assign_jenis_kegiatan=kegiatan) pkr_model = apps.get_model(app_label='siau', model_name='ProgramReviu') chr_model = apps.get_model(app_label='siau', model_name='ReviuChr') chr_attach_model = apps.get_model(app_label='siau', model_name='ReviuChrAttach') ihr_model = apps.get_model(app_label='siau', model_name='ReviuIhr') ihr_attach_model = apps.get_model(app_label='siau', model_name='ReviuIhrAttach') lhr_model = apps.get_model(app_label='siau', model_name='ReviuLhr') lhr_nost_model = apps.get_model(app_label='siau', model_name='ReviuLhrNost') pkr_qs = pkr_model.objects\ .filter(previu_id_assign=OuterRef('assign_id'), previu_id_auditee=OuterRef('assignment_auditi__assign_auditee_id_auditee'))\ .values('previu_id_assign').annotate(jml=Count('previu_id')).order_by('previu_id_assign').values('jml') chr_qs = chr_model.objects\ .filter(chr_assign_id=OuterRef('assign_id'), chr_auditee_id=OuterRef('assignment_auditi__assign_auditee_id_auditee'))\ .values('chr_assign_id').annotate(jml=Count('chr_id')).order_by('chr_assign_id').values('jml') chr_rkmbn_qs = chr_attach_model.objects\ .filter(chr_attach_id_assign=OuterRef('assign_id'), chr_attach_id_auditee=OuterRef('assignment_auditi__assign_auditee_id_auditee'))\ .values('chr_attach_id_assign').annotate(jml=Count('chr_attach_id')).order_by('chr_attach_id_assign').values('jml') ihr_qs = ihr_model.objects\ .filter(ihr_assign=OuterRef('assign_id'), ihr_auditee=OuterRef('assignment_auditi__assign_auditee_id_auditee'))\ .values('ihr_assign').annotate(jml=Count('ihr_id')).order_by('ihr_assign').values('jml') ihr_rkbmn_dipa_qs = ihr_attach_model.objects\ .filter(ihr_attach_id_assign=OuterRef('assign_id'), ihr_attach_id_auditee=OuterRef('assignment_auditi__assign_auditee_id_auditee'))\ .values('ihr_attach_id_assign').annotate(jml=Count('ihr_attach_id')).order_by('ihr_attach_id_assign').values('jml') lhr_qs = lhr_model.objects\ .filter(lhr_assign=OuterRef('assign_id'))\ .values('lhr_assign').annotate(jml=Count('lhr_id')).order_by('lhr_assign').values('jml') nost_qs = lhr_nost_model.objects.filter( lhr_nost_lhr=OuterRef('lhr_id')) lhr_rka_qs = lhr_model.objects\ .filter(Exists(nost_qs))\ .filter(lhr_assign=OuterRef('assign_id'))\ .values('lhr_assign').annotate(jml=Count('lhr_id')).order_by('lhr_assign').values('jml') queryset = self.filter_user(queryset=queryset, subsektor=subsektor, cari=cari, inspektorat=inspektorat, tahun=tahun) qs = queryset\ .values(jenis_kegiatan=F('assign_jenis_kegiatan'), inspektorat=F('assignment_auditi__assign_auditee_id_auditee__auditee_inspektorat__inspektorat_name'), no_st=F('assignment_st__assign_surat_no'), tgl_st=F('assignment_st__assign_surat_tgl'), akhir_penugasan=F('assign_end_date'), auditi=F('assignment_auditi__assign_auditee_id_auditee__auditee_name'),)\ .annotate(pkr=Subquery(pkr_qs), chr=Subquery(chr_qs), chr_rkbmn=Subquery(chr_rkmbn_qs), ihr=Subquery(ihr_qs), ihr_rkbmn_dipa=Subquery(ihr_rkbmn_dipa_qs), lhr=Subquery(lhr_qs), lhr_rka=Subquery(lhr_rka_qs), )\ .order_by('-assignment_st__assign_surat_tgl').distinct() return qs
def test_custom_subquery_exists(self): tests = Test.objects.filter(permission=OuterRef('pk')) qs = Permission.objects.annotate(has_tests=Exists(tests)) self.assert_tables(qs, Permission, Test) self.assert_query_cached(qs, list(Permission.objects.all()))
def test_with_count(self): newest = Comment.objects.filter( post=OuterRef('pk')).order_by('-created_at') Post.objects.annotate( post_exists=Subquery(newest.values('text')[:1])).filter( post_exists=True).count()
def test_annotate_subquery(self): tests = Test.objects.filter(owner=OuterRef('pk')).values('name') qs = User.objects.annotate(first_test=Subquery(tests[:1])) self.assert_tables(qs, User, Test) self.assert_query_cached(qs, [self.user, self.admin])
def customers(self): orders = Order.objects.values("user_id") return self.get_queryset().filter( Q(is_staff=False) | (Q(is_staff=True) & (Exists(orders.filter(user_id=OuterRef("pk"))))))
def project_analyze(self, user, filters, limits): if len(filters) < 1: return None mapping = { 'worker': 'worker_id', 'project': 'projects__id', 'process': 'process_id', 'subProcess': 'sub_process_id', 'funcRole': 'func_role_id', 'place': 'place_id', 'projectState': 'projectstate__state_id', 'date': '' # Обрабатывается отдельно } value_fields = [] user_projects_ids = [ d['id'] for d in Project.objects.filter(gip=user).values('id') ] filtered = self.filter(deleted=False, checked_by__isnull=False) filtered = limits.get_range(filtered, 'report_date') if not user.has_perm( 'projects.global_analysis') and not user_projects_ids: raise SngyException( 'Отсутствуют проекты, в которых вы являетесь ГИПом!') if not user.has_perm('projects.global_analysis'): project_filter = [f for f in filters if f['name'] == 'project'] if project_filter: project_filter = project_filter[0] project_filter.filter = [ id for id in project_filter.filter if id in user_projects_ids ] if not project_filter.filter: project_filter.filter = user_projects_ids else: filtered = filtered.filter(projects__id__in=user_projects_ids) for f in filters: if len(f.filter) > 0: field_name = mapping[f.name] + '__in' filtered = filtered.filter(models.Q(**{field_name: f.filter})) if mapping[f.name]: value_fields.append(mapping[f.name]) # Для вычисления стоимости часов нам все равно необходимо сгруппировать сотрудников worker_mapping = mapping['worker'] if worker_mapping not in value_fields: value_fields.append(worker_mapping) sub_query = self.filter(id=OuterRef('id')).annotate( count=models.Count('projects__number')).values('count') hour_sum = models.Sum( Extract('time_spent', 'EPOCH') / Subquery(sub_query), output_field=models.FloatField()) / 3600 values = filtered.values(*value_fields) result = values.annotate( hours=hour_sum, mnth=TruncMonth('report_date')).order_by(*value_fields) root_class = get_class_hierarchy([(f.name, mapping[f.name]) for f in filters]) root_class.append(result) root_class.get_totals() return root_class.storage