def test_empty_result_set(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg("char_field"), []), (ArrayAgg("integer_field"), []), (ArrayAgg("boolean_field"), []), (BitAnd("integer_field"), None), (BitOr("integer_field"), None), (BoolAnd("boolean_field"), None), (BoolOr("boolean_field"), None), (JSONBAgg("integer_field"), []), (StringAgg("char_field", delimiter=";"), ""), ] if connection.features.has_bit_xor: tests.append((BitXor("integer_field"), None)) for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {"aggregation": expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg("char_field", default=["<empty>"]), ["<empty>"]), (ArrayAgg("integer_field", default=[0]), [0]), (ArrayAgg("boolean_field", default=[False]), [False]), (BitAnd("integer_field", default=0), 0), (BitOr("integer_field", default=0), 0), (BoolAnd("boolean_field", default=False), False), (BoolOr("boolean_field", default=False), False), (JSONBAgg("integer_field", default=Value('["<empty>"]')), ["<empty>"]), ( StringAgg("char_field", delimiter=";", default=Value("<empty>")), "<empty>", ), ] if connection.features.has_bit_xor: tests.append((BitXor("integer_field", default=0), 0)) for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {"aggregation": expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {"aggregation": expected_result})
def test_empty_result_set(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg('char_field'), []), (ArrayAgg('integer_field'), []), (ArrayAgg('boolean_field'), []), (BitAnd('integer_field'), None), (BitOr('integer_field'), None), (BoolAnd('boolean_field'), None), (BoolOr('boolean_field'), None), (JSONBAgg('integer_field'), []), (StringAgg('char_field', delimiter=';'), ''), ] for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result})
def test_default_argument(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg('char_field', default=['<empty>']), ['<empty>']), (ArrayAgg('integer_field', default=[0]), [0]), (ArrayAgg('boolean_field', default=[False]), [False]), (BitAnd('integer_field', default=0), 0), (BitOr('integer_field', default=0), 0), (BoolAnd('boolean_field', default=False), False), (BoolOr('boolean_field', default=False), False), (JSONBAgg('integer_field', default=Value('["<empty>"]')), ['<empty>']), (StringAgg('char_field', delimiter=';', default=Value('<empty>')), '<empty>'), ] for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result})
def get_queryset(self): pier_qs = Pier.objects.filter( harbor=OuterRef("pk")).values("harbor__pk") width_qs = pier_qs.annotate(max=Max("max_width")).values("max") length_qs = pier_qs.annotate(max=Max("max_length")).values("max") depth_qs = pier_qs.annotate(max=Max("max_depth")).values("max") number_of_free_places_qs = pier_qs.annotate( count=Sum("number_of_free_places")).values("count") number_of_inactive_places_qs = pier_qs.annotate( count=Sum("number_of_inactive_places")).values("count") number_of_places_qs = pier_qs.annotate( count=Sum("number_of_places")).values("count") return (super().get_queryset().annotate( max_width=Subquery(width_qs, output_field=DecimalField()), max_length=Subquery(length_qs, output_field=DecimalField()), max_depth=Subquery(depth_qs, output_field=DecimalField()), number_of_free_places=Subquery(number_of_free_places_qs, output_field=SmallIntegerField()), number_of_inactive_places=Subquery( number_of_inactive_places_qs, output_field=SmallIntegerField()), number_of_places=Subquery(number_of_places_qs, output_field=SmallIntegerField()), electricity=BoolOr("piers__electricity"), water=BoolOr("piers__water"), gate=BoolOr("piers__gate"), mooring=BoolOr("piers__mooring"), waste_collection=BoolOr("piers__waste_collection"), lighting=BoolOr("piers__lighting"), ))
def requires_update(self, requirement=True, include_archived=False): """ A ``TransactionCheck`` requires an update if it or any check on a transaction before it in order is stale or no longer current. If a ``TransactionCheck`` on an earlier transaction is stale, it means that transaction has been modified since the check was done, which could also invalidate any checks of any subsequent transactions. By default transactions in ARCHIVED workbaskets are ignored, since these workbaskets exist outside of the normal workflow. """ if include_archived: ignore_filter = {} else: ignore_filter = {"transaction__workbasket__status": "ARCHIVED"} # First filtering out any objects we should ignore, # work out for each check whether it alone requires an update, by # seeing whether it is stale or not current. basic_info = With( self.model.objects.exclude(**ignore_filter).annotate( **self.freshness_annotations).annotate( requires_update=self.requires_update_annotation, ), name="basic_info", ) # Now cascade that result down to any subsequent transactions: if a # transaction in the same workbasket comes later, then it will also # require an update. TODO: do stale transactions pollute the update # check for ever? sequence_info = With( basic_info.join( self.model.objects.all(), pk=basic_info.col.pk).annotate( requires_update=expressions.Window( expression=BoolOr(basic_info.col.requires_update), partition_by=models.F("transaction__workbasket"), order_by=[ models.F("transaction__order").asc(), models.F("pk").desc(), ], ), ), name="sequence_info", ) # Now filter for only the type that we want: checks that either do or do # not require an update. return (sequence_info.join(self, pk=sequence_info.col.pk).with_cte( basic_info).with_cte(sequence_info).annotate( requires_update=sequence_info.col.requires_update).filter( requires_update=requirement))
def get(self, request, user_id, *args, **kwargs): try: user = User.objects.filter(id=user_id).select_related('profile')[0] except IndexError: return redirect('userprofile:login') request_user = User.objects.filter(id=request.user.id).select_related('profile')[0] if request.GET and request.GET['request_type'] == 'get_extra_posts': posts = ProfilePost.objects.filter(author=user.profile).annotate( like_count=Count('like', distinct=True), comment_count=Count('comments', distinct=True), is_liked_by_user=BoolOr(Case( When(Q(like__user_id=request_user.id), then=Value(True)), default=Value(False), output_field=BooleanField(), )), ).values( 'id', 'post_text', 'publication_date', 'is_liked_by_user', 'like_count', 'comment_count', ).order_by('-publication_date')[int(request.GET['posts_from']): int(request.GET['posts_to'])] return JsonResponse(data={'posts': list(posts)}, status=200) if request.GET and request.GET['request_type'] == 'get_comments': comments = PostComment.objects.filter(post_id=request.GET['post_id']). \ values('owner_id', 'text', owner_first_name=F('owner__user__first_name'), owner_last_name=F('owner__user__last_name')).order_by('publication_date') return JsonResponse(data={'comments': list(comments)}, status=200) is_my_friend = Friendship.are_friends(user=request_user.id, friend=user.id) if request.GET and request.GET['request_type'] == 'are_friends': return JsonResponse(data={'are_friends': is_my_friend}, status=200) posts = ProfilePost.objects.filter(author=user.profile).prefetch_related('like').annotate( like_count=Count('like', distinct=True), comment_count=Count('comments', distinct=True) ).order_by('-publication_date') too_many_posts = False if posts.count() > 10: too_many_posts = True posts = posts[:10] list(posts) form = self.form_class(request.POST) args = {'greenLeafUser': user, 'userProfile': user.profile, 'posts': posts, 'form': form, 'are_friends': is_my_friend, 'too_many_posts': too_many_posts} return render(request, self.template_name, args)
def get_queryset(self): return (super().get_queryset().annotate( electricity=BoolOr("sections__electricity"), water=BoolOr("sections__water"), gate=BoolOr("sections__gate"), summer_storage_for_docking_equipment=BoolOr( "sections__summer_storage_for_docking_equipment"), summer_storage_for_trailers=BoolOr( "sections__summer_storage_for_trailers"), summer_storage_for_boats=BoolOr( "sections__summer_storage_for_boats"), ))
def test_bool_or_empty_result(self): AggregateTestModel.objects.all().delete() values = AggregateTestModel.objects.aggregate( boolor=BoolOr('boolean_field')) self.assertEqual(values, {'boolor': None})
def test_bool_or_general(self): values = AggregateTestModel.objects.aggregate( boolor=BoolOr('boolean_field')) self.assertEqual(values, {'boolor': True})
def test_bool_or_q_object(self): values = AggregateTestModel.objects.aggregate( boolor=BoolOr(Q(integer_field__gt=2)), ) self.assertEqual(values, {"boolor": False})
def handle(self, *args, **options): review_all_children = options['all'] import logging logger = logging.getLogger(__name__) today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) cfd_value = Config.objects.get(code='cfd') flagging_history_script_status = Config.objects.get( code='flagging_history_script_status') if review_all_children == False and flagging_history_script_status == True: logger.info( "Flagging history script is running. Multiple simulatenous runs not allowed." ) return # get the date which is x days from today. # x is configured in the config table with the key "cfd" try: flagging_history_script_status.value = True flagging_history_script_status.save() x_days_ago = today - datetime.timedelta(days=int(cfd_value.value)) logger.debug("x_days_ago:" + str(x_days_ago)) if review_all_children: #review all children # Changed from modified yesterday to active child list as the logic involves time since last visit and time since admission # and these affect the status even though the child is not updated review_child_list = Child.objects.filter(active=2).values( 'id', 'case_number', 'first_name', 'cwc_order_number').annotate(flagstatus=BoolOr( 'child_classification__use_for_flagging')) else: #review all children with child details updated yesterday or today children_info_shelter_updated = ChildShelterHomeRelation.objects.filter( modified_on__date__gte=yesterday, active=2).values_list('child_id', flat=True) children_info_visit_updated = FamilyVisit.objects.filter( modified_on__date__gte=yesterday, active=2).values_list('child_id', flat=True) children_updated = list( set(children_info_shelter_updated) | set(children_info_visit_updated)) review_child_list = Child.objects.filter( (Q(modified_on__date__gte=yesterday) | Q(id__in=children_updated)) & Q(active=2)).values( 'id', 'case_number', 'first_name', 'cwc_order_number').annotate(flagstatus=BoolOr( 'child_classification__use_for_flagging')) #review_child_list = Child.objects.filter(modified_on__date__gte=yesterday).values('id','case_number','first_name','cwc_order_number').annotate(flagstatus = BoolOr('child_classification__use_for_flagging')) logger.info("review_child_list_QUERY:" + str(review_child_list.query)) if review_child_list: logger.info("review_child_list Count:" + str(review_child_list.count())) for data in review_child_list: if data["cwc_order_number"]: logger.debug("1.order_number:" + data["cwc_order_number"]) flagging_status = 2 #Legally free for adoption reason = "LFA order issued by the CWC" else: child_shelter = ChildShelterHomeRelation.objects.filter( child=data['id'], active=2).order_by('date_of_admission', 'id').first() # if child_shelter: # logger.info("child_shelter_QUERY:" + str(child_shelter.query)) family_visit = FamilyVisit.objects.filter( child=data['id'], active=2).order_by('-date_of_visit', '-id').first() # if family_visit: # logger.info("family_visit_QUERY:" + str(family_visit.query)) if child_shelter and child_shelter.date_of_admission is not None and child_shelter.date_of_admission <= x_days_ago and family_visit is None: logger.debug("2.date_of_admission:" + str(child_shelter.date_of_admission)) flagging_status = 1 #Child recommended for adoption enquiry reason = "No family visit" elif child_shelter and child_shelter.date_of_admission is not None and child_shelter.date_of_admission <= x_days_ago and family_visit and family_visit.date_of_visit <= x_days_ago: logger.debug("3.date_of_admission:" + str(child_shelter.date_of_admission) + " date_of_visit:" + str(family_visit.date_of_visit)) flagging_status = 1 #Child recommended for adoption enquiry reason = "Last family visit more than 180 days ago" elif data["flagstatus"] == True: logger.debug("4.flagstatus:" + str(data["flagstatus"])) flagging_status = 1 #Child recommended for adoption enquiry reason = "Child has no guardian or child's guardian is unfit/unintersted to raise the child" elif child_shelter is None: logger.debug("5.child_shelter: None") flagging_status = 3 #Not applicable reason = "Child's CCI information is not available" elif child_shelter and child_shelter.date_of_admission is not None and child_shelter.date_of_admission > x_days_ago: logger.debug("6.date_of_admission: " + str(child_shelter.date_of_admission)) flagging_status = 3 #Not applicable reason = "Child's stay in CCI less than 180 days and child's guardian's fitness needs to be evaluated" elif family_visit and family_visit.date_of_visit > x_days_ago: logger.debug("7.date_of_visit: " + str(family_visit.date_of_visit)) flagging_status = 3 #Not applicable reason = "Family visited in the last 180 days and child's guardian's fitness needs to be evaluated" else: logger.debug("8.else") flagging_status = 3 #Not applicable reason = "Not Applicable" # added a filter to check flagged date less than or equal to today to allow manual entry of flagging history for demo purpose # A future date flagging can be added directly for a child and # this will not be overwitten by this script as this will set the flag date to today previous_status = ChildFlaggedHistory.objects.filter( child=data['id'], active=2).filter( flagged_date__lte=today).order_by('-id').first() if previous_status: # logger.info("previous_status_QUERY:"+str(previous_status.query)) logger.debug("previous_status.flagged_status: " + str(previous_status.flagged_status) + " previous_status.reason_for_flagging:" + previous_status.reason_for_flagging) #logger.debug("flagging_status:" + str(flagging_status) + " reason:" + reason) logger.debug("0.child_details:" + data["case_number"] + "::::" + str(data["id"]) + "::::flagging_status:" + str(flagging_status) + " reason:" + reason) if previous_status is None or previous_status.flagged_status != flagging_status or previous_status.reason_for_flagging != reason: ChildFlaggedHistory.objects.create( child=Child.objects.get(id=data['id']), flagged_date=today, reason_for_flagging=reason, flagged_status=flagging_status, ) if previous_status: previous_status.active = 0 previous_status.save() except Exception as ex1: exc_type, exc_value, exc_traceback = sys.exc_info() error_stack = repr( traceback.format_exception(exc_type, exc_value, exc_traceback)) logger.error(error_stack) finally: flagging_history_script_status.value = False flagging_history_script_status.save()