def get_order_amount_total(inward_order): return Invoice.objects.filter(inward_order=inward_order.pk).aggregate(Sum('total_amount'))['total_amount__sum']
def context_general(user): inttasks = IntTask.objects.filter(executor__user=user)\ .exclude(exec_status=IntTask.Done)\ .order_by('exec_status') hd_inttasks_count = IntTask.objects.filter(executor__user=user, exec_status=IntTask.Done, actual_finish__month=TODAY.month, actual_finish__year=TODAY.year).count() active_inttasks_count = IntTask.objects.filter(executor__user=user)\ .exclude(exec_status=IntTask.Done).count()\ + hd_inttasks_count overdue_inttasks_count = IntTask.objects.filter(executor__user=user)\ .exclude(exec_status=IntTask.Done)\ .exclude(planned_finish__gte=TODAY).count() overdue_inttasks_div = int( overdue_inttasks_count / active_inttasks_count * 100) if active_inttasks_count > 0 else 0 bonus = Kpi.objects.filter(employee__user=user, name__in=[Kpi.BonusItel, Kpi.BonusGKP, Kpi.BonusSIA], period__month=TODAY.month, period__year=TODAY.year)\ .aggregate(Sum('value'))['value__sum'] or 0 try: inttask_bonus = Kpi.objects.get(employee__user=user, name=Kpi.Tasks, period__month=TODAY.month, period__year=TODAY.year)\ .value except Kpi.DoesNotExist: inttask_bonus = 0 total_bonus = bonus + inttask_bonus try: productivity = Kpi.objects.get(employee__user=user, name=Kpi.Productivity, period__month=TODAY.month, period__year=TODAY.year)\ .value except Kpi.DoesNotExist: productivity = 0 news = News.objects.exclude(actual_from__gt=TODAY).exclude( actual_to__lte=TODAY).order_by('-created') events = Event.objects.filter( next_date__isnull=False).order_by('next_date') activities = Log.objects.filter(user=user)[:50] context = { 'employee_id': Employee.objects.get(user=user).pk, 'inttasks': inttasks, 'active_inttasks_count': active_inttasks_count, 'overdue_inttasks_count': overdue_inttasks_count, 'overdue_inttasks_div': overdue_inttasks_div, 'bonus': bonus, 'inttask_bonus': inttask_bonus, 'total_bonus': total_bonus, 'productivity': int(productivity), 'news': news, 'events': events, 'activities': activities } return context
def get(self, request, *args, **kwargs): city = request.user.person.city # ------------- Weather Data ------------- try: r = getWeatherData(city) city_weather = { "city": r["name"], "country": r["sys"]["country"], "humidity": r["main"]["humidity"], "temperature": r["main"]["temp"], "description": r["weather"][0]["description"], "icon": r["weather"][0]["icon"], "sunrise": (timedelta(hours=1) + datetime.utcfromtimestamp( int(r["sys"]["sunrise"]))).strftime("%H:%M:%S"), "sunset": (timedelta(hours=1) + datetime.utcfromtimestamp( int(r["sys"]["sunset"]))).strftime("%H:%M:%S"), "today": datetime.now() } except Exception as ex: pass context = dict() client = Client() try: client = Client.objects.get(person_id=request.user.person.id) except Client.DoesNotExist as ex: client = None if client is not None: transactions = Transaction.objects.filter( sender_account__in=Account.objects.filter(client_id=client.id)) transactions_amount = transactions.aggregate( Sum("amount"))["amount__sum"] context["transactions_amount"] = round( transactions_amount if transactions_amount is not None else 0, 2) context["transactions_count"] = transactions.count() withdrawals = Withdrawal.objects.filter( account__in=Account.objects.filter(client_id=client.id)) withdrawals_amount = withdrawals.aggregate( Sum("amount"))["amount__sum"] # ----------- statistiques ------------- statistics = getUserStatistics(request) context["transactions_statistics"] = statistics[0] context["withdrawals_statistics"] = statistics[1] context["deposits_statistics"] = statistics[2] context["received_transaction_count"] = statistics[3] context["today_year"] = datetime.year context["withdrawals_amount"] = round( withdrawals_amount if withdrawals_amount is not None else 0, 2) context["withdrawals_count"] = withdrawals.count() deposits = Deposit.objects.filter( account__in=Account.objects.filter(client_id=client.id)) deposits_amount = deposits.aggregate(Sum("amount"))["amount__sum"] context["deposits_amount"] = round( deposits_amount if deposits_amount is not None else 0, 2) context["deposits_count"] = deposits.count() else: total_balance = nb_clients = nb_transactions = nb_deposits = nb_withdrawals = transactions_balance = deposits_balance = withdrawals_balance = 0 employee = Employee.objects.filter( person_id=request.user.person.id)[0] for account in Account.objects.filter( client__in=Client.objects.filter(creator=employee)): total_balance += account.balance temp_transactions = Transaction.objects.filter( Q(sender_account=account) | Q(receiver_account=account)) nb_transactions += temp_transactions.count() t_b = temp_transactions.aggregate(Sum("amount"))["amount__sum"] transactions_balance += t_b if t_b is not None else 0 temp_deposits = Deposit.objects.filter(account=account) nb_deposits += temp_deposits.count() d_b = temp_deposits.aggregate(Sum("amount"))["amount__sum"] deposits_balance += d_b if temp_deposits.aggregate( Sum("amount"))["amount__sum"] is not None else 0 temp_withdrawals = Withdrawal.objects.filter(account=account) nb_withdrawals += temp_withdrawals.count() w_b = temp_withdrawals.aggregate(Sum("amount"))["amount__sum"] withdrawals_balance += w_b if w_b is not None else 0 # ----------- statistiques ------------- statistics = getEmployeeStatistics(request) context["transactions_statistics"] = statistics[0] context["withdrawals_statistics"] = statistics[1] context["deposits_statistics"] = statistics[2] context["clients_transactions_count"] = statistics[3] context["today_year"] = datetime.year context["nb_clients"] = Client.objects.filter( creator=employee).count() context["total_balance"] = total_balance context["transactions_balance"] = transactions_balance context["deposits_balance"] = deposits_balance context["withdrawals_balance"] = withdrawals_balance context["nb_transactions"] = nb_transactions context["nb_deposits"] = nb_deposits context["nb_withdrawals"] = nb_withdrawals context["city_weather"] = city_weather return render(request, "CRM/index.html", context)
def get_context_data(self, **kwargs): plans = DistributionPlan.objects.filter(sent=True) return { 'distributions': plans.count(), 'completed': plans.filter(quantity=F('delivered')).count(), 'supplies_planned': plans.aggregate(Sum('quantity')).values()[0], 'supplies_delivered': plans.aggregate(Sum('delivered')).values()[0] } # @login_required # def send_apk(request): # # filename = os.path.join(settings.STATIC_ROOT, 'app/UniSupply.apk') # # wrapper = FileWrapper(file(filename)) # response = HttpResponse(wrapper) # response['Content-Length'] = os.path.getsize(filename) # response['Content-Type'] = 'application/vnd.android.package-archive' # response['Content-Disposition'] = 'inline; filename={}'.format('UniSupply.apk') # return response # # # class SiteListJson(BaseDatatableView): # columns = [ # '_id', # 'pcodename', # 'mohafaza', # 'district', # 'cadastral', # 'municipaliy', # 'no_tent', # 'no_ind', # 'lat', # 'long', # 'elevation', # 'confirmed_ip', # 'actual_ip', # 'unicef_priority', # 'assessment_date', # 'num_assessments', # 'completed', # 'remaining', # 'distribution_date', # '3 months', # 'Completed 3 months', # 'Remaining 3 months', # '12 months', # 'Completed 12 months', # 'Remaining 12 months', # '2 years', # 'Completed 2 years', # 'Remaining 2 years', # '3 years', # 'Completed 3 years', # 'Remaining 3 years', # '5 years', # 'Completed 5 years', # 'Remaining 5 years', # '7 years', # 'Completed 7 years', # 'Remaining 7 years', # '9 years', # 'Completed 9 years', # 'Remaining 9 years', # '12 years', # 'Completed 12 years', # 'Remaining 12 years', # '14 years', # 'Completed 14 years', # 'Remaining 14 years', # 'total_kits', # 'total_completed', # 'total_remaining', # ] # order_columns = { # 'district': 'asc', # 'pcodename': 'asc', # 'actual_ip': 'asc', # 'distribution_date': 'asc' # } # # def get_initial_queryset(self): # # return queryset used as base for futher sorting/filtering # # these are simply objects displayed in datatable # # You should not filter data returned here by any filter values entered by user. This is because # # we need some base queryset to count total number of records. # return winter.manifest.find() # # def filter_queryset(self, qs): # # use request parameters to filter queryset # # # # simple example: # search = self.request.POST.get('search[value]', None) # if search: # qs = qs.filter(name__istartswith=search) # # return qs # # def ordering(self, qs): # """ Get parameters from the request and prepare order by clause # """ # order = [] # for col, direct in self.order_columns.iteritems(): # sdir = -1 if direct == 'desc' else 1 # order.append((col, sdir)) # return qs.sort(order) # # def paging(self, qs): # # disable server side paging # return qs # # def prepare_results(self, qs): # # prepare list with output column data # # queryset is already paginated here # data = [] # for row in qs: # data.append([row[column] if column in row else '' for column in self.get_columns()]) # return data # # def get(self, request, *args, **kwargs): # # if request.REQUEST.get('format', 'json') == 'csv': # # rows = self.prepare_results(self.get_initial_queryset()) # rows.insert(0, self.columns) # pseudo_buffer = Echo() # writer = csv.writer(pseudo_buffer) # response = StreamingHttpResponse( # (writer.writerow([unicode(s).encode("utf-8") for s in row]) for row in rows), # content_type="text/csv") # response['Content-Disposition'] = 'attachment; filename="manifest-{}.csv"'.format( # datetime.datetime.now().strftime('%d-%m-%Y') # ) # return response # # return super(SiteListJson, self).get(request, *args, **kwargs) # # # class Echo(object): # """An object that implements just the write method of the file-like # interface. # """ # def write(self, value): # """Write the value by returning it, instead of storing in a buffer.""" # return value
def cash_transfers_amounts(self): FIRST_LEVEL = Decimal(50000.00) SECOND_LEVEL = Decimal(100000.00) THIRD_LEVEL = Decimal(350000.00) FOURTH_LEVEL = Decimal(500000.00) ct_amount_first = self.get_queryset().filter( total_ct_ytd__lte=FIRST_LEVEL) cash_transfers_amounts_first = [ '$0-50,000', ct_amount_first.filter( rating=PartnerOrganization.RATING_NOT_REQUIRED).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_first.filter( rating=PartnerOrganization.RATING_LOW).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_first.filter( rating=PartnerOrganization.RATING_MEDIUM).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_first.filter( rating=PartnerOrganization.RATING_SIGNIFICANT).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_first.filter( rating=PartnerOrganization.RATING_HIGH).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_first.aggregate(count=Count('total_ct_ytd'))['count'], ] ct_amount_second = self.get_queryset().filter( total_ct_ytd__gt=FIRST_LEVEL, total_ct_ytd__lte=SECOND_LEVEL) cash_transfers_amounts_second = [ '$50,001-100,000', ct_amount_second.filter( rating=PartnerOrganization.RATING_NOT_REQUIRED).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_second.filter( rating=PartnerOrganization.RATING_LOW).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_second.filter( rating=PartnerOrganization.RATING_MEDIUM).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_second.filter( rating=PartnerOrganization.RATING_SIGNIFICANT).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_second.filter( rating=PartnerOrganization.RATING_HIGH).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_second.aggregate(count=Count('total_ct_ytd'))['count'], ] ct_amount_third = self.get_queryset().filter( total_ct_ytd__gt=SECOND_LEVEL, total_ct_ytd__lte=THIRD_LEVEL) cash_transfers_amounts_third = [ '$100,001-350,000', ct_amount_third.filter( rating=PartnerOrganization.RATING_NOT_REQUIRED).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_third.filter( rating=PartnerOrganization.RATING_LOW).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_third.filter( rating=PartnerOrganization.RATING_MEDIUM).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_third.filter( rating=PartnerOrganization.RATING_SIGNIFICANT).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_third.filter( rating=PartnerOrganization.RATING_HIGH).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_third.aggregate(count=Count('total_ct_ytd'))['count'], ] ct_amount_fourth = self.get_queryset().filter( total_ct_ytd__gt=THIRD_LEVEL, total_ct_ytd__lte=FOURTH_LEVEL) cash_transfers_amounts_fourth = [ '$350,001-500,000', ct_amount_fourth.filter( rating=PartnerOrganization.RATING_NOT_REQUIRED).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fourth.filter( rating=PartnerOrganization.RATING_LOW).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fourth.filter( rating=PartnerOrganization.RATING_MEDIUM).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fourth.filter( rating=PartnerOrganization.RATING_SIGNIFICANT).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fourth.filter( rating=PartnerOrganization.RATING_HIGH).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fourth.aggregate(count=Count('total_ct_ytd'))['count'], ] ct_amount_fifth = self.get_queryset().filter( total_ct_ytd__gt=FOURTH_LEVEL) cash_transfers_amounts_fifth = [ '>$500,000', ct_amount_fifth.filter( rating=PartnerOrganization.RATING_NOT_REQUIRED).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fifth.filter( rating=PartnerOrganization.RATING_LOW).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fifth.filter( rating=PartnerOrganization.RATING_MEDIUM).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fifth.filter( rating=PartnerOrganization.RATING_SIGNIFICANT).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fifth.filter( rating=PartnerOrganization.RATING_HIGH).aggregate( total=Coalesce(Sum('total_ct_ytd'), 0))['total'], ct_amount_fifth.aggregate(count=Count('total_ct_ytd'))['count'], ] return [ [ 'Risk Rating', 'Not Required', 'Low', 'Medium', 'Significant', 'High', 'Number of IPs' ], cash_transfers_amounts_first, cash_transfers_amounts_second, cash_transfers_amounts_third, cash_transfers_amounts_fourth, cash_transfers_amounts_fifth, ]
def get_context_data(self, **kwargs): context = super(PartyListView, self).get_context_data(**kwargs) qs = context['object_list'] info = self.request.GET.get('info', 'seats') context['coalition'] = qs.filter(is_coalition=True) context['opposition'] = qs.filter(is_coalition=False) context['friend_pages'] = [ ['.', _('By Number of seats'), False], ['.?info=votes-per-seat', _('By votes per seat'), False], ['.?info=discipline', _('By factional discipline'), False], [ '.?info=coalition-discipline', _('By coalition/opposition discipline'), False ], [ '.?info=residence-centrality', _('By residence centrality'), False ], ['.?info=residence-economy', _('By residence economy'), False], ['.?info=bills-proposed', _('By bills proposed'), False], ['.?info=bills-pre', _('By bills passed pre vote'), False], ['.?info=bills-first', _('By bills passed first vote'), False], ['.?info=bills-approved', _('By bills approved'), False], [ '.?info=presence', _('By average weekly hours of presence'), False ], [ '.?info=committees', _('By average monthly committee meetings'), False ], ] if info: if info == 'seats': context['coalition'] = context['coalition'].annotate( extra=Sum('number_of_seats')).order_by('-extra') context['opposition'] = context['opposition'].annotate( extra=Sum('number_of_seats')).order_by('-extra') context['friend_pages'][0][2] = True context['norm_factor'] = 1 context['baseline'] = 0 context['title'] = "%s" % (_('Parties')) if info == 'votes-per-seat': m = 0 for p in context['coalition']: p.extra = p.voting_statistics.votes_per_seat() if p.extra > m: m = p.extra for p in context['opposition']: p.extra = p.voting_statistics.votes_per_seat() if p.extra > m: m = p.extra context['friend_pages'][1][2] = True context['norm_factor'] = m / 20 context['baseline'] = 0 context['title'] = "%s" % (_('Parties')) if info == 'discipline': m = 100 for p in context['coalition']: p.extra = p.voting_statistics.discipline() if p.extra < m: m = p.extra for p in context['opposition']: p.extra = p.voting_statistics.discipline() if p.extra < m: m = p.extra context['friend_pages'][2][2] = True context['norm_factor'] = (100.0 - m) / 15 context['baseline'] = m - 2 context['title'] = "%s" % (_('Parties')) if info == 'coalition-discipline': m = 100 for p in context['coalition']: p.extra = p.voting_statistics.coalition_discipline() if p.extra < m: m = p.extra for p in context['opposition']: p.extra = p.voting_statistics.coalition_discipline() if p.extra < m: m = p.extra context['friend_pages'][3][2] = True context['norm_factor'] = (100.0 - m) / 15 context['baseline'] = m - 2 context['title'] = "%s" % (_('Parties')) if info == 'residence-centrality': m = 10 for p in context['coalition']: rc = [ member.residence_centrality for member in p.members.all() if member.residence_centrality ] if rc: p.extra = round(float(sum(rc)) / len(rc), 1) else: p.extra = 0 if p.extra < m: m = p.extra for p in context['opposition']: rc = [ member.residence_centrality for member in p.members.all() if member.residence_centrality ] if rc: p.extra = round(float(sum(rc)) / len(rc), 1) else: p.extra = 0 if p.extra < m: m = p.extra context['friend_pages'][4][2] = True context['norm_factor'] = (10.0 - m) / 15 context['baseline'] = m - 1 context['title'] = "%s" % ( _('Parties by residence centrality')) if info == 'residence-economy': m = 10 for p in context['coalition']: rc = [ member.residence_economy for member in p.members.all() if member.residence_economy ] if rc: p.extra = round(float(sum(rc)) / len(rc), 1) else: p.extra = 0 if p.extra < m: m = p.extra for p in context['opposition']: rc = [ member.residence_economy for member in p.members.all() if member.residence_economy ] if rc: p.extra = round(float(sum(rc)) / len(rc), 1) else: p.extra = 0 if p.extra < m: m = p.extra context['friend_pages'][5][2] = True context['norm_factor'] = (10.0 - m) / 15 context['baseline'] = m - 1 context['title'] = "%s" % (_('Parties by residence economy')) if info == 'bills-proposed': m = 9999 for p in context['coalition']: p.extra = len( set( Bill.objects.filter( proposers__current_party=p).values_list( 'id', flat=True))) / p.number_of_seats if p.extra < m: m = p.extra for p in context['opposition']: p.extra = len( set( Bill.objects.filter( proposers__current_party=p).values_list( 'id', flat=True))) / p.number_of_seats if p.extra < m: m = p.extra context['friend_pages'][6][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by number of bills proposed per seat')) if info == 'bills-pre': m = 9999 for p in context['coalition']: p.extra = round( float( len( set( Bill.objects.filter( Q(proposers__current_party=p), Q(stage='2') | Q(stage='3') | Q(stage='4') | Q(stage='5') | Q(stage='6')).values_list( 'id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra for p in context['opposition']: p.extra = round( float( len( set( Bill.objects.filter( Q(proposers__current_party=p), Q(stage='2') | Q(stage='3') | Q(stage='4') | Q(stage='5') | Q(stage='6')).values_list( 'id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra context['friend_pages'][7][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by number of bills passed pre vote per seat')) if info == 'bills-first': m = 9999 for p in context['coalition']: p.extra = round( float( len( set( Bill.objects.filter( Q(proposers__current_party=p), Q(stage='4') | Q(stage='5') | Q(stage='6')).values_list( 'id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra for p in context['opposition']: p.extra = round( float( len( set( Bill.objects.filter( Q(proposers__current_party=p), Q(stage='4') | Q(stage='5') | Q(stage='6')).values_list( 'id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra context['friend_pages'][8][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by number of bills passed first vote per seat')) if info == 'bills-approved': m = 9999 for p in context['coalition']: p.extra = round( float( len( set( Bill.objects.filter( proposers__current_party=p, stage='6').values_list('id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra for p in context['opposition']: p.extra = round( float( len( set( Bill.objects.filter( proposers__current_party=p, stage='6').values_list('id', flat=True)))) / p.number_of_seats, 1) if p.extra < m: m = p.extra context['friend_pages'][9][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by number of bills passed approved per seat')) if info == 'presence': m = 9999 for p in context['coalition']: awp = [ member.average_weekly_presence() for member in p.members.all() if member.average_weekly_presence() ] if awp: p.extra = round(float(sum(awp)) / len(awp), 1) else: p.extra = 0 if p.extra < m: m = p.extra for p in context['opposition']: awp = [ member.average_weekly_presence() for member in p.members.all() if member.average_weekly_presence() ] if awp: p.extra = round(float(sum(awp)) / len(awp), 1) else: p.extra = 0 if p.extra < m: m = p.extra context['friend_pages'][10][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by average weekly hours of presence')) if info == 'committees': m = 9999 for p in context['coalition']: cmpm = [ member.committee_meetings_per_month() for member in p.members.all() if member.committee_meetings_per_month() ] if cmpm: p.extra = round(float(sum(cmpm)) / len(cmpm), 1) else: p.extra = 0 if p.extra < m: m = p.extra for p in context['opposition']: cmpm = [ member.committee_meetings_per_month() for member in p.members.all() if member.committee_meetings_per_month() ] if cmpm: p.extra = round(float(sum(cmpm)) / len(cmpm), 1) else: p.extra = 0 if p.extra < m: m = p.extra context['friend_pages'][11][2] = True context['norm_factor'] = m / 2 context['baseline'] = 0 context['title'] = "%s" % ( _('Parties by monthly committee meetings')) return context
def _get_total_limit(self, projects): if not projects.exists(): return 0 estimates = models.PriceEstimate.objects.filter( scope__in=projects).exclude(limit=-1) return estimates.aggregate(Sum('limit'))['limit__sum'] or 0
def ajax_calculate_category_view(request): orders = Order.filter_data(request, Order.objects.all()) order_items = OrderItem.objects.filter(order__in=orders) category_analysis = order_items.values_list('product__category__title').annotate(qty=Sum('qty'), total_incomes=Sum('total_price') ) data = dict() category = True data['result'] = render_to_string(template_name='include/result_container.html', request=request, context=locals() ) return JsonResponse(data)
def test_aggregate_over_annotation(self): agg = Author.objects.annotate(other_age=F('age')).aggregate( otherage_sum=Sum('other_age')) other_agg = Author.objects.aggregate(age_sum=Sum('age')) self.assertEqual(agg['otherage_sum'], other_agg['age_sum'])
def analysis(queryset): amount = queryset.aggregate( Sum('amount'))['amount__sum'] if queryset else 0 category_analysis = queryset.values('category__title').annotate( amount=Sum('amount')) return [amount, category_analysis]
def create(self, validated_data): # TODO: pasar a un servicio independiente encargado de registrar las transacciones # Get data for proccess journal_transaction = JournalTransactionType.objects.get( id=validated_data['transaction_type']) from_account = Account.objects.get( external_account_id=validated_data['from_account'] ['external_account_id'], external_account_type_id=validated_data['from_account'] ['external_account_type']) to_account = OperationAccount.objects.get( external_account_id=validated_data['to_operation_account'], external_account_type_id=3) asset_type = AssetType.objects.get(id=validated_data['asset_type']) account_posting_operation_amount = Posting.objects.filter( account=to_account).aggregate(Sum('amount')) posting_operation_amount = Posting.objects.filter(account=to_account) # Create data for proccess journal = Journal.objects.create( batch=None, gloss="", journal_transaction=journal_transaction) posting_from = Posting.objects.create( account=from_account, asset_type=asset_type, journal=journal, amount=(Decimal(validated_data['amount']) * -1)) posting_to = Posting.objects.create(account=to_account, asset_type=asset_type, journal=journal, amount=Decimal( validated_data['amount'])) dwh_balance_account = Posting.objects.filter( account=from_account).aggregate(Sum('amount')) DWHBalanceAccount.objects.update_or_create( account=from_account, defaults={ 'balance_account_amount': dwh_balance_account['amount__sum'] }) # TODO: Validar que este financiado desde Motor y que no se sobre financie if to_account.financing_amount is not None and account_posting_operation_amount[ 'amount__sum'] is not None and to_account.financing_amount < account_posting_operation_amount[ 'amount__sum']: list_post = [] for posting_operation in posting_operation_amount: list_post.append(model_to_dict(posting_operation)) # TODO Disponer en sistema de mensajeria amazon # TODO: ESTE ES EL ACTO DE REGISTRAR DEUDA DE OPERACION CON PAGADOR # REGISTRAR DOCUMENTO AVAL # FECHA DE COBRO # MONTO DEL COBRO # FECHA DE PAGO DE Documento # Id del documento, puede ser repetido # Identificador unico del socumento # Estado de cobranza: => Puede estar seteado al momento de ser creado!!!! try: # TODO Obtener quien es el solicitante del credito: Preguntar a API-Financing # Generar la orden de pago, debe contener # operacion from_account # y solicitante desde API-Financing para el to_account ######################################## ######################################## # PAGO PENDIENTE A SOLICITANTE ###### ######################################## ######################################## journal = Journal.objects.create( gloss="Pago pendiente a solicitante", batch=None, journal_transaction_id=1) # Registro de solicitud de pago a solicitante por operación financiada payment_request = PaymentRequest.objects.create( journal=journal, amount=(to_account.financing_amount * -1), account_payer=from_account, account=to_account) # Registro de solicitud de pago realizado a solicitante por operación financiada payment_request = PaymentRequest.objects.create( journal=journal, amount=to_account.financing_amount, account_payer=from_account, account=to_account) ################################################### ################################################### # SE ENVIA DATOS PARA GESTION DE COBRANZA ###### ################################################### ################################################### ##MODULO DE COBRANZA payer = PayerRecordService.execute({ 'payer_name': from_account.name, 'external_payer_id': from_account.external_account_id, 'contact_data': '*****@*****.**' }) print(payer.external_id) print("Servicio crear pagador terminado") # recordServices = CreateCollectingRecordService.execute({ # 'collecting_amount': to_account.financing_amount, # 'pay_date': '2001-01-01', # 'document_external_id': '222123123fds1', # 'document_type': 1, # 'operation_external_id': to_account.external_account_id, # 'external_payer_id': from_account.external_account_id, # 'document_description': '234234qqweqweqqq1', # # }) ######################################## ######################################## # PAGO REALIZADO A SOLICITANTE ###### ######################################## ######################################## # NOTA :::: # TODO: ESTE ES EL ACTO DE PAGAR a SOLICITANTE esto debiera ser un servicio journal = Journal.objects.create(batch=None, gloss="Pago a solicitante", journal_transaction_id=1) posting_from = Posting.objects.create( journal=journal, account=to_account, amount=(to_account.financing_amount * -1)) posting_to = Posting.objects.create( journal=journal, account=from_account, amount=to_account.financing_amount) return { "payer": payer.external_id, "operation_account": model_to_dict(to_account), "postings": list_post, "payment_request": model_to_dict(payment_request), "posting_payment": { "from": model_to_dict(posting_from), "to": model_to_dict(posting_to) } } except Exception as e: raise e return model_to_dict(journal)
def generate_excel(heading_title, date_from, date_to, columns, model, select_related, values_list, aggregate_fields=None, total_label_position=None, employee_id=None, is_month_filter=False, consultant_id=None, job_status_pk='', user=None, filter_expression=None, empty_if_no_filter=False, is_datetime=False, queryset=None, date_filter_expression=None): wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet(heading_title) # Sheet header, first row row_num = 0 font_style = xlwt.XFStyle() font_style.font.bold = True heading = [ heading_title, Setting.objects.first().company_name, 'For the period {} to {}'.format(date_from, date_to) ] contact_type = None contact_id = None if employee_id: contact_type = 'Employee' contact_id = employee_id if consultant_id: contact_type = 'Consultant' contact_id = consultant_id if contact_type: contact = Employee.objects.filter(id=contact_id) if contact.exists(): contact = contact.first() heading.insert(2, contact.name) else: heading.insert(2, '') # write header for head in heading: ws.write(row_num, 0, head, font_style) row_num += 1 row_num += 1 # blank row for col_num in range(len(columns)): ws.write(row_num, col_num, columns[col_num], font_style) # Sheet body, remaining rows font_style = xlwt.XFStyle() if queryset: rows = queryset.select_related(*select_related) else: rows = model.objects.all().select_related(*select_related) # filter rows by user employee if user: rows = filter_queryset_by_employee(rows, user, model, filter_expression, empty_if_no_filter) if date_from and date_to: try: # filter job status if job_status_pk: rows = rows.filter(status_id=job_status_pk) data = rows if date_from == 'ALL' and date_to == 'ALL': pass else: # filter date if is_month_filter: year, month = date_from.split('-') if not date_filter_expression: date_filter_expression = Q(date__month=month, date__year=year) data = rows.filter(date_filter_expression) else: # two dates if is_datetime: data = rows.filter(date_time__gte=date_from, date_time__lte=date_to) else: if not date_filter_expression: date_filter_expression = Q(date__gte=date_from, date__lte=date_to) data = rows.filter(date_filter_expression) # filter contact if contact_id: if contact_type == 'Employee': # used for aggreg. data = data.filter(employee_id=contact_id) else: # consultant data = data.filter(job_candidate__consultant_id=contact_id) rows = data.values_list(*values_list) # final value of rows except Exception as e: data = None rows = model.objects.none() for row in rows: row_num += 1 for col_num in range(len(row)): val = row[col_num] if type(val) == uuid.UUID: val = str(val) if type(val) == datetime.datetime: val = val.replace(tzinfo=None) ws.write(row_num, col_num, val, font_style) # total row if aggregate_fields: row_num += 1 if data: sum_aggregate_fields = [] text_aggregate_fields = [] for i, field in aggregate_fields: sum_aggregate_fields.append(Sum(field)) text = '{}__sum'.format(field) text_aggregate_fields.append((i, text)) sums = data.aggregate(*sum_aggregate_fields) ws.write(row_num, total_label_position, 'TOTAL', font_style) for i, field in text_aggregate_fields: ws.write(row_num, i, sums[field], font_style) return wb
def get_order_amount_pending(inward_order): return Invoice.objects.filter(inward_order=inward_order.pk).aggregate(Sum('pending_amount'))['pending_amount__sum']
def get_order_amount_received(inward_order): return Invoice.objects.filter(inward_order=inward_order.pk).aggregate(Sum('received_amount'))['received_amount__sum']
def fecha_estimada(self): acts = Actividad.objects.filter( poscicion__lte=self.poscicion, orden=self.orden).aggregate(dias=Sum('dias_estimados')) return self.orden.fecha + timedelta(days=acts['dias'] or 0)
def test_filter_agg_with_double_f(self): books = Book.objects.annotate(sum_rating=Sum('rating')).filter( sum_rating=F('sum_rating')) for book in books: self.assertEqual(book.sum_rating, book.rating)
def post(self, request, *args, **kwargs): formtype = request.POST.get('form_type') if formtype == 'new_game': form = GameForm(request.POST) if form.is_valid(): pname = form.cleaned_data['name'] if not Product.objects.filter(name=pname).exists(): msg = pname + " is Not registered in DB" messages.add_message(request, messages.WARNING, msg) else: game = form.save(commit=False) game.product = Product.objects.get(name=pname) game.save() else: msg = "Form is NOT valid" messages.add_message(request, messages.WARNING, msg) elif formtype == 'delete_game': if 'id' in request.POST: try: game = Game.objects.get(pk=request.POST.get('id')) game.delete() except ObjectDoesNotExist: print("Game doesn't exist") msg = "Game doesn't exist" messages.add_message(request, messages.WARNING, msg) else: msg = "Form is NOT valid ('ID' attribute not in the form)" messages.add_message(request, messages.WARNING, msg) elif formtype == 'save_entry': form = EntryForm(request.POST) if form.is_valid(): game = Game.objects.get(pk=request.POST.get('game_id')) if form.cleaned_data['entry_price'] < 0 or form.cleaned_data['loss_cut'] < 0: msg = "Price must be positive number" elif (form.cleaned_data['entry_price']-form.cleaned_data['loss_cut'])*game.position <= 0: pos = "smaller" if game.position else "greater" msg = "Loss cut must be %s than entry price"%pos #elif game.entry_set.count() and \ # form.cleaned_data['entry_date'] < game.entry_set.order_by('-entry_date')[0].entry_date: # msg = "Entry date must be newest" else: entry = form.save(commit=False) entry.game = Game.objects.get(pk=request.POST.get('game_id')) entry.code = Code.objects.get(code=request.POST.get('code')) entry.save() data = self.get_game_detail_context(entry.game.id) return JsonResponse({'succeed': True, 'data': data}) else: msg = 'Form is not valid' return JsonResponse({'succeed': False, 'msg': msg}) elif formtype == 'delete_entry': if 'id' in request.POST: try: entry = Entry.objects.get(pk=request.POST.get('id')) entry.delete() entry.game.save() succeed = True data = self.get_game_detail_context(entry.game.id) except ObjectDoesNotExist: print("Game doesn't exist") succeed = False data = 'No mathcing entry found' else: succeed = False data = "Form is NOT valid ('ID' attribute not in the form)" return JsonResponse({'succeed': succeed, 'data': data}) elif formtype == 'new_exit': form = ExitForm(request.POST) if form.is_valid(): game = Game.objects.get(pk=request.POST.get('game_id')) entry = game.entry_set.get(pk=request.POST.get('entry_id')) # 진입매매당 청산된 총 계약수 cons = entry.exit_set.aggregate(sum=Sum('contracts')).get('sum') cons = cons if cons else 0 if request.POST.get('exit_id'): exit = Exit.objects.get(pk=request.POST.get('exit_id')) cons = cons - exit.contracts if form.cleaned_data['exit_price'] < 0: succeed = False data = "Price must be positive number" elif form.cleaned_data['contracts'] + cons > entry.contracts: succeed = False data = "Total contracts can NOT be greater than %s (current: %s)"%(entry.contracts, cons) elif 'exit' in locals(): exit.exit_price = form.cleaned_data['exit_price'] exit.contracts = form.cleaned_data['contracts'] exit.exit_date = form.cleaned_data['exit_date'] exit.save() exit.game.save() succeed = True data = self.get_game_detail_context(game.id) else: exit = form.save(commit=False) exit.game = Game.objects.get(pk=request.POST.get('game_id')) exit.entry = entry exit.save() exit.game.save() succeed = True data = self.get_game_detail_context(game.id) else: succeed = False data = 'Form is not valid' return JsonResponse({'succeed': succeed, 'data': data}) elif formtype == 'delete_exit': if 'id' in request.POST: try: exit = Exit.objects.get(pk=request.POST.get('id')) exit.delete() exit.game.save() succeed = True data = self.get_game_detail_context(exit.game.id) except ObjectDoesNotExist: print("Game doesn't exist") succeed = False data = 'No mathcing entry found' else: succeed = False data = "Form is NOT valid ('ID' attribute not in the form)" return JsonResponse({'succeed': succeed, 'data': data}) elif formtype == 'game_complete': if 'id' in request.POST: try: game = Game.objects.get(pk=request.POST.get('id')) flag = True if request.POST.get('is_completed') == 'true' else False game.is_completed = flag game.save() succeed = True data = "game completion state changed" except ObjectDoesNotExist: print("Game doesn't exist") succeed = False data = 'No mathcing entry found' else: succeed = False data = "Form is NOT valid ('ID' attribute not in the form)" return JsonResponse({'succeed': succeed, 'data': data}) return redirect('record')
def test_filter_wrong_annotation(self): with six.assertRaisesRegex(self, FieldError, "Cannot resolve keyword .*"): list( Book.objects.annotate(sum_rating=Sum('rating')).filter( sum_rating=F('nope')))
def handle(self, **options): logger.info("Started computing stats") time_span = datetime.datetime.now() - datetime.timedelta(weeks=2) # Compute stats relatad with sounds: new_sounds_mod = sounds.models.Sound.objects\ .filter(created__gt=time_span, moderation_date__isnull=False)\ .extra(select={'day': 'date(moderation_date)'}).values('day')\ .order_by().annotate(Count('id')) new_sounds = sounds.models.Sound.objects\ .filter(created__gt=time_span, processing_date__isnull=False)\ .extra(select={'day': 'date(processing_date)'}).values('day')\ .order_by().annotate(Count('id')) sounds_stats = { "new_sounds_mod": list(new_sounds_mod), "new_sounds": list(new_sounds) } cache.set("sounds_stats", sounds_stats, 60 * 60 * 24) # Compute stats related with downloads: new_downloads_sound = sounds.models.Download.objects\ .filter(created__gt=time_span)\ .extra({'day': 'date(created)'}).values('day').order_by()\ .annotate(Count('id')) new_downloads_pack = sounds.models.PackDownload.objects\ .filter(created__gt=time_span)\ .extra({'day': 'date("sounds_packdownload".created)'}).values('day').order_by()\ .annotate(id__count=Sum('pack__num_sounds')) downloads_stats = { 'new_downloads_sound': list(new_downloads_sound), 'new_downloads_pack': list(new_downloads_pack), } cache.set("downloads_stats", downloads_stats, 60 * 60 * 24) # Compute stats relatad with users: new_users = User.objects.filter(date_joined__gt=time_span)\ .extra(select={'day': 'date(date_joined)'})\ .values('day', 'is_active').order_by().annotate(Count('id')) cache.set("users_stats", {"new_users": list(new_users)}, 60 * 60 * 24) time_span = datetime.datetime.now() - datetime.timedelta(days=365) active_users = { 'sounds': { 'obj': sounds.models.Sound.objects, 'attr': 'user_id' }, 'comments': { 'obj': comments.models.Comment.objects, 'attr': 'user_id' }, 'posts': { 'obj': forum.models.Post.objects, 'attr': 'author_id' }, 'sound_downloads': { 'obj': sounds.models.Download.objects, 'attr': 'user_id' }, 'pack_downloads': { 'obj': sounds.models.PackDownload.objects, 'attr': 'user_id' }, 'rate': { 'obj': ratings.models.SoundRating.objects, 'attr': 'user_id' }, } for i in active_users.keys(): qq = active_users[i]['obj'].filter(created__gt=time_span)\ .extra({'week': "to_char(created, 'WW-IYYY')"})\ .values('week').order_by()\ .annotate(Count(active_users[i]['attr'], distinct=True)) converted_weeks = [{ 'week': str( datetime.datetime.strptime(d['week'] + '-0', "%W-%Y-%w").date()), 'amount__sum': d[active_users[i]['attr'] + '__count'] } for d in qq] active_users[i] = converted_weeks cache.set("active_users_stats", active_users, 60 * 60 * 24) # Compute stats related with donations: query_donations = donations.models.Donation.objects\ .filter(created__gt=time_span)\ .extra({'day': 'date(created)'}).values('day').order_by()\ .annotate(Sum('amount')) cache.set('donations_stats', {'new_donations': list(query_donations)}, 60 * 60 * 24) # Compute stats related with Tags: time_span = datetime.datetime.now() - datetime.timedelta(weeks=2) tags_stats = TaggedItem.objects.values('tag_id')\ .filter(created__gt=time_span).annotate(num=Count('tag_id'))\ .values('num', 'tag__name').order_by('-num')[:300] # Most used tags for tags cloud all_tags = TaggedItem.objects.values('tag_id')\ .annotate(num=Count('tag_id'))\ .values('num', 'tag__name').order_by('-num')[:300] with connection.cursor() as cursor: cursor.execute(\ """SELECT count(*) as num_c, t.name, ti.tag_id as id FROM tags_taggeditem ti, tags_tag t, sounds_download d WHERE d.sound_id = ti.object_id AND t.id = ti.tag_id AND d.created > current_date - interval '14 days' GROUP BY ti.tag_id, t.name ORDER BY num_c DESC limit 300""") downloads_tags = cursor.fetchall() tags_stats = { "tags_stats": list(tags_stats), "all_tags": list(all_tags), "downloads_tags": list(downloads_tags) } cache.set('tags_stats', tags_stats, 60 * 60 * 24) # Compute stats for Totals table: users = User.objects.filter(is_active=True) users_num = users.count() users_with_sounds = users.filter(profile__num_sounds__gt=0).count() num_donations = donations.models.Donation.objects\ .aggregate(Sum('amount'))['amount__sum'] time_span = datetime.datetime.now() - datetime.timedelta(30) sum_donations_month = donations.models.Donation.objects\ .filter(created__gt=time_span).aggregate(Sum('amount'))['amount__sum'] num_sounds = sounds.models.Sound.objects.filter( processing_state="OK", moderation_state="OK").count() packs = sounds.models.Pack.objects.all().count() downloads_sounds = sounds.models.Download.objects.count() downloads_packs = sounds.models.PackDownload.objects.all().count() downloads = downloads_sounds + downloads_packs num_comments = comments.models.Comment.objects.all().count() num_ratings = ratings.models.SoundRating.objects.all().count() tags = Tag.objects.all().count() tags_used = TaggedItem.objects.all().count() posts = forum.models.Post.objects.all().count() threads = forum.models.Thread.objects.all().count() totals_stats = { "total_users": users_num, "users_with_sounds": users_with_sounds, "total_donations": num_donations, "donations_last_month": sum_donations_month, "sounds": num_sounds, "packs": packs, "downloads": downloads, "comments": num_comments, "ratings": num_ratings, "tags": tags, "tags_used": tags_used, "posts": posts, "threads": threads, } logger.info("Finished computing stats") cache.set('totals_stats', totals_stats, 60 * 60 * 24)
def get_centre_students_data(from_date, to_date): # Lay toi da 3 trung tam. centres = Centre.objects.all().order_by('id')[:4].values('id', 'name') number_days = (to_date - from_date).days number_weeks = int(number_days / 7) sum_seats = ClassRoom.objects.all().values('centre_id')\ .annotate(sum_seats=Sum('size')).values('centre_id', 'sum_seats') for i in range(len(sum_seats)): sum_seats[i][ 'max_students'] = sum_seats[i]['sum_seats'] * 4 * 6 * number_weeks number_cls_students = ClassesStudents.objects.filter( created_date_time__gte=from_date, created_date_time__lte=to_date) \ .annotate(centre_id=F('classes__centre__id'))\ .values('centre_id').annotate( count_waiting_students=Count(Case( When(state=1, then=1), output_field=IntegerField(), ))).annotate( count_cls_student=Count(Case( When(state=2, then=1), output_field=IntegerField(), ))).values('centre_id', 'count_waiting_students', 'count_cls_student') # attendance_info = StudyShiftStudent.objects.all() \ # .annotate(centre_id=F('student__user__centre__id'))\ # .values('centre_id').annotate( # count_attendance=Count(Case( # When(attendance=True, then=1), # output_field=IntegerField(), # ))).values('centre_id', 'count_attendance') datasets = [] data_waiting_s = [] data_cls_s = [] data_attendance_s = [] data_max_s = [] for centre in centres: waiting_s = 0 cls_s = 0 attendance_s = 0 max_s = 0 for sum_s in sum_seats: if sum_s['centre_id'] == centre['id']: max_s = sum_s['max_students'] data_max_s.append(max_s) for ncs in number_cls_students: if ncs['centre_id'] == centre['id']: waiting_s = ncs['count_waiting_students'] cls_s = ncs['count_cls_student'] data_waiting_s.append(waiting_s) data_cls_s.append(cls_s) # for ai in attendance_info: # if ai['centre_id'] == centre['id']: # attendance_s = ai['count_attendance'] data_attendance_s.append(attendance_s) datasets.append({ 'label': 'Số SV chờ lớp', 'backgroundColor': "#6495ED", 'data': data_waiting_s }) datasets.append({ 'label': 'Số SV được xếp lớp', 'backgroundColor': "#9FE2BF", 'data': data_cls_s }) # datasets.append({'label': 'Số SV đi học', 'backgroundColor': "#40E0D0", 'data': data_attendance_s}) # datasets.append({'label': 'Tổng số SV tối đa', 'backgroundColor': "#CCCCFF", 'data': data_max_s}) labels = list(centres.values_list('name', flat=True)) return labels, datasets
def get_sum_total(self): _sum_ = Conducta.objects.filter(competencia=self) \ .values('id') \ .annotate(v=Max('conductavalor__valor')) \ .aggregate(Sum('v')) return _sum_['v__sum']
def assigned(self): from django.db.models import Sum assigned = self.incomerow_set.aggregate(sum=Sum('amount')) if assigned['sum'] is not None: return assigned['sum'] return 0
def get_financial_findings(): refunds = Audit.objects.filter( amount_refunded__isnull=False, status=Engagement.FINAL, date_of_draft_report_to_unicef__year=datetime.now().year ).aggregate(total=Coalesce(Sum('amount_refunded'), 0))['total'] additional_supporting_document_provided = Audit.objects.filter( date_of_draft_report_to_unicef__year=datetime.now().year, additional_supporting_documentation_provided__isnull=False, status=Engagement.FINAL).aggregate(total=Coalesce( Sum('additional_supporting_documentation_provided'), 0))['total'] justification_provided_and_accepted = Audit.objects.filter( date_of_draft_report_to_unicef__year=datetime.now().year, status=Engagement.FINAL, justification_provided_and_accepted__isnull=False).aggregate( total=Coalesce(Sum('justification_provided_and_accepted'), 0))['total'] impairment = Audit.objects.filter( status=Engagement.FINAL, write_off_required__isnull=False, date_of_draft_report_to_unicef__year=datetime.now().year ).aggregate(total=Coalesce(Sum('write_off_required'), 0))['total'] # pending_unsupported_amount property outstanding_audits = Audit.objects.filter( status=Engagement.FINAL, date_of_draft_report_to_unicef__year=datetime.now().year) _ff = outstanding_audits.filter( financial_findings__isnull=False).aggregate( total=Coalesce(Sum('financial_findings'), 0))['total'] _ar = outstanding_audits.filter( amount_refunded__isnull=False).aggregate( total=Coalesce(Sum('amount_refunded'), 0))['total'] _asdp = outstanding_audits.filter( additional_supporting_documentation_provided__isnull=False ).aggregate(total=Coalesce( Sum('additional_supporting_documentation_provided'), 0))['total'] _wor = outstanding_audits.filter( write_off_required__isnull=False).aggregate( total=Coalesce(Sum('write_off_required'), 0))['total'] outstanding = _ff - _ar - _asdp - _wor outstanding_audits_y1 = Audit.objects.filter( status=Engagement.FINAL, date_of_draft_report_to_unicef__year=datetime.now().year - 1) _ff_y1 = outstanding_audits_y1.filter( financial_findings__isnull=False).aggregate( total=Coalesce(Sum('financial_findings'), 0))['total'] _ar_y1 = outstanding_audits_y1.filter( amount_refunded__isnull=False).aggregate( total=Coalesce(Sum('amount_refunded'), 0))['total'] _asdp_y1 = outstanding_audits_y1.filter( additional_supporting_documentation_provided__isnull=False ).aggregate(total=Coalesce( Sum('additional_supporting_documentation_provided'), 0))['total'] _wor_y1 = outstanding_audits_y1.filter( write_off_required__isnull=False).aggregate( total=Coalesce(Sum('write_off_required'), 0))['total'] outstanding_y1 = _ff_y1 - _ar_y1 - _asdp_y1 - _wor_y1 total_financial_findings = Audit.objects.filter( date_of_draft_report_to_unicef__year=datetime.now().year, financial_findings__isnull=False, status=Engagement.FINAL).aggregate( total=Coalesce(Sum('financial_findings'), 0))['total'] total_audited_expenditure = Audit.objects.filter( date_of_draft_report_to_unicef__year=datetime.now().year, audited_expenditure__isnull=False, status=Engagement.FINAL).aggregate( total=Coalesce(Sum('audited_expenditure'), 0))['total'] return [{ 'name': 'Total Audited Expenditure', 'value': total_audited_expenditure, 'highlighted': False, }, { 'name': 'Total Financial Findings', 'value': total_financial_findings, 'highlighted': True, }, { 'name': 'Refunds', 'value': refunds, 'highlighted': False, }, { 'name': 'Additional Supporting Documentation Received', 'value': additional_supporting_document_provided, 'highlighted': False, }, { 'name': 'Justification Provided and Accepted', 'value': justification_provided_and_accepted, 'highlighted': False, }, { 'name': 'Impairment', 'value': impairment, 'highlighted': False, }, { 'name': 'Outstanding current year (Requires Follow-up)', 'value': outstanding, 'highlighted': True, }, { 'name': 'Outstanding prior year', 'value': outstanding_y1, 'highlighted': True, }]
def paid(self): return self.incomerow_set.all().aggregate(paid=Sum('amount'))['paid'] or 0
def stokAkhir(self): subqs = DetailTransaksi.objects.filter(barang=self) stok = subqs.aggregate(Sum('jumlah'))['jumlah__sum'] return stok
def statistics(): """Gather statistics about users and their inboxes""" try: last_stat = models.Statistic.objects.latest("date") except models.Statistic.DoesNotExist: last_stat = None # the keys of these dictionaries have awful names for historical reasons # don't change them unless you want to do a data migration one_day_ago = timezone.now() - timedelta(days=1) user_aggregate = { "count": Count("id", distinct=True), "new": Coalesce( Count( Case(When(date_joined__gte=one_day_ago, then=F("id")), ), distinct=True, ), 0), "oldest_user_joined": Min("date_joined"), "with_inboxes": Coalesce( Count( Case(When(inbox__isnull=False, then=F("id")), ), distinct=True, ), 0), } inbox_aggregate = { "inbox_count__avg": Coalesce(Avg("inbox_count"), 0), "inbox_count__max": Coalesce(Max("inbox_count"), 0), "inbox_count__min": Coalesce(Min("inbox_count"), 0), "inbox_count__stddev": Coalesce(StdDev("inbox_count"), 0), "inbox_count__sum": Coalesce(Sum("inbox_count"), 0), } email_aggregate = { "email_count__avg": Coalesce(Avg("email_count"), 0), "email_count__max": Coalesce(Max("email_count"), 0), "email_count__min": Coalesce(Min("email_count"), 0), "email_count__stddev": Coalesce(StdDev("email_count"), 0), "email_count__sum": Coalesce(Sum("email_count"), 0), } # collect user and inbox stats users = get_user_model().objects.aggregate(**user_aggregate) inboxes = get_user_model().objects.annotate( inbox_count=Count("inbox__id")).aggregate(**inbox_aggregate) domain_count = models.Domain.objects.available(None).count() inboxes_possible = len(settings.INBOX_CHOICES)**settings.INBOX_LENGTH inboxes["total_possible"] = inboxes_possible * domain_count # collect email state inbox_qs = models.Inbox.objects.exclude( flags=models.Inbox.flags.deleted).annotate( email_count=Count("email__id")) emails = inbox_qs.aggregate(**email_aggregate) inboxes["with_emails"] = inbox_qs.exclude(email_count=0).count() inboxes["disowned"] = models.Inbox.objects.filter( user__isnull=True).count() emails["emails_read"] = models.Email.objects.filter( flags=models.Email.flags.read).count() if last_stat: email_diff = (emails["email_count__sum"] or 0) - (last_stat.emails["email_count__sum"] or 0) emails["running_total"] = last_stat.emails["running_total"] + max( email_diff, 0) else: emails["running_total"] = emails["email_count__sum"] or 0 stat = models.Statistic( users=users, emails=emails, inboxes=inboxes, ) stat.save() log.info("Saved statistics (%s)", stat.date)
def despesa_sum(request): despesa = Despesa.objects.all().aggregate(Sum('des_valordespesa')) context = {'soma_despesa': despesa} return context
def recommended_users_to_follow_according_to_questionaire(user): #a = datetime.datetime.now() #print a if Answer.objects.filter(user=user,is_active=True).exists() == True: if Answer.objects.filter(user=user,question__question_id=1,is_active=True).exists() == True: if Answer.objects.filter(user=user,question__question_id=2,is_active=True).exists() == True: answer_to_question_1 = Answer.objects.get(user=user,question__question_id=1,is_active=True) answers_to_question_2 = Answer.objects.filter(user=user,question__question_id=2,is_active=True) if answer_to_question_1.chosen_answer.question_possible_answer_id == 1 or answer_to_question_1.chosen_answer.question_possible_answer_id == 5: minimum_time_yapped = 15 elif answer_to_question_1.chosen_answer.question_possible_answer_id == 2: minimum_time_yapped = 30 elif answer_to_question_1.chosen_answer.question_possible_answer_id == 3: minimum_time_yapped = 60 elif answer_to_question_1.chosen_answer.question_possible_answer_id == 4: minimum_time_yapped = 120 else: pass channels_interested_in = [] for answer_to_question_2 in answers_to_question_2: if answer_to_question_2.chosen_answer.question_possible_answer_id == 6: channels_interested_in_for_this_answer = [2,8] channels_interested_in.extend(channels_interested_in_for_this_answer) elif answer_to_question_2.chosen_answer.question_possible_answer_id == 7: channels_interested_in_for_this_answer = [4,5,23,13,14] channels_interested_in.extend(channels_interested_in_for_this_answer) elif answer_to_question_2.chosen_answer.question_possible_answer_id == 8: channels_interested_in_for_this_answer = [10,16] channels_interested_in.extend(channels_interested_in_for_this_answer) elif answer_to_question_2.chosen_answer.question_possible_answer_id == 9: channels_interested_in_for_this_answer = [11] channels_interested_in.extend(channels_interested_in_for_this_answer) elif answer_to_question_2.chosen_answer.question_possible_answer_id == 10: channels_interested_in_for_this_answer = [3,15,18] channels_interested_in.extend(channels_interested_in_for_this_answer) elif answer_to_question_2.chosen_answer.question_possible_answer_id == 11: channels_interested_in_for_this_answer = [20] channels_interested_in.extend(channels_interested_in_for_this_answer) else: continue #b = datetime.datetime.now() #print b, "time to check if answers exist", b-a users_to_recommend = [] recommendations = Recommended.objects.filter(is_active=True) users_that_are_recommended = [recommendation.user for recommendation in recommendations] for user_recommended in users_that_are_recommended: time_yapped_in_channels_interested = 0 #d = datetime.datetime.now() #print d, "time to finish first layer loop", d-b #f = b for channel_id in channels_interested_in: #e = datetime.datetime.now() #f = e #print e-f, "time to finish channel loop" channel = Channel.objects.get(pk=channel_id) if time_yapped_in_channels_interested == 0: if Yap.objects.filter(is_active=True,user=user_recommended).count() + Reyap.objects.filter(is_active=True,user=user_recommended).count() != 0: time_yapped_in_channels_interested = Yap.objects.filter(is_active=True,user=user_recommended,channel=channel).aggregate(Sum('length'))['length__sum'] + Reyap.objects.filter(is_active=True,user=user_recommended,yap__channel=channel).aggregate(Sum('length'))['yap__length__sum'] else: continue else: if Yap.objects.filter(is_active=True,user=user_recommended).count() + Reyap.objects.filter(is_active=True,user=user_recommended).count() != 0: time_yapped_in_channels_interested = time_yapped_in_channels_interested + Yap.objects.filter(is_active=True,user=user_recommended,channel=channel).aggregate(Sum('length'))['length__sum'] + Reyap.objects.filter(is_active=True,user=user_recommended,yap__channel=channel).aggregate(Sum('length'))['yap__length__sum'] else: continue if time_yapped_in_channels_interested >= minimum_time_yapped: users_to_recommend.append(user_recommended) #c = datetime.datetime.now() #print c, "time to finish loop", c-b if not users_to_recommend: users_to_recommend = users_that_are_recommended return users_to_recommend elif Answer.objects.filter(user=user,question__question_id=2,is_active=True).exists() == False: recommendations = Recommended.objects.filter(is_active=True) users_that_are_recommended = [recommendation.user for recommendation in recommendations] return users_that_are_recommended #User has not completed the questionaire so there is nothing to calculate results off of. elif Answer.objects.filter(user=user,question__question_id=1,is_active=True).exists() == False: #User hasn't taken the questionaire yet. recommendations = Recommended.objects.filter(is_active=True) users_that_are_recommended = [recommendation.user for recommendation in recommendations] return users_that_are_recommended elif Answer.objects.filter(user=user,is_active=True).exists() == False: #User hasn't taken the qustionaire yet. recommendations = Recommended.objects.filter(is_active=True) users_that_are_recommended = [recommendation.user for recommendation in recommendations] return users_that_are_recommended
Sum('stoim'), Sum('netto')) for i in range(len(format_dates_list) - 1)] aggregate_data_exp = [ selected_item_data.filter(period__range=[format_dates_list[i], format_dates_list[i + 1]], napr='ЭК', ).aggregate( Sum('stoim'), Sum('netto')) for i in range(len(format_dates_list) - 1)] country_data['imp']['cost'] = [int(agg_item) if (agg_item := i['stoim__sum']) else 0 for i in aggregate_data_imp] country_data['imp']['weight'] = [int(agg_item) if (agg_item := i['netto__sum']) else 0 for i in aggregate_data_imp] country_data['exp']['cost'] = [int(agg_item) if (agg_item := i['stoim__sum']) else 0 for i in aggregate_data_exp] country_data['exp']['weight'] = [int(agg_item) if (agg_item := i['netto__sum']) else 0 for i in aggregate_data_exp] format_dates_list.pop(0) extend_data = [] for item in q_objects.children: aggregate_data_imp = StatisticData.objects.filter( napr='ИМ', period__range=[format_dates_list[0], format_dates_list[-1]], **{item[0]: item[1], **filter_dict}).aggregate(Sum('stoim'), Sum('netto')) aggregate_data_exp = StatisticData.objects.filter( napr='ИМ', period__range=[format_dates_list[0], format_dates_list[-1]], **{item[0]: item[1], **filter_dict}).aggregate(Sum('stoim'), Sum('netto')) extend_data.append({ 'item': item[1], 'imp': { 'cost': int(agg_item) if (agg_item := aggregate_data_imp['stoim__sum']) else 0, 'weight': int(agg_item) if (agg_item := aggregate_data_imp['netto__sum']) else 0, }, 'exp': { 'cost': int(agg_item) if (agg_item := aggregate_data_exp['stoim__sum']) else 0, 'weight': int(agg_item) if (agg_item := aggregate_data_exp['netto__sum']) else 0, }, })
def total_queryset(self): filters = [ self.is_in_provided_def_codes, self.is_non_zero_total_spending, self.all_closed_defc_submissions, Q(treasury_account__isnull=False), Q(treasury_account__federal_account__isnull=False), ] annotations = { "fa_code": F("treasury_account__federal_account__federal_account_code"), "description": F("treasury_account__account_title"), "code": F("treasury_account__tas_rendering_label"), "id": F("treasury_account__treasury_account_identifier"), "award_count": Value(None, output_field=IntegerField()), "fa_description": F("treasury_account__federal_account__account_title"), "fa_id": F("treasury_account__federal_account_id"), "obligation": Coalesce( Sum( Case( When( self.final_period_submission_query_filters, then= F("obligations_incurred_by_program_object_class_cpe" ), ), default=Value(0), )), 0, ), "outlay": Coalesce( Sum( Case( When( self.final_period_submission_query_filters, then= F("gross_outlay_amount_by_program_object_class_cpe" ), ), default=Value(0), )), 0, ), "total_budgetary_resources": Coalesce( Subquery( GTASSF133Balances.objects.filter( disaster_emergency_fund_code__in=self.def_codes, fiscal_period=self. latest_reporting_period["submission_fiscal_month"], fiscal_year=self. latest_reporting_period["submission_fiscal_year"], treasury_account_identifier=OuterRef( "treasury_account"), ).annotate(amount=Func("total_budgetary_resources_cpe", function="Sum")).values("amount"), output_field=DecimalField(), ), 0, ), } # Assuming it is more performant to fetch all rows once rather than # run a count query and fetch only a page's worth of results return (FinancialAccountsByProgramActivityObjectClass.objects.filter( *filters).values( "treasury_account__federal_account__id", "treasury_account__federal_account__federal_account_code", "treasury_account__federal_account__account_title", ).annotate(**annotations).values(*annotations.keys()))