def get_authorized_jira_projects(permission, user=None): if user is None: user = get_current_user() if user is None: return JIRA_Project.objects.none() jira_projects = JIRA_Project.objects.all() if user.is_superuser: return jira_projects if user_has_global_permission(user, permission): return jira_projects roles = get_roles_for_permission(permission) engagement_authorized_product_type_roles = Product_Type_Member.objects.filter( product_type=OuterRef('engagement__product__prod_type_id'), user=user, role__in=roles) engagement_authorized_product_roles = Product_Member.objects.filter( product=OuterRef('engagement__product_id'), user=user, role__in=roles) engagement_authorized_product_type_groups = Product_Type_Group.objects.filter( product_type=OuterRef('engagement__product__prod_type_id'), group__users=user, role__in=roles) engagement_authorized_product_groups = Product_Group.objects.filter( product=OuterRef('engagement__product_id'), group__users=user, role__in=roles) product_authorized_product_type_roles = Product_Type_Member.objects.filter( product_type=OuterRef('product__prod_type_id'), user=user, role__in=roles) product_authorized_product_roles = Product_Member.objects.filter( product=OuterRef('product_id'), user=user, role__in=roles) product_authorized_product_type_groups = Product_Type_Group.objects.filter( product_type=OuterRef('product__prod_type_id'), group__users=user, role__in=roles) product_authorized_product_groups = Product_Group.objects.filter( product=OuterRef('product_id'), group__users=user, role__in=roles) jira_projects = jira_projects.annotate( engagement__product__prod_type__member=Exists( engagement_authorized_product_type_roles), engagement__product__member=Exists( engagement_authorized_product_roles), engagement__product__prod_type__authorized_group=Exists( engagement_authorized_product_type_groups), engagement__product__authorized_group=Exists( engagement_authorized_product_groups), product__prod_type__member=Exists( product_authorized_product_type_roles), product__member=Exists(product_authorized_product_roles), product__prod_type__authorized_group=Exists( product_authorized_product_type_groups), product__authorized_group=Exists(product_authorized_product_groups)) jira_projects = jira_projects.filter( Q(engagement__product__prod_type__member=True) | Q(engagement__product__member=True) | Q(engagement__product__prod_type__authorized_group=True) | Q(engagement__product__authorized_group=True) | Q(product__prod_type__member=True) | Q(product__member=True) | Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True)) return jira_projects
def get_queryset(self): params = self.request.GET queryset = Itinerary.objects.filter(date_deleted__isnull=True) parks = params.getlist('parks[]', []) + params.getlist('parks', []) countries = params.getlist('countries[]', []) + params.getlist( 'countries', []) days = params.get('days', None) min_price = params.get('min_price', None) max_price = params.get('max_price', None) from_date = params.get('from_date', None) to_date = params.get('to_date', None) travelers = params.get('adult_travelers', None) main_focus = params.get('main_focus', None) secondary_focus = params.getlist( 'secondary_focus[]', []) + params.getlist('secondary_focus', []) safary_preference = params.get('safary_preference', None) activity_levels = params.getlist( 'activity_levels[]', []) + params.getlist('activity_levels', []) ordering = params.get('ordering', '-id') operators = [] # slug. if load with api slug = json.loads(params.get('slug', 'null')) # slug. if load page and render from server if hasattr(self, 'slug') and self.slug: slug = self.slug if slug: if slug['model'] == 'CountryIndex': countries = [slug['id']] + countries if slug['model'] == 'Park': parks = [slug['id']] + parks if slug['model'] == 'TourOperator': operators.append(slug['id']) if len(parks) or len(countries): queryset = queryset.filter( Q(country_indexes__in=countries) | Q(parks__in=parks)) if (len(operators)): queryset = queryset.filter(tour_operator__in=operators) if days: queryset = queryset.filter(days=days) price_query = Q() if min_price: price_query &= Q(search_price__gte=min_price) if max_price: price_query &= Q(search_price__lte=max_price) if (min_price or max_price): queryset = queryset.filter(price_query) # Logic of filter: get all months between dates. # Check that package contain all this months. # Used "Spanning multi-valued relationships": https://docs.djangoproject.com/en/dev/topics/db/queries/ months = [] if from_date: from_date = datetime.strptime(from_date, '%m/%d/%Y') if to_date: to_date = datetime.strptime(to_date, '%m/%d/%Y') if from_date and to_date: months = self.months_between(from_date, to_date) elif from_date: months.append(from_date.month) elif to_date: months.append(to_date.month) if len(months): for month in months: queryset = queryset.filter(months=month) # TODO: Hmm.. # if travelers: # queryset.filter() if len(secondary_focus): queryset = queryset.filter( secondary_focus_activity__in=secondary_focus) if safary_preference: queryset = queryset.filter(itinerary_type_id=safary_preference) if main_focus: queryset = queryset.filter(safari_focus_activity_id=main_focus) if len(activity_levels): ACTIVITIES = { 'Easy': 'Easy', 'Moderate': 'Moderate', 'Strenuous': 'Strenuous', } activity_levels_query = Q() if 'Easy' in activity_levels: activity_levels_query |= Q((Q( activity_level_name=ACTIVITIES['Easy'])) | (Q(activity_level_name=''))) if 'Moderate' in activity_levels: activity_levels_query |= Q( activity_level_name=ACTIVITIES['Moderate']) if 'Strenuous' in activity_levels: activity_levels_query |= Q( activity_level_name=ACTIVITIES['Strenuous']) queryset = queryset.filter(activity_levels_query) queryset = queryset.annotate(yas_score=F('tour_operator__yas_score')) if ordering: #yas score if ordering == '-yas_score': #yas score by country if len(parks) or len(countries): parks_countries = Park.objects.filter( id__in=parks).values_list('country_indexes__id', flat=True) all_ids = countries + list(parks_countries) yas_score_subquery = YASScore.objects \ .filter(tour_operator_id=OuterRef('tour_operator_id')) \ .filter(country_index_id__in=all_ids) \ .order_by().values('tour_operator_id') packages_yas_score_avg = yas_score_subquery \ .annotate(avg=Avg('yas_score')) \ .values('avg') queryset = queryset.annotate( yas_score=Subquery(packages_yas_score_avg, output_field=IntegerField())) \ .order_by('-yas_score') else: #when there's no country selected #nothing to do pass queryset = queryset.order_by(ordering) else: queryset = queryset.order_by( F('safari_focus_activity__priority').desc(nulls_last=True)) queryset = queryset.distinct() # For page title and discription in ItineraryView.page_head queryset.parks = parks queryset.countries = countries queryset.main_focus = main_focus queryset.secondary_focus = secondary_focus return queryset
def get_download_counts_by_area( region_filter: Dict[str, Any], users: Union[QuerySet, List[User]] = None, count: int = None, start_date: Optional[Union[date, datetime]] = None, ): region_filter = region_filter or dict() query = dict() if users: query["user__in"] = users if start_date: query["downloaded_at__gte"] = start_date query[ "downloadable__export_task__export_provider_task__run__job__the_geom__intersects"] = OuterRef( "the_geom") download_subquery = UserDownload.objects.filter(**query).values("uid") regions = (Region.objects.filter(**region_filter).annotate(downloads=Count( Subquery(download_subquery))).order_by("-downloads")[:count]) return {region.name: region.downloads for region in regions}
def test_obj_subquery_lookup(self): qs = NullableJSONModel.objects.annotate(field=Subquery( NullableJSONModel.objects.filter( pk=OuterRef('pk')).values('value')), ).filter(field__a='b') self.assertSequenceEqual(qs, [self.objs[3], self.objs[4]])
def iterate_positions(self, form_data: dict): p_providers = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED, OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED), ).values('order').annotate( m=GroupConcat('provider', delimiter=',')).values('m').order_by() base_qs = OrderPosition.objects.filter(order__event__in=self.events, ) qs = base_qs.annotate(payment_providers=Subquery( p_providers, output_field=CharField()), ).select_related( 'order', 'order__invoice_address', 'item', 'variation', 'voucher', 'tax_rule').prefetch_related('answers', 'answers__question', 'answers__options') if form_data['paid_only']: qs = qs.filter(order__status=Order.STATUS_PAID) has_subevents = self.events.filter(has_subevents=True).exists() headers = [ _('Event slug'), _('Order code'), _('Position ID'), _('Status'), _('Email'), _('Order date'), ] if has_subevents: headers.append(pgettext('subevent', 'Date')) headers.append(_('Start date')) headers.append(_('End date')) headers += [ _('Product'), _('Variation'), _('Price'), _('Tax rate'), _('Tax rule'), _('Tax value'), _('Attendee name'), ] name_scheme = PERSON_NAME_SCHEMES[ self.event.settings. name_scheme] if not self.is_multievent else None if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(_('Attendee name') + ': ' + str(label)) headers += [ _('Attendee email'), _('Company'), _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('Voucher'), _('Pseudonymization ID'), ] questions = list(Question.objects.filter(event__in=self.events)) options = {} for q in questions: if q.type == Question.TYPE_CHOICE_MULTIPLE: options[q.pk] = [] for o in q.options.all(): headers.append(str(q.question) + ' – ' + str(o.answer)) options[q.pk].append(o) else: headers.append(str(q.question)) headers += [ _('Company'), _('Invoice address name'), ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(_('Invoice address name') + ': ' + str(label)) headers += [ _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('VAT ID'), ] headers += [ _('Sales channel'), _('Order locale'), _('Payment providers'), ] yield headers all_ids = list( base_qs.order_by('order__datetime', 'positionid').values_list('pk', flat=True)) yield self.ProgressSetTotal(total=len(all_ids)) for ids in chunked_iterable(all_ids, 1000): ops = sorted(qs.filter(id__in=ids), key=lambda k: ids.index(k.pk)) for op in ops: order = op.order tz = pytz.timezone( self.event_object_cache[order.event_id].settings.timezone) row = [ self.event_object_cache[order.event_id].slug, order.code, op.positionid, order.get_status_display(), order.email, order.datetime.astimezone(tz).strftime('%Y-%m-%d'), ] if has_subevents: if op.subevent: row.append(op.subevent.name) row.append( op.subevent.date_from.astimezone( self.event_object_cache[order.event_id]. timezone).strftime('%Y-%m-%d %H:%M:%S')) if op.subevent.date_to: row.append( op.subevent.date_to.astimezone( self.event_object_cache[order.event_id]. timezone).strftime('%Y-%m-%d %H:%M:%S')) else: row.append('') else: row.append('') row.append('') row.append('') row += [ str(op.item), str(op.variation) if op.variation else '', op.price, op.tax_rate, str(op.tax_rule) if op.tax_rule else '', op.tax_value, op.attendee_name, ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append(op.attendee_name_parts.get(k, '')) row += [ op.attendee_email, op.company or '', op.street or '', op.zipcode or '', op.city or '', op.country if op.country else '', op.state or '', op.voucher.code if op.voucher else '', op.pseudonymization_id, ] acache = {} for a in op.answers.all(): # We do not want to localize Date, Time and Datetime question answers, as those can lead # to difficulties parsing the data (for example 2019-02-01 may become Février, 2019 01 in French). if a.question.type == Question.TYPE_CHOICE_MULTIPLE: acache[a.question_id] = set(o.pk for o in a.options.all()) elif a.question.type in Question.UNLOCALIZED_TYPES: acache[a.question_id] = a.answer else: acache[a.question_id] = str(a) for q in questions: if q.type == Question.TYPE_CHOICE_MULTIPLE: for o in options[q.pk]: row.append( _('Yes') if o.pk in acache.get(q.pk, set()) else _('No')) else: row.append(acache.get(q.pk, '')) try: row += [ order.invoice_address.company, order.invoice_address.name, ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append( order.invoice_address.name_parts.get(k, '')) row += [ order.invoice_address.street, order.invoice_address.zipcode, order.invoice_address.city, order.invoice_address.country if order.invoice_address.country else order.invoice_address.country_old, order.invoice_address.state, order.invoice_address.vat_id, ] except InvoiceAddress.DoesNotExist: row += [''] * (8 + (len(name_scheme['fields']) if name_scheme and len(name_scheme['fields']) > 1 else 0)) row += [order.sales_channel, order.locale] row.append(', '.join([ str(self.providers.get(p, p)) for p in sorted( set((op.payment_providers or '').split(','))) if p and p != 'free' ])) yield row
def annotate_completed_at(queryset): from tasks.models import Annotation newest = Annotation.objects.filter(task=OuterRef("pk"), task__is_labeled=True).distinct().order_by("-created_at") return queryset.annotate(completed_at=Subquery(newest.values("created_at")[:1]))
def iterate_orders(self, form_data): tz = self.event.timezone tax_rates = set(a for a in OrderFee.objects.filter( order__event=self.event).values_list( 'tax_rate', flat=True).distinct().order_by()) tax_rates |= set(a for a in OrderPosition.objects.filter( order__event=self.event).filter( order__status__in=form_data['status']).values_list( 'tax_rate', flat=True).distinct().order_by()) tax_rates = sorted(tax_rates) headers = [ _('Order code'), _('Order date'), _('Company'), _('Name'), _('Country'), _('VAT ID'), _('Status'), _('Payment date'), _('Order total'), ] + sum(([str(t) + ' % ' + _('Gross'), str(t) + ' % ' + _('Tax')] for t in tax_rates), []) yield headers op_date = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() qs = self.filter_qs(OrderPosition.objects, form_data).filter( order__status__in=form_data['status'], order__event=self.event, ).annotate(payment_date=Subquery( op_date, output_field=models.DateTimeField())).values( 'order__code', 'order__datetime', 'payment_date', 'order__total', 'tax_rate', 'order__status', 'order__id', 'order__invoice_address__name_cached', 'order__invoice_address__company', 'order__invoice_address__country', 'order__invoice_address__vat_id').annotate( prices=Sum('price'), tax_values=Sum('tax_value')).order_by( 'order__datetime' if form_data['sort'] == 'datetime' else 'payment_date', 'order__datetime', 'order__code') fee_sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(taxsum=Sum('tax_value'), grosssum=Sum('value')) } last_order_code = None tax_sums = defaultdict(Decimal) price_sums = defaultdict(Decimal) status_labels = dict(Order.STATUS_CHOICE) row = None for op in qs: if op['order__code'] != last_order_code: if row: yield row row = None row = [ op['order__code'], date_format(op['order__datetime'].astimezone(tz), "SHORT_DATE_FORMAT"), op['order__invoice_address__company'], op['order__invoice_address__name_cached'], op['order__invoice_address__country'], op['order__invoice_address__vat_id'], status_labels[op['order__status']], date_format(op['payment_date'], "SHORT_DATE_FORMAT") if op['payment_date'] else '', round_decimal(op['order__total'], self.event.currency), ] + sum(([Decimal('0.00'), Decimal('0.00')] for t in tax_rates), []) last_order_code = op['order__code'] for i, rate in enumerate(tax_rates): odata = fee_sum_cache.get((op['order__id'], rate)) if odata: row[9 + 2 * i] = odata['grosssum'] or 0 row[10 + 2 * i] = odata['taxsum'] or 0 tax_sums[rate] += odata['taxsum'] or 0 price_sums[rate] += odata['grosssum'] or 0 i = tax_rates.index(op['tax_rate']) row[9 + 2 * i] = round_decimal(row[9 + 2 * i] + op['prices'], self.event.currency) row[10 + 2 * i] = round_decimal(row[10 + 2 * i] + op['tax_values'], self.event.currency) tax_sums[op['tax_rate']] += op['tax_values'] price_sums[op['tax_rate']] += op['prices'] if row: yield row yield [_('Total'), '', '', '', '', '', '', '', ''] + sum(([ round_decimal( price_sums.get(t) or Decimal('0.00'), self.event.currency), round_decimal( tax_sums.get(t) or Decimal('0.00'), self.event.currency) ] for t in tax_rates), [])
def iterate_list(self, form_data): s = GiftCardTransaction.objects.filter( card=OuterRef('pk'), datetime__lte=form_data['date'] ).order_by().values('card').annotate(s=Sum('value')).values('s') qs = organizer.issued_gift_cards.filter( issuance__lte=form_data['date'] ).annotate( cached_value=Coalesce(Subquery(s), Decimal('0.00')), ).order_by('issuance').prefetch_related( 'transactions', 'transactions__order', 'transactions__order__event', 'transactions__order__invoices' ) if form_data.get('testmode') == 'yes': qs = qs.filter(testmode=True) elif form_data.get('testmode') == 'no': qs = qs.filter(testmode=False) if form_data.get('state') == 'empty': qs = qs.filter(cached_value=0) elif form_data.get('state') == 'valid_value': qs = qs.exclude(cached_value=0).filter(Q(expires__isnull=True) | Q(expires__gte=form_data['date'])) elif form_data.get('state') == 'expired_value': qs = qs.exclude(cached_value=0).filter(expires__lt=form_data['date']) elif form_data.get('state') == 'expired': qs = qs.filter(expires__lt=form_data['date']) headers = [ _('Gift card code'), _('Test mode card'), _('Creation date'), _('Expiry date'), _('Special terms and conditions'), _('Currency'), _('Current value'), _('Created in order'), _('Last invoice number of order'), _('Last invoice date of order'), ] yield headers tz = get_current_timezone() for obj in qs: o = None i = None trans = list(obj.transactions.all()) if trans: o = trans[0].order if o: invs = list(o.invoices.all()) if invs: i = invs[-1] row = [ obj.secret, _('Yes') if obj.testmode else _('No'), obj.issuance.astimezone(tz).date().strftime('%Y-%m-%d'), obj.expires.astimezone(tz).date().strftime('%Y-%m-%d') if obj.expires else '', obj.conditions or '', obj.currency, obj.cached_value, o.full_code if o else '', i.number if i else '', i.date.strftime('%Y-%m-%d') if i else '', ] yield row
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) tz = timezone.get_current_timezone() if 'latest' in self.request.GET: clear_cache(self.request.event) subevent = None if self.request.GET.get("subevent", "") != "" and self.request.event.has_subevents: i = self.request.GET.get("subevent", "") try: subevent = self.request.event.subevents.get(pk=i) except SubEvent.DoesNotExist: pass cache = self.request.event.cache ckey = str(subevent.pk) if subevent else 'all' p_date = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False ).values('order').annotate( m=Max('payment_date') ).values( 'm' ).order_by() op_date = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False ).values('order').annotate( m=Max('payment_date') ).values( 'm' ).order_by() # Orders by day ctx['obd_data'] = cache.get('statistics_obd_data' + ckey) if not ctx['obd_data']: oqs = Order.objects.annotate(payment_date=Subquery(p_date, output_field=DateTimeField())) if subevent: oqs = oqs.filter(all_positions__subevent_id=subevent, all_positions__canceled=False).distinct() ordered_by_day = {} for o in oqs.filter(event=self.request.event).values('datetime'): day = o['datetime'].astimezone(tz).date() ordered_by_day[day] = ordered_by_day.get(day, 0) + 1 paid_by_day = {} for o in oqs.filter(event=self.request.event, payment_date__isnull=False).values('payment_date'): day = o['payment_date'].astimezone(tz).date() paid_by_day[day] = paid_by_day.get(day, 0) + 1 data = [] for d in dateutil.rrule.rrule( dateutil.rrule.DAILY, dtstart=min(ordered_by_day.keys()) if ordered_by_day else datetime.date.today(), until=max( max(ordered_by_day.keys() if paid_by_day else [datetime.date.today()]), max(paid_by_day.keys() if paid_by_day else [datetime.date(1970, 1, 1)]) )): d = d.date() data.append({ 'date': d.strftime('%Y-%m-%d'), 'ordered': ordered_by_day.get(d, 0), 'paid': paid_by_day.get(d, 0) }) ctx['obd_data'] = json.dumps(data) cache.set('statistics_obd_data' + ckey, ctx['obd_data']) # Orders by product ctx['obp_data'] = cache.get('statistics_obp_data' + ckey) if not ctx['obp_data']: opqs = OrderPosition.objects if subevent: opqs = opqs.filter(subevent=subevent) num_ordered = { p['item']: p['cnt'] for p in (opqs .filter(order__event=self.request.event) .values('item') .annotate(cnt=Count('id')).order_by()) } num_paid = { p['item']: p['cnt'] for p in (opqs .filter(order__event=self.request.event, order__status=Order.STATUS_PAID) .values('item') .annotate(cnt=Count('id')).order_by()) } item_names = { i.id: str(i) for i in Item.objects.filter(event=self.request.event) } ctx['obp_data'] = json.dumps([ { 'item': item_names[item], 'item_short': item_names[item] if len(item_names[item]) < 15 else (item_names[item][:15] + "…"), 'ordered': cnt, 'paid': num_paid.get(item, 0) } for item, cnt in num_ordered.items() ]) cache.set('statistics_obp_data' + ckey, ctx['obp_data']) ctx['rev_data'] = cache.get('statistics_rev_data' + ckey) if not ctx['rev_data']: rev_by_day = {} if subevent: for o in OrderPosition.objects.annotate( payment_date=Subquery(op_date, output_field=DateTimeField()) ).filter(order__event=self.request.event, subevent=subevent, order__status=Order.STATUS_PAID, payment_date__isnull=False).values('payment_date', 'price'): day = o['payment_date'].astimezone(tz).date() rev_by_day[day] = rev_by_day.get(day, 0) + o['price'] else: for o in Order.objects.annotate( payment_date=Subquery(p_date, output_field=DateTimeField()) ).filter(event=self.request.event, status=Order.STATUS_PAID, payment_date__isnull=False).values('payment_date', 'total'): day = o['payment_date'].astimezone(tz).date() rev_by_day[day] = rev_by_day.get(day, 0) + o['total'] data = [] total = 0 for d in dateutil.rrule.rrule( dateutil.rrule.DAILY, dtstart=min(rev_by_day.keys() if rev_by_day else [datetime.date.today()]), until=max(rev_by_day.keys() if rev_by_day else [datetime.date.today()])): d = d.date() total += float(rev_by_day.get(d, 0)) data.append({ 'date': d.strftime('%Y-%m-%d'), 'revenue': round(total, 2), }) ctx['rev_data'] = json.dumps(data) cache.set('statistics_rev_data' + ckey, ctx['rev_data']) ctx['has_orders'] = self.request.event.orders.exists() ctx['seats'] = {} if not self.request.event.has_subevents or (ckey != "all" and subevent): ev = subevent or self.request.event if ev.seating_plan_id is not None: seats_qs = ev.free_seats(sales_channel=None, include_blocked=True) ctx['seats']['blocked_seats'] = seats_qs.filter(blocked=True).count() ctx['seats']['free_seats'] = seats_qs.filter(blocked=False).count() ctx['seats']['purchased_seats'] = \ ev.seats.count() - ctx['seats']['blocked_seats'] - ctx['seats']['free_seats'] seats_qs = seats_qs.values('product', 'blocked').annotate(count=Count('id'))\ .order_by('product__category__position', 'product__position', 'product', 'blocked') ctx['seats']['products'] = {} ctx['seats']['stats'] = {} item_cache = {i.pk: i for i in self.request.event.items.annotate(has_variations=Count('variations')).filter( pk__in={p['product'] for p in seats_qs if p['product']} )} item_cache[None] = None for item in seats_qs: product = item_cache[item['product']] if item_cache[item['product']] not in ctx['seats']['products']: price = None if product and product.has_variations: price = product.variations.filter( active=True ).aggregate(Min('default_price'))['default_price__min'] if product and not price: price = product.default_price if not price: price = Decimal('0.00') ctx['seats']['products'][product] = { 'free': { 'seats': 0, 'potential': Decimal('0.00'), }, 'blocked': { 'seats': 0, 'potential': Decimal('0.00'), }, 'price': price, } data = ctx['seats']['products'][product] if item['blocked']: data['blocked']['seats'] = item['count'] data['blocked']['potential'] = item['count'] * data['price'] else: data['free']['seats'] = item['count'] data['free']['potential'] = item['count'] * data['price'] return ctx
def get_authorized_jira_issues(permission): user = get_current_user() if user is None: return JIRA_Issue.objects.none() jira_issues = JIRA_Issue.objects.all() if user.is_superuser: return jira_issues if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE: return jira_issues if user_has_global_permission(user, permission): return jira_issues roles = get_roles_for_permission(permission) engagement_authorized_product_type_roles = Product_Type_Member.objects.filter( product_type=OuterRef('engagement__product__prod_type_id'), user=user, role__in=roles) engagement_authorized_product_roles = Product_Member.objects.filter( product=OuterRef('engagement__product_id'), user=user, role__in=roles) engagement_authorized_product_type_groups = Product_Type_Group.objects.filter( product_type=OuterRef('engagement__product__prod_type_id'), group__users=user, role__in=roles) engagement_authorized_product_groups = Product_Group.objects.filter( product=OuterRef('engagement__product_id'), group__users=user, role__in=roles) finding_group_authorized_product_type_roles = Product_Type_Member.objects.filter( product_type=OuterRef( 'finding_group__test__engagement__product__prod_type_id'), user=user, role__in=roles) finding_group_authorized_product_roles = Product_Member.objects.filter( product=OuterRef('finding_group__test__engagement__product_id'), user=user, role__in=roles) finding_group_authorized_product_type_groups = Product_Type_Group.objects.filter( product_type=OuterRef( 'finding_group__test__engagement__product__prod_type_id'), group__users=user, role__in=roles) finding_group_authorized_product_groups = Product_Group.objects.filter( product=OuterRef('finding_group__test__engagement__product_id'), group__users=user, role__in=roles) finding_authorized_product_type_roles = Product_Type_Member.objects.filter( product_type=OuterRef( 'finding__test__engagement__product__prod_type_id'), user=user, role__in=roles) finding_authorized_product_roles = Product_Member.objects.filter( product=OuterRef('finding__test__engagement__product_id'), user=user, role__in=roles) finding_authorized_product_type_groups = Product_Type_Group.objects.filter( product_type=OuterRef( 'finding__test__engagement__product__prod_type_id'), group__users=user, role__in=roles) finding_authorized_product_groups = Product_Group.objects.filter( product=OuterRef('finding__test__engagement__product_id'), group__users=user, role__in=roles) jira_issues = jira_issues.annotate( engagement__product__prod_type__member=Exists( engagement_authorized_product_type_roles), engagement__product__member=Exists( engagement_authorized_product_roles), engagement__product__prod_type__authorized_group=Exists( engagement_authorized_product_type_groups), engagement__product__authorized_group=Exists( engagement_authorized_product_groups), finding_group__test__engagement__product__prod_type__member=Exists( finding_group_authorized_product_type_roles), finding_group__test__engagement__product__member=Exists( finding_group_authorized_product_roles), finding_group__test__engagement__product__prod_type__authorized_group= Exists(finding_group_authorized_product_type_groups), finding_group__test__engagement__product__authorized_group=Exists( finding_group_authorized_product_groups), finding__test__engagement__product__prod_type__member=Exists( finding_authorized_product_type_roles), finding__test__engagement__product__member=Exists( finding_authorized_product_roles), finding__test__engagement__product__prod_type__authorized_group=Exists( finding_authorized_product_type_groups), finding__test__engagement__product__authorized_group=Exists( finding_authorized_product_groups)) jira_issues = jira_issues.filter( Q(engagement__product__prod_type__member=True) | Q(engagement__product__member=True) | Q(engagement__product__prod_type__authorized_group=True) | Q(engagement__product__authorized_group=True) | Q(finding_group__test__engagement__product__prod_type__member=True) | Q(finding_group__test__engagement__product__member=True) | Q(finding_group__test__engagement__product__prod_type__authorized_group =True) | Q(finding_group__test__engagement__product__authorized_group=True) | Q(finding__test__engagement__product__prod_type__member=True) | Q(finding__test__engagement__product__member=True) | Q(finding__test__engagement__product__prod_type__authorized_group=True) | Q(finding__test__engagement__product__authorized_group=True)) return jira_issues
def add_person_id(self, team_id: int): return self.annotate(person_id=Subquery( PersonDistinctId.objects.filter( team_id=team_id, distinct_id=OuterRef( "distinct_id")).order_by().values("person_id")[:1]))
def get(self, request): try: search = request.GET.get('search', None) is_pick = request.GET.get('trendi-pick', None) ranking_by = request.GET.get('ranking', None) is_sale = request.GET.get('sale', None) is_delivery = request.GET.get('delivery', None) category = request.GET.get('category', None) sub_category = request.GET.get('sub-category', None) ordering = request.GET.get('ordering', None) products = Product.objects.\ filter(orderlist__order__orderstatus_id=5).\ select_related( 'seller', 'delivery', 'sale', 'category', 'sub_category' ).\ prefetch_related( 'orderlist_set', 'review_set' ).\ annotate(sum=Sum('orderlist__quantity')).\ annotate(review_count=Subquery( Review.objects.filter(product=OuterRef('pk')).values('product'). annotate(count=Count('pk')).values('count')) ) q = Q() if is_pick: q.add(Q(trendi_pick=is_pick), Q.AND) products = products.order_by('-sum') if is_sale: q.add(Q(sale_id__gt=is_sale), Q.AND) if ranking_by: rank_filter = {'day': 1, 'week': 7, 'month': 30} date = datetime.today() - timedelta( days=rank_filter[ranking_by]) q.add(Q(orderlist__updated_at__gt=date), Q.AND) if category: q.add(Q(category=category), Q.AND) if sub_category: q.add(Q(sub_category=sub_category), Q.AND) if is_delivery: q.add(Q(delivery__delivery_type=is_delivery), Q.AND) sort_type = { 'latest': '-updated_at', 'review': '-review_count', 'l-price': 'price', 'h-price': '-price', None: '-sum' } if not ordering: products = products.order_by(sort_type[ordering]) if ordering in sort_type: products = products.order_by(sort_type[ordering]) if search: print(search) products = Product.objects.select_related( 'seller', 'delivery', 'sale', ) q &= Q(title__icontains = search) |\ Q(category__name__icontains = search) |\ Q(sub_category__name__icontains = search) |\ Q(seller__name__icontains = search) product_list = [{ 'is_pick': product.trendi_pick, 'image_url': product.thumb_image_url, 'seller_name': product.seller.name, 'title': product.title, 'delivery': product.delivery.delivery_type == 1, 'sale': convert_sale(product.sale.sale_ratio), 'discounted_price': get_discounted_price(product.price, product.sale.sale_ratio), 'price': product.price, 'updated_date': product.updated_at, 'product_pk': product.pk, } for product in products.filter(q)] number_of_products = products.filter(q).count() except TypeError: return JsonResponse({"message": "TYPE_ERROR"}, status=400) except KeyError: return JsonResponse({"message": "KEY_ERROR"}, status=400) except Product.DoesNotExist: return JsonResponse({"message": "NOT_EXIST_PRODUCT"}, status=400) if not product_list: return JsonResponse({"message": "NO_RESULT"}, status=400) return JsonResponse( { "number_of_products": number_of_products, "product_list": product_list, }, status=200)
def get(self, request: Request, team: Team) -> Response: """ Returns cumulative counts of unresolved groups per day within the stats period time range. Response: { <project_id>: { <isoformat_date>: {"unresolved": <unresolved_count>}, ... } ... } """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response( {"detail": "You do not have the insights feature enabled"}, status=400) # Team has no projects project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) if len(project_list) == 0: return Response({}) start, end = get_date_range_from_params(request.GET) end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) start = start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) # First we get the count of unresolved groups from before we were writing `GroupHistory` # records. This will be reasonably accurate but not perfect. oldest_history_date = GroupHistory.objects.filter_to_team( team).aggregate(Min("date_added"), )["date_added__min"] project_unresolved = { r["project"]: r["unresolved"] for r in (Group.objects.filter_to_team(team).filter( first_seen__lt=oldest_history_date).values("project").annotate( total=Count("id"), resolved=Count( "id", filter=Q(resolved_at__lt=oldest_history_date)), ).annotate(unresolved=ExpressionWrapper( F("total") - F("resolved"), output_field=IntegerField()))) } # TODO: Could get ignored counts from `GroupSnooze` too prev_status_sub_qs = Coalesce( Subquery( GroupHistory.objects.filter( group_id=OuterRef("group_id"), date_added__lt=OuterRef("date_added"), status__in=OPEN_STATUSES + CLOSED_STATUSES, ).order_by("-id").values("status")[:1]), -1, ) dedupe_status_filter = Q((~Q(prev_status__in=OPEN_STATUSES) & Q(status__in=OPEN_STATUSES)) | (~Q(prev_status__in=CLOSED_STATUSES) & Q(status__in=CLOSED_STATUSES))) # Next, if there's data in the group history table before the stats period then grab that # and use it to help calculate the initial unresolved value if oldest_history_date < start: new_for_projects = (Group.objects.filter_to_team(team).filter( first_seen__gte=oldest_history_date, first_seen__lt=start, ).values("project").annotate(open=Count("id"))) initial_project_history_counts = ( GroupHistory.objects.filter_to_team(team).filter( date_added__gte=oldest_history_date, date_added__lt=start, ).annotate(prev_status=prev_status_sub_qs).filter( dedupe_status_filter).values("project").annotate( reopened=Count("id", filter=Q(status__in=OPEN_STATUSES)), closed=Count("id", filter=Q(status__in=CLOSED_STATUSES)), )) for row in new_for_projects: project_unresolved.setdefault(row["project"], 0) project_unresolved[row["project"]] += row["open"] for row in initial_project_history_counts: project_unresolved.setdefault(row["project"], 0) project_unresolved[ row["project"]] += row["reopened"] - row["closed"] # Just a failsafe to make sure we haven't gone below 0 for project in list(project_unresolved.keys()): project_unresolved[project] = max(0, project_unresolved[project]) # Now we grab the rest of the data bucketed by day new_issues = (Group.objects.filter_to_team(team).filter( first_seen__gte=start, first_seen__lt=end, ).annotate(bucket=TruncDay("first_seen")).order_by("bucket").values( "project", "bucket").annotate(open=Count("id"))) bucketed_issues = (GroupHistory.objects.filter_to_team(team).filter( date_added__gte=start, date_added__lte=end, ).annotate( bucket=TruncDay("date_added"), prev_status=prev_status_sub_qs, ).filter(dedupe_status_filter).order_by("bucket").values( "project", "bucket").annotate( open=Count("id", filter=Q(status__in=OPEN_STATUSES)), closed=Count("id", filter=Q(status__in=CLOSED_STATUSES)), )) current_day, date_series_dict = start, {} while current_day < end: date_series_dict[current_day.isoformat()] = { "open": 0, "closed": 0 } current_day += timedelta(days=1) agg_project_precounts = { project.id: copy.deepcopy(date_series_dict) for project in project_list } for r in chain(bucketed_issues, new_issues): bucket = agg_project_precounts[r["project"]][ r["bucket"].isoformat()] bucket["open"] += r.get("open", 0) bucket["closed"] += r.get("closed", 0) agg_project_counts = {} for project, precounts in agg_project_precounts.items(): open = project_unresolved.get(project, 0) sorted_bucket_keys = sorted(precounts.keys()) project_counts = {} for bucket_key in sorted_bucket_keys: bucket = precounts[bucket_key] open = max(open + bucket["open"] - bucket["closed"], 0) project_counts[bucket_key] = {"unresolved": open} agg_project_counts[project] = project_counts return Response(agg_project_counts)
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) tz = timezone.get_current_timezone() if 'latest' in self.request.GET: clear_cache(self.request.event) subevent = None if self.request.GET.get("subevent", "") != "" and self.request.event.has_subevents: i = self.request.GET.get("subevent", "") try: subevent = self.request.event.subevents.get(pk=i) except SubEvent.DoesNotExist: pass cache = self.request.event.cache ckey = str(subevent.pk) if subevent else 'all' p_date = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() op_date = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() # Orders by day ctx['obd_data'] = cache.get('statistics_obd_data' + ckey) if not ctx['obd_data']: oqs = Order.objects.annotate( payment_date=Subquery(p_date, output_field=DateTimeField())) if subevent: oqs = oqs.filter(positions__subevent_id=subevent, positions__canceled=False).distinct() ordered_by_day = {} for o in oqs.filter(event=self.request.event).values('datetime'): day = o['datetime'].astimezone(tz).date() ordered_by_day[day] = ordered_by_day.get(day, 0) + 1 paid_by_day = {} for o in oqs.filter( event=self.request.event, payment_date__isnull=False).values('payment_date'): day = o['payment_date'].astimezone(tz).date() paid_by_day[day] = paid_by_day.get(day, 0) + 1 data = [] for d in dateutil.rrule.rrule( dateutil.rrule.DAILY, dtstart=min(ordered_by_day.keys()) if ordered_by_day else datetime.date.today(), until=max( max(ordered_by_day.keys( ) if paid_by_day else [datetime.date.today()]), max(paid_by_day.keys( ) if paid_by_day else [datetime.date(1970, 1, 1)]))): d = d.date() data.append({ 'date': d.strftime('%Y-%m-%d'), 'ordered': ordered_by_day.get(d, 0), 'paid': paid_by_day.get(d, 0) }) ctx['obd_data'] = json.dumps(data) cache.set('statistics_obd_data' + ckey, ctx['obd_data']) # Orders by product ctx['obp_data'] = cache.get('statistics_obp_data' + ckey) if not ctx['obp_data']: opqs = OrderPosition.objects if subevent: opqs = opqs.filter(subevent=subevent) num_ordered = { p['item']: p['cnt'] for p in (opqs.filter( order__event=self.request.event).values('item').annotate( cnt=Count('id')).order_by()) } num_paid = { p['item']: p['cnt'] for p in (opqs.filter(order__event=self.request.event, order__status=Order.STATUS_PAID). values('item').annotate(cnt=Count('id')).order_by()) } item_names = { i.id: str(i) for i in Item.objects.filter(event=self.request.event) } ctx['obp_data'] = json.dumps([{ 'item': item_names[item], 'ordered': cnt, 'paid': num_paid.get(item, 0) } for item, cnt in num_ordered.items()]) cache.set('statistics_obp_data' + ckey, ctx['obp_data']) ctx['rev_data'] = cache.get('statistics_rev_data' + ckey) if not ctx['rev_data']: rev_by_day = {} if subevent: for o in OrderPosition.objects.annotate(payment_date=Subquery( op_date, output_field=DateTimeField())).filter( order__event=self.request.event, subevent=subevent, order__status=Order.STATUS_PAID, payment_date__isnull=False).values( 'payment_date', 'price'): day = o['payment_date'].astimezone(tz).date() rev_by_day[day] = rev_by_day.get(day, 0) + o['price'] else: for o in Order.objects.annotate(payment_date=Subquery( p_date, output_field=DateTimeField())).filter( event=self.request.event, status=Order.STATUS_PAID, payment_date__isnull=False).values( 'payment_date', 'total'): day = o['payment_date'].astimezone(tz).date() rev_by_day[day] = rev_by_day.get(day, 0) + o['total'] data = [] total = 0 for d in dateutil.rrule.rrule( dateutil.rrule.DAILY, dtstart=min(rev_by_day.keys( ) if rev_by_day else [datetime.date.today()]), until=max(rev_by_day.keys( ) if rev_by_day else [datetime.date.today()])): d = d.date() total += float(rev_by_day.get(d, 0)) data.append({ 'date': d.strftime('%Y-%m-%d'), 'revenue': round(total, 2), }) ctx['rev_data'] = json.dumps(data) cache.set('statistics_rev_data' + ckey, ctx['rev_data']) ctx['has_orders'] = self.request.event.orders.exists() return ctx
def qs_with_payment(queryset: QuerySet, **_kwargs) -> QuerySet: subquery = Subquery( Payment.objects.filter(order_id=OuterRef("pk")).order_by( "-pk").values_list("charge_status")[:1]) return queryset.annotate(last_charge_status=ExpressionWrapper( subquery, output_field=CharField()))
def _make_subquery(annotation: Union[Expression, F]) -> Subquery: return Subquery( TicketType.objects.annotate( annotation_value=annotation).filter( pk=OuterRef("pk")).values("annotation_value")[:1], )
def get_schedules_with_target_date_by_bin_and_orgs( self, order_by='enrollment__user__id'): """ Returns Schedules with the target_date, related to Users whose id matches the bin_num, and filtered by org_list. Arguments: order_by -- string for field to sort the resulting Schedules by """ target_day = _get_datetime_beginning_of_day(self.target_datetime) schedule_day_equals_target_day_filter = { 'courseenrollment__schedule__{}__gte'.format(self.schedule_date_field): target_day, 'courseenrollment__schedule__{}__lt'.format(self.schedule_date_field): target_day + datetime.timedelta(days=1), # lint-amnesty, pylint: disable=line-too-long } users = User.objects.filter( courseenrollment__is_active=True, is_active=True, **schedule_day_equals_target_day_filter).annotate( id_mod=self.bin_num_for_user_id(F('id'))).filter( id_mod=self.bin_num) schedule_day_equals_target_day_filter = { '{}__gte'.format(self.schedule_date_field): target_day, '{}__lt'.format(self.schedule_date_field): target_day + datetime.timedelta(days=1), } schedules = Schedule.objects.select_related( 'enrollment__user__profile', 'enrollment__course', 'enrollment__fbeenrollmentexclusion', ).filter( Q(enrollment__course__end__isnull=True) | Q(enrollment__course__end__gte=self.current_datetime), self.experience_filter, enrollment__user__in=users, enrollment__is_active=True, **schedule_day_equals_target_day_filter).annotate( external_updates_enabled=Exists( query_external_updates( OuterRef('enrollment__user_id'), OuterRef('enrollment__course_id'))), ).exclude( external_updates_enabled=True, ).order_by(order_by) schedules = self.filter_by_org(schedules) if "read_replica" in settings.DATABASES: schedules = schedules.using("read_replica") LOG.info('Query = %r', schedules.query.sql_with_params()) with function_trace('schedule_query_set_evaluation'): # This will run the query and cache all of the results in memory. num_schedules = len(schedules) LOG.info('Number of schedules = %d', num_schedules) # This should give us a sense of the volume of data being processed by each task. set_custom_attribute('num_schedules', num_schedules) return schedules
def status(self, *args, **kwargs): with language(self.request.event.settings.locale): clist = self.get_object() cqs = clist.positions.annotate(checkedin=Exists( Checkin.objects.filter(list_id=clist.pk, position=OuterRef('pk'), type=Checkin.TYPE_ENTRY))).filter( checkedin=True, ) pqs = clist.positions ev = clist.subevent or clist.event response = { 'event': { 'name': str(ev.name), }, 'checkin_count': cqs.count(), 'position_count': pqs.count(), 'inside_count': clist.inside_count, } op_by_item = { p['item']: p['cnt'] for p in pqs.order_by().values('item').annotate( cnt=Count('id')) } op_by_variation = { p['variation']: p['cnt'] for p in pqs.order_by().values('variation').annotate( cnt=Count('id')) } c_by_item = { p['item']: p['cnt'] for p in cqs.order_by().values('item').annotate( cnt=Count('id')) } c_by_variation = { p['variation']: p['cnt'] for p in cqs.order_by().values('variation').annotate( cnt=Count('id')) } if not clist.all_products: items = clist.limit_products else: items = clist.event.items response['items'] = [] for item in items.order_by('category__position', 'position', 'pk').prefetch_related('variations'): i = { 'id': item.pk, 'name': str(item), 'admission': item.admission, 'checkin_count': c_by_item.get(item.pk, 0), 'position_count': op_by_item.get(item.pk, 0), 'variations': [] } for var in item.variations.all(): i['variations'].append({ 'id': var.pk, 'value': str(var), 'checkin_count': c_by_variation.get(var.pk, 0), 'position_count': op_by_variation.get(var.pk, 0), }) response['items'].append(i) return Response(response)
def get_story(self, doc, form_data): from reportlab.lib.units import mm from reportlab.platypus import Paragraph, Spacer, Table, TableStyle headlinestyle = self.get_style() headlinestyle.fontSize = 15 headlinestyle.fontName = 'OpenSansBd' tz = pytz.timezone(self.event.settings.timezone) tax_rates = set(a for a in OrderFee.objects.filter( order__event=self.event).values_list( 'tax_rate', flat=True).distinct().order_by()) tax_rates |= set(a for a in OrderPosition.objects.filter( order__event=self.event).filter( order__status__in=self.form_data['status']).values_list( 'tax_rate', flat=True).distinct().order_by()) tax_rates = sorted(tax_rates) # Cols: Order ID | Order date | Status | Payment Date | Total | {gross tax} for t in taxes colwidths = [a * doc.width for a in [0.12, 0.1, 0.10, 0.12, 0.08]] if tax_rates: colwidths += [0.48 / (len(tax_rates) * 2) * doc.width ] * (len(tax_rates) * 2) tstyledata = [ # Alignment ('ALIGN', (0, 0), (3, 0), 'LEFT'), # Headlines ('ALIGN', (4, 0), (-1, 0), 'CENTER'), # Headlines ('ALIGN', (4, 1), (-1, -1), 'RIGHT'), # Money ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'), # Fonts ('FONTNAME', (0, 0), (-1, 0), 'OpenSansBd'), # Headlines ('FONTNAME', (0, -1), (-1, -1), 'OpenSansBd'), # Sums ] for i, rate in enumerate(tax_rates): tstyledata.append(('SPAN', (5 + 2 * i, 0), (6 + 2 * i, 0))) story = [ Paragraph( _('Orders by tax rate ({currency})').format( currency=self.event.currency), headlinestyle), Spacer(1, 5 * mm) ] tdata = [ [ _('Order code'), _('Order date'), _('Status'), _('Payment date'), _('Order total'), ] + sum(([localize(t) + ' %', ''] for t in tax_rates), []), ['', '', '', '', ''] + sum( ([_('Gross'), _('Tax')] for t in tax_rates), []), ] op_date = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() qs = OrderPosition.objects.filter( order__status__in=self.form_data['status'], order__event=self.event, ).annotate(payment_date=Subquery( op_date, output_field=models.DateTimeField())).values( 'order__code', 'order__datetime', 'payment_date', 'order__total', 'tax_rate', 'order__status', 'order__id').annotate( prices=Sum('price'), tax_values=Sum('tax_value')).order_by( 'order__datetime' if self.form_data['sort'] == 'datetime' else 'payment_date', 'order__datetime', 'order__code') fee_sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(taxsum=Sum('tax_value'), grosssum=Sum('value')) } last_order_code = None tax_sums = defaultdict(Decimal) price_sums = defaultdict(Decimal) status_labels = dict(Order.STATUS_CHOICE) for op in qs: if op['order__code'] != last_order_code: tdata.append([ op['order__code'], date_format(op['order__datetime'].astimezone(tz), "SHORT_DATE_FORMAT"), status_labels[op['order__status']], date_format(op['payment_date'], "SHORT_DATE_FORMAT") if op['payment_date'] else '', op['order__total'] ] + sum((['', ''] for t in tax_rates), []), ) last_order_code = op['order__code'] for i, rate in enumerate(tax_rates): odata = fee_sum_cache.get((op['order__id'], rate)) if odata: tdata[-1][5 + 2 * i] = odata['grosssum'] or Decimal('0.00') tdata[-1][6 + 2 * i] = odata['taxsum'] or Decimal('0.00') tax_sums[rate] += odata['taxsum'] or Decimal('0.00') price_sums[rate] += odata['grosssum'] or Decimal( '0.00') i = tax_rates.index(op['tax_rate']) tdata[-1][5 + 2 * i] = (tdata[-1][5 + 2 * i] or Decimal('0.00')) + op['prices'] tdata[-1][6 + 2 * i] = (tdata[-1][6 + 2 * i] or Decimal('0.00')) + op['tax_values'] tax_sums[op['tax_rate']] += op['tax_values'] price_sums[op['tax_rate']] += op['prices'] tdata.append([_('Total'), '', '', '', ''] + sum(([ price_sums.get(t) or Decimal('0.00'), tax_sums.get(t) or Decimal('0.00') ] for t in tax_rates), []), ) tdata = [[ localize(round_decimal(c, self.event.currency)) if isinstance( c, (Decimal, int, float)) else c for c in row ] for row in tdata] table = Table(tdata, colWidths=colwidths, repeatRows=2) table.setStyle(TableStyle(tstyledata)) story.append(table) return story
def get_queryset(self, ignore_status=False, ignore_products=False): cqs = Checkin.objects.filter( position_id=OuterRef('pk'), list_id=self.checkinlist.pk).order_by().values( 'position_id').annotate(m=Max('datetime')).values('m') qs = OrderPosition.objects.filter( order__event=self.request.event, ).annotate( last_checked_in=Subquery(cqs)).prefetch_related( 'order__event', 'order__event__organizer') if self.checkinlist.subevent: qs = qs.filter(subevent=self.checkinlist.subevent) if self.request.query_params.get( 'ignore_status', 'false') != 'true' and not ignore_status: qs = qs.filter( order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING] if self.checkinlist.include_pending else [Order.STATUS_PAID]) if self.request.query_params.get('pdf_data', 'false') == 'true': qs = qs.prefetch_related( Prefetch(lookup='checkins', queryset=Checkin.objects.filter( list_id=self.checkinlist.pk)), 'answers', 'answers__options', 'answers__question', Prefetch( 'addons', OrderPosition.objects.select_related('item', 'variation')), Prefetch( 'order', Order.objects.select_related( 'invoice_address').prefetch_related( Prefetch( 'event', Event.objects.select_related('organizer')), Prefetch( 'positions', OrderPosition.objects.prefetch_related( Prefetch('checkins', queryset=Checkin.objects.all()), 'item', 'variation', 'answers', 'answers__options', 'answers__question', ))))).select_related('item', 'variation', 'item__category', 'addon_to', 'order', 'order__invoice_address', 'seat') else: qs = qs.prefetch_related( Prefetch(lookup='checkins', queryset=Checkin.objects.filter( list_id=self.checkinlist.pk)), 'answers', 'answers__options', 'answers__question', Prefetch( 'addons', OrderPosition.objects.select_related( 'item', 'variation'))).select_related( 'item', 'variation', 'order', 'addon_to', 'order__invoice_address', 'order', 'seat') if not self.checkinlist.all_products and not ignore_products: qs = qs.filter(item__in=self.checkinlist.limit_products. values_list('id', flat=True)) if 'subevent' in self.request.query_params.getlist('expand'): qs = qs.prefetch_related('subevent', 'subevent__event', 'subevent__subeventitem_set', 'subevent__subeventitemvariation_set', 'subevent__seat_category_mappings', 'subevent__meta_values') if 'item' in self.request.query_params.getlist('expand'): qs = qs.prefetch_related( 'item', 'item__addons', 'item__bundles', 'item__meta_values', 'item__variations').select_related('item__tax_rule') if 'variation' in self.request.query_params.getlist('expand'): qs = qs.prefetch_related('variation') if 'pk' not in self.request.resolver_match.kwargs and 'can_view_orders' not in self.request.eventpermset \ and len(self.request.query_params.get('search', '')) < 3: qs = qs.none() return qs
def cancel_event(self, event: Event, subevent: int, auto_refund: bool, keep_fee_fixed: str, keep_fee_per_ticket: str, keep_fee_percentage: str, keep_fees: list = None, manual_refund: bool = False, send: bool = False, send_subject: dict = None, send_message: dict = None, send_waitinglist: bool = False, send_waitinglist_subject: dict = {}, send_waitinglist_message: dict = {}, user: int = None, refund_as_giftcard: bool = False, giftcard_expires=None, giftcard_conditions=None, subevents_from: str = None, subevents_to: str = None): send_subject = LazyI18nString(send_subject) send_message = LazyI18nString(send_message) send_waitinglist_subject = LazyI18nString(send_waitinglist_subject) send_waitinglist_message = LazyI18nString(send_waitinglist_message) if user: user = User.objects.get(pk=user) s = OrderPosition.objects.filter( order=OuterRef('pk')).order_by().values('order').annotate( k=Count('id')).values('k') orders_to_cancel = event.orders.annotate( pcnt=Subquery(s, output_field=IntegerField())).filter( status__in=[ Order.STATUS_PAID, Order.STATUS_PENDING, Order.STATUS_EXPIRED ], pcnt__gt=0).all() if subevent or subevents_from: if subevent: subevents = event.subevents.filter(pk=subevent) subevent = subevents.first() subevent_ids = {subevent.pk} else: subevents = event.subevents.filter(date_from__gte=subevents_from, date_from__lt=subevents_to) subevent_ids = set(subevents.values_list('id', flat=True)) has_subevent = OrderPosition.objects.filter( order_id=OuterRef('pk')).filter(subevent__in=subevents) has_other_subevent = OrderPosition.objects.filter( order_id=OuterRef('pk')).exclude(subevent__in=subevents) orders_to_change = orders_to_cancel.annotate( has_subevent=Exists(has_subevent), has_other_subevent=Exists(has_other_subevent), ).filter(has_subevent=True, has_other_subevent=True) orders_to_cancel = orders_to_cancel.annotate( has_subevent=Exists(has_subevent), has_other_subevent=Exists(has_other_subevent), ).filter(has_subevent=True, has_other_subevent=False) for se in subevents: se.log_action( 'pretix.subevent.canceled', user=user, ) se.active = False se.save(update_fields=['active']) se.log_action('pretix.subevent.changed', user=user, data={ 'active': False, '_source': 'cancel_event' }) else: subevents = None subevent_ids = set() orders_to_change = event.orders.none() event.log_action( 'pretix.event.canceled', user=user, ) for i in event.items.filter(active=True): i.active = False i.save(update_fields=['active']) i.log_action('pretix.event.item.changed', user=user, data={ 'active': False, '_source': 'cancel_event' }) failed = 0 total = orders_to_cancel.count() + orders_to_change.count() qs_wl = event.waitinglistentries.filter( voucher__isnull=True).select_related('subevent') if subevents: qs_wl = qs_wl.filter(subevent__in=subevents) if send_waitinglist: total += qs_wl.count() counter = 0 self.update_state(state='PROGRESS', meta={'value': 0}) for o in orders_to_cancel.only('id', 'total').iterator(): try: fee = Decimal('0.00') fee_sum = Decimal('0.00') keep_fee_objects = [] if keep_fees: for f in o.fees.all(): if f.fee_type in keep_fees: fee += f.value keep_fee_objects.append(f) fee_sum += f.value if keep_fee_percentage: fee += Decimal(keep_fee_percentage) / Decimal('100.00') * ( o.total - fee_sum) if keep_fee_fixed: fee += Decimal(keep_fee_fixed) if keep_fee_per_ticket: for p in o.positions.all(): if p.addon_to_id is None: fee += min(p.price, Decimal(keep_fee_per_ticket)) fee = round_decimal(min(fee, o.payment_refund_sum), event.currency) _cancel_order(o.pk, user, send_mail=False, cancellation_fee=fee, keep_fees=keep_fee_objects) refund_amount = o.payment_refund_sum try: if auto_refund: _try_auto_refund(o.pk, manual_refund=manual_refund, allow_partial=True, source=OrderRefund.REFUND_SOURCE_ADMIN, refund_as_giftcard=refund_as_giftcard, giftcard_expires=giftcard_expires, giftcard_conditions=giftcard_conditions, comment=gettext('Event canceled')) finally: if send: _send_mail(o, send_subject, send_message, subevent, refund_amount, user, o.positions.all()) counter += 1 if not self.request.called_directly and counter % max( 10, total // 100) == 0: self.update_state( state='PROGRESS', meta={'value': round(counter / total * 100, 2)}) except LockTimeoutException: logger.exception("Could not cancel order") failed += 1 except OrderError: logger.exception("Could not cancel order") failed += 1 for o in orders_to_change.values_list('id', flat=True).iterator(): with transaction.atomic(): o = event.orders.select_for_update().get(pk=o) total = Decimal('0.00') fee = Decimal('0.00') positions = [] ocm = OrderChangeManager(o, user=user, notify=False) for p in o.positions.all(): if p.subevent_id in subevent_ids: total += p.price ocm.cancel(p) positions.append(p) if keep_fee_per_ticket: if p.addon_to_id is None: fee += min(p.price, Decimal(keep_fee_per_ticket)) if keep_fee_fixed: fee += Decimal(keep_fee_fixed) if keep_fee_percentage: fee += Decimal(keep_fee_percentage) / Decimal('100.00') * total fee = round_decimal(min(fee, o.payment_refund_sum), event.currency) if fee: f = OrderFee( fee_type=OrderFee.FEE_TYPE_CANCELLATION, value=fee, order=o, tax_rule=o.event.settings.tax_rate_default, ) f._calculate_tax() ocm.add_fee(f) ocm.commit() refund_amount = o.payment_refund_sum - o.total if auto_refund: _try_auto_refund(o.pk, manual_refund=manual_refund, allow_partial=True, source=OrderRefund.REFUND_SOURCE_ADMIN, refund_as_giftcard=refund_as_giftcard, giftcard_expires=giftcard_expires, giftcard_conditions=giftcard_conditions, comment=gettext('Event canceled')) if send: _send_mail(o, send_subject, send_message, subevent, refund_amount, user, positions) counter += 1 if not self.request.called_directly and counter % max( 10, total // 100) == 0: self.update_state( state='PROGRESS', meta={'value': round(counter / total * 100, 2)}) if send_waitinglist: for wle in qs_wl: _send_wle_mail(wle, send_waitinglist_subject, send_waitinglist_message, wle.subevent) counter += 1 if not self.request.called_directly and counter % max( 10, total // 100) == 0: self.update_state( state='PROGRESS', meta={'value': round(counter / total * 100, 2)}) return failed
def filter_created_range(qs, _, value): return filter_range_field(qs, "created__date", value) def filter_order_search(qs, _, value): if payment_id := get_payment_id_from_query(value): return filter_order_by_payment(qs, payment_id) users = User.objects.filter( Q(email__trigram_similar=value) | Q(first_name__trigram_similar=value) | Q(last_name__trigram_similar=value) ).values("pk") filter_option = Q(user_email__trigram_similar=value) | Q( Exists(users.filter(pk=OuterRef("user_id"))) ) if order_id := get_order_id_from_query(value): filter_option |= Q(pk=order_id) payments = Payment.objects.filter(psp_reference=value).values("id") filter_option |= Q(Exists(payments.filter(order_id=OuterRef("id")))) discounts = OrderDiscount.objects.filter( Q(name__trigram_similar=value) | Q(translated_name__trigram_similar=value) ).values("id") filter_option |= Q(Exists(discounts.filter(order_id=OuterRef("id")))) lines = OrderLine.objects.filter(product_sku=value).values("id") filter_option |= Q(Exists(lines.filter(order_id=OuterRef("id"))))
def iterate_fees(self, form_data: dict): p_providers = OrderPayment.objects.filter( order=OuterRef('order'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED, OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED), ).values('order').annotate( m=GroupConcat('provider', delimiter=',')).values('m').order_by() qs = OrderFee.objects.filter(order__event__in=self.events, ).annotate( payment_providers=Subquery( p_providers, output_field=CharField()), ).select_related( 'order', 'order__invoice_address', 'tax_rule') if form_data['paid_only']: qs = qs.filter(order__status=Order.STATUS_PAID) headers = [ _('Event slug'), _('Order code'), _('Status'), _('Email'), _('Order date'), _('Fee type'), _('Description'), _('Price'), _('Tax rate'), _('Tax rule'), _('Tax value'), _('Company'), _('Invoice address name'), ] name_scheme = PERSON_NAME_SCHEMES[ self.event.settings. name_scheme] if not self.is_multievent else None if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(_('Invoice address name') + ': ' + str(label)) headers += [ _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('VAT ID'), ] headers.append(_('Payment providers')) yield headers yield self.ProgressSetTotal(total=qs.count()) for op in qs.order_by('order__datetime').iterator(): order = op.order tz = pytz.timezone(order.event.settings.timezone) row = [ self.event_object_cache[order.event_id].slug, order.code, order.get_status_display(), order.email, order.datetime.astimezone(tz).strftime('%Y-%m-%d'), op.get_fee_type_display(), op.description, op.value, op.tax_rate, str(op.tax_rule) if op.tax_rule else '', op.tax_value, ] try: row += [ order.invoice_address.company, order.invoice_address.name, ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append(order.invoice_address.name_parts.get(k, '')) row += [ order.invoice_address.street, order.invoice_address.zipcode, order.invoice_address.city, order.invoice_address.country if order.invoice_address.country else order.invoice_address.country_old, order.invoice_address.state, order.invoice_address.vat_id, ] except InvoiceAddress.DoesNotExist: row += [''] * (8 + (len(name_scheme['fields']) if name_scheme and len(name_scheme['fields']) > 1 else 0)) row.append(', '.join([ str(self.providers.get(p, p)) for p in sorted(set((op.payment_providers or '').split(','))) if p and p != 'free' ])) yield row
def published_with_variants(self, channel_slug: str): published = self.published(channel_slug) query = ProductVariantChannelListing.objects.filter( variant_id=OuterRef("variants__id"), channel__slug=str(channel_slug)).values_list("variant", flat=True) return published.filter(variants__in=query).distinct()
def iterate_orders(self, form_data: dict): p_date = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), payment_date__isnull=False).values('order').annotate( m=Max('payment_date')).values('m').order_by() p_providers = OrderPayment.objects.filter( order=OuterRef('pk'), state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED, OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED), ).values('order').annotate( m=GroupConcat('provider', delimiter=',')).values('m').order_by() i_numbers = Invoice.objects.filter( order=OuterRef('pk'), ).values('order').annotate(m=GroupConcat( 'full_invoice_no', delimiter=', ')).values('m').order_by() s = OrderPosition.objects.filter( order=OuterRef('pk')).order_by().values('order').annotate( k=Count('id')).values('k') qs = Order.objects.filter(event__in=self.events).annotate( payment_date=Subquery(p_date, output_field=DateTimeField()), payment_providers=Subquery(p_providers, output_field=CharField()), invoice_numbers=Subquery(i_numbers, output_field=CharField()), pcnt=Subquery( s, output_field=IntegerField())).select_related('invoice_address') if form_data['paid_only']: qs = qs.filter(status=Order.STATUS_PAID) tax_rates = self._get_all_tax_rates(qs) headers = [ _('Event slug'), _('Order code'), _('Order total'), _('Status'), _('Email'), _('Order date'), _('Company'), _('Name'), ] name_scheme = PERSON_NAME_SCHEMES[ self.event.settings. name_scheme] if not self.is_multievent else None if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(label) headers += [ _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('VAT ID'), _('Date of last payment'), _('Fees'), _('Order locale') ] for tr in tax_rates: headers += [ _('Gross at {rate} % tax').format(rate=tr), _('Net at {rate} % tax').format(rate=tr), _('Tax value at {rate} % tax').format(rate=tr), ] headers.append(_('Invoice numbers')) headers.append(_('Sales channel')) headers.append(_('Requires special attention')) headers.append(_('Comment')) headers.append(_('Positions')) headers.append(_('Payment providers')) yield headers full_fee_sum_cache = { o['order__id']: o['grosssum'] for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(grosssum=Sum('value')) } fee_sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderFee.objects.values('tax_rate', 'order__id').order_by( ).annotate(taxsum=Sum('tax_value'), grosssum=Sum('value')) } sum_cache = { (o['order__id'], o['tax_rate']): o for o in OrderPosition.objects.values('tax_rate', 'order__id'). order_by().annotate(taxsum=Sum('tax_value'), grosssum=Sum('price')) } yield self.ProgressSetTotal(total=qs.count()) for order in qs.order_by('datetime').iterator(): tz = pytz.timezone( self.event_object_cache[order.event_id].settings.timezone) row = [ self.event_object_cache[order.event_id].slug, order.code, order.total, order.get_status_display(), order.email, order.datetime.astimezone(tz).strftime('%Y-%m-%d'), ] try: row += [ order.invoice_address.company, order.invoice_address.name, ] if name_scheme and len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append(order.invoice_address.name_parts.get(k, '')) row += [ order.invoice_address.street, order.invoice_address.zipcode, order.invoice_address.city, order.invoice_address.country if order.invoice_address.country else order.invoice_address.country_old, order.invoice_address.state, order.invoice_address.vat_id, ] except InvoiceAddress.DoesNotExist: row += [''] * (8 + (len(name_scheme['fields']) if name_scheme and len(name_scheme['fields']) > 1 else 0)) row += [ order.payment_date.astimezone(tz).strftime('%Y-%m-%d') if order.payment_date else '', full_fee_sum_cache.get(order.id) or Decimal('0.00'), order.locale, ] for tr in tax_rates: taxrate_values = sum_cache.get((order.id, tr), { 'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00') }) fee_taxrate_values = fee_sum_cache.get( (order.id, tr), { 'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00') }) row += [ taxrate_values['grosssum'] + fee_taxrate_values['grosssum'], (taxrate_values['grosssum'] - taxrate_values['taxsum'] + fee_taxrate_values['grosssum'] - fee_taxrate_values['taxsum']), taxrate_values['taxsum'] + fee_taxrate_values['taxsum'], ] row.append(order.invoice_numbers) row.append(order.sales_channel) row.append(_('Yes') if order.checkin_attention else _('No')) row.append(order.comment or "") row.append(order.pcnt) row.append(', '.join([ str(self.providers.get(p, p)) for p in sorted(set(( order.payment_providers or '').split(','))) if p and p != 'free' ])) yield row
from contentcuration.viewsets.common import SQSum from contentcuration.viewsets.common import UUIDInFilter from contentcuration.viewsets.sync.constants import CHANNEL from contentcuration.viewsets.sync.utils import generate_update_event class CatalogListPagination(CachedListPagination): page_size = None page_size_query_param = "page_size" max_page_size = 1000 django_paginator_class = CatalogPaginator primary_token_subquery = Subquery( SecretToken.objects.filter( channels=OuterRef("id"), is_primary=True).values("token").order_by("-token")[:1]) base_channel_filter_fields = ( "keywords", "published", "languages", "licenses", "kinds", "coach", "assessments", "subtitles", "public", "id__in", "collection", "deleted",
def get_queryset(self): params = self.request.GET queryset = TourOperator.objects.all() parks = params.getlist('parks[]', []) + params.getlist('parks', []) countries = params.getlist('countries[]', []) + params.getlist( 'countries', []) from_date = params.get('from_date', None) luxury_focus = params.get('luxury_focus', None) rating = params.get('rating', None) languages = params.getlist('languages[]', []) + params.getlist( 'languages', []) headquarters = params.getlist('headquarters[]', []) + params.getlist( 'headquarters', []) that = params.getlist('that[]', []) + params.getlist('that', []) ordering = params.get('ordering', '-yas_score') # slug. if load with api slug = json.loads(params.get('slug', 'null')) # slug. if load page and render from server if hasattr(self, 'slug') and self.slug: slug = self.slug if slug: if slug['model'] == 'CountryIndex': countries = [slug['id']] + countries if slug['model'] == 'Park': parks = [slug['id']] + parks if len(parks) or len(countries): queryset = queryset.filter( Q(country_indexes__in=countries) | Q(parks__in=parks)) if luxury_focus: queryset = queryset.filter(luxury_level=luxury_focus) if rating: queryset = queryset.filter(average_rating__gte=rating) if len(languages): queryset = queryset.filter(languages__in=languages) if len(headquarters): queryset = queryset.filter(headquarters__in=headquarters) if len(that): that_choices = TourOperator.operator_that_choices() for t in that: if t in that_choices: rule = that_choices[t] if rule['annotate']: queryset = queryset.annotate( subquery_alias=rule['annotate']) queryset = queryset.filter(rule['query']) queryset = queryset.distinct() queryset = queryset.annotate(new_yas_score=F('yas_score')) if ordering and ordering == '-yas_score': #yas score by country if len(parks) or len(countries): parks_countries = Park.objects.filter( id__in=parks).values_list('country_indexes__id', flat=True) all_ids = countries + list(parks_countries) yas_score_subquery = YASScore.objects \ .filter(tour_operator_id=OuterRef('id')) \ .filter(country_index_id__in=all_ids) \ .order_by().values('tour_operator_id') packages_yas_score_avg = yas_score_subquery \ .annotate(avg=Avg('yas_score')) \ .values('avg') queryset = queryset.annotate( new_yas_score=Subquery(packages_yas_score_avg, output_field=IntegerField())) \ .order_by('-new_yas_score') else: queryset = queryset.order_by(ordering) queryset.parks = parks queryset.countries = countries return queryset
def __init__(self, *args, **kwargs): super(BaseChannelFilter, self).__init__(*args, **kwargs) self.main_tree_query = ContentNode.objects.filter( tree_id=OuterRef("main_tree__tree_id"))
def _event_subquery(self, event: str, key: str): return Event.objects.filter(pk=OuterRef(event)).values(key)[:1]
def get(self, request, course_key): """ Returns a gradebook entry/entries (i.e. both course and subsection-level grade data) for all users enrolled in a course, or a single user enrolled in a course if a `username` parameter is provided. Args: request: A Django request object. course_key: The edx course opaque key of a course object. """ course = get_course_by_id(course_key, depth=None) # We fetch the entire course structure up-front, and use this when iterating # over users to determine their subsection grades. We purposely avoid fetching # the user-specific course structure for each user, because that is very expensive. course_data = CourseData(user=None, course=course) graded_subsections = list( grades_context.graded_subsections_for_course( course_data.collected_structure)) if request.GET.get('username'): with self._get_user_or_raise(request, course_key) as grade_user: course_grade = CourseGradeFactory().read(grade_user, course) entry = self._gradebook_entry(grade_user, course, graded_subsections, course_grade) serializer = StudentGradebookEntrySerializer(entry) return Response(serializer.data) else: q_objects = [] annotations = {} if request.GET.get('user_contains'): search_term = request.GET.get('user_contains') q_objects.append( Q(user__username__icontains=search_term) | Q(programcourseenrollment__program_enrollment__external_user_key__icontains =search_term) | Q(user__email__icontains=search_term)) if request.GET.get('username_contains'): q_objects.append( Q(user__username__icontains=request.GET.get( 'username_contains'))) if request.GET.get('cohort_id'): cohort = cohorts.get_cohort_by_id(course_key, request.GET.get('cohort_id')) if cohort: q_objects.append(Q(user__in=cohort.users.all())) else: q_objects.append(Q(user__in=[])) if request.GET.get('enrollment_mode'): q_objects.append(Q(mode=request.GET.get('enrollment_mode'))) if request.GET.get('assignment') and ( request.GET.get('assignment_grade_max') or request.GET.get('assignment_grade_min')): subqueryset = PersistentSubsectionGrade.objects.annotate( effective_grade_percentage=Case( When(override__isnull=False, then=(F('override__earned_graded_override') / F('override__possible_graded_override')) * 100), default=(F('earned_graded') / F('possible_graded')) * 100)) grade_conditions = { 'effective_grade_percentage__range': (request.GET.get('assignment_grade_min', 0), request.GET.get('assignment_grade_max', 100)) } annotations['selected_assignment_grade_in_range'] = Exists( subqueryset.filter(course_id=OuterRef('course'), user_id=OuterRef('user'), usage_key=UsageKey.from_string( request.GET.get('assignment')), **grade_conditions)) q_objects.append(Q(selected_assignment_grade_in_range=True)) if request.GET.get('course_grade_min') or request.GET.get( 'course_grade_max'): grade_conditions = {} q_object = Q() course_grade_min = request.GET.get('course_grade_min') if course_grade_min: course_grade_min = float( request.GET.get('course_grade_min')) / 100 grade_conditions['percent_grade__gte'] = course_grade_min if request.GET.get('course_grade_max'): course_grade_max = float( request.GET.get('course_grade_max')) / 100 grade_conditions['percent_grade__lte'] = course_grade_max if not course_grade_min or course_grade_min == 0: subquery_grade_absent = ~Exists( PersistentCourseGrade.objects.filter( course_id=OuterRef('course'), user_id=OuterRef('user_id'), )) annotations['course_grade_absent'] = subquery_grade_absent q_object |= Q(course_grade_absent=True) subquery_grade_in_range = Exists( PersistentCourseGrade.objects.filter( course_id=OuterRef('course'), user_id=OuterRef('user_id'), **grade_conditions)) annotations['course_grade_in_range'] = subquery_grade_in_range q_object |= Q(course_grade_in_range=True) q_objects.append(q_object) entries = [] related_models = ['user'] users = self._paginate_users(course_key, q_objects, related_models, annotations=annotations) users_counts = self._get_users_counts(course_key, q_objects, annotations=annotations) with bulk_gradebook_view_context(course_key, users): for user, course_grade, exc in CourseGradeFactory().iter( users, course_key=course_key, collected_block_structure=course_data. collected_structure): if not exc: entry = self._gradebook_entry(user, course, graded_subsections, course_grade) entries.append(entry) serializer = StudentGradebookEntrySerializer(entries, many=True) return self.get_paginated_response(serializer.data, **users_counts)