class CompanyChartsViewSet(GenericViewSet, ListModelMixin): queryset = Company.objects.annotate(__session_average=Subquery( Session.objects.filter(company=OuterRef("id")).annotate( value=ExpressionWrapper(Avg("answered_questions__value") * F("set__weight"), output_field=DecimalField( max_digits=3, decimal_places=2, ))).values("value"))).annotate( date=Case( When(sessions__until__lte=Now(), then=F("sessions__until")), default=Now()), data=ExpressionWrapper( Sum("__session_average") * F("sessions__theme__weight"), output_field=DecimalField( decimal_places=2, max_digits=3))).values( "data", "date") serializer_class = CompanyChartSerializer def filter_queryset(self, queryset): return queryset.filter(id=self.request.user.member.company_id)
def get_queryset(self): # Get latitude and longitude from user lat1 = self.kwargs['user_latitude'] lon1 = self.kwargs['user_longitude'] lat2 = ExpressionWrapper(F('dorm_latitude'), output_field=FloatField()) lon2 = ExpressionWrapper(F('dorm_longitude'), output_field=FloatField()) dorm_id = ExpressionWrapper(F('id'), output_field=IntegerField()) """ WITHOUT use of any external library, using raw PostgreSQL and Haversine Formula http://en.wikipedia.org/wiki/Haversine_formula """ """ STOP! Before running, add the following stored procedure! CREATE FUNCTION sohae_calculate_distance5(u_lat float, u_lng float, d_id integer) RETURNS float AS $$ SELECT (6367*acos(cos(radians(u_lat)) *cos(radians(dorm_latitude)) *cos(radians(dorm_longitude) -radians(u_lng)) +sin(radians(u_lat))*sin(radians(dorm_latitude)))) AS distance FROM sohaegeum_sohaedorm WHERE id = d_id $$ LANGUAGE SQL; """ return models.SohaeDorm.objects.annotate( # Add the distance in KM as an attribute distance=RawSQL( "SELECT sohae_calculate_distance5(%s, %s, %s) AS distance", ( lat1, lon1, dorm_id))).filter(is_active=True)
def make_regions_cte(cte): return Region.objects.filter(parent__isnull=True).values( "name", path=F("name"), depth=Value(0, output_field=int_field), is_planet=Value(0, output_field=int_field), ).union( cte.join(Region, parent=cte.col.name).annotate( # annotations for filter and CASE/WHEN conditions parent_name=ExpressionWrapper( cte.col.name, output_field=text_field, ), parent_depth=ExpressionWrapper( cte.col.depth, output_field=int_field, ), ).filter(~Q(parent_name="mars"), ).values( "name", path=Concat( cte.col.path, Value("\x01"), F("name"), output_field=text_field, ), depth=cte.col.depth + Value(1, output_field=int_field), is_planet=Case( When(parent_depth=0, then=Value(1)), default=Value(0), output_field=int_field, ), ), all=True, )
def getRentalsForSummary(current_date=None): result_dict = {} created_today_filter = Q(created_date__date=current_date) ends_today_filter = Q(end_date__date=current_date) paid_filter = Q(paid=True) unpaid_filter = Q(Q(paid=False) | Q(paid__isnull=True)) rentals = Rental.objects.filter( ends_today_filter).annotate( rent_duration=ExpressionWrapper( ( (F('end_date') - F('start_date')) * 0.000001 \ / Value('3600', IntegerField() )), output_field=DecimalField(decimal_places=1) ) ) rent_subquery = rentals.filter(rentaldetail=OuterRef('id')) rentals_detail = RentalDetail.objects.filter( rental__in=rentals).select_related('rental').annotate( rent_duration=Subquery(rent_subquery.values('rent_duration')) ).annotate(item_rent_gross_amt=ExpressionWrapper( F('rent_duration') * F('price') * F('quantity'), output_field=FloatField())) rent_detail_subquery = rentals_detail.filter( rental_id=OuterRef('id')).values('item_rent_gross_amt') test = rent_detail_subquery.values('rental_id').annotate( total=Sum('item_rent_gross_amt')).values('total') rentals = rentals.prefetch_related( Prefetch('rentaldetail_set', rentals_detail)).annotate(total_rent_amt=Subquery(test)) students_with_rentals = Student.objects.filter( Q(rental__in=rentals)).prefetch_related( Prefetch( 'rental_set', rentals)).annotate(student_total_rent_amt=Subquery( rentals.values('student_id').annotate( total_amt=Sum('total_rent_amt')).values( 'total_amt'))).order_by('name').distinct() # Aggregates used for overall summary rentals_counts = rentals.aggregate( ends_today=Count('*'), paid=Count('paid', filter=paid_filter), ) total_rentals_value = rentals_detail.aggregate( sum=Sum('item_rent_gross_amt')) result_dict['count'] = rentals_counts result_dict['total_profit'] = total_rentals_value result_dict['students'] = students_with_rentals return result_dict
def stats(cls, events=None): ''' Returns stats on the events queryset provided. :param events: A queryset of events, that have the fields sessions, games, players ''' if events is None: events = cls.implicit() if events: # Tailwind's Median aggregator does not work on Durations (PostgreSQL Intervals) # So we have to convert it to Epoch time. Extract is a Django method that can extract # 'epoch' which is the documented method of casting a PostgreSQL interval to epoch time. # https://www.postgresql.org/message-id/19495.1059687790%40sss.pgh.pa.us # Django does not document 'epoch' alas but it works: # https://docs.djangoproject.com/en/4.0/ref/models/database-functions/#extract # We need a Django ExpressionWrapper to cast the uration field to DurationField as # for some reason even though it's a PostgreSQL interval, Django still thinks of it # as a DateTimeField (from the difference of two DateTimeFields I guess and a bug/feature) # that fails tor ecast a difference of DateTimeFiled's as DurationField. epoch_duration = Extract(ExpressionWrapper(F('duration'), output_field=DurationField()), lookup_name='epoch') epoch_gap = Extract(ExpressionWrapper(F('gap_time'), output_field=DurationField()), lookup_name='epoch') result = events.aggregate(Min('sessions'), Avg('sessions'), Median('sessions'), Max('sessions'), Min('games'), Avg('games'), Median('games'), Max('games'), Min('players'), Avg('players'), Median('players'), Max('players'), duration__min=Min('duration'), duration__avg=Avg('duration'), duration__median=Median(epoch_duration), duration__max=Max('duration'), gap__min=Min('gap_time'), gap__avg=Avg('gap_time'), gap__median=Median(epoch_gap), gap__max=Max('gap_time')) # Aggregate is a QuerySet enpoint (i.e results in evaluation of the Query and returns # a standard dict. To wit we can cast teh Epch times back to Durations for the consumer. result['duration__median'] = timedelta(seconds=result['duration__median']) result['gap__median'] = timedelta(seconds=result['gap__median']) else: result = None return result
def test_outerref_in_cte_query(self): # This query is meant to return the difference between min and max # order of each region, through a subquery min_and_max = With( Order.objects.filter(region=OuterRef("pk")).values( 'region') # This is to force group by region_id .annotate( amount_min=Min("amount"), amount_max=Max("amount"), ).values('amount_min', 'amount_max')) regions = (Region.objects.annotate( difference=Subquery(min_and_max.queryset().with_cte( min_and_max).annotate(difference=ExpressionWrapper( F('amount_max') - F('amount_min'), output_field=int_field, ), ).values('difference')[:1], output_field=IntegerField())).order_by("name")) print(regions.query) data = [(r.name, r.difference) for r in regions] self.assertEqual(data, [("bernard's star", None), ('deimos', None), ('earth', 3), ('mars', 2), ('mercury', 2), ('moon', 2), ('phobos', None), ('proxima centauri', 0), ('proxima centauri b', 2), ('sun', 0), ('venus', 3)])
def get_total(self): total = PurchaseItem.objects.filter(purchase=self) \ .annotate( subtotal=ExpressionWrapper( Sum(F('price') * F('quantity')), output_field=FloatField() )).aggregate(total=Sum('subtotal')) return total['total']
def _get_events_query(self) -> Tuple[str, list]: events: Union[EventManager, QuerySet] = Event.objects.filter(team=self._team).order_by("-timestamp").only( "distinct_id", "timestamp" ) if self._filter.date_from: events = events.filter(timestamp__gte=self._filter.date_from - timedelta(hours=12)) if self._filter.date_to: events = events.filter(timestamp__lte=self._filter.date_to + timedelta(hours=12)) keys = [] event_q_filters = [] for i, entity in enumerate(self._filter.entities): key = f"entity_{i}" q_filter = entity_to_Q(entity, self._team.pk) event_q_filters.append(q_filter) events = events.annotate(**{key: ExpressionWrapper(q_filter, output_field=BooleanField())}) keys.append(key) combined_event_q_filter = Q() for events_q_filter in event_q_filters: combined_event_q_filter |= events_q_filter events = events.filter(combined_event_q_filter) events = events.values_list("distinct_id", "timestamp", *keys) with connection.cursor() as cursor: event_query = cursor.mogrify(*events.query.sql_with_params()).decode("utf-8") return event_query, keys
def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(Date('published', 'exact')), "Date(published, exact)") self.assertEqual(repr(DateTime('published', 'exact', utc)), "DateTime(published, exact, %s)" % utc) self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper( F('cost') + F('tax'), models.IntegerField())), "ExpressionWrapper(F(cost) + F(tax))") self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(Random()), "Random()") self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)")
def query_conditions(self) -> List[List[bool]]: if self.feature_flag.aggregation_group_type_index is None: query: QuerySet = Person.objects.filter( team_id=self.feature_flag.team_id, persondistinctid__distinct_id=self.distinct_id, persondistinctid__team_id=self.feature_flag.team_id, ) else: query = Group.objects.filter( team_id=self.feature_flag.team_id, group_type_index=self.feature_flag. aggregation_group_type_index, group_key=self.hashed_identifier, ) fields = [] for index, condition in enumerate(self.feature_flag.conditions): key = f"condition_{index}" if len(condition.get("properties", {})) > 0: # Feature Flags don't support OR filtering yet expr: Any = properties_to_Q( Filter(data=condition).property_groups.flat, team_id=self.feature_flag.team_id, is_direct_query=True) else: expr = RawSQL("true", []) query = query.annotate( **{key: ExpressionWrapper(expr, output_field=BooleanField())}) fields.append(key) return list(query.values_list(*fields))
def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=ExpressionWrapper( F('completed') - F('assigned'), output_field=models.DurationField())) at_least_5_days = { e.name for e in queryset.filter( completion_duration__gte=datetime.timedelta(days=5)) } self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = { e.name for e in queryset.filter( completion_duration__gte=datetime.timedelta(days=120)) } self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = { e.name for e in queryset.filter( completion_duration__lt=datetime.timedelta(days=5)) } expected = {'e0', 'e2'} if connection.features.supports_microsecond_precision: expected.add('e1') self.assertEqual(less_than_5_days, expected)
def filter_quantity(qs, quantity_value, warehouse_ids=None): """Filter products queryset by product variants quantity. Return product queryset which contains at least one variant with aggregated quantity between given range. If warehouses is given, it aggregates quantity only from stocks which are in given warehouses. """ stocks = Stock.objects.all() if warehouse_ids: _, warehouse_pks = resolve_global_ids_to_primary_keys( warehouse_ids, warehouse_types.Warehouse ) stocks = stocks.filter(warehouse_id__in=warehouse_pks) stocks = stocks.values("product_variant_id").filter( product_variant_id=OuterRef("pk") ) stocks = Subquery(stocks.values_list(Sum("quantity"))) variants = ProductVariant.objects.annotate( total_quantity=ExpressionWrapper(stocks, output_field=IntegerField()) ) variants = list( filter_range_field(variants, "total_quantity", quantity_value).values_list( "product_id", flat=True ) ) return qs.filter(pk__in=variants)
def lead_device_type_totals(user: User, date_from: date, date_to: date, projects: list = None, label_type=None, label_values=None, os_groups=None, browser_groups=None, traffic_channels=None): leads_qs = Lead.objects.filter(pixel__project__user=user) leads_qs = _apply_lead_common_filters(leads_qs, date_from, date_to, projects, label_type, label_values, os_groups, browser_groups, traffic_channels) leads_qs = leads_qs.annotate(lead_duration=ExpressionWrapper( F('created') - F('session_started'), output_field=DurationField())) leads_qs = leads_qs.annotate(group_name=Case( When(device_model__device_type__category__in=(DeviceType.PHONE, DeviceType.TABLET), then=Value('Mobile')), When(device_model__device_type__category=(DeviceType.DESKTOP), then=Value('Desktop')), default=Value(_('Unknown')), output_field=CharField())) leads_qs = leads_qs.values('group_name') leads_qs = leads_qs.annotate(leads_count=Count('id')) leads_qs = leads_qs.order_by('group_name') return leads_qs
def lead_duration_by_period(user: User, date_from: date, date_to: date, groups: list = None, projects: list = None, label_type=None, label_values=None, os_groups=None, browser_groups=None, traffic_channels=None): leads_qs = Lead.objects.filter(pixel__project__user=user) leads_qs = _apply_lead_common_filters(leads_qs, date_from, date_to, projects, label_type, label_values, os_groups, browser_groups, traffic_channels) leads_qs = leads_qs.annotate(lead_duration=ExpressionWrapper( F('created') - F('session_started'), output_field=DurationField())) leads_qs = leads_qs.annotate(group_name=_lead_duration_groups_case()) if groups: leads_qs = leads_qs.filter(group_name__in=groups) scale_period = get_scale_period(date_from, date_to) leads_qs = leads_qs.annotate( created_period=_leads_period_unit_expr(scale_period)) leads_qs = leads_qs.values('group_name', 'created_period') leads_qs = leads_qs.annotate(leads_count=Count('id')) leads_qs = leads_qs.order_by('created_period', 'group_name') return leads_qs
def events_query( self, team: Team, filter: SessionsFilter, date_filter: Q, distinct_ids: List[str], start_timestamp: Optional[str], ) -> QuerySet: events = ( Event.objects.filter(team=team) .filter(date_filter) .filter(distinct_id__in=distinct_ids) .order_by("-timestamp") .only("distinct_id", "timestamp") .annotate(current_url=KeyTextTransform("$current_url", "properties")) ) if start_timestamp is not None: events = events.filter(timestamp__lt=datetime.fromtimestamp(float(start_timestamp))) keys = [] for i, entity in enumerate(filter.action_filters): key = f"entity_{i}" events = events.annotate( **{key: ExpressionWrapper(entity_to_Q(entity, team.pk), output_field=BooleanField())} ) keys.append(key) return events.values_list("distinct_id", "timestamp", "current_url", *keys)
def _episodes_queryset(podcast: Podcast) -> QuerySet[Podcast]: """ Builds a query set that should give out episodes of a podcast according to the following criteria: if not older than 7 days: On any day else if not older than 30 days: On every 7th day else if older than 30 days: On every 28th day """ now = local_now() today = local_today() isocalendar_today = today.isocalendar() standard_day_of_year_today = isocalendar_today[1] * 7 + isocalendar_today[2] last_available_cutoff = now - dt.timedelta(days=5) return (podcast.episodes.exclude(spotify_id=None).filter( Q(available=True) | Q(last_available_date_time__gt=last_available_cutoff) ).annotate( # Determine general age age_td=ExpressionWrapper( now - F("publication_date_time"), output_field=DurationField(), ), # Calculate something akin to "day of year" using the ISO calendar # and take modulo 28 for filtering standard_day_of_year_modulo=ExpressionWrapper( Cast( F("publication_date_time__week") * 7 + F("publication_date_time__iso_week_day"), IntegerField(), ) % 28, output_field=IntegerField(), ), ).filter( # New episodes every day Q(age_td__lte=dt.timedelta(days=7)) # In first month every 7 days (roughly, some weekdays might be more busy) | (Q(age_td__lte=dt.timedelta(days=30)) & Q(publication_date_time__iso_week_day=isocalendar_today[2])) # After that on every 28th day | (Q(age_td__gt=dt.timedelta(days=30)) & Q(standard_day_of_year_modulo=(standard_day_of_year_today % 28)))) )
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if (self.kwargs): self.current_date = self.kwargs['summary_date'] lessons_today = Lesson.objects.filter( start_date=self.current_date).order_by('start_time') # .annotate( # students_detail_duration_sum=Sum('lessondetail__duration') # ) lesson_detail = LessonDetail.objects.filter( Q(lesson__in=lessons_today)) instructors_with_lessons = self.queryset.prefetch_related( Prefetch('lessons', lessons_today)).annotate( lessondetail_duration_sum=Sum( 'lessons__lessondetail__duration', filter=Q(lessons__lessondetail__in=lesson_detail)) ).annotate(instructor_lessons_duration_sum=Sum( 'lessons__duration', filter=Q( lessons__in=lessons_today))).annotate( lessons_price_sum=Sum( 'lessons__lessondetail__price', filter=Q(lessons__lessondetail__in=lesson_detail))) context['rentals'] = RentalView.getRentalsForSummary(self.current_date) context['instructors_with_lessons'] = instructors_with_lessons context['profit'] = lesson_detail.values( 'duration', 'pay_rate', 'lesson_id').annotate(lesson_cost=ExpressionWrapper( F('duration') * F('pay_rate'), output_field=FloatField())).aggregate( total_profit=Sum('lesson_cost')) # context['duration_sum'] = lessons_today.aggregate(sum=(Sum('duration'))) context['duration_sum'] = lesson_detail.aggregate( sum=(Sum('duration'))) context['lesson_detail'] = lesson_detail context['single_lessons_count'] = lessons_today.filter( group_lesson=False).count() context['group_lessons_count'] = lessons_today.filter( group_lesson=True).count() context['completed_count'] = lessons_today.filter( completed=True).count() context['lessons_count'] = lessons_today.count() # context['hours'] = range(7,22) context['current_date'] = self.current_date context['current_time'] = datetime.today().hour context['previous_date'] = self.current_date - timedelta(days=1) context['next_date'] = datetime.date(self.current_date + timedelta(days=1)) return context
def test_q_annotation(self): query = Query(None) check = ExpressionWrapper( Q(RawSQL("%s IS NULL", (None, ), BooleanField())) | Q(Exists(Item.objects.all())), BooleanField(), ) query.add_annotation(check, "_check") result = query.get_compiler(using=DEFAULT_DB_ALIAS).execute_sql(SINGLE) self.assertEqual(result[0], 1)
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) payroll_start_date = self.request.GET['dateFrom'] payroll_end_date = self.request.GET['dateTo'] filtered_lessons = Lesson.objects.filter( Q(start_date__gte=payroll_start_date, start_date__lte=payroll_end_date), Q(instructor=F('instructor'))) instructors = Instructor.objects.filter( Q(active=True), Q(lessons=F('lessons')), Q(lessons__start_date__gte=payroll_start_date, lessons__start_date__lte=payroll_end_date)).prefetch_related( Prefetch('lessons', filtered_lessons)).annotate( single_lessons_count=Count( 'lessons', filter=Q(lessons__group_lesson=False)), single_lessons_duration=Sum( 'lessons__duration', filter=Q(lessons__group_lesson=False)), group_lessons_count=Count( 'lessons', filter=Q(lessons__group_lesson=True)), group_lessons_duration=Sum( 'lessons__duration', filter=Q(lessons__group_lesson=True)), single_lessons_value=ExpressionWrapper( Coalesce(F('single_lessons_duration'), Value(0.0)) * Coalesce(F('pay_rate_single'), Value(0.0)), output_field=DecimalField(decimal_places=2)), group_lessons_value=ExpressionWrapper( Coalesce(F('group_lessons_duration'), Value(0.0)) * Coalesce(F('pay_rate_group'), Value(0.0)), output_field=DecimalField( decimal_places=2))).annotate( payroll_sum=F('single_lessons_value') + F('group_lessons_value')) context['instructors'] = instructors return context
def resolve_plants(self, info, **kwargs): try: if not info.context.user.is_authenticated: raise GraphQLError('Unauthorized') else: return Plant.objects \ .filter(owner=info.context.user) \ .annotate(when_to_water=ExpressionWrapper( \ (F('watered') + F(get_time_between_watering_field_for_current_season(timezone.now())) + F('postpone_days')), output_field=DateTimeField())) \ .order_by("when_to_water") \ .select_related("room") except Plant.DoesNotExist: return None
def get_interval_annotation(key: str) -> Dict[str, Any]: map: Dict[str, Any] = { "minute": functions.TruncMinute("timestamp"), "hour": functions.TruncHour("timestamp"), "day": functions.TruncDay("timestamp"), "week": functions.TruncWeek( ExpressionWrapper(F("timestamp") + datetime.timedelta(days=1), output_field=DateTimeField()) ), "month": functions.TruncMonth("timestamp"), } func = map.get(key) if func is None: return {"day": map.get("day")} # default return {key: func}
def test_time_subtraction(self): if connection.features.supports_microsecond_precision: time = datetime.time(12, 30, 15, 2345) timedelta = datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) else: time = datetime.time(12, 30, 15) timedelta = datetime.timedelta(hours=1, minutes=15, seconds=15) Time.objects.create(time=time) queryset = Time.objects.annotate( difference=ExpressionWrapper( F('time') - Value(datetime.time(11, 15, 0), output_field=models.TimeField()), output_field=models.DurationField(), ) ) self.assertEqual(queryset.get().difference, timedelta)
def get_top_40_by_score(): if Config.objects.is_qpa(): flag_field = F("userchallenge__challenge__flag_qpa") else: flag_field = F("userchallenge__challenge__flag_hacktivity") challenge_points = F("userchallenge__challenge__points") hint_used = Cast("userchallenge__hint_used", IntegerField()) user_points = ExpressionWrapper(challenge_points - (hint_used * challenge_points * 0.5), output_field=IntegerField()) return (User.objects.filter( userchallenge__submission__value__iexact=flag_field, userchallenge__challenge__hidden=False, hidden=False).values("username").annotate(total_points=Coalesce( Sum(user_points), 0)).order_by("-total_points")[:40])
def get_score(self): if Config.objects.is_qpa(): flag_field = F("challenge__flag_qpa") else: flag_field = F("challenge__flag_hacktivity") challenge_points = F("challenge__points") hint_used = Cast("hint_used", IntegerField()) user_points = ExpressionWrapper(challenge_points - (hint_used * challenge_points * 0.5), output_field=IntegerField()) return (UserChallenge.objects.filter( user=self, submission__value__iexact=flag_field, challenge__hidden=False).annotate( points_with_hint=user_points).aggregate(total_points=Coalesce( Sum("points_with_hint"), 0)).get("total_points"))
def query_groups(self) -> List[List[bool]]: query: QuerySet = Person.objects.filter( team_id=self.feature_flag.team_id, persondistinctid__distinct_id=self.distinct_id, persondistinctid__team_id=self.feature_flag.team_id, ) fields = [] for index, group in enumerate(self.feature_flag.groups): key = f"group_{index}" subquery = properties_to_Q( Filter(data=group).properties, team_id=self.feature_flag.team_id, is_person_query=True ) query = query.annotate(**{key: ExpressionWrapper(subquery, output_field=BooleanField())}) fields.append(key) return query.values_list(*fields)
def query_groups(self) -> List[List[bool]]: query: QuerySet = Person.objects.filter( team_id=self.feature_flag.team_id, persondistinctid__distinct_id=self.distinct_id, persondistinctid__team_id=self.feature_flag.team_id, ) fields = [] for index, group in enumerate(self.feature_flag.groups): key = f"group_{index}" if len(group.get("properties", {})) > 0: expr: Any = properties_to_Q(Filter(data=group).properties, team_id=self.feature_flag.team_id, is_person_query=True) else: expr = RawSQL("true", []) query = query.annotate( **{key: ExpressionWrapper(expr, output_field=BooleanField())}) fields.append(key) return list(query.values_list(*fields))
def lead_os_totals(user: User, date_from: date, date_to: date, is_mobile, projects: list = None, label_type=None, label_values=None, os_groups=None, browser_groups=None, traffic_channels=None): leads_qs = Lead.objects.filter(pixel__project__user=user) leads_qs = _apply_lead_common_filters(leads_qs, date_from, date_to, projects, label_type, label_values, os_groups, browser_groups, traffic_channels) leads_qs = leads_qs.filter(os_version__family__group__is_mobile=is_mobile) leads_qs = leads_qs.annotate(lead_duration=ExpressionWrapper( F('created') - F('session_started'), output_field=DurationField())) leads_qs = leads_qs.annotate(group_name=Coalesce( F('os_version__family__group__name'), Value(_('Unknown')))) leads_qs = leads_qs.values('group_name') leads_qs = leads_qs.annotate(leads_count=Count('id')) leads_qs = leads_qs.order_by('group_name') return leads_qs
def _load_period_ipstat(fill_date: date, period: timedelta): end_time = timezone.make_aware( time(hour=23, minute=59, second=59, microsecond=999999)) end_date = datetime.combine(fill_date, end_time) start_date = (end_date - period).replace(hour=0, minute=0, second=0, microsecond=0) return Lead.objects \ .filter(session_started__range=(start_date, end_date)) \ .filter(ip_addr__isnull=False) \ .values('ip_addr', 'geo', 'geo__country', 'geo__postal_code', 'provider') \ .annotate(s_cnt=Count('id')) \ .annotate(s_time=Sum(ExpressionWrapper( Coalesce('created', 'last_event_time') - F('session_started'), output_field=DurationField()))) \ .annotate(s0_cnt=Count(Case( When(created__isnull=True, then=F('id')), default=None, output_field=UUIDField()))) \ .annotate(s_beg=Cast(Avg( Cast(F('session_started'), output_field=TimeField()) ), output_field=TimeField())) \ .annotate(user_ids=ArrayAgg('pixel__project__user__id', distinct=True))\ .annotate(cnt_dev=Count('device_id'))
def __init__(self, provider, report_type): """Constructor.""" # group_by_annotations, filters, group_by_options, self.views self._mapping = [{ "provider": Provider.PROVIDER_GCP, "annotations": {}, # Annotations that should always happen "group_by_annotations": { "account": { "account": "account_id" }, "gcp_project": { "gcp_project": "project_id" }, "service": { "service": "service_alias" }, }, # Annotations that should happen depending on group_by values "end_date": "usage_start", "filters": { "account": { "field": "account_id", "operation": "icontains" }, "region": { "field": "region", "operation": "icontains" }, "service": [ { "field": "service_alias", "operation": "icontains", "composition_key": "service_filter" }, { "field": "service_id", "operation": "icontains", "composition_key": "service_filter" }, ], "gcp_project": [ { "field": "project_name", "operation": "icontains", "composition_key": "project_filter" }, { "field": "project_id", "operation": "icontains", "composition_key": "project_filter" }, ], "instance_type": { "field": "instance_type", "operation": "icontains" }, }, "group_by_options": ["account", "region", "service", "gcp_project"], "tag_column": "tags", "report_type": { "costs": { "aggregates": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), }, "aggregate_key": "unblended_cost", "annotations": { "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_raw": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField()))), "cost_usage": Value(0, output_field=DecimalField()), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "cost_units": Coalesce(Max("currency"), Value("USD")), "source_uuid": ArrayAgg(F("source_uuid"), filter=Q(source_uuid__isnull=False), distinct=True), }, "delta_key": { # cost goes to cost_total "cost_total": Sum( ExpressionWrapper( F("unblended_cost") + F("markup_cost") + Coalesce( F("credit_amount"), Value(0, output_field=DecimalField())), output_field=DecimalField(), )) }, "filter": [{}], "cost_units_key": "currency", "cost_units_fallback": "USD", "sum_columns": ["cost_total", "infra_total", "sup_total"], "default_ordering": { "cost_total": "desc" }, }, "instance_type": { "aggregates": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "usage": Sum("usage_amount"), }, "aggregate_key": "usage_amount", "annotations": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Value(0, output_field=DecimalField()), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_units": Coalesce(Max("currency"), Value("USD")), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "usage": Sum("usage_amount"), "usage_units": Coalesce(Max("unit"), Value("hour")), "source_uuid": ArrayAgg(F("source_uuid"), filter=Q(source_uuid__isnull=False), distinct=True), }, "delta_key": { "usage": Sum("usage_amount") }, "filter": [ { "field": "instance_type", "operation": "isnull", "parameter": False }, { "field": "unit", "operation": "exact", "parameter": "hour" }, { "field": "sku_alias", "operation": "contains", "parameter": "Instance Core running" }, ], "group_by": ["instance_type"], "cost_units_key": "currency", "cost_units_fallback": "USD", "usage_units_key": "unit", "usage_units_fallback": "hour", "sum_columns": ["usage", "cost_total", "sup_total", "infra_total"], "default_ordering": { "usage": "desc" }, }, "storage": { "aggregates": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "usage": Sum("usage_amount"), }, "aggregate_key": "usage_amount", "annotations": { "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "sup_credit": Sum(Value(0, output_field=DecimalField())), "cost_raw": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField()))), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("credit_amount"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_credit": Sum( Coalesce(F("credit_amount"), Value(0, output_field=DecimalField()))), "cost_units": Coalesce(Max("currency"), Value("USD")), "usage": Sum("usage_amount"), "usage_units": Coalesce(Max("unit"), Value("gibibyte month")), "source_uuid": ArrayAgg(F("source_uuid"), filter=Q(source_uuid__isnull=False), distinct=True), }, "delta_key": { "usage": Sum("usage_amount") }, # Most of the storage cost was gibibyte month, however one was gibibyte. "filter": [{ "field": "unit", "operation": "exact", "parameter": "gibibyte month" }], "cost_units_key": "currency", "cost_units_fallback": "USD", "usage_units_key": "unit", "usage_units_fallback": "gibibyte month", "sum_columns": ["usage", "cost_total", "sup_total", "infra_total"], "default_ordering": { "usage": "desc" }, }, "tags": { "default_ordering": { "cost_total": "desc" } }, }, "start_date": "usage_start", "tables": { "query": GCPCostEntryLineItemDailySummary }, }] self.views = { "costs": { "default": GCPCostSummaryP, ("account", ): GCPCostSummaryByAccountP, ("region", ): GCPCostSummaryByRegionP, ("account", "region"): GCPCostSummaryByRegionP, ("service", ): GCPCostSummaryByServiceP, ("account", "service"): GCPCostSummaryByServiceP, ("gcp_project", ): GCPCostSummaryByProjectP, ("account", "gcp_project"): GCPCostSummaryByProjectP, }, "instance-type": { "default": GCPComputeSummaryP, ("account", ): GCPComputeSummaryByAccountP }, "storage": { "default": GCPStorageSummaryP, ("account", ): GCPStorageSummaryByAccountP, ("region", ): GCPStorageSummaryByRegionP, ("account", "region"): GCPStorageSummaryByRegionP, ("service", ): GCPStorageSummaryByServiceP, ("account", "service"): GCPStorageSummaryByServiceP, ("gcp_project", ): GCPStorageSummaryByProjectP, ("account", "gcp_project"): GCPStorageSummaryByProjectP, }, "database": { "default": GCPDatabaseSummaryP, ("service", ): GCPDatabaseSummaryP, ("account", "service"): GCPDatabaseSummaryP, ("account", ): GCPDatabaseSummaryP, }, "network": { "default": GCPNetworkSummaryP, ("service", ): GCPNetworkSummaryP, ("account", "service"): GCPNetworkSummaryP, ("account", ): GCPNetworkSummaryP, }, } # I needed a way to identify gcp in the parent class in queries.py so that # way we could filter off of invoice month instead of usage dates for # monthly time scope values. self.gcp_filters = True super().__init__(provider, report_type)
def __init__(self, provider, report_type): """Constructor.""" self._mapping = [{ "provider": Provider.PROVIDER_AWS, "alias": "account_alias__account_alias", "annotations": { "account": "usage_account_id", "service": "product_code", "az": "availability_zone" }, "end_date": "usage_end", "filters": { "account": [ { "field": "account_alias__account_alias", "operation": "icontains", "composition_key": "account_filter", }, { "field": "usage_account_id", "operation": "icontains", "composition_key": "account_filter" }, ], "service": { "field": "product_code", "operation": "icontains" }, "az": { "field": "availability_zone", "operation": "icontains" }, "region": { "field": "region", "operation": "icontains" }, "product_family": { "field": "product_family", "operation": "icontains" }, }, "group_by_options": ["service", "account", "region", "az", "product_family"], "tag_column": "tags", "report_type": { "costs": { "aggregates": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), }, "aggregate_key": "unblended_cost", "annotations": { "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "cost_raw": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_usage": Sum( Value(0, output_field=DecimalField()) + Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_units": Coalesce(Max("currency_code"), Value("USD")), }, "delta_key": { # cost goes to cost_total "cost_total": Sum( ExpressionWrapper(F("unblended_cost") + F("markup_cost"), output_field=DecimalField())) }, "filter": [{}], "cost_units_key": "currency_code", "cost_units_fallback": "USD", "sum_columns": ["cost_total", "infra_total", "sup_total"], "default_ordering": { "cost_total": "desc" }, }, "instance_type": { "aggregates": { "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "count": Sum(Value(0, output_field=DecimalField())), "usage": Sum("usage_amount"), }, "aggregate_key": "usage_amount", "annotations": { "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "cost_raw": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_usage": Value(0, output_field=DecimalField()), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_units": Coalesce(Max("currency_code"), Value("USD")), "count": Max("resource_count"), "count_units": Value("instances", output_field=CharField()), "usage": Sum("usage_amount"), "usage_units": Coalesce(Max("unit"), Value("Hrs")), }, "delta_key": { "usage": Sum("usage_amount") }, "filter": [{ "field": "instance_type", "operation": "isnull", "parameter": False }], "group_by": ["instance_type"], "cost_units_key": "currency_code", "cost_units_fallback": "USD", "usage_units_key": "unit", "usage_units_fallback": "Hrs", "count_units_fallback": "instances", "sum_columns": [ "usage", "cost_total", "infra_total", "sup_total", "count" ], "default_ordering": { "usage": "desc" }, }, "storage": { "aggregates": { "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_raw": Sum("unblended_cost"), "infra_usage": Sum(Value(0, output_field=DecimalField())), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "sup_raw": Sum(Value(0, output_field=DecimalField())), "sup_usage": Sum(Value(0, output_field=DecimalField())), "sup_markup": Sum(Value(0, output_field=DecimalField())), "sup_total": Sum(Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "cost_raw": Sum("unblended_cost"), "cost_usage": Sum(Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "usage": Sum("usage_amount"), }, "aggregate_key": "usage_amount", "annotations": { "infra_raw": Sum("unblended_cost"), "infra_usage": Value(0, output_field=DecimalField()), "infra_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField()))), "infra_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "sup_raw": Value(0, output_field=DecimalField()), "sup_usage": Value(0, output_field=DecimalField()), "sup_markup": Value(0, output_field=DecimalField()), "sup_total": Value(0, output_field=DecimalField()), "cost_raw": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_usage": Sum( Value(0, output_field=DecimalField()) + Value(0, output_field=DecimalField())), "cost_markup": Sum( Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_total": Sum( Coalesce(F("unblended_cost"), Value(0, output_field=DecimalField())) + Coalesce(F("markup_cost"), Value(0, output_field=DecimalField())) + Value(0, output_field=DecimalField())), "cost_units": Coalesce(Max("currency_code"), Value("USD")), "usage": Sum("usage_amount"), "usage_units": Coalesce(Max("unit"), Value("GB-Mo")), }, "delta_key": { "usage": Sum("usage_amount") }, "filter": [ { "field": "product_family", "operation": "icontains", "parameter": "Storage" }, { "field": "unit", "operation": "exact", "parameter": "GB-Mo" }, ], "cost_units_key": "currency_code", "cost_units_fallback": "USD", "usage_units_key": "unit", "usage_units_fallback": "GB-Mo", "sum_columns": ["usage", "cost_total", "sup_total", "infra_total"], "default_ordering": { "usage": "desc" }, }, "tags": { "default_ordering": { "cost_total": "desc" } }, }, "start_date": "usage_start", "tables": { "query": AWSCostEntryLineItemDailySummary }, }] self.views = { "costs": { "default": AWSCostSummary, "account": AWSCostSummaryByAccount, "region": AWSCostSummaryByRegion, "service": AWSCostSummaryByService, "product_family": AWSCostSummaryByService, }, "instance_type": { "default": AWSComputeSummary, "account": AWSComputeSummaryByAccount, "region": AWSComputeSummaryByRegion, "service": AWSComputeSummaryByService, "product_family": AWSComputeSummaryByService, "instance_type": AWSComputeSummary, }, "storage": { "default": AWSStorageSummary, "account": AWSStorageSummaryByAccount, "region": AWSStorageSummaryByRegion, "service": AWSStorageSummaryByService, "product_family": AWSStorageSummaryByService, }, "database": { "default": AWSDatabaseSummary, "service": AWSDatabaseSummary }, "network": { "default": AWSNetworkSummary, "service": AWSNetworkSummary }, } super().__init__(provider, report_type)