def test_expression(self): concat = GroupConcat(F("id") + 1) out = self.shakes.tutees.aggregate(tids=concat) concatted_ids = ",".join( [str(self.jk.id + 1), str(self.grisham.id + 1)]) assert out == {"tids": concatted_ids}
def show_order(self): items = self.items.values('user__name', 'is_union_price').annotate( description=GroupConcat('description', separator=', '), total_price=Sum('total'), code=GroupConcat('code', separator=' '), ) message = _("Order ID: %(orderid)s, Date: %(orderdate)s, Order detail:") \ % {'orderid': self.id, 'orderdate': self.date_created } + '\n' \ + '---------------' + '\n' for item in items: message += item['user__name'] + ': ' + item['code'] \ + ' ' + item['description'] + ' ' + str(item['total_price']) + ' '+ '\n' group = self.group message += _("Address: %(address)s, Phone: %(phone)s, Remark: %(remark)s, Total: %(total)s") \ % {'address': group.address, 'phone': group.phone, 'remark': group.remarks, 'total': str(self.items.aggregate(Sum('total'))['total__sum']) } return message
def test_separator_big(self): concat = GroupConcat('id', separator='BIG') out = self.shakes.tutees.aggregate(tids=concat) concatted_ids = "BIG".join(self.str_tutee_ids) assert out == {'tids': concatted_ids}
def test_basic_annotate_ids(self): concat = GroupConcat('tutees__id') shakey = Author.objects.annotate(tids=concat).get(id=self.shakes.id) concatted_ids = ",".join(self.str_tutee_ids) assert shakey.tids, concatted_ids
def test_basic_aggregate_ids(self): out = self.shakes.tutees.aggregate(tids=GroupConcat('id')) concatted_ids = ",".join(self.str_tutee_ids) assert out == {'tids': concatted_ids}
def test_ordering_desc(self): out = self.shakes.tutees.aggregate( tids=GroupConcat('id', ordering='desc')) assert out == {'tids': ",".join(reversed(self.str_tutee_ids))}
def test_ordering_invalid(self): with pytest.raises(ValueError) as excinfo: self.shakes.tutees.aggregate( tids=GroupConcat('id', ordering='asceding')) assert "'ordering' must be one of" in str(excinfo.value)
def test_separator_ansi_mode(self): concat = GroupConcat('id', separator='>>') out = self.shakes.tutees.aggregate(tids=concat) concatted_ids = ">>".join(self.str_tutee_ids) assert out == {'tids': concatted_ids}
def test_application_order(self): out = (Author.objects.exclude(id=self.shakes.id).aggregate( tids=GroupConcat('tutor_id', distinct=True))) assert out == {'tids': str(self.shakes.id)}
def get_queryset(self): query = Dictionary.objects.get_queryset() query = query.values('typekbn', 'grpname') query = query.annotate(distword_list=GroupConcat('distword')) query = query.annotate(last_updated_at=Max('updated_at')) return query
def test_separator_big(self): concat = GroupConcat("id", separator="BIG") out = self.shakes.tutees.aggregate(tids=concat) concatted_ids = "BIG".join(self.str_tutee_ids) assert out == {"tids": concatted_ids}
def test_separator_ordering(self): concat = GroupConcat("id", separator=":", ordering="asc") out = self.shakes.tutees.aggregate(tids=concat) concatted_ids = ":".join(self.str_tutee_ids) assert out == {"tids": concatted_ids}
def test_ordering_desc(self): out = self.shakes.tutees.aggregate( tids=GroupConcat("id", ordering="desc")) assert out == {"tids": ",".join(reversed(self.str_tutee_ids))}
def test_basic_aggregate_ids_output_field(self): out = self.shakes.tutees.aggregate( tids=GroupConcat("id", output_field=TextField())) concatted_ids = ",".join(self.str_tutee_ids) assert out == {"tids": concatted_ids}
def filter_data(request, *args, **kwargs): """ This function filters the data according to the given get parameters. :param request: contains model name and filters :return: filtered data in appropriate format """ model_name = request.GET.get('model', None) is_default = request.GET.get('isDefault') user = request.user is_default = request.GET.get('isDefault') user = request.user if is_default == "true": is_default = True elif is_default == "false" and not user.is_anonymous: is_default = False else: return Response( status=HTTP_400_BAD_REQUEST, data="Invalid isDefault" ) if model_name is None: return Response( status=HTTP_400_BAD_REQUEST, data="Model name not passed in params" ) if is_default: subtypes = None num_models = 3 for i in range(num_models): if model_name == CLASSES[i]: subtypes = SECONDARY_FILTERS[i] default_filter = PRIMARY_FILTERS[i][0] break if subtypes is None: return Response( status=HTTP_404_NOT_FOUND, data="Model with given name does not exist" ) model = apps.get_model(app_label=APP_NAME, model_name=model_name) else: config = get_object_or_404(Config, name=model_name, user=user) subtypes = json.loads(config.filters) for primary in subtypes: default_filter = primary break filters = request.GET.getlist('filters', [default_filter]) if not all(x in subtypes.keys() for x in filters): return Response( status=HTTP_400_BAD_REQUEST, data="Filters specified do not exist for the given model" ) # Aggregate conditions with "OR" operations conditions = Q() for filter in filters: conditions = conditions | Q(category=filter) # Apply conditions to filter if is_default: data = model.objects.filter(conditions) else: config = Config.objects.get(name=model_name, user=user) data = Data.objects.filter(name=config) # Get Earliest and Latest timestamp in dataset (to be used as range for slider) # mdate = data.aggregate(earliestTime = Min(Time), latestTime = Max(Time)) # list(): Converts queryset of dictionaries into list of dictionaries # annotate(): Creates an attribute for each object based on existing attributes (Here, attributes # created are "date" and "concatenated_filters") # values().annotate(): Groups objects by attributes inside values() (Here: Lat, Lng, 'date'), # annotates each of these groups, and returns a Queryset of dictionaries data = list(data.annotate( date=TruncMonth(Time) # Truncates value of dateField() to Month level ).annotate( concatenated_filters=Concat(V('"'), Category, V('":'), Entity) ).values( Lat, Lng, 'date' ).annotate( filter=Concat(V('{'), GroupConcat('concatenated_filters'), V('}')) )) # Traverses through list of dictionaries and fixes the datatype of values in each dictionary for item in data: for key in item: if key == "date": # Changes key name 'date' to Time(='time') item[Time] = item['date'] item.pop('date') key = Time if key == "filter": # Converts a string in JSON format to JSON/python dictionary after aggregating # values of duplicate keys item[key] = json.loads(item[key], object_pairs_hook=multidict) for k, v in item[key].items(): item[key][k] = aggregator(v) else: # Converts values in different data types (Latitude and Longitude in Decimal() # type, time in datetime.date() type) into String type item[key] = str(item[key]) return Response( status=HTTP_200_OK, data={"primaryFilters": filters, "data": data} )
def test_ordering_asc(self): out = self.shakes.tutees.aggregate(tids=GroupConcat('id', ordering='asc'), ) assert out == {'tids': ",".join(self.str_tutee_ids)}