def test_key_in(self): tests = [ ("value__c__in", [14], self.objs[3:5]), ("value__c__in", [14, 15], self.objs[3:5]), ("value__0__in", [1], [self.objs[5]]), ("value__0__in", [1, 3], [self.objs[5]]), ("value__foo__in", ["bar"], [self.objs[7]]), ( "value__foo__in", [KeyTransform("foo", KeyTransform("bax", "value"))], [self.objs[7]], ), ("value__foo__in", [F("value__bax__foo")], [self.objs[7]]), ( "value__foo__in", [KeyTransform("foo", KeyTransform("bax", "value")), "baz"], [self.objs[7]], ), ("value__foo__in", [F("value__bax__foo"), "baz"], [self.objs[7]]), ("value__foo__in", ["bar", "baz"], [self.objs[7]]), ("value__bar__in", [["foo", "bar"]], [self.objs[7]]), ("value__bar__in", [["foo", "bar"], ["a"]], [self.objs[7]]), ("value__bax__in", [{"foo": "bar"}, {"a": "b"}], [self.objs[7]]), ("value__h__in", [True, "foo"], [self.objs[4]]), ("value__i__in", [False, "foo"], [self.objs[4]]), ] for lookup, value, expected in tests: with self.subTest(lookup=lookup, value=value): self.assertSequenceEqual( NullableJSONModel.objects.filter(**{lookup: value}), expected, )
def test_lookups_with_key_transform(self): tests = ( ('value__d__contains', 'e'), ('value__baz__has_key', 'c'), ('value__baz__has_keys', ['a', 'c']), ('value__baz__has_any_keys', ['a', 'x']), ('value__contains', KeyTransform('bax', 'value')), ('value__has_key', KeyTextTransform('foo', 'value')), ) # contained_by lookup is not supported on Oracle. if connection.vendor != 'oracle': tests += ( ('value__baz__contained_by', { 'a': 'b', 'c': 'd', 'e': 'f' }), ( 'value__contained_by', KeyTransform( 'x', RawSQL( self.raw_sql, ['{"x": {"a": "b", "c": 1, "d": "e"}}'], )), ), ) for lookup, value in tests: with self.subTest(lookup=lookup): self.assertIs( NullableJSONModel.objects.filter(**{ lookup: value }, ).exists(), True)
def test_contains_contained_by_with_key_transform(self): tests = [ ('value__d__contains', 'e'), ('value__d__contains', [{ 'f': 'g' }]), ('value__contains', KeyTransform('bax', 'value')), ('value__baz__contains', { 'a': 'b' }), ('value__baz__contained_by', { 'a': 'b', 'c': 'd', 'e': 'f' }), ( 'value__contained_by', KeyTransform( 'x', RawSQL( self.raw_sql, ['{"x": {"a": "b", "c": 1, "d": "e"}}'], )), ), ] # For databases where {'f': 'g'} (without surrounding []) matches # [{'f': 'g'}]. if not connection.features.json_key_contains_list_matching_requires_list: tests.append(('value__d__contains', {'f': 'g'})) for lookup, value in tests: with self.subTest(lookup=lookup, value=value): self.assertIs( NullableJSONModel.objects.filter(**{ lookup: value }, ).exists(), True)
def test_contains_contained_by_with_key_transform(self): tests = [ ('value__d__contains', 'e'), ('value__d__contains', [{ 'f': 'g' }]), ('value__contains', KeyTransform('bax', 'value')), ('value__baz__contains', { 'a': 'b' }), ('value__baz__contained_by', { 'a': 'b', 'c': 'd', 'e': 'f' }), ( 'value__contained_by', KeyTransform( 'x', RawSQL( self.raw_sql, ['{"x": {"a": "b", "c": 1, "d": "e"}}'], )), ), ] # PostgreSQL requires a layer of nesting. if connection.vendor != 'postgresql': tests.append(('value__d__contains', {'f': 'g'})) for lookup, value in tests: with self.subTest(lookup=lookup, value=value): self.assertIs( NullableJSONModel.objects.filter(**{ lookup: value }, ).exists(), True)
def test_key_transform(self): Detail.objects.bulk_create([ Detail(value={'department': 'IT', 'name': 'Smith', 'salary': 37000}), Detail(value={'department': 'IT', 'name': 'Nowak', 'salary': 32000}), Detail(value={'department': 'HR', 'name': 'Brown', 'salary': 50000}), Detail(value={'department': 'HR', 'name': 'Smith', 'salary': 55000}), Detail(value={'department': 'PR', 'name': 'Moore', 'salary': 90000}), ]) qs = Detail.objects.annotate(department_sum=Window( expression=Sum(Cast( KeyTextTransform('salary', 'value'), output_field=IntegerField(), )), partition_by=[KeyTransform('department', 'value')], order_by=[KeyTransform('name', 'value')], )).order_by('value__department', 'department_sum') self.assertQuerysetEqual(qs, [ ('Brown', 'HR', 50000, 50000), ('Smith', 'HR', 55000, 105000), ('Nowak', 'IT', 32000, 32000), ('Smith', 'IT', 37000, 69000), ('Moore', 'PR', 90000, 90000), ], lambda entry: ( entry.value['name'], entry.value['department'], entry.value['salary'], entry.department_sum, ))
def test_nested_key_transform_raw_expression(self): expr = RawSQL(self.raw_sql, ['{"x": {"y": "bar"}}']) self.assertSequenceEqual( NullableJSONModel.objects.filter( value__foo=KeyTransform('y', KeyTransform('x', expr))), [self.objs[7]], )
def test_key_in(self): tests = [ ('value__c__in', [14], self.objs[3:5]), ('value__c__in', [14, 15], self.objs[3:5]), ('value__0__in', [1], [self.objs[5]]), ('value__0__in', [1, 3], [self.objs[5]]), ('value__foo__in', ['bar'], [self.objs[7]]), ( 'value__foo__in', [KeyTransform('foo', KeyTransform('bax', 'value'))], [self.objs[7]], ), ('value__foo__in', [F('value__bax__foo')], [self.objs[7]]), ( 'value__foo__in', [KeyTransform('foo', KeyTransform('bax', 'value')), 'baz'], [self.objs[7]], ), ('value__foo__in', [F('value__bax__foo'), 'baz'], [self.objs[7]]), ('value__foo__in', ['bar', 'baz'], [self.objs[7]]), ('value__bar__in', [['foo', 'bar']], [self.objs[7]]), ('value__bar__in', [['foo', 'bar'], ['a']], [self.objs[7]]), ('value__bax__in', [{ 'foo': 'bar' }, { 'a': 'b' }], [self.objs[7]]), ] for lookup, value, expected in tests: with self.subTest(lookup=lookup, value=value): self.assertSequenceEqual( NullableJSONModel.objects.filter(**{lookup: value}), expected, )
def test_jsonb_agg_jsonfield_ordering(self): values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg( KeyTransform('lang', 'json_field'), filter=Q(json_field__lang__isnull=False), ordering=KeyTransform('lang', 'json_field'), ), ) self.assertEqual(values, {'jsonbagg': ['en', 'pl']})
def test_contains_contained_by_with_key_transform(self): tests = [ ("value__d__contains", "e"), ("value__d__contains", [{"f": "g"}]), ("value__contains", KeyTransform("bax", "value")), ("value__contains", F("value__bax")), ("value__baz__contains", {"a": "b"}), ("value__baz__contained_by", {"a": "b", "c": "d", "e": "f"}), ( "value__contained_by", KeyTransform( "x", RawSQL( self.raw_sql, ['{"x": {"a": "b", "c": 1, "d": "e"}}'], ), ), ), ] # For databases where {'f': 'g'} (without surrounding []) matches # [{'f': 'g'}]. if not connection.features.json_key_contains_list_matching_requires_list: tests.append(("value__d__contains", {"f": "g"})) for lookup, value in tests: with self.subTest(lookup=lookup, value=value): self.assertIs( NullableJSONModel.objects.filter( **{lookup: value}, ).exists(), True, )
def test_jsonb_agg_jsonfield_ordering(self): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg( KeyTransform("lang", "json_field"), filter=Q(json_field__lang__isnull=False), ordering=KeyTransform("lang", "json_field"), ), ) self.assertEqual(values, {"jsonbagg": ["en", "pl"]})
def test_nested_key_transform_expression(self): self.assertSequenceEqual( NullableJSONModel.objects.filter(value__d__0__isnull=False).annotate( key=KeyTransform('d', 'value'), chain=KeyTransform('f', KeyTransform('1', 'key')), expr=KeyTransform('f', KeyTransform('1', Cast('key', models.JSONField()))), ).filter(chain=F('expr')), [self.objs[4]], )
def test_key_transform_expression(self): self.assertSequenceEqual( NullableJSONModel.objects.filter( value__d__0__isnull=False).annotate( key=KeyTransform("d", "value"), chain=KeyTransform("0", "key"), expr=KeyTransform("0", Cast("key", models.JSONField())), ).filter(chain=F("expr")), [self.objs[4]], )
def test_nested_key_transform_on_subquery(self): self.assertSequenceEqual( NullableJSONModel.objects.filter(value__d__0__isnull=False).annotate( subquery_value=Subquery( NullableJSONModel.objects.filter(pk=OuterRef('pk')).values('value') ), key=KeyTransform('d', 'subquery_value'), chain=KeyTransform('f', KeyTransform('1', 'key')), ).filter(chain='g'), [self.objs[4]], )
def test_nested_key_transform_on_subquery(self): self.assertSequenceEqual( NullableJSONModel.objects.filter( value__d__0__isnull=False).annotate( subquery_value=Subquery( NullableJSONModel.objects.filter( pk=OuterRef("pk")).values("value")), key=KeyTransform("d", "subquery_value"), chain=KeyTransform("f", KeyTransform("1", "key")), ).filter(chain="g"), [self.objs[4]], )
def test_has_key_list(self): obj = NullableJSONModel.objects.create(value=[{'a': 1}, {'b': 'x'}]) tests = [ Q(value__1__has_key='b'), Q(value__has_key=KeyTransform('b', KeyTransform(1, 'value'))), Q(value__has_key=KeyTransform('b', KeyTransform('1', 'value'))), ] for condition in tests: with self.subTest(condition=condition): self.assertSequenceEqual( NullableJSONModel.objects.filter(condition), [obj], )
def test_has_key_list(self): obj = NullableJSONModel.objects.create(value=[{"a": 1}, {"b": "x"}]) tests = [ Q(value__1__has_key="b"), Q(value__has_key=KeyTransform("b", KeyTransform(1, "value"))), Q(value__has_key=KeyTransform("b", KeyTransform("1", "value"))), Q(value__has_key=F("value__1__b")), ] for condition in tests: with self.subTest(condition=condition): self.assertSequenceEqual( NullableJSONModel.objects.filter(condition), [obj], )
def test_ordering_grouping_by_key_transform(self): base_qs = NullableJSONModel.objects.filter(value__d__0__isnull=False) for qs in ( base_qs.order_by('value__d__0'), base_qs.annotate(key=KeyTransform('0', KeyTransform('d', 'value'))).order_by('key'), ): self.assertSequenceEqual(qs, [self.objs[4]]) qs = NullableJSONModel.objects.filter(value__isnull=False) self.assertQuerysetEqual( qs.filter(value__isnull=False).annotate( key=KeyTextTransform('f', KeyTransform('1', KeyTransform('d', 'value'))), ).values('key').annotate(count=Count('key')).order_by('count'), [(None, 0), ('g', 1)], operator.itemgetter('key', 'count'), )
def apply_ordering(queryset, ordering): if ordering: field_name = ordering[0].replace("tasks:", "") ascending = False if field_name[0] == '-' else True # detect direction field_name = field_name[1:] if field_name[0] == '-' else field_name # remove direction if "data." in field_name: field_name = field_name.replace(".", "__", 1) only_undefined_field = queryset.exists() and queryset.first().project.only_undefined_field if only_undefined_field: field_name = re.sub('data__\w+', f'data__{settings.DATA_UNDEFINED_NAME}', field_name) # annotate task with data field for float/int/bool ordering support json_field = field_name.replace('data__', '') queryset = queryset.annotate(ordering_field=KeyTransform(json_field, 'data')) f = F('ordering_field').asc(nulls_last=True) if ascending else F('ordering_field').desc(nulls_last=True) else: f = F(field_name).asc(nulls_last=True) if ascending else F(field_name).desc(nulls_last=True) queryset = queryset.order_by(f) else: queryset = queryset.order_by("id") return queryset
def test_array_agg_jsonfield(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg( KeyTransform('lang', 'json_field'), filter=Q(json_field__lang__isnull=False), ), ) self.assertEqual(values, {'arrayagg': ['pl', 'en']})
def test_contains_contained_by_with_key_transform(self): tests = [ ('value__contains', KeyTransform('bax', 'value')), ('value__baz__contained_by', {'a': 'b', 'c': 'd', 'e': 'f'}), ( 'value__contained_by', KeyTransform('x', RawSQL( self.raw_sql, ['{"x": {"a": "b", "c": 1, "d": "e"}}'], )), ), ] for lookup, value in tests: with self.subTest(lookup=lookup): self.assertIs(NullableJSONModel.objects.filter( **{lookup: value}, ).exists(), True)
def test_ordering_grouping_by_key_transform(self): base_qs = NullableJSONModel.objects.filter(value__d__0__isnull=False) for qs in ( base_qs.order_by("value__d__0"), base_qs.annotate(key=KeyTransform( "0", KeyTransform("d", "value"))).order_by("key"), ): self.assertSequenceEqual(qs, [self.objs[4]]) qs = NullableJSONModel.objects.filter(value__isnull=False) self.assertQuerysetEqual( qs.filter(value__isnull=False).annotate(key=KeyTextTransform( "f", KeyTransform("1", KeyTransform( "d", "value"))), ).values("key").annotate( count=Count("key")).order_by("count"), [(None, 0), ("g", 1)], operator.itemgetter("key", "count"), )
def test_array_agg_jsonfield(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg( KeyTransform("lang", "json_field"), filter=Q(json_field__lang__isnull=False), ), ) self.assertEqual(values, {"arrayagg": ["pl", "en"]})
def test_expression_wrapper_key_transform(self): self.assertSequenceEqual( NullableJSONModel.objects.annotate(expr=ExpressionWrapper( KeyTransform('c', 'value'), output_field=IntegerField(), ), ).filter(expr__isnull=False), self.objs[3:5], )
def test_has_key_deep(self): tests = [ (Q(value__baz__has_key="a"), self.objs[7]), ( Q(value__has_key=KeyTransform("a", KeyTransform("baz", "value"))), self.objs[7], ), (Q(value__has_key=F("value__baz__a")), self.objs[7]), ( Q(value__has_key=KeyTransform("c", KeyTransform("baz", "value"))), self.objs[7], ), (Q(value__has_key=F("value__baz__c")), self.objs[7]), (Q(value__d__1__has_key="f"), self.objs[4]), ( Q( value__has_key=KeyTransform( "f", KeyTransform("1", KeyTransform("d", "value")) ) ), self.objs[4], ), (Q(value__has_key=F("value__d__1__f")), self.objs[4]), ] for condition, expected in tests: with self.subTest(condition=condition): self.assertSequenceEqual( NullableJSONModel.objects.filter(condition), [expected], )
def test_annotation_with_json(self, qualifications_consequence, qualifications): qs = (Consequence.objects.filter( state=Consequence.States.NEEDS_CONFIRMATION).annotate( qualification_id=Cast(KeyTransform( "qualification_id", "data"), IntegerField())).annotate( qualification_title=Subquery( Qualification.objects.filter(pk=OuterRef( "qualification_id")).values("title")[:1]))) assert len(qs) == 1 assert qs[0].qualification_id == qualifications.nfs.id assert qs[0].qualification_title == qualifications.nfs.title
def filter_queryset(cls, qs, user: UserProfile): qs = qs.annotate( qualification_id=Cast(KeyTransform("qualification_id", "data"), IntegerField()), event_id=Cast(KeyTransform("event_id", "data"), IntegerField()), ).annotate( qualification_title=Subquery( Qualification.objects.filter( id=OuterRef("qualification_id")).values("title")[:1]), event_title=Subquery( Event.objects.filter( id=OuterRef("event_id")).values("title")[:1]), ) return qs.filter( ~Q(slug=cls.slug) # Qualifications can be granted by people who... | Q( # are responsible for the event the consequence originated from, if applicable event_id__in=get_objects_for_user( user, perms="change_event", klass=Event), ) | Q( # can edit the affected user anyway user__in=get_objects_for_user( user, perms="change_userprofile", klass=get_user_model())))
def test_has_key_deep(self): tests = [ (Q(value__baz__has_key='a'), self.objs[7]), (Q(value__has_key=KeyTransform('a', KeyTransform('baz', 'value'))), self.objs[7]), (Q(value__has_key=KeyTransform('c', KeyTransform('baz', 'value'))), self.objs[7]), (Q(value__d__1__has_key='f'), self.objs[4]), ( Q(value__has_key=KeyTransform('f', KeyTransform('1', KeyTransform('d', 'value')))), self.objs[4], ) ] for condition, expected in tests: with self.subTest(condition=condition): self.assertSequenceEqual( NullableJSONModel.objects.filter(condition), [expected], )
def apply_filters(queryset, filters): if not filters: return queryset # convert conjunction to orm statement filter_expression = Q() if filters.conjunction == ConjunctionEnum.OR: conjunction = Q.OR else: conjunction = Q.AND only_undefined_field = queryset.exists() and queryset.first( ).project.only_undefined_field for _filter in filters.items: # we can also have annotations filters if not _filter.filter.startswith("filter:tasks:") or not _filter.value: continue # django orm loop expression attached to column name field_name = preprocess_field_name(_filter.filter, only_undefined_field) # annotate with cast to number if need if _filter.type == 'Number' and field_name.startswith('data__'): json_field = field_name.replace('data__', '') queryset = queryset.annotate( **{ f'filter_{json_field.replace("$undefined$", "undefined")}': Cast(KeyTransform(json_field, 'data'), output_field=FloatField()) }) clean_field_name = f'filter_{json_field.replace("$undefined$", "undefined")}' else: clean_field_name = field_name # special case: predictions, annotations, cancelled --- for them 0 is equal to is_empty=True if clean_field_name in ('total_predictions', 'total_annotations', 'cancelled_annotations') and \ _filter.operator == 'empty': _filter.operator = 'equal' if cast_bool_from_str( _filter.value) else 'not_equal' _filter.value = 0 # append operator field_name = f"{clean_field_name}{operators.get(_filter.operator, '')}" # in if _filter.operator == "in": cast_value(_filter) filter_expression.add( Q( **{ f"{field_name}__gte": _filter.value.min, f"{field_name}__lte": _filter.value.max, }), conjunction, ) # not in elif _filter.operator == "not_in": cast_value(_filter) filter_expression.add( ~Q( **{ f"{field_name}__gte": _filter.value.min, f"{field_name}__lte": _filter.value.max, }), conjunction, ) # empty elif _filter.operator == 'empty': if cast_bool_from_str(_filter.value): filter_expression.add(Q(**{field_name: True}), conjunction) else: filter_expression.add(~Q(**{field_name: True}), conjunction) # starting from not_ elif _filter.operator.startswith("not_"): cast_value(_filter) filter_expression.add(~Q(**{field_name: _filter.value}), conjunction) # all others else: cast_value(_filter) filter_expression.add(Q(**{field_name: _filter.value}), conjunction) logger.debug(f'Apply filter: {filter_expression}') queryset = queryset.filter(filter_expression) return queryset