def test_row_created(mock_broadcast_to_channel_group, data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) field = data_fixture.create_text_field(table=table) row = RowHandler().create_row(user=user, table=table, values={ f'field_{field.id}': 'Test' }) mock_broadcast_to_channel_group.delay.assert_called_once() args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f'table-{table.id}' assert args[0][1]['type'] == 'row_created' assert args[0][1]['table_id'] == table.id assert args[0][1]['row']['id'] == row.id assert args[0][1]['before_row_id'] is None assert args[0][1]['row'][f'field_{field.id}'] == 'Test' row_2 = RowHandler().create_row(user=user, table=table, before=row, values={ f'field_{field.id}': 'Test2' }) args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f'table-{table.id}' assert args[0][1]['type'] == 'row_created' assert args[0][1]['table_id'] == table.id assert args[0][1]['row']['id'] == row_2.id assert args[0][1]['before_row_id'] == row.id assert args[0][1]['row'][f'field_{field.id}'] == 'Test2'
def test_row_updated(mock_broadcast_to_channel_group, data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) field = data_fixture.create_text_field(table=table) field_2 = data_fixture.create_text_field(table=table) row = table.get_model().objects.create() RowHandler().update_row(user=user, table=table, row_id=row.id, values={ f'field_{field.id}': 'Test' }) mock_broadcast_to_channel_group.delay.assert_called_once() args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f'table-{table.id}' assert args[0][1]['type'] == 'row_updated' assert args[0][1]['table_id'] == table.id assert args[0][1]['row']['id'] == row.id assert args[0][1]['row'][f'field_{field.id}'] == 'Test' assert args[0][1]['row'][f'field_{field_2.id}'] is None row.refresh_from_db() setattr(row, f'field_{field_2.id}', 'Second') row.save() RowHandler().update_row(user=user, table=table, row_id=row.id, values={ f'field_{field.id}': 'First' }) args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f'table-{table.id}' assert args[0][1]['type'] == 'row_updated' assert args[0][1]['table_id'] == table.id assert args[0][1]['row']['id'] == row.id assert args[0][1]['row'][f'field_{field.id}'] == 'First' assert args[0][1]['row'][f'field_{field_2.id}'] == 'Second'
def patch(self, request, table_id, row_id): """ Updates the row with the given row_id for the table with the given table_id. Also the post data is validated according to the tables field types. """ table = TableHandler().get_table(request.user, table_id) TokenHandler().check_table_permissions(request, 'update', table, False) # Small side effect of generating the model for only the fields that need to # change is that the response it not going to contain the other fields. It is # however much faster because it doesn't need to get the specific version of # all the field objects. field_ids = RowHandler().extract_field_ids_from_dict(request.data) model = table.get_model(field_ids=field_ids) validation_serializer = get_row_serializer_class(model) data = validate_data(validation_serializer, request.data) row = RowHandler().update_row(request.user, table, row_id, data, model) serializer_class = get_row_serializer_class(model, RowSerializer, is_response=True) serializer = serializer_class(row) return Response(serializer.data)
def post(self, request, table_id): """ Creates a new row for the given table_id. Also the post data is validated according to the tables field types. """ table = TableHandler().get_table(table_id) TokenHandler().check_table_permissions(request, 'create', table, False) model = table.get_model() validation_serializer = get_row_serializer_class(model) data = validate_data(validation_serializer, request.data) before_id = request.GET.get('before') before = (RowHandler().get_row(request.user, table, before_id, model) if before_id else None) row = RowHandler().create_row(request.user, table, data, model, before=before) serializer_class = get_row_serializer_class(model, RowSerializer, is_response=True) serializer = serializer_class(row) return Response(serializer.data)
def test_row_created(mock_broadcast_to_channel_group, data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) field = data_fixture.create_text_field(table=table) row = RowHandler().create_row(user=user, table=table, values={f"field_{field.id}": "Test"}) mock_broadcast_to_channel_group.delay.assert_called_once() args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f"table-{table.id}" assert args[0][1]["type"] == "row_created" assert args[0][1]["table_id"] == table.id assert args[0][1]["row"]["id"] == row.id assert args[0][1]["before_row_id"] is None assert args[0][1]["row"][f"field_{field.id}"] == "Test" row_2 = RowHandler().create_row(user=user, table=table, before=row, values={f"field_{field.id}": "Test2"}) args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f"table-{table.id}" assert args[0][1]["type"] == "row_created" assert args[0][1]["table_id"] == table.id assert args[0][1]["row"]["id"] == row_2.id assert args[0][1]["before_row_id"] == row.id assert args[0][1]["row"][f"field_{field.id}"] == "Test2"
def test_if_duplicate_field_names_json_export(storage_mock, data_fixture): user = data_fixture.create_user() database = data_fixture.create_database_application(user=user) table = data_fixture.create_database_table(database=database) data_fixture.create_text_field(table=table, name="name", order=1) data_fixture.create_text_field(table=table, name="name", order=2) data_fixture.create_text_field(table=table, name="name", order=3) data_fixture.create_text_field(table=table, name='Another"name', order=4) data_fixture.create_text_field(table=table, name='Another"name', order=5) row_handler = RowHandler() row_handler.create_row(user=user, table=table) job, contents = run_export_job_with_mock_storage( table, None, storage_mock, user, {"exporter_type": "json"} ) assert ( contents == """[ { "id": 1, "name": "", "name 2": "", "name 3": "", "Another\\"name": "", "Another\\"name 2": "" } ] """ )
def test_extract_manytomany_values(data_fixture): row_handler = RowHandler() class TemporaryModel1(models.Model): class Meta: app_label = 'test' class TemporaryModel2(models.Model): field_1 = models.CharField() field_2 = models.ManyToManyField(TemporaryModel1) class Meta: app_label = 'test' values = { 'field_1': 'Value 1', 'field_2': ['Value 2'] } values, manytomany_values = row_handler.extract_manytomany_values( values, TemporaryModel2 ) assert len(values.keys()) == 1 assert 'field_1' in values assert len(manytomany_values.keys()) == 1 assert 'field_2' in manytomany_values
def test_get_field_ids_from_dict(): handler = RowHandler() assert handler.extract_field_ids_from_dict({ 1: 'Included', 'field_2': 'Included', '3': 'Included', 'abc': 'Not included', 'fieldd_3': 'Not included' }) == [1, 2, 3]
def test_get_field_ids_from_dict(): handler = RowHandler() fields_dict = { 1: "Included", "field_2": "Included", "3": "Included", "abc": "Not included", "fieldd_3": "Not included", } assert handler.extract_field_ids_from_dict(fields_dict) == [1, 2, 3]
def test_link_row_enhance_queryset(data_fixture, django_assert_num_queries): user = data_fixture.create_user() database = data_fixture.create_database_application(user=user, name="Placeholder") example_table = data_fixture.create_database_table( name="Example", database=database ) customers_table = data_fixture.create_database_table( name="Customers", database=database ) field_handler = FieldHandler() row_handler = RowHandler() link_row_field = field_handler.create_field( user=user, table=example_table, type_name="link_row", link_row_table=customers_table, ) customers_row_1 = row_handler.create_row(user=user, table=customers_table) customers_row_2 = row_handler.create_row(user=user, table=customers_table) customers_row_3 = row_handler.create_row(user=user, table=customers_table) row_handler.create_row( user=user, table=example_table, values={ f"field_{link_row_field.id}": [customers_row_1.id, customers_row_2.id], }, ) row_handler.create_row( user=user, table=example_table, values={ f"field_{link_row_field.id}": [customers_row_1.id], }, ) row_handler.create_row( user=user, table=example_table, values={ f"field_{link_row_field.id}": [customers_row_3.id], }, ) model = example_table.get_model() rows = list(model.objects.all().enhance_by_fields()) with django_assert_num_queries(0): for row in rows: list(getattr(row, f"field_{link_row_field.id}").all())
def handle(self, *args, **options): table_id = options["table_id"] limit = options["limit"] fake = Faker() row_handler = RowHandler() cache = {} try: table = Table.objects.get(pk=table_id) except Table.DoesNotExist: self.stdout.write( self.style.ERROR(f"The table with id {table_id} was not " f"found.")) sys.exit(1) if "add_columns" in options and options["add_columns"]: self.create_a_column_for_every_type(table) model = table.get_model() # Find out what the highest order is because we want to append the new rows. order = ceil( model.objects.aggregate(max=Max("order")).get("max") or Decimal("0")) for i in range(0, limit): # Based on the random_value function we have for each type we can # build a dict with a random value for each field. values = { f"field_{field_id}": field_object["type"].random_value(field_object["field"], fake, cache) for field_id, field_object in model._field_objects.items() } values, manytomany_values = row_handler.extract_manytomany_values( values, model) order += Decimal("1") values["order"] = order # Insert the row with the randomly created values. instance = model.objects.create(**values) # Changes the set of the manytomany values. for field_name, value in manytomany_values.items(): if value and len(value) > 0: getattr(instance, field_name).set(value) self.stdout.write( self.style.SUCCESS(f"{limit} rows have been inserted."))
def test_get_row(data_fixture): user = data_fixture.create_user() user_2 = data_fixture.create_user() table = data_fixture.create_database_table(name='Car', user=user) name_field = data_fixture.create_text_field( table=table, name='Name', text_default='Test' ) speed_field = data_fixture.create_number_field( table=table, name='Max speed', number_negative=True ) price_field = data_fixture.create_number_field( table=table, name='Price', number_type='DECIMAL', number_decimal_places=2, number_negative=False ) handler = RowHandler() row = handler.create_row(user=user, table=table, values={ f'field_{name_field.id}': 'Tesla', f'field_{speed_field.id}': 240, f'field_{price_field.id}': Decimal('59999.99') }) with pytest.raises(UserNotInGroupError): handler.get_row(user=user_2, table=table, row_id=row.id) with pytest.raises(RowDoesNotExist): handler.get_row(user=user, table=table, row_id=99999) row_tmp = handler.get_row(user=user, table=table, row_id=row.id) assert row_tmp.id == row.id assert getattr(row_tmp, f'field_{name_field.id}') == 'Tesla' assert getattr(row_tmp, f'field_{speed_field.id}') == 240 assert getattr(row_tmp, f'field_{price_field.id}') == Decimal('59999.99')
def test_extract_field_ids_from_string(): handler = RowHandler() assert handler.extract_field_ids_from_string(None) == [] assert handler.extract_field_ids_from_string('not,something') == [] assert handler.extract_field_ids_from_string('field_1,field_2') == [1, 2] assert handler.extract_field_ids_from_string('field_22,test_8,999') == [22, 8, 999] assert handler.extract_field_ids_from_string('is,1,one') == [1]
def delete(self, request, table_id, row_id): """ Deletes an existing row with the given row_id for table with the given table_id. """ table = TableHandler().get_table(request.user, table_id) RowHandler().delete_row(request.user, table, row_id) return Response(status=204)
def test_single_select_field_type_get_order(data_fixture): user = data_fixture.create_user() database = data_fixture.create_database_application(user=user, name='Placeholder') table = data_fixture.create_database_table(name='Example', database=database) field = data_fixture.create_single_select_field(table=table) option_c = data_fixture.create_select_option(field=field, value='C', color='blue') option_a = data_fixture.create_select_option(field=field, value='A', color='blue') option_b = data_fixture.create_select_option(field=field, value='B', color='blue') grid_view = data_fixture.create_grid_view(table=table) view_handler = ViewHandler() row_handler = RowHandler() row_1 = row_handler.create_row(user=user, table=table, values={f'field_{field.id}': option_b.id}) row_2 = row_handler.create_row(user=user, table=table, values={f'field_{field.id}': option_a.id}) row_3 = row_handler.create_row(user=user, table=table, values={f'field_{field.id}': option_c.id}) row_4 = row_handler.create_row(user=user, table=table, values={f'field_{field.id}': option_b.id}) row_5 = row_handler.create_row(user=user, table=table, values={f'field_{field.id}': None}) sort = data_fixture.create_view_sort(view=grid_view, field=field, order='ASC') model = table.get_model() rows = view_handler.apply_sorting(grid_view, model.objects.all()) row_ids = [row.id for row in rows] assert row_ids == [row_5.id, row_2.id, row_1.id, row_4.id, row_3.id] sort.order = 'DESC' sort.save() rows = view_handler.apply_sorting(grid_view, model.objects.all()) row_ids = [row.id for row in rows] assert row_ids == [row_3.id, row_1.id, row_4.id, row_2.id, row_5.id] option_a.value = 'Z' option_a.save() sort.order = 'ASC' sort.save() model = table.get_model() rows = view_handler.apply_sorting(grid_view, model.objects.all()) row_ids = [row.id for row in rows] assert row_ids == [row_5.id, row_1.id, row_4.id, row_3.id, row_2.id]
def test_delete_row(data_fixture): user = data_fixture.create_user() user_2 = data_fixture.create_user() table = data_fixture.create_database_table(name='Car', user=user) data_fixture.create_text_field(table=table, name='Name', text_default='Test') handler = RowHandler() model = table.get_model() row = handler.create_row(user=user, table=table) row_2 = handler.create_row(user=user, table=table) with pytest.raises(UserNotInGroupError): handler.delete_row(user=user_2, table=table, row_id=row.id) with pytest.raises(RowDoesNotExist): handler.delete_row(user=user, table=table, row_id=99999) handler.delete_row(user=user, table=table, row_id=row.id) assert model.objects.all().count() == 1
def test_get_include_exclude_fields(data_fixture): table = data_fixture.create_database_table() table_2 = data_fixture.create_database_table() field_1 = data_fixture.create_text_field(table=table, order=1) field_2 = data_fixture.create_text_field(table=table, order=2) field_3 = data_fixture.create_text_field(table=table_2, order=3) row_handler = RowHandler() assert (row_handler.get_include_exclude_fields( table, include=None, exclude=None) is None) assert row_handler.get_include_exclude_fields( table, include="", exclude="") is None fields = row_handler.get_include_exclude_fields(table, f"field_{field_1.id}") assert len(fields) == 1 assert fields[0].id == field_1.id fields = row_handler.get_include_exclude_fields( table, f"field_{field_1.id},field_9999,field_{field_2.id}") assert len(fields) == 2 assert fields[0].id == field_1.id assert fields[1].id == field_2.id fields = row_handler.get_include_exclude_fields( table, None, f"field_{field_1.id},field_9999") assert len(fields) == 1 assert fields[0].id == field_2.id fields = row_handler.get_include_exclude_fields( table, f"field_{field_1.id},field_{field_2}", f"field_{field_1.id}") assert len(fields) == 1 assert fields[0].id == field_2.id fields = row_handler.get_include_exclude_fields(table, f"field_{field_3.id}") assert len(fields) == 0 fields = row_handler.get_include_exclude_fields(table, None, f"field_{field_3.id}") assert len(fields) == 2
def test_extract_field_ids_from_string(): handler = RowHandler() assert handler.extract_field_ids_from_string(None) == [] assert handler.extract_field_ids_from_string("not,something") == [] assert handler.extract_field_ids_from_string("field_1,field_2") == [1, 2] assert handler.extract_field_ids_from_string("field_22,test_8,999") == [ 22, 8, 999 ] assert handler.extract_field_ids_from_string("is,1,one") == [1]
def delete(self, request, table_id, row_id): """ Deletes an existing row with the given row_id for table with the given table_id. """ table = TableHandler().get_table(table_id) TokenHandler().check_table_permissions(request, 'delete', table, False) RowHandler().delete_row(request.user, table, row_id) return Response(status=204)
def test_create_row(data_fixture): user = data_fixture.create_user() user_2 = data_fixture.create_user() table = data_fixture.create_database_table(name='Car', user=user) name_field = data_fixture.create_text_field(table=table, name='Name', text_default='Test') speed_field = data_fixture.create_number_field(table=table, name='Max speed', number_negative=True) price_field = data_fixture.create_number_field(table=table, name='Price', number_type='DECIMAL', number_decimal_places=2, number_negative=False) handler = RowHandler() with pytest.raises(UserNotInGroupError): handler.create_row(user=user_2, table=table) row = handler.create_row(user=user, table=table, values={ name_field.id: 'Tesla', speed_field.id: 240, f'field_{price_field.id}': 59999.99, 9999: 'Must not be added' }) assert getattr(row, f'field_{name_field.id}') == 'Tesla' assert getattr(row, f'field_{speed_field.id}') == 240 assert getattr(row, f'field_{price_field.id}') == 59999.99 assert not getattr(row, f'field_9999', None) row.refresh_from_db() assert getattr(row, f'field_{name_field.id}') == 'Tesla' assert getattr(row, f'field_{speed_field.id}') == 240 assert getattr(row, f'field_{price_field.id}') == Decimal('59999.99') assert not getattr(row, f'field_9999', None) row = handler.create_row(user=user, table=table) assert getattr(row, f'field_{name_field.id}') == 'Test' assert not getattr(row, f'field_{speed_field.id}') assert not getattr(row, f'field_{price_field.id}') with pytest.raises(ValidationError): handler.create_row(user=user, table=table, values={price_field.id: -10.22}) model = table.get_model() assert model.objects.all().count() == 2
def handle(self, *args, **options): table_id = options['table_id'] limit = options['limit'] fake = Faker() row_handler = RowHandler() cache = {} try: table = Table.objects.get(pk=table_id) except Table.DoesNotExist: self.stdout.write( self.style.ERROR(f"The table with id {table_id} was not " f"found.")) sys.exit(1) model = table.get_model() for i in range(0, limit): # Based on the random_value function we have for each type we can # build a dict with a random value for each field. values = { f'field_{field_id}': field_object['type'].random_value(field_object['field'], fake, cache) for field_id, field_object in model._field_objects.items() } values, manytomany_values = row_handler.extract_manytomany_values( values, model) # Insert the row with the randomly created values. instance = model.objects.create(**values) # Changes the set of the manytomany values. for field_name, value in manytomany_values.items(): if value and len(value) > 0: getattr(instance, field_name).set(value) self.stdout.write( self.style.SUCCESS(f"{limit} rows have been inserted."))
def patch(self, request, table_id, row_id): """ Updates the row with the given row_id for the table with the given table_id. Also the post data is validated according to the tables field types. """ table = TableHandler().get_table(table_id) TokenHandler().check_table_permissions(request, "update", table, False) field_ids = RowHandler().extract_field_ids_from_dict(request.data) model = table.get_model() validation_serializer = get_row_serializer_class(model, field_ids=field_ids) data = validate_data(validation_serializer, request.data) row = RowHandler().update_row(request.user, table, row_id, data, model) serializer_class = get_row_serializer_class( model, RowSerializer, is_response=True ) serializer = serializer_class(row) return Response(serializer.data)
def patch(self, request, table_id, row_id): """Moves the row to another position.""" table = TableHandler().get_table(table_id) TokenHandler().check_table_permissions(request, "update", table, False) model = table.get_model() before_id = request.GET.get("before_id") before = ( RowHandler().get_row(request.user, table, before_id, model) if before_id else None ) row = RowHandler().move_row( request.user, table, row_id, before=before, model=model ) serializer_class = get_row_serializer_class( model, RowSerializer, is_response=True ) serializer = serializer_class(row) return Response(serializer.data)
def test_row_deleted(mock_broadcast_to_channel_group, data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) row = table.get_model().objects.create() row_id = row.id RowHandler().delete_row(user=user, table=table, row_id=row_id) mock_broadcast_to_channel_group.delay.assert_called_once() args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f'table-{table.id}' assert args[0][1]['type'] == 'row_deleted' assert args[0][1]['row_id'] == row_id assert args[0][1]['table_id'] == table.id
def test_row_updated(mock_broadcast_to_channel_group, data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) field = data_fixture.create_text_field(table=table) field_2 = data_fixture.create_text_field(table=table) row = table.get_model().objects.create() RowHandler().update_row(user=user, table=table, row_id=row.id, values={f"field_{field.id}": "Test"}) mock_broadcast_to_channel_group.delay.assert_called_once() args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f"table-{table.id}" assert args[0][1]["type"] == "row_updated" assert args[0][1]["table_id"] == table.id assert args[0][1]["row_before_update"]["id"] == row.id assert args[0][1]["row_before_update"][f"field_{field.id}"] is None assert args[0][1]["row_before_update"][f"field_{field_2.id}"] is None assert args[0][1]["row"]["id"] == row.id assert args[0][1]["row"][f"field_{field.id}"] == "Test" assert args[0][1]["row"][f"field_{field_2.id}"] is None row.refresh_from_db() setattr(row, f"field_{field_2.id}", "Second") row.save() RowHandler().update_row(user=user, table=table, row_id=row.id, values={f"field_{field.id}": "First"}) args = mock_broadcast_to_channel_group.delay.call_args assert args[0][0] == f"table-{table.id}" assert args[0][1]["type"] == "row_updated" assert args[0][1]["table_id"] == table.id assert args[0][1]["row"]["id"] == row.id assert args[0][1]["row"][f"field_{field.id}"] == "First" assert args[0][1]["row"][f"field_{field_2.id}"] == "Second"
def test_extract_manytomany_values(data_fixture): row_handler = RowHandler() class TemporaryModel1(models.Model): class Meta: app_label = "test" class TemporaryModel2(models.Model): field_1 = models.CharField() field_2 = models.ManyToManyField(TemporaryModel1) class Meta: app_label = "test" values = {"field_1": "Value 1", "field_2": ["Value 2"]} values, manytomany_values = row_handler.extract_manytomany_values( values, TemporaryModel2) assert len(values.keys()) == 1 assert "field_1" in values assert len(manytomany_values.keys()) == 1 assert "field_2" in manytomany_values
def test_if_xml_duplicate_name_and_value_are_escaped(storage_mock, data_fixture): user = data_fixture.create_user() database = data_fixture.create_database_application(user=user) table = data_fixture.create_database_table(database=database) text = data_fixture.create_text_field(table=table, name="<name>", order=0) data_fixture.create_text_field(table=table, name="name", order=1) data_fixture.create_text_field(table=table, name="Another name", order=2) data_fixture.create_text_field(table=table, name="Another@name", order=3) empty_1 = data_fixture.create_text_field(table=table, name="@", order=4) empty_2 = data_fixture.create_text_field(table=table, name="", order=5) data_fixture.create_text_field(table=table, name="1", order=6) row_handler = RowHandler() row_handler.create_row( user=user, table=table, values={f"field_{text.id}": "<value>"}, ) job, contents = run_export_job_with_mock_storage( table, None, storage_mock, user, {"exporter_type": "xml"} ) assert strip_indents_and_newlines(contents) == strip_indents_and_newlines( f""" <?xml version="1.0" encoding="utf-8" ?> <rows> <row> <id>1</id> <name><value></name> <name-2/> <Another-name/> <Another-name-2/> <field-{empty_1.id}/> <field-{empty_2.id}/> <field-1/> </row> </rows> """ )
def post(self, request, table_id): """ Creates a new row for the given table_id. Also the post data is validated according to the tables field types. """ table = TableHandler().get_table(request.user, table_id) model = table.get_model() validation_serializer = get_row_serializer_class(model) data = validate_data(validation_serializer, request.data) row = RowHandler().create_row(request.user, table, data, model) serializer_class = get_row_serializer_class(model, RowSerializer) serializer = serializer_class(row) return Response(serializer.data)
def get(self, request, table_id, row_id): """ Responds with a serializer version of the row related to the provided row_id and table_id. """ table = TableHandler().get_table(table_id) TokenHandler().check_table_permissions(request, 'read', table, False) model = table.get_model() row = RowHandler().get_row(request.user, table, row_id, model) serializer_class = get_row_serializer_class(model, RowSerializer, is_response=True) serializer = serializer_class(row) return Response(serializer.data)
def get(self, request, table_id): """ Lists all the rows of the given table id paginated. It is also possible to provide a search query. """ table = TableHandler().get_table(table_id) table.database.group.has_user(request.user, raise_error=True) TokenHandler().check_table_permissions(request, 'read', table, False) search = request.GET.get('search') order_by = request.GET.get('order_by') include = request.GET.get('include') exclude = request.GET.get('exclude') fields = RowHandler().get_include_exclude_fields( table, include, exclude) model = table.get_model(fields=fields, field_ids=[] if fields else None) queryset = model.objects.all().enhance_by_fields() if search: queryset = queryset.search_all_fields(search) if order_by: queryset = queryset.order_by_fields_string(order_by) filter_type = (FILTER_TYPE_OR if str(request.GET.get('filter_type')).upper() == 'OR' else FILTER_TYPE_AND) filter_object = { key: request.GET.getlist(key) for key in request.GET.keys() } queryset = queryset.filter_by_fields_object(filter_object, filter_type) paginator = PageNumberPagination( limit_page_size=settings.ROW_PAGE_SIZE_LIMIT) page = paginator.paginate_queryset(queryset, request, self) serializer_class = get_row_serializer_class(model, RowSerializer, is_response=True) serializer = serializer_class(page, many=True) return paginator.get_paginated_response(serializer.data)