def test_import_export_single_select_field(data_fixture): user = data_fixture.create_user() table = data_fixture.create_database_table(user=user) field_handler = FieldHandler() field = field_handler.create_field(user=user, table=table, type_name='single_select', name='Single select', select_options=[{ 'value': 'Option 1', 'color': 'blue' }]) select_option = field.select_options.all().first() field_type = field_type_registry.get_by_model(field) field_serialized = field_type.export_serialized(field) id_mapping = {} field_imported = field_type.import_serialized(table, field_serialized, id_mapping) assert field_imported.select_options.all().count() == 1 imported_select_option = field_imported.select_options.all().first() assert imported_select_option.id != select_option.id assert imported_select_option.value == select_option.value assert imported_select_option.color == select_option.color assert imported_select_option.order == select_option.order
def create_filter(self, user, view, field, type_name, value): """ Creates a new view filter. The rows that are visible in a view should always be filtered by the related view filters. :param user: The user on whose behalf the view filter is created. :type user: User :param view: The view for which the filter needs to be created. :type: View :param field: The field that the filter should compare the value with. :type field: Field :param type_name: The filter type, allowed values are the types in the view_filter_type_registry `equal`, `not_equal` etc. :type type_name: str :param value: The value that the filter must apply to. :type value: str :raises UserNotInGroupError: When the user does not belong to the related group. :raises ViewFilterNotSupported: When the provided view does not support filtering. :raises ViewFilterTypeNotAllowedForField: When the field does not support the filter type. :raises FieldNotInTable: When the provided field does not belong to the provided view's table. :return: The created view filter instance. :rtype: ViewFilter """ group = view.table.database.group if not group.has_user(user): raise UserNotInGroupError(user, group) # Check if view supports filtering view_type = view_type_registry.get_by_model(view.specific_class) if not view_type.can_filter: raise ViewFilterNotSupported( f'Filtering is not supported for {view_type.type} views.' ) view_filter_type = view_filter_type_registry.get(type_name) field_type = field_type_registry.get_by_model(field.specific_class) # Check if the field is allowed for this filter type. if field_type.type not in view_filter_type.compatible_field_types: raise ViewFilterTypeNotAllowedForField( f'The view filter type {type_name} is not supported for field type ' f'{field_type.type}.' ) # Check if field belongs to the grid views table if not view.table.field_set.filter(id=field.pk).exists(): raise FieldNotInTable(f'The field {field.pk} does not belong to table ' f'{view.table.id}.') return ViewFilter.objects.create( view=view, field=field, type=view_filter_type.type, value=value )
def export_serialized(self, database): """ Exports the database application type to a serialized format that can later be be imported via the `import_serialized`. """ tables = database.table_set.all().prefetch_related( 'field_set', 'view_set', 'view_set__viewfilter_set', 'view_set__viewsort_set' ) serialized_tables = [] for table in tables: fields = table.field_set.all() serialized_fields = [] for f in fields: field = f.specific field_type = field_type_registry.get_by_model(field) serialized_fields.append(field_type.export_serialized(field)) serialized_views = [] for v in table.view_set.all(): view = v.specific view_type = view_type_registry.get_by_model(view) serialized_views.append(view_type.export_serialized(view)) model = table.get_model(fields=fields) serialized_rows = [] table_cache = {} for row in model.objects.all(): serialized_row = { 'id': row.id, 'order': str(row.order) } for field_object in model._field_objects.values(): field_name = field_object['name'] field_type = field_object['type'] serialized_row[field_name] = field_type.get_export_serialized_value( row, field_name, table_cache ) serialized_rows.append(serialized_row) serialized_tables.append({ 'id': table.id, 'name': table.name, 'order': table.order, 'fields': serialized_fields, 'views': serialized_views, 'rows': serialized_rows, }) serialized = super().export_serialized(database) serialized['tables'] = serialized_tables return serialized
def update_sort(self, user, view_sort, **kwargs): """ Updates the values of an existing view sort. :param user: The user on whose behalf the view sort is updated. :type user: User :param view_sort: The view sort that needs to be updated. :type view_sort: ViewSort :param kwargs: The values that need to be updated, allowed values are `field` and `order`. :type kwargs: dict :raises FieldNotInTable: When the field does not support sorting. :return: The updated view sort instance. :rtype: ViewSort """ group = view_sort.view.table.database.group group.has_user(user, raise_error=True) field = kwargs.get("field", view_sort.field) order = kwargs.get("order", view_sort.order) # If the field has changed we need to check if the field belongs to the table. if ( field.id != view_sort.field_id and not view_sort.view.table.field_set.filter(id=field.pk).exists() ): raise FieldNotInTable( f"The field {field.pk} does not belong to table " f"{view_sort.view.table.id}." ) # If the field has changed we need to check if the new field type supports # sorting. field_type = field_type_registry.get_by_model(field.specific_class) if field.id != view_sort.field_id and not field_type.can_order_by: raise ViewSortFieldNotSupported( f"The field {field.pk} does not support " f"sorting." ) # If the field has changed we need to check if the new field doesn't already # exist as sort. if ( field.id != view_sort.field_id and view_sort.view.viewsort_set.filter(field_id=field.pk).exists() ): raise ViewSortFieldAlreadyExist( f"A sort with the field {field.pk} " f"already exists." ) view_sort.field = field view_sort.order = order view_sort.save() view_sort_updated.send(self, view_sort=view_sort, user=user) return view_sort
def get_type(self, instance): # It could be that the field related to the instance is already in the context # else we can call the specific_class property to find it. field = self.context.get("instance_type") if not field: field = field_type_registry.get_by_model(instance.specific_class) return field.type
def test_import_export_date_field(data_fixture): date_field = data_fixture.create_date_field() date_field_type = field_type_registry.get_by_model(date_field) number_serialized = date_field_type.export_serialized(date_field) number_field_imported = date_field_type.import_serialized( date_field.table, number_serialized, {}) assert date_field.date_format == number_field_imported.date_format assert date_field.date_include_time == number_field_imported.date_include_time assert date_field.date_time_format == number_field_imported.date_time_format
def create_sort(self, user, view, field, order): """ Creates a new view sort. :param user: The user on whose behalf the view sort is created. :type user: User :param view: The view for which the sort needs to be created. :type: View :param field: The field that needs to be sorted. :type field: Field :param order: The desired order, can either be ascending (A to Z) or descending (Z to A). :type order: str :raises UserNotInGroupError: When the user does not belong to the related group. :raises ViewSortNotSupported: When the provided view does not support sorting. :raises FieldNotInTable: When the provided field does not belong to the provided view's table. :return: The created view sort instance. :rtype: ViewSort """ group = view.table.database.group if not group.has_user(user): raise UserNotInGroupError(user, group) # Check if view supports sorting. view_type = view_type_registry.get_by_model(view.specific_class) if not view_type.can_sort: raise ViewSortNotSupported( f'Sorting is not supported for {view_type.type} views.') # Check if the field supports sorting. field_type = field_type_registry.get_by_model(field.specific_class) if not field_type.can_order_by: raise ViewSortFieldNotSupported( f'The field {field.pk} does not support ' f'sorting.') # Check if field belongs to the grid views table if not view.table.field_set.filter(id=field.pk).exists(): raise FieldNotInTable( f'The field {field.pk} does not belong to table ' f'{view.table.id}.') # Check if the field already exists as sort if view.viewsort_set.filter(field_id=field.pk).exists(): raise ViewSortFieldAlreadyExist( f'A sort with the field {field.pk} ' f'already exists.') view_sort = ViewSort.objects.create(view=view, field=field, order=order) view_sort_created.send(self, view_sort=view_sort, user=user) return view_sort
def update_sort(self, user, view_sort, **kwargs): """ Updates the values of an existing view sort. :param user: The user on whose behalf the view sort is updated. :type user: User :param view_sort: The view sort that needs to be updated. :type view_sort: ViewSort :param kwargs: The values that need to be updated, allowed values are `field` and `order`. :type kwargs: dict :raises UserNotInGroupError: When the user does not belong to the related group. :raises FieldNotInTable: When the field does not support sorting. :return: The updated view sort instance. :rtype: ViewSort """ group = view_sort.view.table.database.group if not group.has_user(user): raise UserNotInGroupError(user, group) field = kwargs.get('field', view_sort.field) order = kwargs.get('order', view_sort.order) # If the field has changed we need to check if the field belongs to the table. if ( field.id != view_sort.field_id and not view_sort.view.table.field_set.filter(id=field.pk).exists() ): raise FieldNotInTable(f'The field {field.pk} does not belong to table ' f'{view_sort.view.table.id}.') # If the field has changed we need to check if the new field type supports # sorting. field_type = field_type_registry.get_by_model(field.specific_class) if ( field.id != view_sort.field_id and not field_type.can_order_by ): raise ViewSortFieldNotSupported(f'The field {field.pk} does not support ' f'sorting.') # If the field has changed we need to check if the new field doesn't already # exist as sort. if ( field.id != view_sort.field_id and view_sort.view.viewsort_set.filter(field_id=field.pk).exists() ): raise ViewSortFieldAlreadyExist(f'A sort with the field {field.pk} ' f'already exists.') view_sort.field = field view_sort.order = order view_sort.save() return view_sort
def test_get_set_export_serialized_value_date_field(data_fixture): table = data_fixture.create_database_table() date_field = data_fixture.create_date_field(table=table) datetime_field = data_fixture.create_date_field(table=table, date_include_time=True) date_field_name = f'field_{date_field.id}' datetime_field_name = f'field_{datetime_field.id}' date_field_type = field_type_registry.get_by_model(date_field) model = table.get_model() row_1 = model.objects.create() row_2 = model.objects.create( **{ f'field_{date_field.id}': '2010-02-03', f'field_{datetime_field.id}': make_aware(datetime(2010, 2, 3, 12, 30, 0), utc), }) row_1.refresh_from_db() row_2.refresh_from_db() old_row_1_date = getattr(row_1, date_field_name) old_row_1_datetime = getattr(row_1, datetime_field_name) old_row_2_date = getattr(row_2, date_field_name) old_row_2_datetime = getattr(row_2, datetime_field_name) date_field_type.set_import_serialized_value( row_1, date_field_name, date_field_type.get_export_serialized_value(row_1, date_field_name, {}), {}) date_field_type.set_import_serialized_value( row_1, datetime_field_name, date_field_type.get_export_serialized_value(row_1, datetime_field_name, {}), {}) date_field_type.set_import_serialized_value( row_2, date_field_name, date_field_type.get_export_serialized_value(row_2, date_field_name, {}), {}) date_field_type.set_import_serialized_value( row_2, datetime_field_name, date_field_type.get_export_serialized_value(row_2, datetime_field_name, {}), {}) row_1.refresh_from_db() row_2.refresh_from_db() assert old_row_1_date == getattr(row_1, date_field_name) assert old_row_1_datetime == getattr(row_1, datetime_field_name) assert old_row_2_date == getattr(row_2, date_field_name) assert old_row_2_datetime == getattr(row_2, datetime_field_name)
def export_serialized(self, database, files_zip, storage): """ Exports the database application type to a serialized format that can later be be imported via the `import_serialized`. """ tables = database.table_set.all().prefetch_related( "field_set", "view_set", "view_set__viewfilter_set", "view_set__viewsort_set", ) serialized_tables = [] for table in tables: fields = table.field_set.all() serialized_fields = [] for f in fields: field = f.specific field_type = field_type_registry.get_by_model(field) serialized_fields.append(field_type.export_serialized(field)) serialized_views = [] for v in table.view_set.all(): view = v.specific view_type = view_type_registry.get_by_model(view) serialized_views.append(view_type.export_serialized(view)) model = table.get_model(fields=fields) serialized_rows = [] table_cache = {} for row in model.objects.all(): serialized_row = {"id": row.id, "order": str(row.order)} for field_object in model._field_objects.values(): field_name = field_object["name"] field_type = field_object["type"] serialized_row[ field_name] = field_type.get_export_serialized_value( row, field_name, table_cache, files_zip, storage) serialized_rows.append(serialized_row) serialized_tables.append({ "id": table.id, "name": table.name, "order": table.order, "fields": serialized_fields, "views": serialized_views, "rows": serialized_rows, }) serialized = super().export_serialized(database, files_zip, storage) serialized["tables"] = serialized_tables return serialized
def test_import_export_number_field(data_fixture): number_field = data_fixture.create_number_field(name='Number field', number_type='DECIMAL', number_negative=True, number_decimal_places=2) number_field_type = field_type_registry.get_by_model(number_field) number_serialized = number_field_type.export_serialized(number_field) number_field_imported = number_field_type.import_serialized( number_field.table, number_serialized, {}) assert number_field.number_type == number_field_imported.number_type assert number_field.number_negative == number_field_imported.number_negative assert number_field.number_decimal_places == ( number_field_imported.number_decimal_places)
def update_filter(self, user, view_filter, **kwargs): """ Updates the values of an existing view filter. :param user: The user on whose behalf the view filter is updated. :type user: User :param view_filter: The view filter that needs to be updated. :type view_filter: ViewFilter :param kwargs: The values that need to be updated, allowed values are `field`, `value` and `type_name`. :type kwargs: dict :raises UserNotInGroupError: When the user does not belong to the related group. :raises ViewFilterTypeNotAllowedForField: When the field does not supports the filter type. :raises FieldNotInTable: When the provided field does not belong to the view's table. :return: The updated view filter instance. :rtype: ViewFilter """ group = view_filter.view.table.database.group if not group.has_user(user): raise UserNotInGroupError(user, group) type_name = kwargs.get('type_name', view_filter.type) field = kwargs.get('field', view_filter.field) value = kwargs.get('value', view_filter.value) view_filter_type = view_filter_type_registry.get(type_name) field_type = field_type_registry.get_by_model(field.specific_class) # Check if the field is allowed for this filter type. if field_type.type not in view_filter_type.compatible_field_types: raise ViewFilterTypeNotAllowedForField( f'The view filter type {type_name} is not supported for field type ' f'{field_type.type}.' ) # If the field has changed we need to check if the field belongs to the table. if ( field.id != view_filter.field_id and not view_filter.view.table.field_set.filter(id=field.pk).exists() ): raise FieldNotInTable(f'The field {field.pk} does not belong to table ' f'{view_filter.view.table.id}.') view_filter.field = field view_filter.value = value view_filter.type = type_name view_filter.save() return view_filter
def test_get_set_export_serialized_value_boolean_field(data_fixture): table = data_fixture.create_database_table() boolean_field = data_fixture.create_boolean_field(table=table) boolean_field_name = f'field_{boolean_field.id}' boolean_field_type = field_type_registry.get_by_model(boolean_field) model = table.get_model() row_1 = model.objects.create() row_2 = model.objects.create(**{f'field_{boolean_field.id}': True}) row_3 = model.objects.create(**{f'field_{boolean_field.id}': False}) row_1.refresh_from_db() row_2.refresh_from_db() row_3.refresh_from_db() old_row_1_value = getattr(row_1, boolean_field_name) old_row_2_value = getattr(row_2, boolean_field_name) old_row_3_value = getattr(row_3, boolean_field_name) boolean_field_type.set_import_serialized_value( row_1, boolean_field_name, boolean_field_type.get_export_serialized_value(row_1, boolean_field_name, {}), {} ) boolean_field_type.set_import_serialized_value( row_2, boolean_field_name, boolean_field_type.get_export_serialized_value(row_2, boolean_field_name, {}), {} ) boolean_field_type.set_import_serialized_value( row_3, boolean_field_name, boolean_field_type.get_export_serialized_value(row_3, boolean_field_name, {}), {} ) row_1.save() row_2.save() row_3.save() row_1.refresh_from_db() row_2.refresh_from_db() row_3.refresh_from_db() assert old_row_1_value == getattr(row_1, boolean_field_name) assert old_row_2_value == getattr(row_2, boolean_field_name) assert old_row_3_value == getattr(row_3, boolean_field_name)
def test_import_export_text_field(data_fixture): id_mapping = {} text_field = data_fixture.create_text_field(name="Text name", text_default="Text default") text_field_type = field_type_registry.get_by_model(text_field) text_serialized = text_field_type.export_serialized(text_field) text_field_imported = text_field_type.import_serialized( text_field.table, text_serialized, id_mapping) assert text_field.id != text_field_imported.id assert text_field.name == text_field_imported.name assert text_field.order == text_field_imported.order assert text_field.primary == text_field_imported.primary assert text_field.text_default == text_field_imported.text_default assert id_mapping["database_fields"][ text_field.id] == text_field_imported.id
def field_type_changed(self, field): """ This method is called by the FieldHandler when the field type of a field has changed. It could be that the field has filters or sortings that are not compatible anymore. If that is the case then those need to be removed. :param field: The new field object. :type field: Field """ field_type = field_type_registry.get_by_model(field.specific_class) # If the new field type does not support sorting then all sortings will be # removed. if not field_type.can_order_by: field.viewsort_set.all().delete() # Check which filters are not compatible anymore and remove those. for filter in field.viewfilter_set.all(): filter_type = view_filter_type_registry.get(filter.type) if field_type.type not in filter_type.compatible_field_types: filter.delete()
def get_model(self, fields=None, field_ids=None, attribute_names=False): """ Generates a django model based on available fields that belong to this table. :param fields: Extra table field instances that need to be added the model. :type fields: list :param field_ids: If provided only the fields with the ids in the list will be added to the model. This can be done to improve speed if for example only a single field needs to be mutated. :type field_ids: None or list :param attribute_names: If True, the the model attributes will be based on the field name instead of the field id. :type attribute_names: bool :return: The generated model. :rtype: Model """ if not fields: fields = [] app_label = f'{DatabaseConfig.name}_tables' meta = type( 'Meta', (), { 'managed': False, 'db_table': f'database_table_{self.id}', 'app_label': app_label }) attrs = { 'Meta': meta, '__module__': 'database.models', # An indication that the model is a generated table model. '_generated_table_model': True, # An object containing the table fields, field types and the chosen names # with the table field id as key. '_field_objects': {} } # Construct a query to fetch all the fields of that table. fields_query = self.field_set.all() # If the field ids are provided we must only fetch the fields of which the ids # are in that list. if isinstance(field_ids, list): if len(field_ids) == 0: fields_query = [] else: fields_query = fields_query.filter(pk__in=field_ids) # Create a combined list of fields that must be added and belong to the this # table. fields = fields + [field for field in fields_query] # If there are duplicate field names we have to store them in a list so we know # later which ones are duplicate. duplicate_field_names = [] # We will have to add each field to with the correct field name and model field # to the attribute list in order for the model to work. for field in fields: field = field.specific field_type = field_type_registry.get_by_model(field) field_name = field.db_column # If attribute_names is True we will not use 'field_{id}' as attribute name, # but we will rather use a name the user provided. if attribute_names: field_name = field.model_attribute_name # If the field name already exists we will append '_field_{id}' to each # entry that is a duplicate. if field_name in attrs: duplicate_field_names.append(field_name) replaced_field_name = f'{field_name}_{attrs[field_name].db_column}' attrs[replaced_field_name] = attrs.pop(field_name) if field_name in duplicate_field_names: field_name = f'{field_name}_{field.db_column}' # Add the generated objects and information to the dict that optionally can # be returned. attrs['_field_objects'][field.id] = { 'field': field, 'type': field_type, 'name': field_name } # Add the field to the attribute dict that is used to generate the model. # All the kwargs that are passed to the `get_model_field` method are going # to be passed along to the model field. attrs[field_name] = field_type.get_model_field( field, db_column=field.db_column, verbose_name=field.name) # Create the model class. model = type(str(f'{self.model_class_name}TableModel'), (models.Model, ), attrs) # Immediately remove the model from the cache because it is used only once. model_name = model._meta.model_name all_models = model._meta.apps.all_models del all_models[app_label][model_name] return model
def get_filter(self, field_name, value, model_field, field) -> OptionallyAnnotatedQ: field_type = field_type_registry.get_by_model(field) return field_type.contains_query(field_name, value, model_field, field)
def get_model(self, fields=None, field_ids=None, attribute_names=False, manytomany_models=None): """ Generates a temporary Django model based on available fields that belong to this table. Note that the model will not be registered with the apps because of the `DatabaseConfig.prevent_generated_model_for_registering` hack. We do not want to the model cached because models with the same name can differ. :param fields: Extra table field instances that need to be added the model. :type fields: list :param field_ids: If provided only the fields with the ids in the list will be added to the model. This can be done to improve speed if for example only a single field needs to be mutated. :type field_ids: None or list :param attribute_names: If True, the the model attributes will be based on the field name instead of the field id. :type attribute_names: bool :param manytomany_models: In some cases with related fields a model has to be generated in order to generate that model. In order to prevent a recursion loop we cache the generated models and pass those along. :type manytomany_models: dict :return: The generated model. :rtype: Model """ if not fields: fields = [] if not manytomany_models: manytomany_models = {} app_label = "database_table" meta = type( "Meta", (), { "managed": False, "db_table": f"database_table_{self.id}", "app_label": app_label, "ordering": ["order", "id"], }, ) attrs = { "Meta": meta, "__module__": "database.models", # An indication that the model is a generated table model. "_generated_table_model": True, "_table_id": self.id, # An object containing the table fields, field types and the chosen names # with the table field id as key. "_field_objects": {}, # We are using our own table model manager to implement some queryset # helpers. "objects": TableModelManager(), # Indicates which position the row has. "order": models.DecimalField( max_digits=40, decimal_places=20, editable=False, db_index=True, default=1, ), } # Construct a query to fetch all the fields of that table. fields_query = self.field_set.all() # If the field ids are provided we must only fetch the fields of which the ids # are in that list. if isinstance(field_ids, list): if len(field_ids) == 0: fields_query = [] else: fields_query = fields_query.filter(pk__in=field_ids) # Create a combined list of fields that must be added and belong to the this # table. fields = list(fields) + [field for field in fields_query] # If there are duplicate field names we have to store them in a list so we know # later which ones are duplicate. duplicate_field_names = [] # We will have to add each field to with the correct field name and model field # to the attribute list in order for the model to work. for field in fields: field = field.specific field_type = field_type_registry.get_by_model(field) field_name = field.db_column # If attribute_names is True we will not use 'field_{id}' as attribute name, # but we will rather use a name the user provided. if attribute_names: field_name = field.model_attribute_name # If the field name already exists we will append '_field_{id}' to each # entry that is a duplicate. if field_name in attrs: duplicate_field_names.append(field_name) replaced_field_name = f"{field_name}_{attrs[field_name].db_column}" attrs[replaced_field_name] = attrs.pop(field_name) if field_name in duplicate_field_names: field_name = f"{field_name}_{field.db_column}" # Add the generated objects and information to the dict that optionally can # be returned. attrs["_field_objects"][field.id] = { "field": field, "type": field_type, "name": field_name, } # Add the field to the attribute dict that is used to generate the model. # All the kwargs that are passed to the `get_model_field` method are going # to be passed along to the model field. attrs[field_name] = field_type.get_model_field( field, db_column=field.db_column, verbose_name=field.name) # Create the model class. model = type( str(f"Table{self.pk}Model"), ( CreatedAndUpdatedOnMixin, models.Model, ), attrs, ) # In some situations the field can only be added once the model class has been # generated. So for each field we will call the after_model_generation with # the generated model as argument in order to do this. This is for example used # by the link row field. It can also be used to make other changes to the # class. for field_id, field_object in attrs["_field_objects"].items(): field_object["type"].after_model_generation( field_object["field"], model, field_object["name"], manytomany_models) return model
def get_model(self, fields=None, field_ids=None, attribute_names=False, manytomany_models=None): """ Generates a temporary Django model based on available fields that belong to this table. Note that the model will not be registered with the apps because of the `DatabaseConfig.prevent_generated_model_for_registering` hack. We do not want to the model cached because models with the same name can differ. :param fields: Extra table field instances that need to be added the model. :type fields: list :param field_ids: If provided only the fields with the ids in the list will be added to the model. This can be done to improve speed if for example only a single field needs to be mutated. :type field_ids: None or list :param attribute_names: If True, the the model attributes will be based on the field name instead of the field id. :type attribute_names: bool :param manytomany_models: In some cases with related fields a model has to be generated in order to generate that model. In order to prevent a recursion loop we cache the generated models and pass those along. :type manytomany_models: dict :return: The generated model. :rtype: Model """ if not fields: fields = [] if not manytomany_models: manytomany_models = {} app_label = 'database_table' meta = type('Meta', (), { 'managed': False, 'db_table': f'database_table_{self.id}', 'app_label': app_label, 'ordering': ['id'] }) attrs = { 'Meta': meta, '__module__': 'database.models', # An indication that the model is a generated table model. '_generated_table_model': True, '_table_id': self.id, # An object containing the table fields, field types and the chosen names # with the table field id as key. '_field_objects': {}, # We are using our own table model manager to implement some queryset # helpers. 'objects': TableModelManager() } # Construct a query to fetch all the fields of that table. fields_query = self.field_set.all() # If the field ids are provided we must only fetch the fields of which the ids # are in that list. if isinstance(field_ids, list): if len(field_ids) == 0: fields_query = [] else: fields_query = fields_query.filter(pk__in=field_ids) # Create a combined list of fields that must be added and belong to the this # table. fields = fields + [field for field in fields_query] # If there are duplicate field names we have to store them in a list so we know # later which ones are duplicate. duplicate_field_names = [] # We will have to add each field to with the correct field name and model field # to the attribute list in order for the model to work. for field in fields: field = field.specific field_type = field_type_registry.get_by_model(field) field_name = field.db_column # If attribute_names is True we will not use 'field_{id}' as attribute name, # but we will rather use a name the user provided. if attribute_names: field_name = field.model_attribute_name # If the field name already exists we will append '_field_{id}' to each # entry that is a duplicate. if field_name in attrs: duplicate_field_names.append(field_name) replaced_field_name = f'{field_name}_{attrs[field_name].db_column}' attrs[replaced_field_name] = attrs.pop(field_name) if field_name in duplicate_field_names: field_name = f'{field_name}_{field.db_column}' # Add the generated objects and information to the dict that optionally can # be returned. attrs['_field_objects'][field.id] = { 'field': field, 'type': field_type, 'name': field_name } # Add the field to the attribute dict that is used to generate the model. # All the kwargs that are passed to the `get_model_field` method are going # to be passed along to the model field. attrs[field_name] = field_type.get_model_field( field, db_column=field.db_column, verbose_name=field.name ) # Create the model class. model = type( str(f'Table{self.pk}Model'), (models.Model,), attrs ) # In some situations the field can only be added once the model class has been # generated. So for each field we will call the after_model_generation with # the generated model as argument in order to do this. This is for example used # by the link row field. It can also be used to make other changes to the # class. for field_id, field_object in attrs['_field_objects'].items(): field_object['type'].after_model_generation( field_object['field'], model, field_object['name'], manytomany_models ) return model