def method_save_new(instance, *args, **kwargs): if instance.pk: return self.method_save_original(instance, *args, **kwargs) else: # Set meta.auto_created to True to have the original save_base # not send the pre_save signal which would normally send # the instance without a primary key. Since we assign a random # primary key any pre_save signal handler that relies on an # empty primary key will fail. # The meta.auto_created and manual pre_save sending emulates # the original behavior. Since meta.auto_created also disables # the post_save signal we must also send it ourselves. # This hack work with Django 1.11 .save_base() but can break # in future versions if that method is updated. pre_save.send( sender=instance.__class__, instance=instance, raw=False, update_fields=None, ) instance._meta.auto_created = True instance.pk = RandomPrimaryKeyModelMonkeyPatchMixin.get_unique_primary_key( model=instance._meta.model ) instance.id = instance.pk result = instance.save_base(force_insert=True) instance._meta.auto_created = False post_save.send( sender=instance.__class__, instance=instance, created=True, update_fields=None, raw=False ) return result
def test_role_mapping_changes(self): """Test that role mapping listeners work when changes are made.""" # Set up (just for this test) self.django_moderator.groups.add(self.moderator_group) self.django_admin.groups.add(self.admin_group) self.assertEqual(self.django_moderator.groups.all().count(), 1) self.assertEqual(self.django_admin.groups.all().count(), 1) # Test mapping deletion self.admin_mapping.delete() self.assertEqual(self.django_admin.groups.all().count(), 0) # Test mapping update self.moderator_mapping.group = self.admin_group self.moderator_mapping.save() self.assertEqual(self.django_moderator.groups.all().count(), 1) self.assertTrue(self.admin_group in self.django_moderator.groups.all()) # Test mapping creation new_mapping = RoleMapping.objects.create(role=self.admin_role, group=self.moderator_group) self.assertEqual(self.django_admin.groups.all().count(), 1) self.assertTrue(self.moderator_group in self.django_admin.groups.all()) # Test that nothing happens when fixtures are loaded pre_save.send(RoleMapping, instance=new_mapping, raw=True) self.assertEqual(self.django_admin.groups.all().count(), 1) self.assertTrue(self.moderator_group in self.django_admin.groups.all())
def bulk_create_with_signal(cls: models.Model, items, **kwargs): for i in items: pre_save.send(sender=cls, instance=i) result = cls.objects.bulk_create(items, **kwargs) for i in items: post_save.send(sender=cls, instance=i, created=True) return result
def on_model_pre_create(self, model, instance): model_cls, django_instance = self.get_django_instance(model, instance) pre_save.send(model_cls, raw=True, using=self, instance=django_instance, update_fields=[])
def fake_save(self, instance): cls = instance.__class__ pre_save.send(sender=cls, instance=instance) for field in cls._meta.fields: if isinstance(field, FileField): getattr(instance, field.name)._committed = True post_save.send(sender=cls, instance=instance) return self.storage.listdir('avatars')[1]
def test_pre_save_with_fields_unchanged(self): with must_be_called(False) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_key',)) obj = FakeModel() post_init.send(instance=obj, sender=FakeModel) obj.another = 'dont care about this field' pre_save.send(instance=obj, sender=FakeModel)
def test_pre_save_with_fields_changed(self): with must_be_called(True) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_key',)) obj = FakeModel() post_init.send(instance=obj, sender=FakeModel) obj.a_key = 'change a field that we care about' pre_save.send(instance=obj, sender=FakeModel)
def test_pre_save_unchanged(self): with must_be_called(False) as func: pre_save_changed.connect(func, sender=FakeModel) obj = FakeModel() # post_init sets list of initial values post_init.send(instance=obj, sender=FakeModel) # This *doesn't* call pre_save_changed, because we haven't changed anything. pre_save.send(instance=obj, sender=FakeModel)
def test_pre_save_with_fields_unchanged(self): with must_be_called(False) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_key', )) obj = FakeModel() post_init.send(instance=obj, sender=FakeModel) obj.another = 'dont care about this field' pre_save.send(instance=obj, sender=FakeModel)
def test_pre_save_with_fields_changed(self): with must_be_called(True) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_key', )) obj = FakeModel() post_init.send(instance=obj, sender=FakeModel) obj.a_key = 'change a field that we care about' pre_save.send(instance=obj, sender=FakeModel)
def test_pre_save_changed(self): with must_be_called(True) as func: pre_save_changed.connect(func, sender=FakeModel) obj = FakeModel() # post_init sets list of initial values post_init.send(instance=obj, sender=FakeModel) obj.a_key = 'another value' pre_save.send(instance=obj, sender=FakeModel)
def save_dirty(self, raw=False, using=None): """ An alternative to save, instead writing every field again, only updates the dirty fields via QuerySet.update """ if not self.pk: self.save(using=using) updated = 1 else: # a lot copied from django/db/models/base.py using = using or router.db_for_write(self.__class__, instance=self) changed_values = self.get_changed_values() if len(changed_values.keys()) == 0: return False pre_save.send(sender=self.__class__, instance=self, raw=raw, using=using) # Detect if updating relationship field_ids directly # If related field object itself has changed then the field_id # also changes, in which case we detect and ignore the field_id # change, otherwise we'll reload the object again later unnecessarily rel_fields = {f.column: f for f in self._meta.fields if f.rel} updated_rel_ids = [] for field_name in changed_values.keys(): if field_name in rel_fields.keys(): rel_field = rel_fields[field_name] value = changed_values[rel_field.column] obj_value = getattr(self, rel_field.name).pk del changed_values[rel_field.column] changed_values[rel_field.name] = value if value != obj_value: updated_rel_ids.append(rel_field.column) # Maps db column names back to field names if they differ field_map = {f.column: f.name for f in self._meta.fields if f.db_column} for field_from, field_to in field_map.iteritems(): if field_from in changed_values: changed_values[field_to] = changed_values[field_from] del changed_values[field_from] updated = self.__class__.objects.filter(pk=self.pk).update(**changed_values) # Reload updated relationships for field_name in updated_rel_ids: field = rel_fields[field_name] field_pk = getattr(self, field_name) rel_obj = field.related.parent_model.objects.get(pk=field_pk) setattr(self, field.name, rel_obj) self._reset_state() post_save.send(sender=self.__class__, instance=self, created=False, raw=raw, using=using) return updated == 1
def preview(request): ''' A simple preview for Posts. ''' #TODO: Save the preview as draft somewhere from almparse.parser import transform b = request.POST.get('data') p = PostData(body=transform(b)) pre_save.send(sender=PostData, request=request, instance=p) context_instance = RequestContext(request) return render_to_response('board/post_preview.html', {'post': p}, context_instance=context_instance)
def on_model_pre_create(self, model, instance): try: model_cls, django_instance = self.get_django_instance( model, instance) pre_save.send(model_cls, raw=True, using=self, instance=django_instance, update_fields=[]) except Exception as e: logger.warning('[!] on_model_pre_create signal failed: {}'.format( str(e)))
def preview(request): ''' A simple preview for Posts. ''' #TODO: Save the preview as draft somewhere from almparse.parser import transform b = request.POST.get('data') p = PostData(body = transform(b)) pre_save.send(sender = PostData, request = request, instance = p) context_instance = RequestContext(request) return render_to_response('board/post_preview.html', {'post':p}, context_instance = context_instance)
def post(self, request, *args, **kwargs): post_data = request.POST interval_data = { "every": post_data.get("every"), "period": post_data.get("period") } try: pre_save.send(sender=PeriodicTask, instance=TimedTask) self.get_queryset().update(**interval_data) status = 1 except: status = 2 return my_render(request, "task/interval_edit.html", locals())
def save_base( self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None, ): """ Copied from base class for a minor change. This is an ugly overwriting but since Django's ``save_base`` method does not differ between versions 1.8 and 1.10, that way of implementing wouldn't harm the flow """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created and "pre_save" not in self.signals_to_disable: pre_save.send( sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields, ) with transaction.atomic(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) self._state.db = using self._state.adding = False if not meta.auto_created and "post_save" not in self.signals_to_disable: post_save.send( sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using, ) # Empty the signals in case it might be used somewhere else in future self.signals_to_disable = []
def save_dirty(self): ''' An alternative to save, instead writing every field again, only updates the dirty fields via QuerySet.update ''' if not self.pk: self.save() updated = 1 else: changed_values = self.get_changed_values(unpickle=False) if len(changed_values.keys()) == 0: return False pre_save.send(sender=self.__class__, instance=self) # Detect if updating relationship field_ids directly # If related field object itself has changed then the field_id # also changes, in which case we detect and ignore the field_id # change, otherwise we'll reload the object again later unnecessarily rel_fields = dict([(f.column, f) for f in self._meta.fields if f.rel]) updated_rel_ids = [] for field_name in changed_values.keys(): if field_name in rel_fields.keys(): rel_field = rel_fields[field_name] value = changed_values[rel_field.column] obj_value = getattr(self, rel_field.name).pk del changed_values[rel_field.column] changed_values[rel_field.name] = value if value != obj_value: updated_rel_ids.append(rel_field.column) # Maps db column names back to field names if they differ field_map = dict([(f.column, f.name) for f in self._meta.fields if f.db_column]) for field_from, field_to in field_map.iteritems(): if field_from in changed_values: changed_values[field_to] = changed_values[field_from] del changed_values[field_from] updated = self.__class__.objects.filter(pk=self.pk).update(**changed_values) # Reload updated relationships for field_name in updated_rel_ids: field = rel_fields[field_name] field_pk = getattr(self, field_name) rel_obj = field.related.parent_model.objects.get(pk=field_pk) setattr(self, field.name, rel_obj) self._reset_state() post_save.send(sender=self.__class__, instance=self, created=False) return updated == 1
def update(self, **kwargs): for instance in self: pre_save.send(sender=instance.__class__, instance=instance, raw=False, using=self.db, update_fields=kwargs.keys()) result = super(UpdateSignalsQuerySet, self.all()).update(**kwargs) for instance in self: for key, value in kwargs.items(): # Fake setting off values from kwargs setattr(instance, key, value) post_save.send(sender=instance.__class__, instance=instance, created=False, raw=False, using=self.db, update_fields=kwargs.keys()) return result
def post(self, request, *args, **kwargs): post_data = request.POST enabled_value = str(post_data.get("enabled")) == str(True) is_send_email_value = str(post_data.get("is_send_email")) == str(True) job_data = { "nice_name": post_data.get("nice_name", ''), "host": post_data.get("host", ''), "name": post_data.get("name", ''), "interval_id": post_data.get("interval", ''), "crontab_id": post_data.get("crontab", ''), "args": post_data.get("args", ''), "kwargs": post_data.get("kwargs", ''), "queue": post_data.get("queue", ''), "enabled": enabled_value, "run_status": True if enabled_value else False, "exchange": post_data.get("exchange", ''), "routing_key": post_data.get("routing_key", ''), "expires": None, "description": post_data.get("description", ''), "date_changed": datetime.now(), "email": post_data.get("email"), "is_send_email": is_send_email_value } if job_data['interval_id'] and job_data['crontab_id']: status = 2 return my_render(request, "task/job_edit.html", locals()) task_value = post_data.get("regtask") if task_value: job_data["task"] = task_value kwargs_vaule = job_data["kwargs"] args_value = job_data["args"] try: json.loads(kwargs_vaule) except: status = 2 return my_render(request, "task/job_edit.html", locals()) if args_value: try: json.loads(args_value) except: status = 2 return my_render(request, "task/job_edit.html", locals()) try: pre_save.send(sender=PeriodicTask, instance=TimedTask) self.get_queryset().update(**job_data) status = 1 except Exception as e: print e status = 2 return my_render(request, "task/job_edit.html", locals())
def test_pre_save_with_fields_changed(self): with must_be_called(True) as func: pre_save_changed.connect(func, sender=FakeModel, fields=("a_key", )) obj = FakeModel() post_init.send(instance=obj, sender=FakeModel) obj.a_key = "change a field that we care about" pre_save.send(instance=obj, sender=FakeModel) assert func.kwargs["changed_fields"] == { "a_key": ("a value", "change a field that we care about") }
def save(self, objects, connection): """ Saves the objects to the given connection :param objects: the objects to save :param connection: the connection which should be used for saving the object :return: nothing, the objects are updated with the "id" property """ def _serialize_object(object_to_serialize): if object_to_serialize.is_incomplete: raise AttributeError( "Object %s is incomplete, probably obtained from search and can not be saved. " + "To get metadata-complete record, call .update() on the object" % object_to_serialize ) return { "metadata": object_to_serialize.metadata, "bitstream": object_to_serialize.get_local_bitstream(), "slug": object_to_serialize.slug, } objects_to_update = [] objects_to_create = [] if connection is None: connection = self.connection for o in objects: if o.objects_fedora_connection == connection and o.id: objects_to_update.append(o) pre_save.send(sender=o.__class__, instance=o, raw=False, using="repository", update_fields=None) else: objects_to_create.append(o) pre_save.send(sender=o.__class__, instance=o, raw=False, using="repository", update_fields=None) if objects_to_update: metadata = connection.update_objects([_serialize_object(o) for o in objects_to_update]) for md, obj in zip(metadata, objects_to_update): obj.metadata = md if objects_to_create: metadata = connection.create_objects([_serialize_object(o) for o in objects_to_create]) for md, obj in zip(metadata, objects_to_create): obj.metadata = md for o in objects: post_save.send( sender=o.__class__, instance=o, created=None, raw=False, using="repository", update_fields=None )
def _move_item(self, request, startorder, endorder): if self._get_order_direction(request) != '-1': order_up, order_down = self.default_order_directions[0] else: order_up, order_down = self.default_order_directions[1] if startorder < endorder - order_up: finalorder = endorder - order_up move_filter = { '{0}__gte'.format(self.default_order_field): startorder, '{0}__lte'.format(self.default_order_field): finalorder, } order_by = self.default_order_field move_update = {self.default_order_field: F(self.default_order_field) - 1} elif startorder > endorder + order_down: finalorder = endorder + order_down move_filter = { '{0}__gte'.format(self.default_order_field): finalorder, '{0}__lte'.format(self.default_order_field): startorder, } order_by = '-{0}'.format(self.default_order_field) move_update = {self.default_order_field: F(self.default_order_field) + 1} else: return self.model.objects.none() with transaction.atomic(): extra_model_filters = self.get_extra_model_filters(request) filters = {self.default_order_field: startorder} filters.update(extra_model_filters) move_filter.update(extra_model_filters) obj = self.model.objects.get(**filters) obj_qs = self.model.objects.filter(pk=obj.pk) move_qs = self.model.objects.filter(**move_filter).order_by(order_by) for instance in move_qs: pre_save.send( self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=None or router.db_for_write( self.model, instance=instance), ) # using qs.update avoid multi [pre|post]_save signal on obj.save() obj_qs.update(**{self.default_order_field: self.get_max_order(request, obj) + 1}) move_qs.update(**move_update) obj_qs.update(**{self.default_order_field: finalorder}) for instance in move_qs: post_save.send(self.model, instance=instance, update_fields=[self.default_order_field]) query_set = self.model.objects.filter(**move_filter).order_by(self.default_order_field).values_list('pk', self.default_order_field) return [dict(pk=pk, order=order) for pk, order in query_set]
def update_model_fields(obj, update_fields=[], trigger_signals=False): """ 根据给定字段,保存该对象的对应字段信息 """ field_entry = {} for k in update_fields: if hasattr(obj, k): field_entry[k] = getattr(obj, k) if trigger_signals: pre_save.send(sender=obj.__class__, instance=obj) rows = obj.__class__.objects.filter(pk=obj.pk).update(**field_entry) if trigger_signals: post_save.send(sender=obj.__class__, instance=obj) return rows
def test_pre_save_changed(self): with must_be_called(True) as func: pre_save_changed.connect(func, sender=FakeModel) obj = FakeModel() # post_init sets list of initial values post_init.send(instance=obj, sender=FakeModel) obj.a_key = "another value" pre_save.send(instance=obj, sender=FakeModel) assert func.kwargs["changed_fields"] == { "a_key": ("a value", "another value") }
def post(self, request, id): crontab_data = { "minute": request.POST.get("minute", ''), "hour": request.POST.get("hour", ''), "day_of_week": request.POST.get("day_of_week", ''), "day_of_month": request.POST.get("day_of_month", ''), "month_of_year": request.POST.get("month_of_year", '') } try: pre_save.send(sender=PeriodicTask, instance=TimedTask) CrontabSchedule.objects.filter(pk=id).update(**crontab_data) status = 1 except: status = 2 return my_render(request, "task/crontab_edit.html", locals())
def test_compare_after_to_python(self): """ Field values (e.g. datetimes) are equal even if set via string. Ensures that to_python() is called prior to comparison between old & new values. """ with must_be_called(False) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_datetime',)) obj = FakeModel() obj.a_datetime = '2017-01-01T00:00:00.000000Z' post_init.send(instance=obj, sender=FakeModel) # This is identical to the above, even though the type is different, # so don't call the signal obj.a_datetime = datetime.datetime(2017, 1, 1, 0, 0, 0, 0, utc) pre_save.send(instance=obj, sender=FakeModel)
def update(self, **values): pre_save.send(sender=self.__class__, instance=self, created=False) result = super().update(**values) post_save.send( sender=self.__class__, instance=self, created=False, raw=False, update_fields=self.get_changed_columns(), ) # We also clean the DRF cache clear_for_instance(result) return result
def test_clearable(self): """ A ClearablFileInput will set field value to False before pre_save """ profile = models.Profile(avatar='avatars/test.jpg') cls = profile.__class__ profile.avatar = False pre_save.send(sender=cls, instance=profile) # Saving will then properly clear profile.avatar = '' post_save.send(sender=cls, instance=profile) # FileField is cleared, but not explicitly deleted, file remains files = self.storage.listdir('avatars')[1] self.assertEqual(len(files), 1)
def test_compare_after_to_python(self): """ Field values (e.g. datetimes) are equal even if set via string. Ensures that to_python() is called prior to comparison between old & new values. """ with must_be_called(False) as func: pre_save_changed.connect(func, sender=FakeModel, fields=('a_datetime', )) obj = FakeModel() obj.a_datetime = '2017-01-01T00:00:00.000000Z' post_init.send(instance=obj, sender=FakeModel) # This is identical to the above, even though the type is different, # so don't call the signal obj.a_datetime = datetime.datetime(2017, 1, 1, 0, 0, 0, 0, utc) pre_save.send(instance=obj, sender=FakeModel)
def update_model_change_fields(obj, update_params={}, trigger_signals=False): """ 只更新在obj上发生改变的属性值 """ field_entry = {} for k, v in update_params.iteritems(): if hasattr(obj, k) and getattr(obj, k) != v: field_entry[k] = v setattr(obj, k, v) if not field_entry: return 0 if trigger_signals: pre_save.send(sender=obj.__class__, instance=obj) rows = obj.__class__.objects.filter(pk=obj.pk).update(**field_entry) if trigger_signals: post_save.send(sender=obj.__class__, instance=obj) return rows
def run_command(self, name: str, command: str): from pentest_project.models import Action from pentest_project.tasks import run_action_terminal actions = [ Action(name=name, command=command, worker=worker) for worker in self.all() ] for action in actions: pre_save.send(sender=Action, instance=action, raw=False, using=Action.objects.db) saved_instances = Action.objects.bulk_create(actions) for action in saved_instances: post_save.send(sender=Action, instance=action, created=True, raw=False, using=Action.objects.db) run_action_terminal.apply_async((action.pk, ), countdown=TASK_COUNTDOWN) return actions
def get_current_user(self): _user = self.created_by pre_save.send(sender=self.__class__, instance=self, raw=True, using="default") _current = self.created_by self.created_by = _user return _current
def merge_into(self, other, callback=lambda x: x, using='default'): """ Collects objects related to ``self`` and updates their foreign keys to point to ``other``. If ``callback`` is specified, it will be executed on each collected chunk before any changes are made, and should return a modified list of results that still need updated. NOTE: Duplicates (unique constraints) which exist and are bound to ``other`` are preserved, and relations on ``self`` are discarded. """ # TODO: proper support for database routing s_model = type(self) # Find all the objects than need to be deleted. collector = EverythingCollector(using=using) collector.collect([self]) for model, objects in collector.data.iteritems(): # find all potential keys which match our type fields = set( f.name for f in model._meta.fields if isinstance(f, ForeignKey) and f.rel.to == s_model if f.rel.to ) if not fields: # the collector pulls in the self reference, so if it's our model # we actually assume it's probably not related to itself, and its # perfectly ok if model == s_model: continue raise TypeError('Unable to determine related keys on %r' % model) for obj in objects: send_signals = not model._meta.auto_created # find fields which need changed update_kwargs = {} for f_name in fields: if getattr(obj, f_name) == self: update_kwargs[f_name] = other if not update_kwargs: # as before, if we're referencing ourself, this is ok if obj == self: continue raise ValueError('Mismatched row present in related results') signal_kwargs = { 'sender': model, 'instance': obj, 'using': using, 'migrated': True, } if send_signals: pre_delete.send(**signal_kwargs) post_delete.send(**signal_kwargs) for k, v in update_kwargs.iteritems(): setattr(obj, k, v) if send_signals: pre_save.send(created=True, **signal_kwargs) try: with transaction.atomic(): model.objects.using(using).filter(pk=obj.pk).update(**update_kwargs) except IntegrityError: # duplicate key exists, destroy the relations model.objects.using(using).filter(pk=obj.pk).delete() if send_signals: post_save.send(created=True, **signal_kwargs)
def merge_into(self, other, callback=lambda x: x, using='default'): """ Collects objects related to ``self`` and updates their foreign keys to point to ``other``. If ``callback`` is specified, it will be executed on each collected chunk before any changes are made, and should return a modified list of results that still need updated. NOTE: Duplicates (unique constraints) which exist and are bound to ``other`` are preserved, and relations on ``self`` are discarded. """ # TODO: proper support for database routing s_model = type(self) # Find all the objects than need to be deleted. collector = EverythingCollector(using=using) collector.collect([self]) for model, objects in six.iteritems(collector.data): # find all potential keys which match our type fields = set(f.name for f in model._meta.fields if isinstance(f, ForeignKey) and f.rel.to == s_model if f.rel.to) if not fields: # the collector pulls in the self reference, so if it's our model # we actually assume it's probably not related to itself, and its # perfectly ok if model == s_model: continue raise TypeError('Unable to determine related keys on %r' % model) for obj in objects: send_signals = not model._meta.auto_created # find fields which need changed update_kwargs = {} for f_name in fields: if getattr(obj, f_name) == self: update_kwargs[f_name] = other if not update_kwargs: # as before, if we're referencing ourself, this is ok if obj == self: continue raise ValueError('Mismatched row present in related results') signal_kwargs = { 'sender': model, 'instance': obj, 'using': using, 'migrated': True, } if send_signals: pre_delete.send(**signal_kwargs) post_delete.send(**signal_kwargs) for k, v in six.iteritems(update_kwargs): setattr(obj, k, v) if send_signals: pre_save.send(created=True, **signal_kwargs) try: with transaction.atomic(using=using): model.objects.using(using).filter(pk=obj.pk).update( **update_kwargs) except IntegrityError: # duplicate key exists, destroy the relations model.objects.using(using).filter(pk=obj.pk).delete() if send_signals: post_save.send(created=True, **signal_kwargs)
def move_item(self, startorder, endorder, extra_model_filters=None): model = self.model rank_field = self.default_order_field if endorder < startorder: # Drag up move_filter = { '{0}__gte'.format(rank_field): endorder, '{0}__lte'.format(rank_field): startorder - 1, } move_delta = +1 order_by = '-{0}'.format(rank_field) elif endorder > startorder: # Drag down move_filter = { '{0}__gte'.format(rank_field): startorder + 1, '{0}__lte'.format(rank_field): endorder, } move_delta = -1 order_by = rank_field else: return model.objects.none() obj_filters = {rank_field: startorder} if extra_model_filters is not None: obj_filters.update(extra_model_filters) move_filter.update(extra_model_filters) with transaction.atomic(): try: obj = model.objects.get(**obj_filters) except model.MultipleObjectsReturned: if AUTO_ORDER == True: # Run order script for model... Quite nice... orderfield = model._meta.ordering[0] if orderfield[0] == '-': orderfield = orderfield[1:] for order, obj in enumerate(model.objects.iterator(), start=1): setattr(obj, orderfield, order) obj.save() # After order should be ok, get obj obj = model.objects.get(**obj_filters) else: msg = "Detected non-unique values in field '{0}' used for sorting this model.\nConsider to run \n"\ " python manage.py reorder {1}\n"\ "to adjust this inconsistency." # noinspection PyProtectedMember raise model.MultipleObjectsReturned( msg.format(rank_field, model._meta.label)) move_qs = model.objects.filter(**move_filter).order_by(order_by) move_objs = list(move_qs) for instance in move_objs: setattr(instance, rank_field, getattr(instance, rank_field) + move_delta) # Do not run `instance.save()`, because it will be updated later in bulk by `move_qs.update`. pre_save.send( model, instance=instance, update_fields=[rank_field], raw=False, using=router.db_for_write(model, instance=instance), ) move_qs.update(**{rank_field: F(rank_field) + move_delta}) for instance in move_objs: post_save.send( model, instance=instance, update_fields=[rank_field], raw=False, using=router.db_for_write(model, instance=instance), created=False, ) setattr(obj, rank_field, endorder) obj.save(update_fields=[rank_field]) return [{ 'pk': instance.pk, 'order': getattr(instance, rank_field) } for instance in chain(move_objs, [obj])]
def _move_item(self, request, startorder, endorder): if self._get_order_direction(request) != '-1': order_up, order_down = 0, 1 else: order_up, order_down = 1, 0 if startorder < endorder - order_up: finalorder = endorder - order_up move_filter = { '{0}__gte'.format(self.default_order_field): startorder, '{0}__lte'.format(self.default_order_field): finalorder, } order_by = self.default_order_field move_update = { self.default_order_field: F(self.default_order_field) - 1 } elif startorder > endorder + order_down: finalorder = endorder + order_down move_filter = { '{0}__gte'.format(self.default_order_field): finalorder, '{0}__lte'.format(self.default_order_field): startorder, } order_by = '-{0}'.format(self.default_order_field) move_update = { self.default_order_field: F(self.default_order_field) + 1 } else: return self.model.objects.none() with transaction.atomic(): extra_model_filters = self.get_extra_model_filters(request) filters = {self.default_order_field: startorder} filters.update(extra_model_filters) move_filter.update(extra_model_filters) try: obj = self.model.objects.get(**filters) except self.model.MultipleObjectsReturned as exc: msg = "Detected non-unique values in field '{}' used for sorting this model.\nConsider to run \n"\ " python manage.py reorder {}\n"\ "to adjust this inconsistency." raise self.model.MultipleObjectsReturned( msg.format(self.default_order_field, self.model._meta.label)) obj_qs = self.model.objects.filter(pk=obj.pk) move_qs = self.model.objects.filter( **move_filter).order_by(order_by) for instance in move_qs: pre_save.send( self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=None or router.db_for_write(self.model, instance=instance), ) # using qs.update avoid multi [pre|post]_save signal on obj.save() obj_qs.update(**{ self.default_order_field: self.get_max_order(request, obj) + 1 }) move_qs.update(**move_update) obj_qs.update(**{self.default_order_field: finalorder}) for instance in move_qs: post_save.send(self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=router.db_for_write(self.model, instance=instance), created=False) query_set = self.model.objects.filter(**move_filter).order_by( self.default_order_field).values_list('pk', self.default_order_field) return [dict(pk=pk, order=order) for pk, order in query_set]
def bulk_create(self, objs, **kwargs): from django.db.models.signals import pre_save for item in objs: pre_save.send(item.__class__, instance=item) return super().bulk_create(objs, **kwargs)
def _move_item(self, request, startorder, endorder): if self._get_order_direction(request) != '-1': order_up, order_down = self.default_order_directions[0] else: order_up, order_down = self.default_order_directions[1] if startorder < endorder - order_up: finalorder = endorder - order_up move_filter = { '{0}__gte'.format(self.default_order_field): startorder, '{0}__lte'.format(self.default_order_field): finalorder, } order_by = self.default_order_field move_update = { self.default_order_field: F(self.default_order_field) - 1 } elif startorder > endorder + order_down: finalorder = endorder + order_down move_filter = { '{0}__gte'.format(self.default_order_field): finalorder, '{0}__lte'.format(self.default_order_field): startorder, } order_by = '-{0}'.format(self.default_order_field) move_update = { self.default_order_field: F(self.default_order_field) + 1 } else: return self.model.objects.none() with transaction.atomic(): extra_model_filters = self.get_extra_model_filters(request) filters = {self.default_order_field: startorder} filters.update(extra_model_filters) move_filter.update(extra_model_filters) obj = self.model.objects.get(**filters) obj_qs = self.model.objects.filter(pk=obj.pk) move_qs = self.model.objects.filter( **move_filter).order_by(order_by) for instance in move_qs: pre_save.send( self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=None or router.db_for_write(self.model, instance=instance), ) # using qs.update avoid multi [pre|post]_save signal on obj.save() obj_qs.update(**{ self.default_order_field: self.get_max_order(request, obj) + 1 }) move_qs.update(**move_update) obj_qs.update(**{self.default_order_field: finalorder}) for instance in move_qs: post_save.send(self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=router.db_for_write(self.model, instance=instance), created=False) query_set = self.model.objects.filter(**move_filter).order_by( self.default_order_field).values_list('pk', self.default_order_field) return [dict(pk=pk, order=order) for pk, order in query_set]
def _move_item(self, request, startorder, endorder): if self._get_order_direction(request) != '-1': order_up, order_down = 0, 1 else: order_up, order_down = 1, 0 if startorder < endorder - order_up: finalorder = endorder - order_up move_filter = { '{0}__gte'.format(self.default_order_field): startorder, '{0}__lte'.format(self.default_order_field): finalorder, } order_by = self.default_order_field move_update = {self.default_order_field: F(self.default_order_field) - 1} elif startorder > endorder + order_down: finalorder = endorder + order_down move_filter = { '{0}__gte'.format(self.default_order_field): finalorder, '{0}__lte'.format(self.default_order_field): startorder, } order_by = '-{0}'.format(self.default_order_field) move_update = {self.default_order_field: F(self.default_order_field) + 1} else: return self.model.objects.none() with transaction.atomic(): extra_model_filters = self.get_extra_model_filters(request) filters = {self.default_order_field: startorder} filters.update(extra_model_filters) move_filter.update(extra_model_filters) try: obj = self.model.objects.get(**filters) except self.model.MultipleObjectsReturned as exc: msg = "Detected non-unique values in field '{}' used for sorting this model.\nConsider to run \n"\ " python manage.py reorder {}\n"\ "to adjust this inconsistency." raise self.model.MultipleObjectsReturned(msg.format(self.default_order_field, self.model._meta.label)) obj_qs = self.model.objects.filter(pk=obj.pk) move_qs = self.model.objects.filter(**move_filter).order_by(order_by) for instance in move_qs: pre_save.send( self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=None or router.db_for_write( self.model, instance=instance), ) # using qs.update avoid multi [pre|post]_save signal on obj.save() obj_qs.update(**{self.default_order_field: self.get_max_order(request, obj) + 1}) move_qs.update(**move_update) obj_qs.update(**{self.default_order_field: finalorder}) for instance in move_qs: post_save.send( self.model, instance=instance, update_fields=[self.default_order_field], raw=False, using=router.db_for_write(self.model, instance=instance), created=False ) query_set = self.model.objects.filter(**move_filter).order_by(self.default_order_field).values_list('pk', self.default_order_field) return [dict(pk=pk, order=order) for pk, order in query_set]
def redirect_pre_save(sender, signal=None, *args, **kwargs): pre_save.send(BaseUser, *args, **kwargs)
def create_items(sender, instance, **kwargs): pre_save.send(sender=sender.__bases__[0], instance=instance)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): pre_save.send(sender=MockedWithFieldsPreSave, instance=self, raw=False, using=using, update_fields=update_fields)