def delete(self): # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using) # update fields for model, instances_for_fieldvalues in six.iteritems( self.field_updates): query = sql.UpdateQuery(model) for (field, value), instances in six.iteritems(instances_for_fieldvalues): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in six.itervalues(self.data): instances.reverse() # delete batches for model, batches in six.iteritems(self.batches): query = sql.DeleteQuery(model) for field, instances in six.iteritems(batches): query.delete_batch([obj.pk for obj in instances], self.using, field) # delete instances for model, instances in six.iteritems(self.data): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.delete_batch(pk_list, self.using) # send post_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.post_delete.send(sender=model, instance=obj, using=self.using) # update collected instances for model, instances_for_fieldvalues in six.iteritems( self.field_updates): for (field, value), instances in six.iteritems(instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) for model, instances in six.iteritems(self.data): for instance in instances: setattr(instance, model._meta.pk.attname, None)
def delete_objects(seen_objs): """ Iterate through a list of seen classes, and remove any instances that are referred to. """ try: ordered_classes = seen_objs.keys() except CyclicDependency: # If there is a cyclic dependency, we cannot in general delete the # objects. However, if an appropriate transaction is set up, or if the # database is lax enough, it will succeed. So for now, we go ahead and # try anyway. ordered_classes = seen_objs.unordered_keys() obj_pairs = {} for cls in ordered_classes: items = seen_objs[cls].items() items.sort() obj_pairs[cls] = items # Pre-notify all instances to be deleted. for pk_val, instance in items: signals.pre_delete.send(sender=cls, instance=instance) pk_list = [pk for pk, instance in items] del_query = sql.DeleteQuery(cls, connection) del_query.delete_batch_related(pk_list) update_query = sql.UpdateQuery(cls, connection) for field, model in cls._meta.get_fields_with_model(): if (field.rel and field.null and field.rel.to in seen_objs and filter(lambda f: f.column == field.column, field.rel.to._meta.fields)): if model: sql.UpdateQuery(model, connection).clear_related(field, pk_list) else: update_query.clear_related(field, pk_list) # Now delete the actual data. for cls in ordered_classes: items = obj_pairs[cls] items.reverse() pk_list = [pk for pk, instance in items] del_query = sql.DeleteQuery(cls, connection) del_query.delete_batch(pk_list) # Last cleanup; set NULLs where there once was a reference to the # object, NULL the primary key of the found objects, and perform # post-notification. for pk_val, instance in items: for field in cls._meta.fields: if field.rel and field.null and field.rel.to in seen_objs: setattr(instance, field.attname, None) signals.post_delete.send(sender=cls, instance=instance) setattr(instance, cls._meta.pk.attname, None) transaction.commit_unless_managed()
def delete(self): self.sort() # collect pk_list before deletion (once things start to delete # queries might not be able to retreive pk list) del_dict = OrderedDict() for model, instances in self.data.items(): del_dict.setdefault(model, []) for inst in instances: del_dict[model] += list(inst.values_list('pk', flat=True)) deleted_counter = Counter() with transaction.atomic(using=self.using, savepoint=False): # update fields for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): for inst in instances: query = sql.UpdateQuery(model) query.update_batch(inst.values_list('pk', flat=True), {field.name: value}, self.using) # fast deletes for qs in self.fast_deletes: count = qs._raw_delete(using=self.using) deleted_counter[qs.model._meta.label] += count # delete instances for model, pk_list in del_dict.items(): query = sql.DeleteQuery(model) count = query.delete_batch(pk_list, self.using) deleted_counter[model._meta.label] += count return sum(deleted_counter.values()), dict(deleted_counter)
def delete(self): self.sort() # number of objects deleted for each model label deleted_counter = Counter() with transaction.atomic(using=self.using, savepoint=False): # fast deletes for qs in self.fast_deletes: count = qs._raw_delete(using=self.using) deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): query = sql.UpdateQuery(model) query.update_batch(instances.values_list('pk', flat=True), {field.name: value}, self.using) # delete instances for model, instances in self.data.items(): query = sql.DeleteQuery(model) pk_list = instances.values_list('pk', flat=True) print('Deleting', instances) count = query.delete_batch(pk_list, self.using) deleted_counter[model._meta.label] += count
def delete(self): # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() # number of objects deleted for each model label deleted_counter = Counter() with transaction.atomic(using=self.using, savepoint=False): # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using) # fast deletes for qs in self.fast_deletes: count = qs._raw_delete(using=self.using) deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): query = sql.UpdateQuery(model) query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in self.data.values(): instances.reverse() # delete instances for model, instances in self.data.items(): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] count = query.delete_batch(pk_list, self.using) deleted_counter[model._meta.label] += count if not model._meta.auto_created: for obj in instances: signals.post_delete.send(sender=model, instance=obj, using=self.using) # update collected instances for instances_for_fieldvalues in self.field_updates.values(): for (field, value), instances in instances_for_fieldvalues.items(): for obj in instances: setattr(obj, field.attname, value) for model, instances in self.data.items(): for instance in instances: setattr(instance, model._meta.pk.attname, None) return sum(deleted_counter.values()), dict(deleted_counter)
def as_sql(self): """ Generate SQL queries that perform related deletion """ # List of (sql, params) tuples to perform deletion query_list = [] for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) self.sort() # Do not send pre_delete signals as in .delete() # Fast deletes for qs in self.fast_deletes: # TODO Check for any potential caveats from complex queries - assume none are generated by Collector # Clone queryset into DeleteQuery to use .as_sql() query_list.append( qs.query.clone(klass=sql.DeleteQuery).get_compiler( self.using).as_sql()) # update fields for model, instances_for_fieldvalues in six.iteritems( self.field_updates): query = sql.UpdateQuery(model) for (field, value), instances in six.iteritems(instances_for_fieldvalues): query.add_update_values({field.name: value}) query.add_q(models.Q(pk__in=[obj.pk for obj in instances])) query_list.append( query.get_compiler(using=self.using).as_sql()) # reverse instance collections for instances in six.itervalues(self.data): instances.reverse() # delete instances for model, instances in six.iteritems(self.data): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.where = query.where_class() query.add_q(models.Q(pk__in=pk_list)) query_list.append(query.get_compiler(using=self.using).as_sql()) # Do not update instances as in .delete() return query_list
def delete(self): ''' Replaced to simple '_raw_delete' method to prevent extra 'SELECT' on delete if there are listeners of 'post_delete' signal. Send custom 'pure_post_delete' signal to avoid conflicts. ''' assert self.query.can_filter(), \ "Cannot use 'limit' or 'offset' with delete." if self._fields is not None: raise TypeError( "Cannot call delete() after .values() or .values_list()") count = sql.DeleteQuery(self.model).delete_qs(self, self.db) if count > 0 and self.cp_behavior is not None: pure_post_delete.send(sender=self.model, queryset=self, count=count) self._result_cache = None return count
def sql_hard_delete(self, model, instances): query = sql.DeleteQuery(model) query.delete_batch([obj.pk for obj in instances], self.using)
def delete(self): """ Patched the BaseCollector.delete with soft delete support for PermanentModel """ from .models import PermanentModel time = now() # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() with transaction.atomic(using=self.using, savepoint=False): # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using) # fast deletes for qs in self.fast_deletes: if issubclass( qs.model, PermanentModel): # Update PermanentModel instance pk_list = [obj.pk for obj in qs] qs = sql.UpdateQuery(qs.model) qs.update_batch(pk_list, {settings.FIELD: time}, self.using) else: qs._raw_delete(using=self.using) # update fields for model, instances_for_fieldvalues in six.iteritems( self.field_updates): query = sql.UpdateQuery(model) for (field, value ), instances in six.iteritems(instances_for_fieldvalues): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in six.itervalues(self.data): instances.reverse() # delete instances for model, instances in six.iteritems(self.data): pk_list = [obj.pk for obj in instances] if issubclass(model, PermanentModel): query = sql.UpdateQuery(model) query.update_batch(pk_list, {settings.FIELD: time}, self.using) else: query = sql.DeleteQuery(model) query.delete_batch(pk_list, self.using) if not model._meta.auto_created: for obj in instances: signals.post_delete.send(sender=model, instance=obj, using=self.using) # update collected instances for model, instances_for_fieldvalues in six.iteritems( self.field_updates): for (field, value), instances in six.iteritems(instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) for model, instances in six.iteritems(self.data): for instance in instances: if issubclass(model, PermanentModel): continue setattr(instance, model._meta.pk.attname, None)
def delete(self, using=None, keep_parents=False, **kwargs): """ Mark this instance as deleted and call `delete()` on all related objects. Don’t call `super`! """ logger.info('DELETE %s' % self) self.deleted = True self.save(**kwargs) # the following is copied from django.db.models.base.Model using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)) # Find all the related objects that need to be deleted. collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) #return collector.delete() # remove self from deletion collection myself_and_friends = collector.data[type(self)].remove(self) if myself_and_friends: collector.data[type(self)] = myself_and_friends else: del collector.data[type(self)] # Problem: collector.delete() doesn’t call object’s delete method, but deletes! # The following is copied from collector.delete() (db.models.deletion) # sort instance collections for model, instances in collector.data.items(): collector.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. collector.sort() # number of objects deleted for each model label deleted_counter = Counter() with transaction.atomic(using=collector.using, savepoint=False): # send pre_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=collector.using) ## fast deletes #for qs in collector.fast_deletes: # count = qs._raw_delete(using=collector.using) # deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in six.iteritems( collector.field_updates): query = sql.UpdateQuery(model) for (field, value ), instances in six.iteritems(instances_for_fieldvalues): query.update_batch([obj.pk for obj in instances], {field.name: value}, collector.using) # reverse instance collections for instances in six.itervalues(collector.data): instances.reverse() # delete instances for model, instances in six.iteritems(collector.data): if not issubclass(model, DorsaleBaseModel): # handle non-cerebrale models as usual query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] count = query.delete_batch(pk_list, collector.using) deleted_counter[model._meta.label] += count else: for inst in instances: inst.delete() # expensive operation! if not model._meta.auto_created: for obj in instances: signals.post_delete.send(sender=model, instance=obj, using=collector.using) # update collected instances for model, instances_for_fieldvalues in six.iteritems( collector.field_updates): for (field, value), instances in six.iteritems(instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) for model, instances in six.iteritems(collector.data): for instance in instances: setattr(instance, model._meta.pk.attname, None) return sum(deleted_counter.values()), dict(deleted_counter)
def delete(self, force=False): """ Patched the BaseCollector.delete with soft delete support for PermanentModel """ from .models import PermanentModel time = now() deleted_counter = Counter() # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() # number of objects deleted for each model label deleted_counter = Counter() if DJANGO_VERSION < (1, 8, 0): transaction_handling = partial( transaction.commit_on_success_unless_managed, using=self.using) else: transaction_handling = partial(transaction.atomic, using=self.using, savepoint=False) with transaction_handling(): # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using) # fast deletes for qs in self.fast_deletes: if (issubclass(qs.model, PermanentModel) and not force): # Update PermanentModel instance pk_list = [obj.pk for obj in qs] qs = sql.UpdateQuery(qs.model) qs.update_batch(pk_list, {FIELD: time}, self.using) count = len(pk_list) else: count = qs._raw_delete(using=self.using) if DJANGO_VERSION >= (1, 9, 0): deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in six.iteritems( self.field_updates): query = sql.UpdateQuery(model) for (field, value), instances in six.iteritems(instances_for_fieldvalues): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in six.itervalues(self.data): instances.reverse() # delete instances for model, instances in six.iteritems(self.data): pk_list = [obj.pk for obj in instances] if issubclass(model, PermanentModel) and not force: query = sql.UpdateQuery(model) query.update_batch(pk_list, {FIELD: time}, self.using) for instance in instances: setattr(instance, FIELD, time) count = len(pk_list) else: query = sql.DeleteQuery(model) count = query.delete_batch(pk_list, self.using) if DJANGO_VERSION >= (1, 9, 0): deleted_counter[model._meta.label] += count if not model._meta.auto_created: for obj in instances: signals.post_delete.send(sender=model, instance=obj, using=self.using) # update collected instances for model, instances_for_fieldvalues in six.iteritems(self.field_updates): for (field, value), instances in six.iteritems(instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) for model, instances in six.iteritems(self.data): for instance in instances: if issubclass(model, PermanentModel) and not force: continue setattr(instance, model._meta.pk.attname, None) if DJANGO_VERSION >= (1, 9, 0): return sum(deleted_counter.values()), dict(deleted_counter)
def delete(self, using=None, *args, **kwargs): """ Mark this instance as deleted and call `delete()` on all related objects. Don’t call `super`! """ logger.info('DELETE %s' % self) self.deleted = True self.save(*args, **kwargs) # the following is copied from django.db.models.base.Model using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val( ) is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname) # Find all the related objects than need to be deleted. collector = Collector(using=using) collector.collect([self]) # remove self from deletion collection myself_and_friends = collector.data[type(self)].remove(self) if myself_and_friends: collector.data[type(self)] = myself_and_friends else: del collector.data[type(self)] # Problem: collector.delete() doesn’t call object’s delete method, but deletes! # The following is copied from collector.delete() # sort instance collections for model, instances in collector.data.items(): collector.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer contraint checks until the # end of a transaction. collector.sort() # send pre_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=collector.using) # update fields for model, instances_for_fieldvalues in collector.field_updates.iteritems( ): query = sql.UpdateQuery(model) for (field, value), instances in instances_for_fieldvalues.iteritems(): query.update_batch([obj.pk for obj in instances], {field.name: value}, collector.using) # reverse instance collections for instances in collector.data.itervalues(): instances.reverse() # delete batches for model, batches in collector.batches.iteritems(): if not issubclass(model, CerebraleBaseModel): # handle non-cerebrale models as usual query = sql.DeleteQuery(model) for field, instances in batches.iteritems(): query.delete_batch([obj.pk for obj in instances], collector.using, field) else: # don’t know what to do pass # delete instances for model, instances in collector.data.iteritems(): if not issubclass(model, CerebraleBaseModel): # handle non-cerebrale models as usual query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.delete_batch(pk_list, collector.using) else: for inst in instances: inst.delete() # expensive operation! # send post_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: signals.post_delete.send(sender=model, instance=obj, using=collector.using) # update collected instances for model, instances_for_fieldvalues in collector.field_updates.iteritems( ): for (field, value), instances in instances_for_fieldvalues.iteritems(): for obj in instances: setattr(obj, field.attname, value) for model, instances in collector.data.iteritems(): for instance in instances: setattr(instance, model._meta.pk.attname, None)
def Collector__delete(self): # NOTE: Original Django code. Only changes are marked as ADDED. # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() # number of objects deleted for each model label deleted_counter = Counter() with transaction.atomic(using=self.using, savepoint=False): # ADDED: Chunk deletion of larger amounts of related objects (+6 lines) for obj_queryset in self.chunk_deletes: exists = True while exists: values_list = obj_queryset.values_list('pk', flat=True)[:DELETION_MAX_CHUNK] obj_queryset.model.objects.filter(pk__in=values_list).delete() exists = obj_queryset.exists() # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send( sender=model, instance=obj, using=self.using ) # fast deletes for qs in self.fast_deletes: count = qs._raw_delete(using=self.using) deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): query = sql.UpdateQuery(model) query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in self.data.values(): instances.reverse() # delete instances for model, instances in self.data.items(): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] count = query.delete_batch(pk_list, self.using) deleted_counter[model._meta.label] += count if not model._meta.auto_created: for obj in instances: signals.post_delete.send( sender=model, instance=obj, using=self.using ) # update collected instances for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): for obj in instances: setattr(obj, field.attname, value) for model, instances in self.data.items(): for instance in instances: setattr(instance, model._meta.pk.attname, None) return sum(deleted_counter.values()), dict(deleted_counter)
def delete(self, schema): if schema == None: # add schema import inspect fx = inspect.stack() error_detail = "" for x in fx: error_detail += "\n\t {0}, line {1}".format(fx[1], fx[2]) raise (Exception( "can not call ''{1}'' without schema in '{0}'.\nDetail:\n{2}". format(__file__, "Collector.delete", error_detail))) # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() with transaction.commit_on_success_unless_managed(using=self.using): # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using, schema=schema) # fast deletes for qs in self.fast_deletes: qs._raw_delete(using=self.using) # update fields for model, instances_for_fieldvalues in six.iteritems( self.field_updates): query = sql.UpdateQuery(model) for (field, value ), instances in six.iteritems(instances_for_fieldvalues): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in six.itervalues(self.data): instances.reverse() # delete instances for model, instances in six.iteritems(self.data): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.delete_batch(pk_list, self.using, schema=schema) if not model._meta.auto_created: for obj in instances: signals.post_delete.send(sender=model, instance=obj, using=self.using, schema=schema) # update collected instances for model, instances_for_fieldvalues in six.iteritems( self.field_updates): for (field, value), instances in six.iteritems(instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) for model, instances in six.iteritems(self.data): for instance in instances: setattr(instance, model._meta.pk.attname, None)