def delete_without_cascade(self, using=None, keep_parents=False): """ Modified version of django's default delete() method. This method is added to enable safe deletion of the child models without removing objects related to it through the parent. As of Feb 2017, no models are directly related to the OAuth2DataRequestProject or OnSiteDataRequestProject child models. """ allowed_models = [ "private_sharing.onsitedatarequestproject", "private_sharing.oauth2datarequestproject", ] if self._meta.label_lower not in allowed_models: raise Exception("'delete_without_cascade' only for child models!") using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents, collect_related=False) return collector.delete()
def delete(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete()
def delete(self, force=False): """ Deletes the records in the current QuerySet. """ assert self.query.can_filter(), \ "Cannot use 'limit' or 'offset' with delete." del_query = self._clone() # The delete is actually 2 queries - one to find related objects, # and one to delete. Make sure that the discovery of related # objects is performed on the same database as the deletion. del_query._for_write = True # Disable non-supported fields. del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) collector.delete(force=force) # Clear the result cache, in case this QuerySet gets reused. self._result_cache = None
def skip(self): """ Determine whether or not this object should be skipped. If this model instance is a parent of a single subclassed instance, skip it. The subclassed instance will create this parent instance for us. TODO: Allow the user to force its creation? """ if self.skip_me is not None: return self.skip_me cls = self.instance.__class__ using = router.db_for_write(cls, instance=self.instance) collector = Collector(using=using) collector.collect([self.instance], collect_related=False) sub_objects = sum([list(i) for i in collector.data.values()], []) sub_objects_parents = [so._meta.parents for so in sub_objects] if [self.model in p for p in sub_objects_parents].count(True) == 1: # since this instance isn't explicitly created, it's variable name # can't be referenced in the script, so record None in context dict pk_name = self.instance._meta.pk.name key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name)) self.context[key] = None self.skip_me = True else: self.skip_me = False return self.skip_me
def hard_delete(self): """Delete the records in the current QuerySet.""" self._not_support_combined_queries('delete') assert not self.query.is_sliced, \ "Cannot use 'limit' or 'offset' with delete." if self._fields is not None: raise TypeError( "Cannot call delete() after .values() or .values_list()") del_query = self._chain() # The delete is actually 2 queries - one to find related objects, # and one to delete. Make sure that the discovery of related # objects is performed on the same database as the deletion. del_query._for_write = True # Disable non-supported fields. del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) deleted, _rows_count = collector.delete() # Clear the result cache, in case this QuerySet gets reused. self._result_cache = None return deleted, _rows_count
def revert(self, delete=False): # Group the models by the database of the serialized model. versions_by_db = defaultdict(list) for version in self.version_set.iterator(): versions_by_db[version.db].append(version) # For each db, perform a separate atomic revert. for version_db, versions in versions_by_db.items(): with transaction.atomic(using=version_db): # Optionally delete objects no longer in the current revision. if delete: # Get a set of all objects in this revision. old_revision = set() for version in versions: model = version._model try: # Load the model instance from the same DB as it was saved under. old_revision.add(model._default_manager.using(version.db).get(pk=version.object_id)) except model.DoesNotExist: pass # Calculate the set of all objects that are in the revision now. current_revision = chain.from_iterable( _follow_relations_recursive(obj) for obj in old_revision ) # Delete objects that are no longer in the current revision. collector = Collector(using=version_db) new_objs = [item for item in current_revision if item not in old_revision] for model, group in groupby(new_objs, type): collector.collect(list(group)) collector.delete() # Attempt to revert all revisions. _safe_revert(versions)
def get(self, request, **kwargs): obj = self.get_object() collector = Collector(using='default') collector.collect([obj]) collector.sort() ret = {'items': []} for model, instances in collector.data.items(): display_obj = rel_model.get(model._meta.model_name, lambda original, related: related) instances = [display_obj(obj, instance) for instance in instances] instances = filter( lambda instance: hasattr(instance, 'get_absolute_url'), instances) counter = Counter(instances) ret['items'] += [{ 'name': force_text(instance), 'preview_url': request._request.build_absolute_uri( instance.get_absolute_url()), 'type': model._meta.verbose_name, 'count': count } for instance, count in counter.items()] ret['items'].sort( key=lambda item: item['type'] != obj._meta.verbose_name) return Response(ret)
def test_fast_delete_instance_set_pk_none(self): u = User.objects.create() # User can be fast-deleted. collector = Collector(using='default') self.assertTrue(collector.can_fast_delete(u)) u.delete() self.assertIsNone(u.pk)
def delete(self, *args, **kwargs): log_data = get_metadata(self.__class__, 'log', False) log_index = get_metadata(self.__class__, 'logging', ()) if (log_data or log_index) and self._user: from djangoplus.admin.models import Log collector = Collector(using='default') collector.collect([self], keep_parents=False) for cls, objs in collector.data.items(): content_type = ContentType.objects.get_for_model(cls) for obj in objs: log = Log() log.operation = Log.DELETE log.user = self._user log.content_type = content_type log.object_id = obj.pk log.object_description = unicode(obj) diff = [] for field in get_metadata(obj.__class__, 'fields'): if not isinstance(field, models.FileField): o1 = getattr(obj, field.name) v1 = unicode(o1) diff.append((field.verbose_name, v1)) log.content = json.dumps(diff) log.save() log.create_indexes(obj) super(Model, self).delete(*args, **kwargs) self.check_role(False)
def delete(self, using=None): """Surcharge de la methode save pour supprimer le fichier source avant l objet """ using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname, ) if self.source_file: tab_path = self.source_file.name.split("/") # decoupage du chemin vers le fichier source rep_path = os.path.join( os.path.join(settings.VIDEO_ROOT, tab_path[1]).replace("\\", "/") ) # recomposition du chemin du repertoire du fichier source self.source_file.delete() # suppression du fichier source if len(os.listdir(rep_path)) == 0: # si le repertoire du fichier source est vide try: os.rmdir(rep_path) # suppression du repertoire except: pass # Si un probleme survient on ne fait rien collector = Collector(using=using) collector.collect([self]) collector.delete()
def delete(self): """ Deletes the records in the current QuerySet. """ assert self.query.can_filter(), \ "Cannot use 'limit' or 'offset' with delete." # NB: this line is patched to work with SkinnyQuerySet # see https://gist.github.com/550438#file-gistfile2-txt del_query = self._clone(klass=QuerySet) # The delete is actually 2 queries - one to find related objects, # and one to delete. Make sure that the discovery of related # objects is performed on the same database as the deletion. del_query._for_write = True # Disable non-supported fields. del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) collector.delete() # Clear the result cache, in case this QuerySet gets reused. self._result_cache = None
def get(self, request, *args, **kwargs): """ Catch protected relations and show to user. """ self.object = self.get_object() can_delete = True protected_objects = [] collector_message = None collector = Collector(using="default") try: collector.collect([self.object]) except ProtectedError as e: collector_message = ("Cannot delete %s because it has relations " "that depends on it." % self.object) protected_objects = e.protected_objects can_delete = False if can_delete and self.redirect: messages.success(request, self.get_success_message(self.object)) self.delete(request, *args, **kwargs) return redirect(self.get_success_url()) context = self.get_context_data( object=self.object, can_delete=can_delete, collector_message=collector_message, protected_objects=protected_objects, ) return self.render_to_response(context)
def get_queryset(self): subject = Subject.objects.get(pk=self.kwargs['subject_id']) collector = Collector(using="default") collector.collect([subject]) out = [] for model, instance in collector.instances_with_model(): if model._meta.app_label == 'data': continue if not isinstance(instance, BaseAction): continue url = reverse( 'admin:%s_%s_change' % (instance._meta.app_label, instance._meta.model_name), args=[instance.id]) item = {} clsname = instance.__class__.__name__ item['url'] = url item['name'] = model.__name__ item['type'] = getattr(instance, self.CLASS_TYPE_FIELD.get(clsname, ''), None) item['date_time'] = instance.start_time i = 0 for n in self.CLASS_FIELDS.get(clsname, ()): v = getattr(instance, n, None) if v is None: continue item['arg%d' % i] = '%s: %s' % (n, v) i += 1 out.append(item) out = sorted(out, key=itemgetter('date_time'), reverse=True) return out
def delete(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname, ) # Find all the objects than need to be deleted. collector = Collector(using=using) collector.collect([self]) # hack to prevent ORM delet object via sql # it does not use QuerySet - directly sql module :( to_delete = {} for k in collector.data.keys()[:]: if issubclass(k, MongoModel): to_delete.update({k: collector.data.pop(k)}) for key, object_set in to_delete.items(): for obj in object_set: cls = obj.__class__ if not cls._meta.auto_created: models.signals.pre_delete.send(sender=cls, instance=obj) cls._default_manager.filter(pk=obj.pk).delete() if not cls._meta.auto_created: models.signals.post_delete.send(sender=cls, instance=obj) # Delete other objects. collector.delete()
def delete(self, using=None): # FIXME: if the Translation is the one used as default/fallback, # then deleting it will mean the corresponding field on the related # model will stay empty even if there are translations in other # languages! cls = self.__class__ using = using or router.db_for_write(cls, instance=self) # Look for all translations for the same string (id=self.id) except the # current one (autoid=self.autoid). qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid) if qs.using(using).exists(): # If other Translations for the same id exist, we just need to # delete this one and *only* this one, without letting Django # collect dependencies (it'd remove the others, which we want to # keep). assert self._get_pk_val() is not None collector = Collector(using=using) collector.collect([self], collect_related=False) # In addition, because we have FK pointing to a non-unique column, # we need to force MySQL to ignore constraints because it's dumb # and would otherwise complain even if there are remaining rows # that matches the FK. with connections[using].constraint_checks_disabled(): collector.delete() else: # If no other Translations with that id exist, then we should let # django behave normally. It should find the related model and set # the FKs to NULL. return super(Translation, self).delete(using=using)
def delete(self, *args, **kwargs): ''' Overides the standard model delete method; sets "effective_to" as the current date and time and then calls save() instead. ''' # see django.db.models.deletion.Collection.delete using = kwargs.get('using', router.db_for_write(self.__class__, instance=self)) cannot_be_deleted_assert = ("""%s object can't be deleted because its %s attribute is set to None.""" % (self._meta.object_name, self._meta.pk.attname)) assert self._get_pk_val() is not None, cannot_be_deleted_assert collector = Collector(using=using) collector.collect([self]) collector.sort() # send pre_delete signals def delete(collector): for model, obj in collector.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send( sender=model, instance=obj, using=using ) # be compatible with django 1.4.x if hasattr(collector, 'fast_deletes'): # fast deletes for qs in collector.fast_deletes: for instance in qs: self._delete(instance) # delete batches # be compatible with django>=1.6 if hasattr(collector, 'batches'): for model, batches in six.iteritems(collector.batches): for field, instances in six.iteritems(batches): for instance in instances: self._delete(instance) # "delete" instances for model, instances in six.iteritems(collector.data): for instance in instances: self._delete(instance) # send post_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: signals.post_delete.send( sender=model, instance=obj, using=using ) # another django>=1.6 thing try: from django.db.transaction import commit_on_success_unless_managed except ImportError: delete(collector) else: commit_on_success_unless_managed(using=using)(delete(collector))
def copy_dataset(dataset, owner, versions=None, copy_files=False): logger.info("logging from copy_dataset") if versions == None: versions = [dataset.get_version()] #count total number of objects to copy items_to_copy = 1 collector = Collector("default") for version in versions: collector.collect([dataset.get_investigation(version)]) for key in collector.data.keys(): items_to_copy = items_to_copy + len(collector.data[key]) #check to see if dataset already exists for provided user dataset_copy = None data_sets = DataSet.objects.filter(name="%s (copy)" % dataset.name) for data_set in data_sets: print data_set if data_set.get_owner() == owner: dataset_copy = data_set #if after checking all datasets there one with this name owned by the given user, create new dataset if dataset_copy == None: #create new dataset with copied information dataset_copy = DataSet.objects.create(name="%s (copy)" % dataset.name, summary=dataset.summary, description=dataset.description, slug=dataset.slug ) #set the owner to the provided user dataset_copy.set_owner(owner) dataset_copy.save() logger.info("copy_dataset: Created data set %s" % dataset_copy.name) #make copies of investigations and their links and assign to newly created dataset for version in versions: inv = dataset.get_investigation(version) node_collection = copy_object(inv, copy_files=copy_files) node_collection.save() #find the corresponding Investigation object try: inv = Investigation.objects.get(uuid=node_collection.uuid) except Investigation.DoesNotExist: inv = Study.objects.get(uuid=node_collection.uuid).investigation #use the Investigation object to grab its InvestigationLink object il = InvestigationLink.objects.get(investigation=inv) il.data_set = dataset_copy il.save() #annotate the investigation annotate_nodes(inv.uuid) # calculate total number of files and total number of bytes dataset_copy.file_size = dataset.get_file_size() dataset_copy.file_count = dataset.get_file_count() dataset_copy.save() return dataset_copy
def copy_dataset(dataset, owner, versions=None, copy_files=False): logger.info("logging from copy_dataset") if versions == None: versions = [dataset.get_version()] #count total number of objects to copy items_to_copy = 1 collector = Collector("default") for version in versions: collector.collect([dataset.get_investigation(version)]) for key in collector.data.keys(): items_to_copy = items_to_copy + len(collector.data[key]) #check to see if dataset already exists for provided user dataset_copy = None data_sets = DataSet.objects.filter(name="%s (copy)" % dataset.name) for data_set in data_sets: print data_set if data_set.get_owner() == owner: dataset_copy = data_set #if after checking all datasets there one with this name owned by the given user, create new dataset if dataset_copy == None: #create new dataset with copied information dataset_copy = DataSet.objects.create(name="%s (copy)" % dataset.name, summary=dataset.summary, description=dataset.description, slug=dataset.slug) #set the owner to the provided user dataset_copy.set_owner(owner) dataset_copy.save() logger.info("copy_dataset: Created data set %s" % dataset_copy.name) #make copies of investigations and their links and assign to newly created dataset for version in versions: inv = dataset.get_investigation(version) node_collection = copy_object(inv, copy_files=copy_files) node_collection.save() #find the corresponding Investigation object try: inv = Investigation.objects.get(uuid=node_collection.uuid) except Investigation.DoesNotExist: inv = Study.objects.get(uuid=node_collection.uuid).investigation #use the Investigation object to grab its InvestigationLink object il = InvestigationLink.objects.get(investigation=inv) il.data_set = dataset_copy il.save() #annotate the investigation annotate_nodes(inv.uuid) # calculate total number of files and total number of bytes dataset_copy.file_size = dataset.get_file_size() dataset_copy.file_count = dataset.get_file_count() dataset_copy.save() return dataset_copy
def delete_blockers(request, model, id): obj = api.get_object_or_404(model, id=int(id)) using = router.db_for_write(obj.__class__, instance=obj) collector = Collector(using=using) collector.delete_blockers = [] collector.collect([obj]) result = ["%s.%s" % (sub_objs[0].__class__.__name__, field.name) for field, sub_objs in collector.delete_blockers] return http.HttpResponse(api.toJson(result), content_type='application/json')
def duplicate(obj, value=None, field=None, duplicate_order=None): # pylint: disable=R0914 """ Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved. """ collector = Collector({}) collector.collect([obj]) collector.sort() related_models = collector.data.keys() data_snapshot = {} for key in collector.data.keys(): data_snapshot.update({key: dict(zip([item.pk for item in collector.data[key]], \ [item for item in collector.data[key]]))}) root_obj = None # Sometimes it's good enough just to save in reverse deletion order. if duplicate_order is None: duplicate_order = reversed(related_models) for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: fk_value = getattr(obj, "%s_id" % fk.name) # If this FK has been duplicated then point to the duplicate. fk_rel_to = data_snapshot[fk.rel.to] if fk_value in fk_rel_to: dupe_obj = fk_rel_to[fk_value] setattr(obj, fk.name, dupe_obj) # Duplicate the object and save it. obj.id = None if field is None or field != 'slug': slug = obj.slug obj.slug = slug + '-copy' if field is not None: setattr(obj, field, value) obj.save() if root_obj is None: root_obj = obj return root_obj
def _get_deleted_objects(self): collector = Collector(using=router.db_for_write(self.object)) obj = [self.object] if hasattr(self, 'ids'): obj = list() if hasattr(self, 'objects'): for _obj in self.objects: obj.append(_obj) collector.collect(obj) return collector.data
def can_delete(self): """ Selects which fields of the base model can be deleted """ if self._get_pk_val(): seen_objs = Collector( router.db_for_write(self.__class__, instance=self)) seen_objs.collect([self]) if len(seen_objs.data) > 1: raise ValidationError("Sorry, cannot be deleted.")
def related_objs_ids_cascade_deleted(self): from django.db import router from django.db.models.deletion import Collector using = router.db_for_write(self.__class__, instance=self) collector = Collector(using=using) collector.collect([self], keep_parents=False) deleted_obj_ids = [obj for v in collector.data.values() for obj in v] return deleted_obj_ids
def delete_complete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete()
def get_cascades(self, user): c = Collector(user._state.db) c.collect([user]) # figure out which models this user has data in existing = set(c.data.keys()) existing.update(q.model for q in c.fast_deletes if q.exists()) # but don't mention they have a User, that's obvious: existing.discard(User) return existing
def _collect_related(self, using=None, keep_parents=False): collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) fast_deletes = [] for queryset in collector.fast_deletes: if queryset.count() > 0: fast_deletes.append(queryset) return dict(instances_with_model=collector.instances_with_model(), fast_deletes=fast_deletes, data=collector.data)
def delete_child_instances(instance): try: collector = Collector(using='default') collector.collect(objs=[instance], collect_related=True) except ProtectedError as error: raise ValidationError({ "Error": [ "cannot deleted the record since there are" " other records that depend on it" ] })
def handle_eighth_sponsor_deletion(in_obj, eighth_sponsor): teststaff, _ = get_user_model().objects.get_or_create(id=7011) c = Collector(using="default") c.collect([in_obj]) objects = c.instances_with_model() for obj in objects: if not isinstance(obj[1], eighth_sponsor): obj[1].user = teststaff obj[1].save() else: original = obj[1] original.delete()
def get_context_data(self, **kwargs): context = super(DynamicDeleteView, self).get_context_data(**kwargs) collector = Collector(using='default') # or specific database collector.collect([self.object]) to_delete = collector.instances_with_model() context['to_delete_list'] = [] for x, y in to_delete: context['to_delete_list'].append((x.__name__, y,)) return context
def delete(self, using=None): if (self.__class__._meta.materialized_view or (self.__class__._meta.intermediate and not self.__class__._meta.concrete)): raise NonPersistantModel('Materialized views and intermediate views can\'t be'\ 'saved or deleted') using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete()
def handle_eighth_sponsor_deletion(in_obj, eighth_sponsor): teststaff = User.get_user(id=7011) c = Collector(using="default") c.collect([in_obj]) objects = c.instances_with_model() for obj in objects: if not isinstance(obj[1], eighth_sponsor): obj[1].user = teststaff obj[1].save() else: original = obj[1] original.delete()
def related_classes(instance): """ Return all classes which would be deleted if the passed instance were deleted too by employing the cascade machinery of Django itself. """ collector = Collector(using=instance._state.db) collector.collect([instance]) # Save collected objects for later referencing instance._collected_objects = collector.data return collector.data.keys()
def export_delete_tree(modeladmin, request, queryset): """ Export as fixture selected queryset and all the records that belong to. That mean that dump what will be deleted if the queryset was deleted """ if not request.user.has_perm('adminactions.export'): messages.error(request, _('Sorry you do not have rights to execute this action')) return initial = {'_selected_action': request.POST.getlist(helpers.ACTION_CHECKBOX_NAME), 'select_across': request.POST.get('select_across') == '1', 'action': request.POST.get('action'), 'serializer': 'json', 'indent': 4} if 'apply' in request.POST: form = FixtureOptions(request.POST) if form.is_valid(): try: collect_related = form.cleaned_data.get('add_foreign_keys') using = router.db_for_write(modeladmin.model) c = Collector(using) c.collect(queryset, collect_related=collect_related) data = [] for model, instances in c.data.items(): data.extend(instances) return _dump_qs(form, queryset, data) except AttributeError as e: messages.error(request, str(e)) return HttpResponseRedirect(request.path) else: form = FixtureOptions(initial=initial) adminForm = helpers.AdminForm(form, modeladmin.get_fieldsets(request), {}, model_admin=modeladmin) media = modeladmin.media + adminForm.media tpl = 'adminactions/export_fixture.html' ctx = {'adminform': adminForm, 'change': True, 'title': _('Export Delete Tree'), 'is_popup': False, 'save_as': False, 'has_delete_permission': False, 'has_add_permission': False, 'has_change_permission': True, 'queryset': queryset, 'opts': queryset.model._meta, 'app_label': queryset.model._meta.app_label, 'media': mark_safe(media)} return render_to_response(tpl, RequestContext(request, ctx))
def delete(self, *args, **kwargs): ''' Replace super(BaseModel, self).delete() Cause: When delete relationship in cascade default no have attribute User to Log. ''' using = router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete()
def get_related_models(queryset): related_objects = defaultdict(dict) for main_object in queryset: collector = Collector(using='default') # or specific database collector.collect([main_object]) main_key = main_object.id for model, instance in collector.instances_with_model(): try: related_objects[main_key][model.__name__].append(instance) except KeyError: related_objects[main_key][model.__name__] = [instance] return related_objects
def related_classes(instance): """ Return all classes which would be deleted if the passed instance were deleted too by employing the cascade machinery of Django itself. Does **not** return instances, only classes. """ collector = Collector(using=instance._state.db) collector.collect([instance]) # Save collected objects for later referencing (well yes, it does return # instances but we don't have to tell anybody :-) instance._collected_objects = collector.data return collector.data.keys()
def modal_delete(request, s, *args, **kwargs): server = Server.objects.get(id=s) collector = Collector(using='default') collector.collect([server]) estimate = sum(len(x) for x in collector.data.values()) breakdown = {} for k, v in collector.data.items(): name = k._meta.verbose_name_plural if len(v) != 1 else k._meta.verbose_name breakdown[name] = len(v) return render(request, 'components/servers/detailed/modals/delete.pug', {'estimate': estimate, 'breakdown': breakdown})
def get_context_data(self, **kwargs): context = super(DynamicDeleteView, self).get_context_data(**kwargs) collector = Collector(using='default') # or specific database collector.collect([self.object]) to_delete = collector.instances_with_model() context['to_delete_list'] = [] for x, y in to_delete: context['to_delete_list'].append(( x.__name__, y, )) return context
def collect_and_remove(objs, using): collector = Collector(using=using) with transaction.atomic(): now = timezone.now() collector.collect(objs) for qs in collector.fast_deletes: if issubclass(qs.model, SoftDeletableModel): # only touch related objects which aren't already softdeleted # to prevent clobbering the removed date on already removed objects qs.filter(removed__isnull=True).update(removed=now) for model, instances in collector.data.items(): if issubclass(model, SoftDeletableModel): pks = [obj.pk for obj in instances] qs = QuerySet(model=model, using=collector.using) qs.filter(pk__in=pks).filter(removed__isnull=True).update(removed=now)
def delete(self, using=None, keep_parents=False, hard_delete=False, *args, **kwargs): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) with transaction.atomic(): if hard_delete: # set hard deletion for all related models for model, instances in six.iteritems(collector.data): if issubclass(model, SyncableModel) or issubclass(model, MorangoMPTTModel): for obj in instances: obj._update_hard_deleted_models() return collector.delete()
def soft_delete(self, *args, **kwargs): if len(self.data) > 0: # normal deletes for model, instances in self.data.iteritems(): if issubclass(model, SoftDeletableModel): for instance in instances: instance.is_active = False instance.save() else: collector = Collector(using=None) # use the default db collector.collect(instances) collector.delete() else: # fast deletes for qs in self.fast_deletes: qs.update(is_active=False)
def get_related_objects(self, using=None): """ Возвращает структуру содержащую классы моделей, первичные ключи и экземпляры записей, зависящие от текущей записи. Возвращаемая структура имеет вид: [(КлассМодели1, {id1: ЭкземплярМодели1cID1, id2: ЭкземплярМодели1cID2, ...}), (КлассМодели2, {id1: ЭкземплярМодели2cID1, id2: ЭкземплярМодели2cID2, ...} }, ...] @deprecated: Вытаскивает много данных. Сервер может зависнуть! """ using = using or router.db_for_write(self.__class__, instance=self) collector = Collector(using=using) collector.collect([self]) return collector.data.items()
def clean_upper_level(self): """ Check upper level active locations with no reference - delete if is leaf - deactivate if all children are inactive (doesn't exist an active child) """ logging.info('Clean upper level') qs = get_location_model().objects.filter( admin_level=self.carto.admin_level - 1, is_active=False) for location in qs: collector = Collector(using='default') collector.collect([location]) if not collector.dependencies: if location.is_leaf_node(): location.delete() logger.info(f'Deleting parent {location}')
def dump(qs): l = [] c = [] collector = Collector('default') def dump_related_objects(instance): if instance.__class__.__name__ not in ['Log', 'RegistroDiferenca'] and instance not in l: l.append(instance) if instance.__class__.__name__ not in c: c.append(instance.__class__.__name__) related_fields = [ f for f in instance.__class__._meta.get_fields(include_hidden=True) if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete ] qs = instance.__class__.objects.filter(pk=instance.pk) for related_field in related_fields: objs = collector.related_objects(related_field, qs) for obj in objs: if obj not in l: dump_related_objects(obj) for instance in qs: print('Loading', instance.pk, instance) dump_related_objects(instance) return c, l
def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)) collector = Collector(using=using) # Change S # MEMO: Collector doesn't support method to change its base query. # Therefore, I changed param '[self]' to CPkQuerySet object. # #collector.collect([self], keep_parents=keep_parents) model = self._meta.model qs = model.objects.filter(pk=self.pk) collector.collect(qs, keep_parents=keep_parents) # Change E return collector.delete()
def collect_related_instanses(entity, exclude_models=None): def always_false(*args, **kw): return False exclude_models = exclude_models or [] using = router.db_for_write(entity.__class__, instance=entity) collector = Collector(using=using) collector.can_fast_delete = always_false collector.collect([entity]) entities = [] models = set() for model, instances in collector.data.items(): if model not in exclude_models: entities.extend(instances) models.add(model) return entities, models
def restore(self): if self.removed: with transaction.atomic(): collector = Collector(using=router.db_for_write(self.__class__, instance=self)) collector.collect([self]) for qs in collector.fast_deletes: if issubclass(qs.model, SoftDeletableModel): # don't restore related objects that were deleted before this object qs = qs.filter(removed__gte=self.removed) qs.update(removed=None) for model, instances in collector.data.items(): if issubclass(model, SoftDeletableModel): pks = [obj.pk for obj in instances] qs = QuerySet(model=model, using=collector.using).filter(pk__in=pks) qs = qs.filter(removed__gte=self.removed) qs.update(removed=None) self.refresh_from_db(fields=['removed'])
def delete(self, force=False): """ Deletes the records in the current QuerySet. """ assert self.query.can_filter( ), "Cannot use 'limit' or 'offset' with delete." del_query = self._clone() # The delete is actually 2 queries - one to find related objects, # and one to delete. Make sure that the discovery of related # objects is performed on the same database as the deletion. del_query._for_write = True # Disable non-supported fields. del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) if django.VERSION < (1, 9, 0): collector.delete(force=force) else: deleted, _rows_count = collector.delete(force=force) # Clear the result cache, in case this QuerySet gets reused. self._result_cache = None if django.VERSION < (1, 9, 0): return else: return deleted, _rows_count
def get_deleted_objects(root): using = router.db_for_write(root) collector = Collector(using=using) collector.collect([root]) def format_callback(obj): opts = obj._meta try: return reverse('{}:{}_{}_details'.format(opts.app_label, opts.app_label, opts.model_name), args=[obj._get_pk_val()]) except NoReverseMatch: return None to_delete = collector.nested(format_callback) return to_delete
def handle_obsolete_locations(self, to_deactivate): """ Handle obsolate locations: - deactivate referenced locations - delete non referenced locations """ logging.info('Clean Obsolate Locations') for location in get_location_model().objects.filter( p_code__in=to_deactivate): collector = Collector(using='default') collector.collect([location]) if collector.dependencies or location.get_children(): location.name = f"{location.name} [{datetime.today().strftime('%Y-%m-%d')}]" location.is_active = False location.save() logger.info(f'Deactivating {location}') else: location.delete() logger.info(f'Deleting {location}')
def delete(self, using=None): """Surcharge de la methode save pour supprimer le fichier source avant l objet """ using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname, ) if self.outputdir: try: shutil.rmtree( os.path.join(settings.VIDEO_ROOT, self.outputdir).replace("\\", "/") ) # suppression du repertoire except: pass # Si un probleme survient on ne fait rien collector = Collector(using=using) collector.collect([self]) collector.delete()
def related_classes(instance): """ Return all classes which would be deleted if the passed instance were deleted too by employing the cascade machinery of Django itself. Does **not** return instances, only classes. Note! When using Django 1.5, autogenerated models (many to many through models) are returned too. """ collector = Collector(using=instance._state.db) # We really do not want fast deletion, we absolutely need to know whether # there are related objects around! collector.can_fast_delete = lambda *args, **kwargs: False collector.collect([instance]) # Save collected objects for later referencing (well yes, it does return # instances but we don't have to tell anybody :-) instance._collected_objects = collector.data return collector.data.keys()
def delete(self, hard=False, using=None, *args, **kwargs): # removed from the database or move to trash if self.version_in_trash or hard: # without sending signals post_delete, pre_delete self._delete(using=None, *args, **kwargs) elif hard: # with sending signals signal_allow(self, models.signals.pre_delete) signal_allow(self, models.signals.post_delete) self._delete(using=None, *args, **kwargs) signal_allow(self, models.signals.pre_delete, count=0) signal_allow(self, models.signals.post_delete, count=0) else: # with sending "virtual" signals using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, \ "%s object can't be deleted because its %s attribute is set to None." % \ (self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) # send pre_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: models.signals.pre_delete.send( sender=model, instance=obj, using=using ) self.version_in_trash = True self.version_date = _get_current_date() self._save() # send post_delete signals for model, obj in collector.instances_with_model(): if not model._meta.auto_created: models.signals.post_delete.send( sender=model, instance=obj, using=using )
def get_deleted_objects(root): """Collect linked objects required to be deleted. And maps each one to the `detail` page link. """ using = router.db_for_write(root) collector = Collector(using=using) collector.collect([root]) def format_callback(obj): opts = obj._meta try: return reverse( '{}:{}_{}_details'.format(opts.app_label, opts.app_label, opts.model_name), args=[obj._get_pk_val()]) except NoReverseMatch: return None to_delete = collector.nested(format_callback) return to_delete
def duplicate(objs, using_src, using_dest, special_handlers): #adapted from http://stackoverflow.com/a/6064096/768104 collector = Collector(using_src) collector.collect(objs) collector.sort() related_models = collector.data.keys() duplicate_order = reversed(related_models) extracted = {} for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to not in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: rel_obj = getattr(obj, fk.name) rel_cls = rel_obj.__class__ if rel_cls not in extracted: extracted[rel_cls]={} if rel_obj is not None and rel_obj.id not in extracted[rel_cls]: extracted[rel_cls][rel_obj.id]=True rel_obj.save(using=using_dest) #print "-> saved related object %s" % (rel_obj,) #now ready to insert obj: if model not in extracted: extracted[model]={} if obj is not None and obj.id not in extracted[model]: extracted[model][obj.id]=True try: obj.save(using=using_dest) except IntegrityError as e: pending_inserts.append(obj) print "%s done TOTAL objects written: %s " % (model.__name__, sum([len(extracted[i]) for i in extracted])) do_pending_inserts(using_dest)