def delete(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val( ) is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete()
def delete_blockers(request, model, id): obj = api.get_object_or_404(model, id=int(id)) using = router.db_for_write(obj.__class__, instance=obj) collector = Collector(using=using) collector.delete_blockers = [] collector.collect([obj]) result = ["%s.%s" % (sub_objs[0].__class__.__name__, field.name) for field, sub_objs in collector.delete_blockers] return http.HttpResponse(api.toJson(result), content_type='application/json')
def duplicate(obj, value=None, field=None, duplicate_order=None): # pylint: disable=R0914 """ Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved. """ collector = Collector({}) collector.collect([obj]) collector.sort() related_models = collector.data.keys() data_snapshot = {} for key in collector.data.keys(): data_snapshot.update({key: dict(zip([item.pk for item in collector.data[key]], \ [item for item in collector.data[key]]))}) root_obj = None # Sometimes it's good enough just to save in reverse deletion order. if duplicate_order is None: duplicate_order = reversed(related_models) for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: fk_value = getattr(obj, "%s_id" % fk.name) # If this FK has been duplicated then point to the duplicate. fk_rel_to = data_snapshot[fk.rel.to] if fk_value in fk_rel_to: dupe_obj = fk_rel_to[fk_value] setattr(obj, fk.name, dupe_obj) # Duplicate the object and save it. obj.id = None if field is None or field != 'slug': slug = obj.slug obj.slug = slug + '-copy' if field is not None: setattr(obj, field, value) obj.save() if root_obj is None: root_obj = obj return root_obj
def copy_dataset(dataset, owner, versions=None, copy_files=False): logger.info("logging from copy_dataset") if versions is None: versions = [dataset.get_version()] # count total number of objects to copy items_to_copy = 1 collector = Collector("default") for version in versions: collector.collect([dataset.get_investigation(version)]) for key in collector.data.keys(): items_to_copy += len(collector.data[key]) # check to see if dataset already exists for provided user dataset_copy = None data_sets = DataSet.objects.filter(name="%s (copy)" % dataset.name) for data_set in data_sets: logger.debug("DataSet: %s", data_set) if data_set.get_owner() == owner: dataset_copy = data_set # if after checking all datasets there one with this name owned by the # given user, create new dataset if dataset_copy is None: # create new dataset with copied information dataset_copy = DataSet.objects.create(name="%s (copy)" % dataset.name, summary=dataset.summary, description=dataset.description, slug=dataset.slug) # set the owner to the provided user dataset_copy.set_owner(owner) dataset_copy.save() logger.info("copy_dataset: Created data set %s", dataset_copy.name) # make copies of investigations and their links and assign to newly # created dataset for version in versions: inv = dataset.get_investigation(version) node_collection = copy_object(inv, copy_files=copy_files) node_collection.save() # find the corresponding Investigation object try: inv = Investigation.objects.get(uuid=node_collection.uuid) except Investigation.DoesNotExist: inv = Study.objects.get(uuid=node_collection.uuid).investigation # use the Investigation object to grab its InvestigationLink object il = InvestigationLink.objects.get(investigation=inv) il.data_set = dataset_copy il.save() # annotate the investigation annotate_nodes(inv.uuid) # calculate total number of files and total number of bytes dataset_copy.file_size = dataset.get_file_size() dataset_copy.file_count = dataset.get_file_count() dataset_copy.save() return dataset_copy
def can_delete(self): """ Selects which fields of the base model can be deleted """ if self._get_pk_val(): seen_objs = Collector( router.db_for_write(self.__class__, instance=self)) seen_objs.collect([self]) if len(seen_objs.data) > 1: raise ValidationError("Sorry, cannot be deleted.")
def get_cascades(self, user): c = Collector(user._state.db) c.collect([user]) # figure out which models this user has data in existing = set(c.data.keys()) existing.update(q.model for q in c.fast_deletes if q.exists()) # but don't mention they have a User, that's obvious: existing.discard(User) return existing
def _get_deleted_objects(self): collector = Collector(using=router.db_for_write(self.object)) obj = [self.object] if hasattr(self, 'ids'): obj = list() if hasattr(self, 'objects'): for _obj in self.objects: obj.append(_obj) collector.collect(obj) return collector.data
def related_objs_ids_cascade_deleted(self): from django.db import router from django.db.models.deletion import Collector using = router.db_for_write(self.__class__, instance=self) collector = Collector(using=using) collector.collect([self], keep_parents=False) deleted_obj_ids = [obj for v in collector.data.values() for obj in v] return deleted_obj_ids
def delete_child_instances(instance): try: collector = Collector(using='default') collector.collect(objs=[instance], collect_related=True) except ProtectedError as error: raise ValidationError({ "Error": [ "cannot deleted the record since there are" " other records that depend on it" ] })
def _collect_related(self, using=None, keep_parents=False): collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) fast_deletes = [] for queryset in collector.fast_deletes: if queryset.count() > 0: fast_deletes.append(queryset) return dict(instances_with_model=collector.instances_with_model(), fast_deletes=fast_deletes, data=collector.data)
def delete(self, **kwargs): """ Deletes fields from base model """ assert self._get_pk_val( ) is not None, "Object %s cannot be deleted because %s is null." % ( self._meta.object_name, self._meta.pk.attname) seen_objs = Collector( router.db_for_write(self.__class__, instance=self)) seen_objs.collect([self]) self.can_delete() seen_objs.delete()
def handle_eighth_sponsor_deletion(in_obj, eighth_sponsor): teststaff, _ = get_user_model().objects.get_or_create(id=7011) c = Collector(using="default") c.collect([in_obj]) objects = c.instances_with_model() for obj in objects: if not isinstance(obj[1], eighth_sponsor): obj[1].user = teststaff obj[1].save() else: original = obj[1] original.delete()
def handle(self, *args, **options): qs = Person.objects.all() qs = qs.values('pk', 'memberships__extra__post_election').annotate( pees=Count('memberships__extra__post_election')).filter(pees__gt=1) for row in qs: print("{} has {} duplicate post elections attached".format( row['pk'], row['pees'])) if options['fix']: with transaction.atomic(): duplicate_pee_pk = row['memberships__extra__post_election'] membership = Membership.objects.filter( extra__post_election=duplicate_pee_pk, person_id=row['pk'], ).first() membership_collector = Collector(using='default') membership_extra_collector = Collector(using='default') membership_collector.collect([membership]) membership_extra_collector.collect([membership.extra]) clean_membership_dependencies = [ o for o in membership_collector.dependencies.items()[0] [1] if not issubclass(o, MembershipExtra) ] dependencies = any(( clean_membership_dependencies, membership_extra_collector.dependencies, )) if not dependencies: membership.extra.delete() membership.delete() print("Deleted a duplicate membership for {}".format( row['pk'])) else: print("Not deleting because of dependencies:") print(membership_collector.dependencies) print(membership_extra_collector.dependencies)
def related_classes(instance): """ Return all classes which would be deleted if the passed instance were deleted too by employing the cascade machinery of Django itself. """ collector = Collector(using=instance._state.db) collector.collect([instance]) # Save collected objects for later referencing instance._collected_objects = collector.data return collector.data.keys()
def delete(self, *args, **kwargs): """ Replace super(BaseModel, self).delete() Cause: When delete relationship in cascade default no have attribute User to Log. """ using = router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete()
def get_related_models(queryset): related_objects = defaultdict(dict) for main_object in queryset: collector = Collector(using='default') # or specific database collector.collect([main_object]) main_key = main_object.id for model, instance in collector.instances_with_model(): try: related_objects[main_key][model.__name__].append(instance) except KeyError: related_objects[main_key][model.__name__] = [instance] return related_objects
def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) if (self.registration_reminder is not None and not self.registration_reminder.sent): collector.add([self.registration_reminder]) if self.start_reminder is not None and not self.start_reminder.sent: collector.add([self.start_reminder]) if self.is_pizza_event(): collector.add([self.pizzaevent]) return collector.delete()
def handle(self, *args, **options): verbosity = int(options.get("verbosity", "1")) levels = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG) if "verbosity" in options: logging.basicConfig(format='%(message)s', level=levels[verbosity]) if options['days'] < 7 and not options['force']: logger.error('Delete less than one week not allowed, try using --force') return if options['days'] < 1: logger.error('Delete less than one day not allowed at all') return start_date = timezone.now() - timedelta(days=options['days']) collector = Collector(using=DEFAULT_DB_ALIAS) logger.info("Purge old records since {}".format(start_date)) for model in (NotifySend, TaskReport): qs = model.objects.filter(created__lt=start_date) logger.info("Delete {} records in {}".format(qs.count(), model._meta.db_table)) collector.collect(qs) collector.delete() logger.info("Done purging old records.") if not options['vacuum']: return sql = "VACUUM" if options['full']: sql += " FULL" force_proxy = connection.cursor() realconn = connection.connection old_isolation_level = realconn.isolation_level realconn.set_isolation_level(0) cursor = realconn.cursor() for model in (NotifySend, TaskReport): sql_command = "{} {}".format(sql, model._meta.db_table) logger.info("Run {}".format(sql_command)) cursor.execute(sql_command) realconn.set_isolation_level(old_isolation_level) del force_proxy
def collect_and_remove(objs, using): collector = Collector(using=using) with transaction.atomic(): now = timezone.now() collector.collect(objs) for qs in collector.fast_deletes: if issubclass(qs.model, SoftDeletableModel): # only touch related objects which aren't already softdeleted # to prevent clobbering the removed date on already removed objects qs.filter(removed__isnull=True).update(removed=now) for model, instances in collector.data.items(): if issubclass(model, SoftDeletableModel): pks = [obj.pk for obj in instances] qs = QuerySet(model=model, using=collector.using) qs.filter(pk__in=pks).filter(removed__isnull=True).update(removed=now)
def modal_delete(request, s, *args, **kwargs): server = Server.objects.get(id=s) collector = Collector(using='default') collector.collect([server]) estimate = sum(len(x) for x in collector.data.values()) breakdown = {} for k, v in collector.data.items(): name = k._meta.verbose_name_plural if len(v) != 1 else k._meta.verbose_name breakdown[name] = len(v) return render(request, 'components/servers/detailed/modals/delete.pug', {'estimate': estimate, 'breakdown': breakdown})
def get_context_data(self, **kwargs): context = super(DynamicDeleteView, self).get_context_data(**kwargs) collector = Collector(using='default') # or specific database collector.collect([self.object]) to_delete = collector.instances_with_model() context['to_delete_list'] = [] for x, y in to_delete: context['to_delete_list'].append(( x.__name__, y, )) return context
def clean_upper_level(self): """ Check upper level active locations with no reference - delete if is leaf - deactivate if all children are inactive (doesn't exist an active child) """ logging.info('Clean upper level') qs = get_location_model().objects.filter( admin_level=self.carto.admin_level - 1, is_active=False) for location in qs: collector = Collector(using='default') collector.collect([location]) if not collector.dependencies: if location.is_leaf_node(): location.delete() logger.info(f'Deleting parent {location}')
def delete(self, using=None, keep_parents=False, hard_delete=False, *args, **kwargs): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) with transaction.atomic(): if hard_delete: # set hard deletion for all related models for model, instances in six.iteritems(collector.data): if issubclass(model, SyncableModel) or issubclass(model, MorangoMPTTModel): for obj in instances: obj._update_hard_deleted_models() return collector.delete()
def compare_collector(qs): ac = AWXCollector('default') oc = Collector('default') ac.collect(qs) oc.collect(qs) ac.sort() oc.sort() old_del_dict = oc.data awx_del_dict = OrderedDict() for model, instances in ac.data.items(): awx_del_dict.setdefault(model, set()) for inst in instances: awx_del_dict[model].update(inst) return old_del_dict, awx_del_dict
def get_related_objects(self, using=None): """ Возвращает структуру содержащую классы моделей, первичные ключи и экземпляры записей, зависящие от текущей записи. Возвращаемая структура имеет вид: [(КлассМодели1, {id1: ЭкземплярМодели1cID1, id2: ЭкземплярМодели1cID2, ...}), (КлассМодели2, {id1: ЭкземплярМодели2cID1, id2: ЭкземплярМодели2cID2, ...} }, ...] @deprecated: Вытаскивает много данных. Сервер может зависнуть! """ using = using or router.db_for_write(self.__class__, instance=self) collector = Collector(using=using) collector.collect([self]) return collector.data.items()
def soft_delete(self, *args, **kwargs): if len(self.data) > 0: # normal deletes for model, instances in self.data.iteritems(): if issubclass(model, SoftDeletableModel): for instance in instances: instance.is_active = False instance.save() else: collector = Collector(using=None) # use the default db collector.collect(instances) collector.delete() else: # fast deletes for qs in self.fast_deletes: qs.update(is_active=False)
def delete_child(obj, child_model, ar=None, using=None): """ Delete the `child_model` instance related to `obj` without deleting the parent `obj` itself. """ # logger.info(u"delete_child %s from %s",child_model.__name__,obj) using = using or router.db_for_write(obj.__class__, instance=obj) child = get_child(obj, child_model) if child is None: raise Exception("%s has no child in %s" % (obj, child_model.__name__)) # msg = child.disable_delete(ar) ignore_models = set() # for m in models_by_base(obj.__class__): # ignore_models.remove(child_model) msg = child._lino_ddh.disable_delete_on_object(obj, ignore_models) if msg: raise ValidationError(msg) # logger.debug(u"Delete child %s from %s",child_model.__name__,obj) # 20160720 TODO: Django has added the keep_parents argument, and # we should use this before Django 1.10 but this still seems to # delete objects that are related to parents. So we sill cannot # use it. if True: collector = ChildCollector(using=using) collector.collect([child]) # raise Exception(repr(collector.data)) # model = obj.__class__ # remove the collected MTI parents so they are not deleted # (this idea didnt work: yes the parents were saved, but not # their related objects). # concrete_model = child_model._meta.concrete_model # for ptr in six.itervalues(concrete_model._meta.parents): # if ptr: # # raise Exception(repr(ptr.rel.model)) # del collector.data[ptr.rel.model] else: collector = Collector(using=using) collector.collect([child], source=obj.__class__, nullable=True, keep_parents=True) collector.delete()
def change_spanish_home_pages_to_home_pages(apps, schema_editor): SpanishHomePage = apps.get_model('v1', 'SpanishHomePage') spanish_home_pages = SpanishHomePage.objects.all() # If there are no SpanishHomePages, nothing to do. if not spanish_home_pages.exists(): return ContentType = apps.get_model('contenttypes', 'ContentType') Page = apps.get_model('wagtailcore', 'Page') PageRevision = apps.get_model('wagtailcore', 'PageRevision') # When running migrations against an empty database, there won't already # be a ContentType for the HomePage model type. But we need one in order to # convert SpanishHomePages to that type. So use get_or_create here to # either use the existing ContentType or create a new one. homepage_content_type, _ = ContentType.objects.get_or_create( app_label='v1', model='homepage') # This is by no means a general solution to the problem of changing a # Wagtail Page's type, but seems to work for this specific situation, where # we meet these requirements: # # - Changing a child page into its immediate parent's page type # - There are no foreign keys from the child page type to other models # - There are no foreign keys from other models to the child page type collector = Collector(using=schema_editor.connection.alias) for page in spanish_home_pages: # This deletes the page's row in the v1_spanishhomepage table without # touching the parent v1_homepage, v1_cfgovpage, or wagtailcore_page # tables. collector.collect([page], keep_parents=True, collect_related=False) collector.delete() # This updates the wagtailcore_page table to set the page's new type. page.content_type = homepage_content_type Page.save(page, update_fields=['content_type']) # Finally fixup existing page revisions so they also have the right # content type. This doesn't affect anything, but is good for # consistency. for revision in PageRevision.objects.filter(page_id=page.pk): content = json.loads(revision.content_json) content['content_type'] = homepage_content_type.pk revision.content_json = json.dumps(content) revision.save(update_fields=['content_json'])
def restore(self): if self.removed: with transaction.atomic(): collector = Collector(using=router.db_for_write(self.__class__, instance=self)) collector.collect([self]) for qs in collector.fast_deletes: if issubclass(qs.model, SoftDeletableModel): # don't restore related objects that were deleted before this object qs = qs.filter(removed__gte=self.removed) qs.update(removed=None) for model, instances in collector.data.items(): if issubclass(model, SoftDeletableModel): pks = [obj.pk for obj in instances] qs = QuerySet(model=model, using=collector.using).filter(pk__in=pks) qs = qs.filter(removed__gte=self.removed) qs.update(removed=None) self.refresh_from_db(fields=['removed'])
def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)) collector = Collector(using=using) # Change S # MEMO: Collector doesn't support method to change its base query. # Therefore, I changed param '[self]' to CPkQuerySet object. # #collector.collect([self], keep_parents=keep_parents) model = self._meta.model qs = model.objects.filter(pk=self.pk) collector.collect(qs, keep_parents=keep_parents) # Change E return collector.delete()