def ready(self): from .models import clear_usersettings_cache from .shortcuts import get_usersettings_model usersettings_model = get_usersettings_model() pre_save.connect(clear_usersettings_cache, sender=usersettings_model) pre_delete.connect(clear_usersettings_cache, sender=usersettings_model)
def ready(self): # Load projector elements. # Do this by just importing all from these files. from . import projector # noqa # Import all required stuff. from django.db.models.signals import pre_delete, post_save from openslides.core.signals import config_signal from openslides.utils.rest_api import router from .signals import ( setup_agenda_config, listen_to_related_object_post_delete, listen_to_related_object_post_save) from .views import ItemViewSet # Connect signals. config_signal.connect(setup_agenda_config, dispatch_uid='setup_agenda_config') post_save.connect( listen_to_related_object_post_save, dispatch_uid='listen_to_related_object_post_save') pre_delete.connect( listen_to_related_object_post_delete, dispatch_uid='listen_to_related_object_post_delete') # Register viewsets. router.register('agenda/item', ItemViewSet)
def register_live_index(model_cls): """Register a model and index for auto indexing.""" uid = str(model_cls) + 'live_indexing' post_save.connect(_live_index_handler, model_cls, dispatch_uid=uid) pre_delete.connect(_live_index_handler, model_cls, dispatch_uid=uid) # Enable this to be used as decorator. return model_cls
def ready(self): super(MirroringApp, self).ready() Document = apps.get_model(app_label='documents', model_name='Document') IndexInstanceNode = apps.get_model( app_label='document_indexing', model_name='IndexInstanceNode' ) pre_delete.connect( handler_document_cache_delete, dispatch_uid='mirroring_handler_document_cache_delete', sender=Document ) pre_delete.connect( handler_node_cache_delete, dispatch_uid='mirroring_handler_node_cache_delete', sender=IndexInstanceNode ) pre_save.connect( handler_document_cache_delete, dispatch_uid='mirroring_handler_document_cache_delete', sender=Document ) pre_save.connect( handler_node_cache_delete, dispatch_uid='mirroring_handler_node_cache_delete', sender=IndexInstanceNode )
def manage_bookmarks(cls, enabled=True): """Connects handlers for bookmarks management. This handler could be used to automatically create a related bookmark list on given model class instance creation. i.e.: >> manage_bookmarks(User) It will auto generate a bookmark list associated to each new User's instance. To disconnect: >> manage_bookmarks(User, False) """ cls = get_model(cls) cls_name = cls.__name__.lower() create_dispatch_uid = "create_%s_bookmarks" % cls_name delete_dispatch_uid = "delete_%s_bookmarks" % cls_name if enabled: post_save.connect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid) pre_delete.connect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid) else: post_save.disconnect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid) pre_delete.disconnect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
def register(self, model_class, fields=None, follow=(), format=DEFAULT_SERIALIZATION_FORMAT): """Registers a model with this revision manager.""" # Prevent multiple registration. if self.is_registered(model_class): raise RegistrationError, "%r has already been registered with Reversion." % model_class # Ensure the parent model of proxy models is registered. if model_class._meta.proxy and not self.is_registered(model_class._meta.parents.keys()[0]): raise RegistrationError, "%r is a proxy model, and its parent has not been registered with Reversion." % model_class # Calculate serializable model fields. opts = model_class._meta local_fields = opts.local_fields + opts.local_many_to_many if fields is None: fields = [field.name for field in local_fields] fields = tuple(fields) # Calculate serializable model file fields. file_fields = [] for field in local_fields: if isinstance(field, models.FileField) and field.name in fields: field.storage = VersionFileStorageWrapper(field.storage) file_fields.append(field) file_fields = tuple(file_fields) # Register the generated registration information. follow = tuple(follow) registration_info = RegistrationInfo(fields, file_fields, follow, format) self._registry[model_class] = registration_info # Connect to the post save signal of the model. post_save.connect(self.post_save_receiver, model_class) pre_delete.connect(self.pre_delete_receiver, model_class)
def register(cls, model, descriptor="vacuous"): post_save.connect(cls.post_save, sender=model) pre_delete.connect(cls.pre_delete, sender=model) post_init.connect(cls.post_init, sender=model) cls.models.add(model) if descriptor is not None: setattr(model, descriptor, AdapterDescriptor(cls))
def register(self, model, index_cls=AlgoliaIndex, auto_indexing=None): """ Registers the given model with Algolia engine. If the given model is already registered with Algolia engine, a RegistrationError will be raised. """ # Check for existing registration. if self.is_registered(model): raise RegistrationError( '{} is already registered with Algolia engine'.format(model)) # Perform the registration. if not issubclass(index_cls, AlgoliaIndex): raise RegistrationError( '{} should be a subclass of AlgoliaIndex'.format(index_cls)) index_obj = index_cls(model, self.client, self.__settings) self.__registered_models[model] = index_obj if (isinstance(auto_indexing, bool) and auto_indexing) or self.__auto_indexing: # Connect to the signalling framework. post_save.connect(self.__post_save_receiver, model) pre_delete.connect(self.__pre_delete_receiver, model) logger.info('REGISTER %s', model)
def __new__(cls, name, bases, attrs): model = super(UserDataBase, cls).__new__(cls, name, bases, attrs) if model._meta.abstract: return model def on_create(sender, instance, created, *args, **kwargs): if not created: return try: data = {'user': instance} data.update(model.get_defaults(instance)) obj = model.objects.create(**data) obj.setup() except: print "Error when creating PerUserData %r" % model raise def on_delete(sender, instance, *args, **kwargs): model.objects.filter(pk=instance).delete() post_save.connect(on_create, sender=User, weak=False) pre_delete.connect(on_delete, sender=User, weak=False) INSTANCES.append(model) return model
def connect_signals(): for subtype in Product.SUBTYPES + [Product]: post_save.connect(on_product_post_save, sender=subtype) pre_delete.connect(on_product_pre_delete, sender=subtype) post_save.connect(on_customer_group_post_save, sender=CustomerGroup) pre_delete.connect(on_customer_group_pre_delete, sender=CustomerGroup)
def register(model, fields=None, exclude=None): """Registers a model with the specified fields to be automatically versioned on save and delete operations. """ if model in registry: raise AlreadyRegistered(u'The model {0} is already registered'.format( model.__name__)) if not fields: fields = utils.get_model_fields(model) if not exclude: exclude = () utils.validate_fields(model, fields, exclude) fields = tuple(set(fields) - set(exclude)) if not fields: raise ValueError(u'No fields defined for versioning.') dispatch_uid = '{0}_revision'.format(model.__name__) pre_delete.connect(pre_delete_revision, weak=False, sender=model, dispatch_uid=dispatch_uid) post_save.connect(post_save_revision, weak=False, sender=model, dispatch_uid=dispatch_uid) registry[model] = { 'fields': fields, 'dispatch_uid': dispatch_uid, }
def finalize(self, model): inline_settings = self._inline_settings self._inline_settings = {} for setting, (related_model, one, admin) in six.iteritems(inline_settings): field = models.OneToOneField if one else models.ForeignKey related_model = type( related_model.__name__, (related_model, ), { 'Meta': type( 'Meta', (related_model.Meta, object), { 'app_label': 'settings' } ), '__module__': related_model.__module__, 'settings': field( model, related_name=setting ) } ) admin = type( related_model.__name__, (admin, ), { 'model': related_model } ) self._inline_settings[setting] = (related_model, one, admin) self._model = model pre_save.connect(self._on_model_changed, sender=model) pre_delete.connect(self._on_model_changed, sender=model)
def register(self, model_or_iterable, model_metatags_class=None, **options): if not model_metatags_class: model_metatags_class = ModelMetatags if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model in self._registry: raise AlreadyRegistered( u'The model %s already registered' % model.__name__) self._append_inline_instance(model) if options: options['__module__'] = __name__ model_metatags_class = type( '%sMetatags' % model.__name__, (model_metatags_class,), options) model_metatags = model_metatags_class() self._registry[model] = model_metatags pre_delete.connect(self.delete_metatag, sender=model) post_save.connect(self.check_metatag_url_path, sender=model) post_save.connect(self.check_metatag_language, sender=model) sites_field_class = model_metatags.sites_field_class(model) if sites_field_class is ManyToManyField: through_model = getattr( model, model_metatags.sites_field_name).through m2m_changed.connect( self.check_metatag_sites, sender=through_model) else: post_save.connect(self.check_metatag_site, sender=model)
def ready(self): RabbitWorker().declare_exchange() from models import Collection, Export from sched import start_sched, schedule_harvest_receiver, unschedule_harvest_receiver from export import export_receiver, export_m2m_receiver from notifications import send_user_harvest_emails if settings.SCHEDULE_HARVESTS: log.debug("Setting receivers for collections.") post_save.connect(schedule_harvest_receiver, sender=Collection) pre_delete.connect(unschedule_harvest_receiver, sender=Collection) # Export if settings.PERFORM_EXPORTS: log.debug("Setting receiver for exports.") post_save.connect(export_receiver, sender=Export) m2m_changed.connect(export_m2m_receiver, sender=Export.seeds.through) # Add 5 minute interval if settings.FIVE_MINUTE_SCHEDULE: log.debug("Adding 5 minute timer") Collection.SCHEDULE_CHOICES.append((5, "Every 5 minutes")) if settings.RUN_SCHEDULER: log.debug("Running scheduler") sched = start_sched() if settings.PERFORM_USER_HARVEST_EMAILS: if sched.get_job('user_harvest_emails') is not None: sched.remove_job('user_harvest_emails') sched.add_job(send_user_harvest_emails, 'cron', hour=settings.USER_HARVEST_EMAILS_HOUR, minute=settings.USER_HARVEST_EMAILS_MINUTE, id='user_harvest_emails') else: log.debug("Not running scheduler")
def register_poster(self, model, stream_field, *actions): field = getattr(model, stream_field).field stream = field.rel.to if isinstance(stream, str): stream = get_model(*stream.split('.')) if not stream in self._registry: raise NotRegistered('Model %s not registered.' % stream) self._models[model] = stream_field if isinstance(field, ForeignKey): self.logger.debug( 'Registered poster model %s for stream model %s' % ( model._meta.verbose_name.capitalize(), stream._meta.verbose_name.capitalize() ) ) elif isinstance(field, ManyToManyField): m2m_changed.connect(receivers.m2m_changed) self.logger.debug( 'Registered poster model %s for many-to-many stream model %s' % ( model._meta.verbose_name.capitalize(), stream._meta.verbose_name.capitalize() ) ) else: raise ImproperlyConfigured('Connecting field must be a ForeignKey or ManyToMany field') pre_save.connect(receivers.pre_save, sender = model) pre_delete.connect(receivers.pre_delete, sender = model) post_save.connect(receivers.post_save, sender = model)
def register(self, model, adapter_cls=SearchAdapter, **field_overrides): """ Registers the given model with this search engine. If the given model is already registered with this search engine, a RegistrationError will be raised. """ # Add in custom live filters. if isinstance(model, QuerySet): live_queryset = model model = model.model field_overrides["get_live_queryset"] = lambda self_: live_queryset.all() # Check for existing registration. if self.is_registered(model): raise RegistrationError("{model!r} is already registered with this search engine".format( model = model, )) # Perform any customization. if field_overrides: # Conversion to str is needed because Python 2 doesn't accept unicode for class name adapter_cls = type(str("Custom") + adapter_cls.__name__, (adapter_cls,), field_overrides) # Perform the registration. adapter_obj = adapter_cls(model) self._registered_models[model] = adapter_obj # Connect to the signalling framework. post_save.connect(self._post_save_receiver, model) pre_delete.connect(self._pre_delete_receiver, model)
def ready(self): # Load projector elements. # Do this by just importing all from these files. from . import projector # noqa # Import all required stuff. from django.db.models.signals import pre_delete, post_save from openslides.core.config import config from openslides.utils.rest_api import router from .config_variables import get_config_variables from .signals import ( listen_to_related_object_post_delete, listen_to_related_object_post_save) from .views import ItemViewSet # Define config variables config.update_config_variables(get_config_variables()) # Connect signals. post_save.connect( listen_to_related_object_post_save, dispatch_uid='listen_to_related_object_post_save') pre_delete.connect( listen_to_related_object_post_delete, dispatch_uid='listen_to_related_object_post_delete') # Register viewsets. router.register(self.get_model('Item').get_collection_string(), ItemViewSet)
def __new__(mcs, name, bases, attrs): model = super(Base, mcs).__new__(mcs, name, bases, attrs) if model._meta.abstract: return model # Avoid virtual models (for, for instance, deferred fields) if model._meta.concrete_model is not model: return model if not auto: return model # Setup the signals that will automatically create and destroy # instances. # # We use weak=False or our (inline) receivers will be garbage # collected. def on_create_cb(sender, instance, created, *args, **kwargs): if created: model.objects.create(**{attr: instance}) def on_delete_cb(sender, instance, *args, **kwargs): model.objects.filter(pk=instance).delete() post_save.connect(on_create_cb, sender=parent, weak=False) pre_delete.connect(on_delete_cb, sender=parent, weak=False) return model
def ready(self): Attachment = self.get_model('Attachment') pre_delete.connect(self.delete_file, sender=Attachment) pre_delete.connect(self.delete_attachmentimage, sender=Attachment) pre_save.connect(self.delete_attachmentimage, sender=Attachment) post_save.connect(self.create_attachmentimage, sender=Attachment)
def ready(self): pre_save.connect(self.update_count_pre_save) pre_delete.connect(self.update_count_pre_save) post_save.connect(self.update_count_post_save) user_logged_out.connect(self.remove_auth_token) self.set_registration_active() self.clear_last_objects_cache()
def cache_purge_hook(model, func=None): pre_save.connect( partial(__pre_save_hook, model, func), dispatch_uid="cache_purge_hook", sender=model, weak=False, ) post_save.connect( partial(__post_save_hook, model, func), dispatch_uid="cache_purge_hook", sender=model, weak=False, ) pre_delete.connect( partial(__pre_delete_hook, model, func), dispatch_uid="cache_purge_hook", sender=model, weak=False, ) post_delete.connect( partial(__post_delete_hook, model, func), dispatch_uid="cache_purge_hook", sender=model, weak=False, )
def ready(self): # Import all required stuff. from django.db.models.signals import pre_delete, post_save from ..core.config import config from ..core.signals import permission_change, user_data_required from ..utils.rest_api import router from .config_variables import get_config_variables from .projector import get_projector_elements from .signals import ( get_permission_change_data, listen_to_related_object_post_delete, listen_to_related_object_post_save, required_users) from .views import ItemViewSet # Define config variables and projector elements. config.update_config_variables(get_config_variables()) register_projector_elements(get_projector_elements()) # Connect signals. post_save.connect( listen_to_related_object_post_save, dispatch_uid='listen_to_related_object_post_save') pre_delete.connect( listen_to_related_object_post_delete, dispatch_uid='listen_to_related_object_post_delete') permission_change.connect( get_permission_change_data, dispatch_uid='agenda_get_permission_change_data') user_data_required.connect( required_users, dispatch_uid='agenda_required_users') # Register viewsets. router.register(self.get_model('Item').get_collection_string(), ItemViewSet)
def finalize(): for relation in modules.tax.Tax.m2m_invalidations: field = getattr(modules.tax.Tax, relation) m2m_changed.connect(on_tax_m2m_changed, sender=field.through) for subtype in modules.tax.subtypes: pre_save.connect(on_tax_pre_save, sender=subtype) post_save.connect(on_tax_post_save, sender=subtype) pre_delete.connect(on_tax_pre_delete, sender=subtype)
def ready(self): from .signals import mediafile_delete MediaFile = self.get_model('MediaFile') pre_delete.connect( mediafile_delete, sender=MediaFile, dispatch_uid='mediafile_delete')
def ready(self): from django.db.models.signals import post_save, pre_delete from menuhin.listeners import create_menu_url, unpublish_on_delete from varlet.models import Page post_save.connect(sender=Page, receiver=create_menu_url, dispatch_uid="create_menuitem_from_page") pre_delete.connect(sender=Page, receiver=unpublish_on_delete, dispatch_uid="unpublish_menuitem_from_page_delete")
def setup(self): from aristotle_mdr.models import Status, _concept post_save.connect(self.handle_status_change, sender=Status) post_delete.connect(self.handle_status_change, sender=Status) post_save.connect(self.handle_concept_save, sender=_concept) pre_delete.connect(self.handle_concept_delete, sender=_concept) super(AristotleSignalProcessor,self).setup()
def _connect_signal_handlers(self): post_save.connect(self.handle_object_update, sender=self.model) pre_delete.connect(self.handle_object_deletion, sender=self.model) sites_field_class = self.sites_field_class() if sites_field_class is ManyToManyField: self._through = getattr(self.model, self.sites_field_name).through m2m_changed.connect(self.handle_object_update, self._through)
def ready(self): query.QuerySet.format = format query.QuerySet.update = update query.QuerySet._update = _update if getattr(settings, 'ENABLE_ARRAY_M2M', False): datastructures.Join.as_sql = as_sql query.prefetch_one_level = prefetch_one_level pre_delete.connect(delete_reverse_related)
def register(mymodel): assert not issubclass(mymodel, ViewTracker), ( "ViewTrackers cannot have ViewTrackers... you fool. Model: %s" % mymodel ) post_save.connect(post_save_handler, sender=mymodel) pre_delete.connect(pre_delete_handler, sender=mymodel) logging.debug("ViewTracker registered for model '%s'" % mymodel)
def connect(): pre_submit_translation.connect(pre_handler, sender=Resource) post_submit_translation.connect(post_handler, sender=Resource) lotte_init.connect(lotte_init_handler) lotte_done.connect(lotte_done_handler) cron_hourly.connect(db_cleanup) cron_hourly.connect(expiration_notification) post_save.connect(invalidate_cache, sender=Lock) pre_delete.connect(invalidate_cache, sender=Lock)
from django.db.models.signals import pre_delete from core.signals import file_field_delete from news.models import News pre_delete.connect(file_field_delete, News)
def wiki_revision_delete_handler(sender, instance, **kwargs): # If a revision is somehow deleted, but the article still has a current # revision update LatestArtcileRevision to match that one if instance.article and instance.article.current_revision: LatestArticleRevision.copy_article_revision( instance.article.current_revision ) def user_followers_delete_handler(sender, instance, **kwargs): """ Make all users unfollow the user being deleted. N.B. Because django-activity-stream is using a GFK, these do not cascade delete. """ # Import act_models here. If imported at the top, this interferes with # appconf imports and breaks compressor configuration. # See https://github.com/jezdez/django_compressor/issues/333 from actstream import models as act_models followers = act_models.followers(instance) for follower in followers: unfollow(follower, instance) post_save.connect(wiki_article_handler, sender=Article) post_save.connect(wiki_revision_handler, sender=ArticleRevision) pre_delete.connect(wiki_article_delete_handler, sender=Article) pre_delete.connect(wiki_article_delete_handler, sender=ArticleRevision) pre_delete.connect(user_followers_delete_handler, sender=User)
null=True, related_name='follower_friendship_set') created_at = models.DateTimeField(auto_now_add=True) class Meta: index_together = ( # 获取我关注的所有人,按照关注时间排序 ('from_user', 'created_at'), # 获得关注我的所有人,按照关注时间排序 ('to_user', 'created_at'), ) unique_together = (('from_user', 'to_user'), ) ordering = ('-created_at', ) def __str__(self): return f'{self.from_user} followed {self.to_user}' @property def cached_from_user(self): return MemcachedHelper.get_object_through_cache( User, self.from_user_id) @property def cached_to_user(self): return MemcachedHelper.get_object_through_cache(User, self.to_user_id) # hook up with listeners to invalidate cache pre_delete.connect(invalidate_following_cache, sender=Friendship) post_save.connect(invalidate_following_cache, sender=Friendship)
class Meta: db_table = 'cmsplugin_cascade_page' verbose_name = verbose_name_plural = _("Cascade Page Settings") @classmethod def assure_relation(cls, cms_page): """ Assure that we have a foreign key relation, pointing from CascadePage onto CMSPage. """ try: cms_page.cascadepage except cls.DoesNotExist: cls.objects.create(extended_object=cms_page) extension_pool.register(CascadePage) def delete_cascade_element(sender, instance=None, **kwargs): if isinstance(instance, CascadeModelBase): try: instance.page.cascadepage.glossary['element_ids'].pop( str(instance.pk)) instance.page.cascadepage.save() except (AttributeError, KeyError): pass pre_delete.connect(delete_cascade_element)
unique_together = ('event', 'memberID', 'clubID', 'user', 'customer') ordering = ('-id',) def left_event_member(sender, instance, **kwargs): try: if instance.user_id and NOTIFY_JOIN_LEFT_EVENT == 1: url = "http://{0}:{1}/myapi/notify-room/".format(XMPP_HOST, XMPP_PORT) data = {'event_id': str(instance.event.id), 'message':'{} declined to join this event'.format(instance.user.user_profile.display_name or instance.user.username)} req = Request(url, json.dumps(data).encode('utf8')) response = urlopen(req) except: pass pre_delete.connect(left_event_member, sender=EventMember) # Connect create profile function to post_save signal of MemberEvent model # post_save.connect(log_join_event, sender=EventMember) def create_owner_in_event_member(sender, instance, created, **kwargs): if created and instance.event_type == 'PE': EventMember.objects.create(user=instance.user, event=instance, status='H') def push_invited_player_to_subcribe(sender, instance, created, **kwargs): if created and instance.user_id and NOTIFY_JOIN_LEFT_EVENT == 1: ctype = ContentType.objects.get_for_model(GolfCourseEvent) try: user_subcribe = UserSubcribe.objects.get(content_type=ctype, object_id=instance.event_id) subcribe_list = eval(user_subcribe.user) if instance.user_id not in subcribe_list: subcribe_list.append(instance.user_id)
def connect_model_signals(self, model, handler): """ Connect the *pre_delete* signal sent by given *model* to the *handler* receiver. """ pre_delete_signal.connect(handler.deleting_target_object, sender=model)
#============================================================================== def invalidate_simple_page_after_save(sender, instance, **kwargs): """ Clear simple page buffer RUS: Очищает буфер simple_page_buffer после сохранения страницы. """ clear_simple_page_buffer() def invalidate_simple_page_before_delete(sender, instance, **kwargs): """ RUS: Очищает буфер simple_page_buffer перед удалением страницы. """ invalidate_simple_page_after_save(sender, instance, **kwargs) Model = SimplePage # отправляет сигналы обработчику SimplePage после сохранения и перед удалением страницы. post_save.connect(invalidate_simple_page_after_save, sender=Model, dispatch_uid=make_dispatch_uid( post_save, invalidate_simple_page_after_save, Model )) pre_delete.connect(invalidate_simple_page_before_delete, sender=Model, dispatch_uid=make_dispatch_uid( pre_delete, invalidate_simple_page_before_delete, Model ))
def _listen(): post_save.connect(plan_watchers.on_plan_save, TestPlan) pre_delete.connect(plan_watchers.on_plan_delete, TestPlan) pre_save.connect(plan_watchers.pre_save_clean, TestPlan)
# This Python file uses the following encoding: utf-8 from aula.apps.avaluacioQualitativa.abstract_models import AbstractItemQualitativa,\ AbstractRespostaAvaluacioQualitativa, AbstractAvaluacioQualitativa from aula.apps.avaluacioQualitativa.business_rules.respostaavaluacioqualitativa import respostaAvaluacioQualitativa_pre_delete,\ respostaAvaluacioQualitativa_pre_save, respostaAvaluacioQualitativa_clean class ItemQualitativa(AbstractItemQualitativa): pass class AvaluacioQualitativa(AbstractAvaluacioQualitativa): pass class RespostaAvaluacioQualitativa(AbstractRespostaAvaluacioQualitativa): def clean(self): respostaAvaluacioQualitativa_clean( self ) # ----------------------------- B U S I N E S S R U L E S ------------------------------------ # from django.db.models.signals import post_save, pre_save, pre_delete pre_delete.connect(respostaAvaluacioQualitativa_pre_delete, sender= RespostaAvaluacioQualitativa) pre_save.connect(respostaAvaluacioQualitativa_pre_save, sender = RespostaAvaluacioQualitativa )
'date_modified') for note in note_qs: note['date_created'] = \ note['date_created'].strftime(MONGO_STRFTIME) note['date_modified'] = \ note['date_modified'].strftime(MONGO_STRFTIME) notes.append(note) return notes def _remove_from_mongo(sender, **kwargs): instance_id = kwargs.get('instance').instance.id xform_instances.remove(instance_id) pre_delete.connect(_remove_from_mongo, sender=ParsedInstance) def rest_service_form_submission(sender, **kwargs): parsed_instance = kwargs.get('instance') created = kwargs.get('created') if created: call_service(parsed_instance) post_save.connect(rest_service_form_submission, sender=ParsedInstance) def submission_count(sender, **kwargs): parsed_instance = kwargs.get('instance') created = kwargs.get('created')
instance._old_state = instance.state # Create the message data = { "hostname": instance.hostname, "health": instance.get_health_display(), "state": instance.get_state_display(), } # Send the event send_event(".worker", "lavaserver", data) pre_delete.connect( testjob_pre_delete_handler, sender=TestJob, weak=False, dispatch_uid="testjob_pre_delete_handler", ) # This handler is used for the notification and the events post_init.connect( testjob_init_handler, sender=TestJob, weak=False, dispatch_uid="testjob_init_handler", ) pre_save.connect( testjob_notifications, sender=TestJob, weak=False, dispatch_uid="testjob_notifications", )
created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) likes_count = models.IntegerField(default=0, null=True) class Meta: index_together = (('tweet', 'created_at'), ) def __str__(self): return '{} - {} comments {} on tweet {}'.format( self.created_at, self.user, self.content, self.tweet_id, ) @property def like_set(self): return Like.objects.filter( content_type=ContentType.objects.get_for_model(Comment), object_id=self.id, ).order_by('-created_at') @property def cached_user(self): return MemcachedHelper.get_object_through_cache(User, self.user_id) post_save.connect(incr_comments_count, sender=Comment) pre_delete.connect(decr_comments_count, sender=Comment)
# if no other link is using this app+version+profile, delete the file from blobdb if not (HostedCCZ.objects.filter( app_id=self.app_id, version=self.version, profile_id=self.profile_id).exclude(link=self.link).exists()): self.utility.remove_file_from_blobdb() def delete(self, *args, **kwargs): self.delete_ccz() super(HostedCCZ, self).delete(*args, **kwargs) def update_status(self, new_status): assert new_status in self.STATUSES HostedCCZ.objects.filter(id=self.pk).update(status=new_status) class HostedCCZCustomSupportingFile(models.Model): link = models.ForeignKey(HostedCCZLink, on_delete=models.PROTECT) file = models.ForeignKey(HostedCCZSupportingFile, on_delete=models.CASCADE) def __str__(self): return str(self.file) + "@" + str(self.link) def delete_ccz_for_link(sender, instance, **kwargs): for hosted_ccz in HostedCCZ.objects.filter(link=instance): hosted_ccz.delete_ccz() pre_delete.connect(delete_ccz_for_link, sender=HostedCCZLink)
#delete ratings, comments, and taggit tags: ct = ContentType.objects.get_for_model(layer) OverallRating.objects.filter(content_type=ct, object_id=layer.id).delete() Comment.objects.filter(content_type=ct, object_id=layer.id).delete() layer.keywords.clear() layer.delete() output['stats']['deleted'] += 1 status = "delete_succeeded" except Exception, e: status = "delete_failed" finally: from .signals import geoserver_pre_delete pre_delete.connect(geoserver_pre_delete, sender=Layer) msg = "[%s] Layer %s (%d/%d)" % (status, layer.name, i + 1, number_deleted) info = {'name': layer.name, 'status': status} if status == "delete_failed": exception_type, error, traceback = sys.exc_info() info['traceback'] = traceback info['exception_type'] = exception_type info['error'] = error output['deleted_layers'].append(info) if verbosity > 0: print >> console, msg finish = datetime.datetime.now() td = finish - start
account = Accounts.objects.get(accountName=charged_account.accountName) account.amount -= kwargs['instance'].total_price account.save() def delete_expense_handler(sender, **kwargs): account = kwargs['instance'].charged_account account.amount += kwargs['instance'].total_price account.save() def add_income_handler(sender, **kwargs): if kwargs['created']: topped_up_account = kwargs['instance'].topped_up_account account = Accounts.objects.get( accountName=topped_up_account.accountName) account.amount += kwargs['instance'].income account.save() def delete_income_handler(sender, **kwargs): account = kwargs['instance'].topped_up_account account.amount -= kwargs['instance'].income account.save() post_save.connect(add_expense_handler, sender=Expenses) post_save.connect(add_income_handler, sender=Incomes) pre_delete.connect(delete_expense_handler, sender=Expenses) pre_delete.connect(delete_income_handler, sender=Incomes)