def _attach_signals(self): ''' Attach all signals for eav ''' post_init.connect(Registry.attach_eav_attr, sender=self.model_cls) pre_save.connect(getattr(self.config_cls.entity_class, 'pre_save_handler'), sender=self.model_cls) post_save.connect(getattr(self.config_cls.entity_class, 'post_save_handler'), sender=self.model_cls)
def make_temporal(cls): assert issubclass( cls, Clocked), 'add temporal_django.Clocked to %s' % cls.__name__ model_fields = set([f.name for f in cls._meta.fields]) for field in fields: assert field in model_fields, '%s is not a field on %s' % ( field, cls.__name__) history_models = { f: _build_field_history_model(cls, f, temporal_schema) for f in fields } clock_model = _build_entity_clock_model(cls, temporal_schema, activity_model) cls.temporal_options = InternalClockedOption( cls, temporal_fields=fields, history_models=history_models, clock_model=clock_model, activity_model=activity_model, ) post_init.connect(_save_initial_state_post_init, sender=cls) _disable_bulk_create(cls) return cls
def contribute_to_class(self, cls, name): setattr(cls, name, ImageSpecFileDescriptor(self, name)) try: # Make sure we don't modify an inherited ImageKitMeta instance ik = cls.__dict__['ik'] except KeyError: try: base = getattr(cls, '_ik') except AttributeError: ik = ImageKitMeta() else: # Inherit all the spec fields. ik = ImageKitMeta(base.spec_fields) setattr(cls, '_ik', ik) ik.spec_fields.append(name) # Connect to the signals only once for this class. uid = '%s.%s' % (cls.__module__, cls.__name__) post_init.connect(ImageSpecField._post_init_receiver, sender=cls, dispatch_uid=uid) post_save.connect(ImageSpecField._post_save_receiver, sender=cls, dispatch_uid=uid) post_delete.connect(ImageSpecField._post_delete_receiver, sender=cls, dispatch_uid=uid) # Register the field with the image_cache_backend try: self.image_cache_backend.register_field(cls, self, name) except AttributeError: pass
def __init__(self): self._source_groups = [] #NEW HOOK -- tweak uid uid = 'instance_ik_spec_field_receivers' post_init.connect(self.post_init_receiver, dispatch_uid=uid) post_save.connect(self.post_save_receiver, dispatch_uid=uid)
def on_change(cls, get_state, fn): """ Call a function whenever a model instance changes. :param cls: The model we are interested in. (This is a class.) :param get_state: A function that gets state from a model instance. A change is a change in state. :param fn: The function to call when there is a change. """ def _set_state(sender, **kwargs): instance = kwargs.get('instance') instance._prev_state = get_state(instance) def _post_save(sender, **kwargs): instance = kwargs.get('instance') created = kwargs.get('created') state = get_state(instance) # We do not emit a signal when the object has just been created. if not created and state != instance._prev_state: fn(instance) # The item could still be changed again, so... instance._prev_state = state # We have to have weak=False to prevent garbage collection of these # handlers. post_save.connect(_post_save, sender=cls, weak=False) post_init.connect(_set_state, sender=cls, weak=False)
def register_signals_timelog(self): """ Register timelog signals for certain models only """ # Imports must stay here. If you put it outside of this class it will fail. # Because this app is loaded on when it reaches ready() method. # Any interaction with django will fail before this class from django.db.models.signals import post_init, post_save from application.signals import timelog_post_init, timelog_post_save timelog_models_instances = [self.get_model(model) for model in [\ 'ApplicantName', 'ApplicantPersonalDetails', 'ApplicantHomeAddress', 'AdultInHome', 'ChildInHome', 'ChildcareType', 'CriminalRecordCheck', 'EYFS', 'FirstAidTraining', 'HealthDeclarationBooklet', 'Reference', 'UserDetails', ]] for model in timelog_models_instances: post_init.connect(timelog_post_init, sender=model, dispatch_uid="timelog_post_init") post_save.connect(timelog_post_save, sender=model, dispatch_uid="timelog_post_save")
def contribute_to_class(self, cls, name): """Add methods/signal handlers to the model owning this field. This will add :samp:`get_{fieldname}_json()` and :samp:`set_{fieldname}_json()` methods to the model, which will allow retrieving and setting the serialized data directly. It also listens for when an instance of the model is initialized and sets the field to point to the deserialized JSON data. Args: cls (type): The :py:class:`~django.db.models.Model` class to contribute to. name (unicode): The name of this field on the model. """ def get_json(model_instance): return self.value_to_string(model_instance) def set_json(model_instance, json): setattr(model_instance, self.attname, self.loads(json)) super(JSONField, self).contribute_to_class(cls, name) setattr(cls, 'get_%s_json' % self.name, get_json) setattr(cls, 'set_%s_json' % self.name, set_json) post_init.connect(self.post_init, sender=cls)
def _attach_signals(self): ''' Attach all signals for eav ''' post_init.connect(Registry.attach_eav_attr, sender=self.model_cls) pre_save.connect(Entity.pre_save_handler, sender=self.model_cls) post_save.connect(Entity.post_save_handler, sender=self.model_cls)
def connect_signals(): if len(FIELDS) > 0: return def find_models_with_filefield(): for model in get_models(): opts = ensure_get_fields(model._meta) name = get_model_name(model) for field in opts.get_fields(): if isinstance(field, models.FileField): FIELDS[name].append(field.name) if name in FIELDS: yield model for model in find_models_with_filefield(): key = '{{}}_django_cleanup_{}'.format(get_model_name(model)) post_init.connect(cache_original_post_init, sender=model, dispatch_uid=key.format('post_init')) post_save.connect(delete_old_post_save, sender=model, dispatch_uid=key.format('post_save')) post_delete.connect(delete_all_post_delete, sender=model, dispatch_uid=key.format('post_delete'))
def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) post_init.connect(post_init_audit_log, cls) post_save.connect(post_save_audit_log, cls) post_delete.connect(post_delete_audit_log, cls) logger.debug(f"Audit logging signals connected for {cls}.")
def register(cls, model, descriptor="vacuous"): post_save.connect(cls.post_save, sender=model) pre_delete.connect(cls.pre_delete, sender=model) post_init.connect(cls.post_init, sender=model) cls.models.add(model) if descriptor is not None: setattr(model, descriptor, AdapterDescriptor(cls))
def connect_signals(): post_init.connect(post_init_treenode, dispatch_uid='post_init_treenode') post_migrate.connect(post_migrate_treenode, dispatch_uid='post_migrate_treenode') post_save.connect(post_save_treenode, dispatch_uid='post_save_treenode') post_delete.connect(post_delete_treenode, dispatch_uid='post_delete_treenode')
def test_model_signal(self): unresolved_references = post_init.unresolved_references.copy() post_init.connect(on_post_init, sender='missing-app.Model') post_init.connect(OnPostInit(), sender='missing-app.Model') errors = run_checks() expected = [ Error( "The 'on_post_init' function was connected to the 'post_init' " "signal with a lazy reference to the 'missing-app.Model' " "sender, which has not been installed.", obj='model_validation.tests', id='signals.E001', ), Error( "An instance of the 'OnPostInit' class was connected to " "the 'post_init' signal with a lazy reference to the " "'missing-app.Model' sender, which has not been installed.", obj='model_validation.tests', id='signals.E001', ) ] self.assertEqual(errors, expected) post_init.unresolved_references = unresolved_references
def test_model_signal(self): unresolved_references = post_init.unresolved_references.copy() post_init.connect(on_post_init, sender='missing-app.Model') post_init.connect(OnPostInit(), sender='missing-app.Model') errors = run_checks() expected = [ Error( "The 'on_post_init' function was connected to the 'post_init' " "signal with a lazy reference to the 'missing-app.Model' " "sender, which has not been installed.", hint=None, obj='model_validation.tests', id='signals.E001', ), Error( "An instance of the 'OnPostInit' class was connected to " "the 'post_init' signal with a lazy reference to the " "'missing-app.Model' sender, which has not been installed.", hint=None, obj='model_validation.tests', id='signals.E001', ) ] self.assertEqual(errors, expected) post_init.unresolved_references = unresolved_references
def connect(cls): cls._prepare_m2m_fields() def _instance_post_init(sender, instance, **kwargs): cls._instance_post_init(instance, **kwargs) post_init.connect(_instance_post_init, sender=cls.model, weak=False) def _instance_post_save(sender, instance, **kwargs): cls._instance_post_save(instance, **kwargs) post_save.connect(_instance_post_save, sender=cls.model, weak=False) def field_has_changed(self, field): "Returns `True` if `field` has changed since initialization." if getattr(self, "_dirty_fields", UNSAVED) is UNSAVED: return False return self._dirty_fields.get(field) != getattr(self, field) cls.model.field_has_changed = field_has_changed def field_old_value(self, field): "Returns the previous value of `field`" return getattr(self, "_dirty_fields", UNSAVED).get(field) cls.model.field_old_value = field_old_value def changed_fields(self): "Returns a list of changed attributes." if getattr(self, "_dirty_fields", UNSAVED) is UNSAVED: return {} return dict([(k, v) for k, v in self._dirty_fields.iteritems() if v != getattr(self, k)]) cls.model.changed_fields = changed_fields
def register(cls, model, descriptor='vacuous'): post_save.connect(cls.post_save, sender=model) pre_delete.connect(cls.pre_delete, sender=model) post_init.connect(cls.post_init, sender=model) cls.models.add(model) if descriptor is not None: setattr(model, descriptor, AdapterDescriptor(cls))
def ready(self): try: from puput.models import BlogPage post_init.connect(handle_blog_model, sender=BlogPage) except: pass # shit happens
def process_request(self, request): if hasattr(request, 'user') and request.user.is_authenticated(): user = request.user else: user = None user_passer = curry(pass_audit_user, user) post_init.connect(user_passer, weak=False, dispatch_uid=('audit_post_init_middleware', request))
def encrypt_fields(klass, field_names): if not getattr(settings, 'CRYPTO_KEY', None): # pragma: no cover raise ImproperlyConfigured('CRYPTO_KEY is not set in settings') cb_ins = Encryptor(field_names) __global_reference.append(cb_ins) pre_save.connect(cb_ins.encrypt, klass) post_save.connect(cb_ins.decrypt, klass) post_init.connect(cb_ins.decrypt, klass)
def __class_prepared(self, sender, **kwargs): """ Given the cache is configured, connects the required signals for invalidation. """ if not self.cache_fields: return post_init.connect(self.__post_init, sender=sender, weak=False) post_save.connect(self.__post_save, sender=sender, weak=False) post_delete.connect(self.__post_delete, sender=sender, weak=False)
def setup_signals(self, cls): if cls._meta.abstract: return pre_save.connect(self.augmented_model_pre_save, cls) descriptor = getattr(cls, self.relation_name) rel_model = this_django.get_descriptor_rel_model(descriptor) is_m2md = isinstance(descriptor, this_django.ManyToManyDescriptor) is_frod = isinstance(descriptor, this_django.ForeignRelatedObjectsDescriptor) is_grod = isinstance( descriptor, this_django.ReverseGenericRelatedObjectsDescriptor) descriptor_field = this_django.get_descriptor_remote_field(descriptor) if is_m2md: post_init.connect(self.patch_instance_prepare_database_save, sender=rel_model) m2m_changed.connect(self.m2m_changed, sender=descriptor.through) elif is_frod: # patch nullable FK and One2One fields #: see AbnormForeignRelatedObjectsDescriptor comments if descriptor_field.null: this_django.add_foreign_related_object_descriptor_slave_field( descriptor, self) setattr(cls, self.relation_name, descriptor) # post_update signal machinery below if is_grod: # get GenericForeignKey field name, which is customizable generic_fk_fields = filter( partial(this_django._is_matching_generic_foreign_key, descriptor_field), this_django.get_model_fields(rel_model)) related_field = next(iter(generic_fk_fields), None) related_field_name = getattr(related_field, 'name', None) else: related_field_name = descriptor_field.name if related_field_name and related_field_name != '+': # if backwards relation name is not disabled, sign current # instance to update *current* field parent, see # https://docs.djangoproject.com/en/stable/ref/models/fields/ # #django.db.models.ForeignKey.related_name # for details receiver = self.get_post_update_receiver(related_field_name) # local functions are garbage collected, so disable weak refs post_update.connect(receiver, sender=rel_model, weak=False) if django.VERSION < (1, 9) or (is_frod or is_m2md): # required for all descriptor types with django 1.6-1.8 for some # reason, and only for (is_frod or is_m2md) case with 1.9+ self.connect_related_model_signals(rel_model) state.signals_connected.append((get_model_name(self.model), self.name))
def register(obj_def): if snailtracker_enabled(): if obj_def._meta.db_table not in snailtracker_site.registry: logger.debug('Registering %s' % obj_def._meta.db_table) post_init.connect(snailtracker_post_init_hook, sender=obj_def,) post_save.connect(snailtracker_post_save_hook, sender=obj_def,) post_delete.connect(snailtracker_post_delete_hook, sender=obj_def,) snailtracker_site.registry[obj_def._meta.db_table] = True else: logger.debug('%s already registered' % obj_def._meta.db_table)
def test_lazy(self): inc = _make_inc() from django.db.models.signals import post_init post_init.connect(inc, sender=Category) qs = Category.objects.cache() for c in qs.iterator(): break self.assertEqual(inc.get(), 1)
def _attach_signals(self): """ Attach pre- and post- save signals from model class to Entity helper. This way, Entity instance will be able to prepare and clean-up before and after creation / update of the user's model class instance. """ post_init.connect(Registry.attach_eav_attr, sender=self.model_cls) pre_save.connect(Entity.pre_save_handler, sender=self.model_cls) post_save.connect(Entity.post_save_handler, sender=self.model_cls)
def configure_receivers(): # Connect the signals. We have to listen to every model (not just those # with IK fields) and filter in our receivers because of a Django issue with # abstract base models. # Related: # https://github.com/jdriscoll/django-imagekit/issues/126 # https://code.djangoproject.com/ticket/9318 uid = 'ik_spec_field_receivers' post_init.connect(post_init_receiver, dispatch_uid=uid) post_save.connect(post_save_receiver, dispatch_uid=uid) post_delete.connect(post_delete_receiver, dispatch_uid=uid)
def inner(cls): # contains a local copy of the previous values of attributes cls.__data = {} def has_changed(self, field): "Returns ``True`` if ``field`` has changed since initialization." if self.__data is UNSAVED: return False return self.__data.get(field) != getattr(self, field) cls.has_changed = has_changed def old_value(self, field): "Returns the previous value of ``field``" return self.__data.get(field) cls.old_value = old_value def get_tracked_changes(self): "Returns a list of changed attributes." changed = {} if self.__data is UNSAVED: return changed for k, v in six.iteritems(self.__data): if v != getattr(self, k): changed[k] = {'old': v, 'new': getattr(self, k)} return changed cls.get_tracked_changes = get_tracked_changes # Ensure we are updating local attributes on model init def _post_init(sender, instance, **kwargs): _store(instance) post_init.connect(_post_init, sender=cls, weak=False) # Send changes to kafka if TRACK_CHANGES_KAFKA_PRODUCER: def _post_save(sender, instance, created, **kwargs): producer = get_producer(TRACK_CHANGES_KAFKA_PRODUCER) producer.log_changes(instance, 'Created' if created else 'Updated') post_save.connect(_post_save, sender=cls, weak=False) # Ensure we are updating local attributes on model save def save(self, *args, **kwargs): save._original(self, *args, **kwargs) _store(self) save._original = cls.save cls.save = save return cls
def connect(app): models = get_models_to_audit() for (name, fields, model) in models: save_current_values = save_current_data_factory(model, fields) post_init.connect(save_current_values, sender=model, weak=False) add_audit_log_record = add_audit_log_record_factory(model, fields) post_save.connect(add_audit_log_record, sender=model, weak=False) add_last_audit_log_record = add_last_audit_log_record_factory(model, fields) post_delete.connect(add_last_audit_log_record, sender=model, weak=False)
def register(translated_model): registry = get_registry() original_model = translated_model.model if original_model in registry: return None registry.add(original_model) fields = { '__module__': 'transtrans.models', 'language': models.CharField(max_length=5), 'object': models.ForeignKey(original_model, related_name='translations'), '__unicode__': lambda self: self.language } unique_fields = [] for field in translated_model.fields: new_field = copy(original_model._meta.get_field_by_name(field)[0]) # Remove uniqueness from field and add it as unique_together within language later if new_field.unique: unique_fields.append(new_field.name) new_field._unique = False original_model.add_to_class(field, TranslatedField(field)) fields[field] = new_field model_name = 'Translated{0}'.format(original_model._meta.object_name) model_class = type(model_name, (models.Model, ), fields) # Restore unique index as unique_together within language for uniq_field in unique_fields: validation_tuple = (('language', uniq_field)) model_class._meta.unique_together.append(validation_tuple) TransManager().contribute_to_class(original_model, 'objects') original_model.add_to_class('translated_fields', translated_model.fields) original_model.add_to_class('propagate_current_language', propagate_current_language) original_model.add_to_class('_new_translation', _new_translation) original_model.add_to_class('_translations_all', _translations_all) original_model.add_to_class('__repr__', __repr__) original_model.add_to_class('_initialized', _initialized) original_model.add_to_class('validate_unique', validate_unique) # Override validate_unique method # Magic! Don't ask... original_model._meta._fill_related_objects_cache() original_model._meta.init_name_map() post_init.connect(model_post_init, original_model) pre_save.connect(model_pre_save, original_model) post_save.connect(model_post_save, original_model) return model_class
def add_handler(handler): assert handler.name not in handlers, ( 'An URL handler with the name "%s" already exists!') % handler.name handlers[handler.name] = handler dispatch_uid = 'urlrouter:%s' % id(handler.model) post_init.connect(backup_urls, sender=handler.model, dispatch_uid=dispatch_uid) pre_save.connect(reclaim_urls, sender=handler.model, dispatch_uid=dispatch_uid) post_save.connect(add_new_urls, sender=handler.model, dispatch_uid=dispatch_uid)
def connect(): '''Connect signals to the cleanup models''' for model in cache.cleanup_models(): key = '{{}}_django_cleanup_{}'.format(cache.get_model_name(model)) post_init.connect(cache_original_post_init, sender=model, dispatch_uid=key.format('post_init')) pre_save.connect(fallback_pre_save, sender=model, dispatch_uid=key.format('pre_save')) post_save.connect(delete_old_post_save, sender=model, dispatch_uid=key.format('post_save')) post_delete.connect(delete_all_post_delete, sender=model, dispatch_uid=key.format('post_delete'))
def contribute_to_class(self, cls, name): "This method is based on the django GenericForeignKey.contribute_to_class" self.name = name self.model = cls self.instance_counter_attr = "_%s_counter" % name cls._meta.add_virtual_field(self) # For some reason I don't totally understand, using weakrefs here doesn't work. post_init.connect(self.instance_post_init, sender=cls, weak=False) # Connect myself as the descriptor for this field setattr(cls, name, self)
def enable(): """ Connects the tracking signals, and so enables tracking. """ post_init.connect(post_init_receiver, dispatch_uid=_dispatch_uid(post_init_receiver)) post_save.connect(post_save_receiver, dispatch_uid=_dispatch_uid(post_save_receiver)) m2m_changed.connect(m2m_changed_receiver, dispatch_uid=_dispatch_uid(m2m_changed_receiver)) post_delete.connect(post_delete_receiver, dispatch_uid=_dispatch_uid(post_delete_receiver))
def contribute_to_class(self, cls, name, private_only=False, **kwargs): super().contribute_to_class(cls, name, private_only, **kwargs) post_init.connect(self._track_previous_version, sender=cls, dispatch_uid='denormalized_track_previous') post_save.connect(self._track_changes, sender=cls, dispatch_uid='denormalized_update_value_on_save') post_save.connect(self._track_previous_version, sender=cls, dispatch_uid='update_denormalized_previous') post_delete.connect(self._track_changes, sender=cls, dispatch_uid='denormalized_update_value_on_delete') for tracker in self.trackers: tracker.foreign_key = self.name
def inner(cls): # contains a local copy of the previous values of attributes cls.__data = {} def has_changed(self, field): "Returns ``True`` if ``field`` has changed since initialization." if self.__data is UNSAVED: return False return self.__data.get(field) != getattr(self, field) cls.has_changed = has_changed def old_value(self, field): "Returns the previous value of ``field``" return self.__data.get(field) cls.old_value = old_value def get_tracked_changes(self): "Returns a list of changed attributes." changed = {} if self.__data is UNSAVED: return changed for k, v in self.__data.iteritems(): if v != getattr(self, k): changed[k] = {'old': v, 'new': getattr(self, k)} return changed cls.get_tracked_changes = get_tracked_changes # Ensure we are updating local attributes on model init def _post_init(sender, instance, **kwargs): _store(instance) post_init.connect(_post_init, sender=cls, weak=False) # Send changes to kafka if TRACK_CHANGES_KAFKA_PRODUCER: def _post_save(sender, instance, created, **kwargs): producer = get_producer(TRACK_CHANGES_KAFKA_PRODUCER) producer.log_changes(instance, 'Created' if created else 'Updated') post_save.connect(_post_save, sender=cls, weak=False) # Ensure we are updating local attributes on model save def save(self, *args, **kwargs): save._original(self, *args, **kwargs) _store(self) save._original = cls.save cls.save = save return cls
def __init__(self): if not hasattr(settings.SITE_ID, "set"): raise TypeError("Invalid type for settings.SITE_ID: %s" % type(settings.SITE_ID).__name__) self.cache_alias = getattr(settings, "CACHE_MULTISITE_ALIAS", "default") self.key_prefix = getattr( settings, "CACHE_MULTISITE_KEY_PREFIX", settings.CACHES[self.cache_alias].get("KEY_PREFIX", "") ) self.cache = get_cache(self.cache_alias, KEY_PREFIX=self.key_prefix) post_init.connect(self.site_domain_cache_hook, sender=Site, dispatch_uid="multisite_post_init") pre_save.connect(self.site_domain_changed_hook, sender=Site) post_delete.connect(self.site_deleted_hook, sender=Site)
def contribute_to_class(self, cls, name): def get_json(model_instance): return self.dumps(getattr(model_instance, self.attname, None)) def set_json(model_instance, json): setattr(model_instance, self.attname, self.loads(json)) super(JSONField, self).contribute_to_class(cls, name) setattr(cls, "get_%s_json" % self.name, get_json) setattr(cls, "set_%s_json" % self.name, set_json) post_init.connect(self.post_init, sender=cls)
def __init__(self): if not hasattr(settings.SITE_ID, 'set'): raise TypeError('Invalid type for settings.SITE_ID: %s' % type(settings.SITE_ID).__name__) self.cache_alias = getattr(settings, 'CACHE_MULTISITE_ALIAS', 'default') self.key_prefix = getattr(settings, 'CACHE_MULTISITE_KEY_PREFIX', '') self.cache = get_cache(self.cache_alias, KEY_PREFIX=self.key_prefix) post_init.connect(self.site_domain_cache_hook, sender=Site, dispatch_uid='multisite_post_init') pre_save.connect(self.site_domain_changed_hook, sender=Site) post_delete.connect(self.site_deleted_hook, sender=Site)
def inner(cls): """Inner callback to return""" # contains a local copy of the previous values of attributes cls.data = {} def has_changed(self, field): """Returns `True` if `field` has changed since initialization.""" if self.data is not_saved: return False return self.data.get(field) != getattr(self, field) cls.has_changed = has_changed def old_value(self, field): """Returns the previous value of `field`""" return self.data.get(field) cls.old_value = old_value def whats_changed(self): """Returns a list of changed attributes.""" changed = {} if self.data is not_saved: return changed for key, value in self.data.items(): if value != getattr(self, key): changed[key] = value return changed cls.whats_changed = whats_changed # Ensure we are updating local attributes on model init # noinspection PyUnusedLocal def _post_init(instance, **kwargs): _store(instance) post_init.connect(_post_init, sender=cls, weak=False) # Ensure we are updating local attributes on model save def save(self, *args, **kwargs): """Intercepts save() in post_init signal""" save.original(self, *args, **kwargs) _store(self) save.original = cls.save cls.save = save return cls
def contribute_to_class(self, cls, name): """Perform operations when added to a class. This will listen for when an instance is constructed in order to perform some initial work. Args: cls (type): The model class. name (str): The name of the field. """ super(SignatureField, self).contribute_to_class(cls, name) post_init.connect(self._post_init, sender=cls)
def models_register(): if MODELS_FOR_LOGGING: registered_models = [] for app in MODELS_FOR_LOGGING: item = app.split('.') if apps.all_models.get(item[-1]): # if this is app for v in apps.get_app_config(item[-1]).models.values(): if '%s.%s' % (app, v.__name__) not in MODELS_FOR_EXCLUDE: registered_models.append(v) else: # if this is model registered_models.append(apps.get_registered_model(item[-2], item[-1])) for model in registered_models: post_init.connect(init_model_attrs, sender=model) post_save.connect(save_model, sender=model) post_delete.connect(delete_model, sender=model)
def _connect(self): """ Connect the signal listing. """ # this is used to capture the current state for the model post_init.connect( self.post_init_receiver, sender=self.model_cls, dispatch_uid=id(self) ) post_save.connect( self.post_save_receiver, sender=self.model_cls, dispatch_uid=id(self) ) post_delete.connect( self.post_delete_receiver, sender=self.model_cls, dispatch_uid=id(self) )
def test_model_signal(self): unresolved_references = post_init.unresolved_references.copy() post_init.connect(on_post_init, sender='missing-app.Model') post_init.connect(OnPostInit(), sender='missing-app.Model') e = ModelErrorCollection(six.StringIO()) validate_model_signals(e) self.assertSetEqual( set(e.errors), {('model_validation.tests', "The `on_post_init` function was connected to the `post_init` " "signal with a lazy reference to the 'missing-app.Model' " "sender, which has not been installed."), ('model_validation.tests', "An instance of the `OnPostInit` class was connected to " "the `post_init` signal with a lazy reference to the " "'missing-app.Model' sender, which has not been installed.")}) post_init.unresolved_references = unresolved_references
def register(self, model_class, fields=None, follow=(), format=DEFAULT_SERIALIZATION_FORMAT, exclude_fields=()): """Registers a model with this revision manager.""" # Prevent multiple registration. if self.is_registered(model_class): raise RegistrationError, "%r has already been registered with Reversion." % model_class # Ensure the parent model of proxy models is registered. if model_class._meta.proxy and not \ self.is_registered(model_class._meta.parents.keys()[0]): raise RegistrationError, "%r is a proxy model, and its parent has "\ "not been registered with Reversion." % model_class # Calculate serializable model fields. opts = model_class._meta # Reasoning: # In case of hierarchical models the we need all fields to be registered # This way when we serialize the parent it will show all needed fields. #local_fields = opts.local_fields + opts.local_many_to_many local_fields = opts.fields + opts.many_to_many if fields is None: fields = [f.name for f in local_fields] fields = tuple(fields) # Calculate serializable model file fields. file_fields = [] for field in local_fields: if isinstance(field, models.FileField) and field.name in fields: field.storage = VersionFileStorageWrapper(field.storage) file_fields.append(field) file_fields = tuple(file_fields) # Register the generated registration information. if isinstance(follow, basestring): follow = (follow,) else: follow = tuple(follow) tmp_fields = [f for f in fields if f not in exclude_fields] fields = tuple(tmp_fields) registration_info = RegistrationInfo(fields, file_fields, follow, format) self._registry[model_class] = registration_info # Connect to the post save signal of the model. post_save.connect(self.post_save_receiver, model_class) pre_delete.connect(self.pre_delete_receiver, model_class) pre_save.connect(self.pre_save_receiver, model_class) post_init.connect(self.post_init_receiver, model_class)
def _add_signals_to_cls(cls): # Use repr(cls) to be sure to bound the callback # only once for each class post_init.connect( tracking_init, sender=cls, dispatch_uid=repr(cls), ) post_save.connect( tracking_save, sender=cls, dispatch_uid=repr(cls), ) pre_delete.connect( tracking_delete, sender=cls, dispatch_uid=repr(cls), )
def contribute_to_class(self, cls, name): """Add methods to the model class. This introduces the methods for incrementing, decrementing, reloading, and re-initializing the fields. Args: cls (type): The model class. name (str): The name of the field on the model. """ def _increment(model_instance, *args, **kwargs): self._increment(model_instance, *args, **kwargs) def _decrement(model_instance, *args, **kwargs): self._decrement(model_instance, *args, **kwargs) def _reload(model_instance): self._reload(model_instance) def _reinit(model_instance): self._reinit(model_instance) super(CounterField, self).contribute_to_class(cls, name) setattr(cls, 'increment_%s' % self.name, _increment) setattr(cls, 'decrement_%s' % self.name, _decrement) setattr(cls, 'reload_%s' % self.name, _reload) setattr(cls, 'reinit_%s' % self.name, _reinit) setattr(cls, self.attname, self) # We need to prevent CounterFields (or any subclasses) from being # saved by default. We'd ideally override save() or save_base(), but # can't safely do that here, and we can't modify update_fields due to # how it would impact the save process. So we need to go deeper, # overriding _do_update(), which actually receives the values and # field instances being saved. From there, we can filter out any # CounterFields by default. setattr(cls, '_do_update', type(self)._model_do_update) post_init.connect(self._post_init, sender=cls)
def __init__(self, *args, **kwargs): super(DynamicSiteMiddleware, self).__init__(*args, **kwargs) if not hasattr(settings.SITE_ID, 'set'): raise TypeError('Invalid type for settings.SITE_ID: %s' % type(settings.SITE_ID).__name__) self.cache_alias = getattr(settings, 'CACHE_MULTISITE_ALIAS', 'default') self.key_prefix = getattr( settings, 'CACHE_MULTISITE_KEY_PREFIX', settings.CACHES[self.cache_alias].get('KEY_PREFIX', '') ) self.cache = caches[self.cache_alias] post_init.connect(self.site_domain_cache_hook, sender=Site, dispatch_uid='multisite_post_init') pre_save.connect(self.site_domain_changed_hook, sender=Site) post_delete.connect(self.site_deleted_hook, sender=Site)
def test_model_signal(self): unresolved_references = post_init.unresolved_references.copy() post_init.connect(on_post_init, sender='missing-app.Model') post_init.connect(OnPostInit(), sender='missing-app.Model') e = ModelErrorCollection(six.StringIO()) validate_model_signals(e) self.assertSetEqual(set(e.errors), { ('model_validation.tests', "The `on_post_init` function was connected to the `post_init` " "signal with a lazy reference to the 'missing-app.Model' " "sender, which has not been installed." ), ('model_validation.tests', "An instance of the `OnPostInit` class was connected to " "the `post_init` signal with a lazy reference to the " "'missing-app.Model' sender, which has not been installed." ) }) post_init.unresolved_references = unresolved_references
def register(self, model, opts_class=None, **options): # Ensure that a base is not registered after a subclass (_registry # is closed with respect to taking bases, so we can just check if # we've seen the model). if model in self._registry: if self._registry[model].registered: raise AlreadyRegistered( 'Model "%s" is already registered for translation' % model.__name__) else: descendants = [d.__name__ for d in self._registry.keys() if issubclass(d, model) and d != model] raise DescendantRegistered( 'Model "%s" cannot be registered after its subclass' ' "%s"' % (model.__name__, descendants[0])) opts = self._get_options_for_model(model, opts_class, **options) opts.registered = True # Delete all fields cache for related model (parent and children) for related_obj in model._meta.get_all_related_objects(): delete_cache_fields(related_obj.model) # Connect signal for model post_init.connect(delete_mt_init, sender=model) patch_clean_fields(model) patch_metaclass(model) for field_name in opts.local_fields.keys(): ref_field_name = "ref_{field}".format(field=field_name) field = model._meta.get_field(field_name) ref_descriptor = OriginalFieldDescriptor(field) descriptor = GettextFieldDescriptor(field) setattr(model, field_name, descriptor) setattr(model, ref_field_name, ref_descriptor) opts.add_translation_field(field_name, descriptor) opts.add_translation_field(field_name, ref_descriptor)
def contribute_to_class(self, cls, name): def _increment(model_instance, *args, **kwargs): self._increment(model_instance, *args, **kwargs) def _decrement(model_instance, *args, **kwargs): self._decrement(model_instance, *args, **kwargs) def _reload(model_instance): self._reload(model_instance) def _reinit(model_instance): self._reinit(model_instance) super(CounterField, self).contribute_to_class(cls, name) setattr(cls, 'increment_%s' % self.name, _increment) setattr(cls, 'decrement_%s' % self.name, _decrement) setattr(cls, 'reload_%s' % self.name, _reload) setattr(cls, 'reinit_%s' % self.name, _reinit) setattr(cls, self.attname, self) post_init.connect(self._post_init, sender=cls)
def inner(cls): # contains a local copy of the previous values of attributes cls.__data = {} def has_changed(self, field): "Returns ``True`` if ``field`` has changed since initialization." if self.__data is UNSAVED: return False return self.__data.get(field) != getattr(self, field) cls.has_changed = has_changed def old_value(self, field): "Returns the previous value of ``field``" return self.__data.get(field) cls.old_value = old_value def whats_changed(self): "Returns a list of changed attributes." changed = {} if self.__data is UNSAVED: return changed for k, v in self.__data.iteritems(): if v != getattr(self, k): changed[k] = v return changed cls.whats_changed = whats_changed # Ensure we are updating local attributes on model init def _post_init(sender, instance, **kwargs): _store(instance) post_init.connect(_post_init, sender=cls, weak=False) # Ensure we are updating local attributes on model save def save(self, *args, **kwargs): save._original(self, *args, **kwargs) _store(self) save._original = cls.save cls.save = save return cls