def _disconnect_signals(): """ used in testing """ post_save.disconnect(plan_watchers.notify_on_plan_is_updated, TestPlan) pre_delete.disconnect( plan_watchers.load_email_settings_for_later_deletion, TestPlan) post_delete.disconnect(plan_watchers.notify_deletion_of_plan, TestPlan) pre_save.disconnect(plan_watchers.pre_save_clean, TestPlan)
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.run()
def disconnect_signals(): post_init.disconnect(post_init_treenode, dispatch_uid='post_init_treenode') post_migrate.disconnect(post_migrate_treenode, dispatch_uid='post_migrate_treenode') post_save.disconnect(post_save_treenode, dispatch_uid='post_save_treenode') post_delete.disconnect(post_delete_treenode, dispatch_uid='post_delete_treenode')
def setUp(self): post_save.disconnect(manage_community_groups, sender=Community, dispatch_uid="manage_groups") post_delete.disconnect(remove_community_groups, sender=Community, dispatch_uid="remove_groups")
def cleanup(rollback=False): post_save.disconnect(log_save) post_delete.disconnect(log_delete) if rollback: transaction.rollback() else: transaction.commit()
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.logger = logging.getLogger(__name__) # self.close_files() # self.calcular_validacao() # return # self.distribuir_validacao() # return # self.count_registers(full=False) # return self.s3_connect() # self.__clear_bucket('cmjatai_postgresql') # return if not settings.DEBUG: self.update_backup_postgresql() self.start_time = timezone.localtime() self.s3_sync() print(self.start_time) print(timezone.localtime()) print(self.count_registros)
def unregister(self, model): post_save.disconnect(add_obj_to_autocompleter, sender=model, dispatch_uid='autocompleter.%s.add' % (model)) pre_save.disconnect(remove_old_obj_from_autocompleter, sender=model, dispatch_uid='autocompleter.%s.remoe_old' % (model)) post_delete.disconnect(remove_obj_from_autocompleter, sender=model, dispatch_uid='autocompleter.%s.remove' % (model))
def _disconnect_signals(): """ used in testing """ post_save.disconnect(plan_watchers.notify_on_plan_is_updated, TestPlan) pre_delete.disconnect(plan_watchers.load_email_settings_for_later_deletion, TestPlan) post_delete.disconnect(plan_watchers.notify_deletion_of_plan, TestPlan) pre_save.disconnect(plan_watchers.pre_save_clean, TestPlan)
def unregister(self, name): import inspect from registration import Achievement if inspect.isclass(name) and issubclass(name, Achievement): name = name.name if name not in self._registry: raise NotRegistered achievement_class = self._registry.pop(name) assert issubclass(achievement_class, Achievement) try: AchievementModel.objects.get(name=achievement_class.name).delete() except: pass post_save.disconnect( sender=achievement_class.get_observed_model(), dispatch_uid='django_achievements.%s_save' % achievement_class.name ) post_delete.disconnect( sender=achievement_class.get_observed_model(), dispatch_uid='django_achievements.%s_delete' % achievement_class.name )
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.logger = logging.getLogger(__name__)
def turn_off_syncing(): """ Disables all of the signals for syncing entities. """ post_delete.disconnect(delete_entity_signal_handler, dispatch_uid='delete_entity_signal_handler') post_save.disconnect(save_entity_signal_handler, dispatch_uid='save_entity_signal_handler') post_bulk_operation.disconnect(bulk_operation_signal_handler, dispatch_uid='bulk_operation_signal_handler')
def handle(self, *args, **options): boards = Board.objects.filter(store_threads_for__gt=0) for board in boards: processing_start = datetime.datetime.now() # Get the posts older than the amount specified in the board settings. time_threshold = datetime.datetime.now().replace( tzinfo=utc) - datetime.timedelta(hours=board.store_threads_for) queryset = Thread.objects.filter(board=board, last_reply__lt=time_threshold, saved=False) # Count for stats and delete. amount = queryset.count() post_delete.disconnect(receiver=post_post_delete, sender=Post) try: queryset.delete() except Exception as e: sys.stderr.write('%s\m' % (e)) finally: post_delete.connect(post_post_delete, sender=Post) processing_time = datetime.datetime.now() - processing_start print('%s Board: %s Deleted threads: %s Time passed: %s sec' % (datetime.datetime.now(), board, amount, processing_time.seconds))
def connect_signals(cls, connected=True): if (cls.model and getattr(cls, 'post_save_connect', False) and hasattr(cls, 'post_save')): if connected: post_save.connect(cls.post_save, sender=cls.model) else: post_save.disconnect(cls.post_save, sender=cls.model) for field in cls.model._meta.get_fields(): if isinstance(field, ManyToManyField): if connected: m2m_changed.connect(cls.m2m_changed, sender=getattr( cls.model, field.name).through) else: m2m_changed.disconnect(cls.m2m_changed, sender=getattr( cls.model, field.name).through) if (cls.model and getattr(cls, 'post_delete_connect', False) and hasattr(cls, 'post_delete')): if connected: post_delete.connect(cls.post_delete, sender=cls.model) else: post_delete.disconnect(cls.post_delete, sender=cls.model)
def handle(self, *args, **options): boards = Board.objects.filter(store_threads_for__gt=0) for board in boards: processing_start = datetime.datetime.now() # Get the posts older than the amount specified in the board settings. time_threshold = datetime.datetime.now().replace(tzinfo=utc) - datetime.timedelta(hours=board.store_threads_for) queryset = Thread.objects.filter(board=board, last_reply__lt=time_threshold, saved=False) # Count for stats and delete. amount = queryset.count() post_delete.disconnect(receiver=post_post_delete, sender=Post) try: queryset.delete() except Exception as e: sys.stderr.write('%s\m' % (e)) finally: post_delete.connect(post_post_delete, sender=Post) processing_time = datetime.datetime.now() - processing_start print('%s Board: %s Deleted threads: %s Time passed: %s sec' % (datetime.datetime.now(), board, amount, processing_time.seconds))
def handle(self, *args, **options): m = Manutencao() # m.desativa_signals() m.desativa_auto_now() post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal')
def _unregister_model_from_signals(model): """ Unregisters the model from post_save and post_delete Django signals. :param model: Django Model object or import path as str. """ post_save.disconnect(generic_post_save, sender=model) post_delete.disconnect(generic_post_delete, sender=model)
def disconnect__models(self): if self.models is not None: for model in self.models: post_save.disconnect(self.handle_post_save, sender=model) post_delete.disconnect(self.handle_post_delete, sender=model) else: post_save.disconnect(self.handle_post_save) post_delete.disconnect(self.handle_post_delete)
def teardown(self): # pragma: no cover from aristotle_mdr.models import Status, _concept post_save.disconnect(self.handle_status_change, sender=Status) post_delete.disconnect(self.handle_status_change, sender=Status) post_save.disconnect(self.handle_concept_save, sender=_concept) pre_delete.disconnect(self.handle_concept_delete, sender=_concept) super(AristotleSignalProcessor,self).teardown()
def listen_off(): """ Turns signal listening off so account balances are not updated. This should be done when lots of transactions will be saved/deleted in a short space of time to avoid problems when updating account balances """ post_delete.disconnect(Transaction.on_delete, sender=Transaction) post_save.disconnect(Transaction.on_save, sender=Transaction)
def deregister_signal_handlers(): # Disconnects the signal handlers for easy access in importers post_save.disconnect(post_product_category_save_signal_handler, sender=Product.categories.through) post_save.disconnect(product_post_save_signal_handler, sender=Product) post_delete.disconnect(product_post_delete_signal_handler, sender=Product) post_save.disconnect(category_change_handler, sender=Category) post_delete.disconnect(category_change_handler, sender=Category)
def disable(): """ Disconnects the tracking signals, and so disables tracking. """ post_init.disconnect(dispatch_uid=_dispatch_uid(post_init_receiver)) post_save.disconnect(dispatch_uid=_dispatch_uid(post_save_receiver)) m2m_changed.disconnect(dispatch_uid=_dispatch_uid(m2m_changed_receiver)) post_delete.disconnect(dispatch_uid=_dispatch_uid(post_delete_receiver))
def unregister(self, model): post_save.disconnect(add_obj_to_autocompleter, sender=model, dispatch_uid='autocompleter.%s.add' % (model)) post_delete.disconnect(remove_obj_from_autocompleter, sender=model, dispatch_uid='autocompleter.%s.remove' % (model))
def teardown(self): """ Disconnect for Events and Calendars. """ post_save.disconnect(self.handle_save, sender=Event) post_delete.disconnect(self.handle_delete, sender=Event) post_save.disconnect(self.handle_save, sender=Calendar) post_delete.disconnect(self.handle_delete, sender=Calendar)
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.logger = logging.getLogger(__name__) """url = 'https://www.jatai.go.leg.br/noticias/morre-o-ex-vereador-eudes-assis-carvalho'
def setUp(self): # Disabling signals to simplify these tests and because create doesn't # call the signals anyway. version_changed_signal.disconnect(version_changed, dispatch_uid='version_changed') post_save.disconnect(update_status, sender=Version, dispatch_uid='version_update_status') post_delete.disconnect(update_status, sender=Version, dispatch_uid='version_update_status')
def setUpClass(cls): post_delete.disconnect(zapier_subscription_post_delete, sender=ZapierSubscription) super(BaseDumpLoadTest, cls).setUpClass() cls.domain_name = uuid.uuid4().hex cls.domain = Domain(name=cls.domain_name) cls.domain.save() cls.default_objects_counts = Counter({DomainMigrationProgress: 1})
def callback_delete(sender, **kwargs): post_delete.disconnect(callback_delete) try: mo = ModelsOperation() mo.model_class = sender.__name__ mo.operation = 'Deletion' mo.save() except: pass post_delete.connect(callback_delete)
def handle(self, *args, **options): """ Run the data pipeline """ post_save.disconnect(serie_create_update, sender=Serie) post_delete.disconnect(serie_delete, sender=Serie) query = Serie.objects.all() generate(query) post_save.connect(serie_create_update, sender=Serie) post_delete.connect(serie_delete, sender=Serie)
def delete(self, *args, **kwargs): # Disconnect the Signal for entries post_delete.disconnect(delete_oplog_entry, sender=OplogEntry) all_entries = OplogEntry.objects.filter(oplog_id=self.id) # Recursively delete instances of :model:`OplogEntry` to avoid recursive errors for entry in all_entries: entry.delete() # Reconnect the Signal post_delete.connect(delete_oplog_entry, sender=OplogEntry) super(Oplog, self).delete(*args, **kwargs)
def setUp(self): post_save.disconnect(manage_community_groups, sender=Community, dispatch_uid="manage_groups") post_delete.disconnect(remove_community_groups, sender=Community, dispatch_uid="remove_groups") User.objects.create(username='******', password='******') self.systers_user = SystersUser.objects.get() self.community = Community.objects.create(name="Foo", slug="foo", order=1, admin=self.systers_user)
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') # self.clear() self.run() self.reset_sequences() self.migrar_documentos()
def assertNoActionOnSynchronize(self, sender, save=True, delete=True): def fail(**kwargs): self.fail('Signal caught - action performed.') if save: post_save.connect(fail, sender=sender) if delete: post_delete.connect(fail, sender=sender) self.synchronize() post_save.disconnect(fail, sender=sender) post_delete.disconnect(fail, sender=sender)
def set_up(self): post_delete.disconnect(post_delete_signal_handler, sender=Article) post_save.disconnect(post_save_signal_handler, sender=Article) self.clear() try: yield finally: # self.clear() post_save.connect(post_save_signal_handler, sender=Article) post_delete.connect(post_delete_signal_handler, sender=Article)
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.logger = logging.getLogger(__name__) # self.run_busca_desordem_de_dispositivos() self.run_bi()
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') self.logger = logging.getLogger(__name__) self.run_ploe43_2019() self.run_emmod_ao_ploe09()
def tearDown(self): redis.client = None pre_save.disconnect(redis.listing_pre_save, sender=Listing) post_save.disconnect(redis.listing_post_save, sender=Listing) pre_delete.disconnect(redis.listing_pre_delete, sender=Listing) post_delete.disconnect(redis.listing_post_delete, sender=Listing) content_published.disconnect(redis.publishable_published) content_unpublished.disconnect(redis.publishable_unpublished) super(TestRedisListings, self).tearDown() self.redis.flushdb()
def unregister_signal_handlers(): Image = get_image_model() for model in PAGE_MODEL_CLASSES: page_published.disconnect(purge_page_from_cache, sender=model) page_unpublished.disconnect(purge_page_from_cache, sender=model) post_save.disconnect(purge_image_from_cache, sender=Image) post_delete.disconnect(purge_image_from_cache, sender=Image) post_save.disconnect(purge_document_from_cache, sender=Document) post_delete.disconnect(purge_document_from_cache, sender=Document)
def unregister(self, model_class): """Removes a model from version control.""" try: registration_info = self._registry.pop(model_class) except KeyError: raise RegistrationError, "%r has not been registered with Reversion." % model_class else: for field in registration_info.file_fields: field.storage = field.storage.wrapped_storage post_save.disconnect(self.post_save_receiver, model_class) post_delete.disconnect(self.post_delete_receiver, model_class)
def disconnect_entry_signals(): """ Disconnect all the signals on Entry model. """ post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_PING_DIRECTORIES) post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS) post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE) post_delete.disconnect(sender=Entry, dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
def unregister(self, model): """Removes a model from version control.""" if isinstance(model, (str, unicode)): model = get_model(*model.split(".")) if not self.is_registered(model): raise RegistrationError("{model} has not been registered with django-reversion".format(model=model)) del self._registered_models[model] post_save.disconnect(self._post_save_receiver, model) pre_delete.disconnect(self._pre_delete_receiver, model) pre_save.disconnect(self.pre_save_smart_handler, model) post_delete.disconnect(self.post_delete_smart_handler, model)
def tearDown(self): redis.client = None pre_save.disconnect(redis.listing_pre_save, sender=Listing) post_save.disconnect(redis.listing_post_save, sender=Listing) pre_delete.disconnect(redis.listing_pre_delete, sender=Listing) post_delete.disconnect(redis.listing_post_delete, sender=Listing) content_published.disconnect(redis.publishable_published) content_unpublished.disconnect(redis.publishable_unpublished) super(TestAuthorLH, self).tearDown() self.redis.flushdb()
def _stop_listening(self): """ Stops listening. This will try to disconnect all handler, regardless of it was on or off """ model = self.model_reference if not model: return post_save.disconnect(None,sender=model,dispatch_uid=self.post_save_uid) post_delete.disconnect(None,sender=model,dispatch_uid=self.post_delete_uid)
def handle(self, *args, **options): post_delete.disconnect(dispatch_uid='sapl_post_delete_signal') post_save.disconnect(dispatch_uid='sapl_post_save_signal') post_delete.disconnect(dispatch_uid='cmj_post_delete_signal') post_save.disconnect(dispatch_uid='cmj_post_save_signal') # post_delete.disconnect(sapl_post_delete_signal) # post_save.disconnect(sapl_post_save_signal) # post_delete.disconnect(cmj_post_delete_signal) # post_save.disconnect(cmj_post_save_signal) self.sync = options['sync'] self.run() self.reset_sequences() self.migrar_documentos()
def turn_off_syncing(for_post_save=True, for_post_delete=True, for_m2m_changed=True, for_post_bulk_operation=True): """ Disables all of the signals for syncing entities. By default, everything is turned off. If the user wants to turn off everything but one signal, for example the post_save signal, they would do: turn_off_sync(for_post_save=False) """ if for_post_save: post_save.disconnect(save_entity_signal_handler, dispatch_uid='save_entity_signal_handler') if for_post_delete: post_delete.disconnect(delete_entity_signal_handler, dispatch_uid='delete_entity_signal_handler') if for_m2m_changed: m2m_changed.disconnect(m2m_changed_entity_signal_handler, dispatch_uid='m2m_changed_entity_signal_handler') if for_post_bulk_operation: post_bulk_operation.disconnect(bulk_operation_signal_handler, dispatch_uid='bulk_operation_signal_handler')
def setUp(self, distribute_mock): post_save.disconnect(synchronize_to_storage, sender=Apy) ready_to_sync.disconnect(synchronize_to_storage, sender=Apy) post_save.disconnect(initialize_on_storage, sender=Base) post_delete.disconnect(synchronize_to_storage_on_delete, sender=Apy) distribute_mock.return_value = True self.user1 = User.objects.create_user('user1', '*****@*****.**', 'pass') self.user1.save() auth, created = AuthProfile.objects.get_or_create(user=self.user1) auth.user = self.user1 auth.save() self.user2 = User.objects.create_user('user2', '*****@*****.**', 'pass') self.user2.save() auth, created = AuthProfile.objects.get_or_create(user=self.user2) auth.user = self.user2 auth.save() self.base1 = Base.objects.create(name="base1", user=self.user1) self.base1_apy1 = Apy.objects.create(name="base1_apy1", base=self.base1) self.base1_apy1.save() self.base1_apy1_not_everyone = Apy.objects.create(name="base1_apy1_not_everyone", base=self.base1) self.base1_apy1_not_everyone.save() self.base1_apy1_everyone = Apy.objects.create(name="base1_apy1_everyone", base=self.base1, everyone=True) self.base1_apy1_everyone.save() self.base1_apy_xml = Apy.objects.create(name="base1_apy_xml", base=self.base1) self.base1_apy_xml.module = "def func(self):"\ " return 'bla'" self.base1_apy_public = Apy.objects.create(name="base1_apy_public", base=self.base1) self.base1_apy_public.public = True self.base1_apy_public.save() # add setting to base1 setting = Setting(base=self.base1) setting.key = "setting1_key" setting.value = "setting2_value" setting.save() self.client1 = Client() # logged in with objects self.client2 = Client() # logged in without objects self.client3 = Client() # not logged in self.client_csrf = Client(enforce_csrf_checks=True) # not logged in
def unregister(self, model_or_iterable): """ Unregisters the given model(s). If a model isn't already registered, this will raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered("The model %s is not registered" % model.__name__) # Detach the signals post_save.disconnect(self._registry[model].post_save_callback, sender=model) post_delete.disconnect(self._registry[model].post_delete_callback, sender=model) del self._registry[model]
def disconnect_entry_signals(): """ Disconnect all the signals on Entry model. """ post_save.disconnect( sender=Entry, dispatch_uid=ENTRY_PS_PING_DIRECTORIES) post_save.disconnect( sender=Entry, dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS) post_save.disconnect( sender=Entry, dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE) post_delete.disconnect( sender=Entry, dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
def disconnect_discussion_signals(): """ Disconnect all the signals on Comment model provided by Zinnia. """ post_save.disconnect( sender=comment_model, dispatch_uid=COMMENT_PS_COUNT_DISCUSSIONS) post_delete.disconnect( sender=comment_model, dispatch_uid=COMMENT_PD_COUNT_DISCUSSIONS) comment_was_flagged.disconnect( sender=comment_model, dispatch_uid=COMMENT_WF_COUNT_DISCUSSIONS) comment_was_posted.disconnect( sender=comment_model, dispatch_uid=COMMENT_WP_COUNT_COMMENTS) pingback_was_posted.disconnect( sender=comment_model, dispatch_uid=PINGBACK_WF_COUNT_PINGBACKS) trackback_was_posted.disconnect( sender=comment_model, dispatch_uid=TRACKBACK_WF_COUNT_TRACKBACKS)
def disconnect_signals(): post_delete.disconnect(post_record_delete_callback, sender=Record) post_save.disconnect(post_record_save_callback, sender=Record) post_delete.disconnect(post_taggeditem_callback, sender=TaggedItem) post_save.disconnect(post_taggeditem_callback, sender=TaggedItem) post_delete.disconnect(post_collectionitem_callback, sender=CollectionItem) post_save.disconnect(post_collectionitem_callback, sender=CollectionItem)
def deletefile(sender, instance=None, **kwargs): # check that the instance is the right one fieldfile = getattr(instance, self.fieldname) try: saved_uid = fieldfile.name except AttributeError: # an instance of another class return if saved_uid != uid: return fieldfile.name = None setattr(instance, fieldfile.field.name, fieldfile.name) self.backend.del_document(instance, commit_msg, username) # Delete the filesize cache if hasattr(fieldfile, '_size'): del fieldfile._size fieldfile._committed = False if save: instance.save() # remove signal post_delete.disconnect(dispatch_uid=uid)
def disconnect(self): """disconnect Django signals""" post_save.disconnect(self.save_receiver, self.model, dispatch_uid="observe_post_save_model_{}".format(self.model.__name__)) post_delete.disconnect(self.delete_receiver, self.model, dispatch_uid="observe_post_delete_model_{}".format(self.model.__name__))
def stop_listening(): post_save.disconnect(dispatch_uid='completion.listeners.update_obj') post_delete.disconnect(dispatch_uid='completion.listeners.remove_obj')
from django.conf import settings from django.db.models.signals import post_save, post_delete from piston.models import consumer_post_save, consumer_post_delete, Consumer from piston import utils from amo.decorators import json_response # These send emails to the user about the creation of the OAuth tokens # within the site. We don't want that. post_save.disconnect(consumer_post_save, sender=Consumer) post_delete.disconnect(consumer_post_delete, sender=Consumer) # Monkey patch the rc handler in django-piston so that all the rc responses # that piston will return are JSON. It's nicer to write an API if we are # consistent. class json_factory(utils.rc_factory): def __getattr__(self, attr): response = super(json_factory, self).__getattr__(attr) if response.status_code >= 300: return json_response({'error': response.content}, status_code=response.status_code) return response if settings.MARKETPLACE: utils.rc = json_factory()
def disconnect(): if 'lfs_solr' in settings.INSTALLED_APPS: post_delete.disconnect(product_deleted_listener, sender=Product) post_save.disconnect(product_saved_listener, sender=Product)
def synchro_disconnect(): post_save.disconnect(dispatch_uid='synchro_add_chg') post_delete.disconnect(dispatch_uid='synchro_del') m2m_changed.disconnect(dispatch_uid='synchro_m2m')