def update_image_sizes( sender, **kwargs):
        # if main image is too big, resize it; make a thumbnail image
        img_rec = kwargs.get('instance', None)
        if img_rec is None:
            return

        # (1) resize main image
        if img_rec.main_image.width > MAX_MAIN_IMAGE_WIDTH or img_rec.main_image.height > MAX_MAIN_IMAGE_WIDTH:
            im = Image.open(img_rec.main_image.file.name)   # open image
            im.thumbnail((MAX_MAIN_IMAGE_WIDTH, MAX_MAIN_IMAGE_WIDTH), Image.ANTIALIAS) # resize
            im.save(img_rec.main_image.file.name, quality=90)   #save
        
        # (2) make a thumbnail
        thumb = Image.open(img_rec.main_image.file.name)    # open the main image
        thumb.thumbnail((MAX_THUMB_IMAGE_WIDTH, MAX_THUMB_IMAGE_WIDTH), Image.ANTIALIAS)
        thumb_fullpath = os.path.join(settings.MEDIA_ROOT\
                        , img_rec.get_image_upload_directory_thumb(os.path.basename(img_rec.main_image.path)) )

        # if needed, make thumb directory
        if not os.path.isdir(os.path.dirname(thumb_fullpath)):
            os.makedirs(os.path.dirname(thumb_fullpath))
        # save file
        thumb.save(thumb_fullpath, quality=100)

        # disconnect save signal, save the ImageRecord, and reconnect signal
        post_save.disconnect(ImageRecord.update_image_sizes, sender=ImageRecord)        
        # update/save django model
        img_rec.thumb_image.name = img_rec.get_image_upload_directory_thumb(os.path.basename(thumb_fullpath))
        img_rec.save()
        post_save.connect(ImageRecord.update_image_sizes, sender=ImageRecord)
Example #2
0
def fill_out_background_color(sender, **kwargs):
    if sender == Episode or sender == Show:
        instance = kwargs.get('instance', None)
        raw = kwargs.get('raw', False)
        if not raw and instance is not None:
            if instance.carton_image and instance.carton_image.path:
                im = Image.open(instance.carton_image.path)
                pixels = im.load()
                width, height = im.size
                border_pixels = []

                for y in range(height):
                    border_pixels.append(pixels[0, y])
                    border_pixels.append(pixels[width - 1, y])

                red = sum(pixel[0] for pixel in border_pixels) / len(border_pixels)
                green = sum(pixel[1] for pixel in border_pixels) / len(border_pixels)
                blue = sum(pixel[2] for pixel in border_pixels) / len(border_pixels)

                hex_color = '#{}'.format(''.join([hex(red)[2:].zfill(2), hex(green)[2:].zfill(2), hex(blue)[2:].zfill(2)]))
                instance.carton_background_color = hex_color
            else:
                if sender == Episode:
                    instance.carton_background_color = ""
                else:
                    instance.carton_background_color = "transparent"

            post_save.disconnect(fill_out_background_color)
            try:
                instance.save()
            finally:
                post_save.connect(fill_out_background_color)
def priority_user_import(sender, **kwargs):
    user = kwargs['instance']
    if user.priority == PRIORITY_IMMEDIATE:
        post_save.disconnect(priority_user_import, sender=User)

        try:
            imp = Import(priority=user.priority, csv_type='user')
            imp.save()

            user.priority = PRIORITY_DEFAULT
            user.queue_id = imp.pk
            user.save()

            try:
                imp.csv_path = UserBuilder([user]).build()
            except:
                imp.csv_errors = traceback.format_exc()

            if imp.csv_path:
                imp.import_csv()
            else:
                user.queue_id = None
                user.priority = PRIORITY_HIGH
                user.save()
                imp.delete()

        except Exception, err:
            log.error('Immediate user provision failed: %s' % (err))
            user.priority = PRIORITY_HIGH
            user.save()

        post_save.connect(priority_user_import, sender=User)
Example #4
0
    def _post_save_received(self, sender, instance, created, **kwargs):
        # when an object in this chain gets saved, make sure
        # its parent is set if it was just created, or
        # cascade from this object to select correct objects
        # in the case of a move
        if not instance == self.instance:
            return

        # set parents on newly saved instances
        if created and self._parent_link:
            # make sure we don't get stuck in a signal loop
            post_save.disconnect(self._post_save_received, self._meta.model)

            # on m2m relations we need to add parents, not set them
            if self._parent_relation_is_m2m:
                related_set = getattr(self.instance, self._parent_relation)
                related_set.add(self._parent_link.instance)
            else:
                setattr(self.instance, self._parent_relation, 
                    self._parent_link.instance)

            # save changes and reset the signal listener
            self.instance.save()
            post_save.connect(self._post_save_received, 
                    sender=self._meta.model)
        
        # allow moving by updating the chain after a save - we only
        # do this when an object wasn't created to allow a person
        # to save multiple new objects on different levels of the chain.
        if not created:
            if self._parent_link:
                self._parent_link._cascade_from_child()
            else:
                self._child_link._cascade_from_parent()
Example #5
0
    def handle(self, *args, **options):
        verbose = int(options['verbosity']) > 1
        if HAS_CKAN:  # Prevent user to be automatically created
            post_save.disconnect(sync_ckan_on_save, sender=User, dispatch_uid="youckan.ckan.sync_user")

        users = User.objects.filter(slug__in=args) if args else User.objects.all()
        if verbose:
            self.stdout.write('{0} user(s) to process'.format(users.count()))
        for user in users:
            if verbose:
                self.stdout.write('-> Processing user "{0}"'.format(user.full_name))
            if HAS_CKAN:  # Store CKAN id before slug change
                try:
                    response = client.action('user_show', {'id': user.slug})
                except:
                    self.stderr.write('Unable to fetch CKAN user for "{0}"'.format(user.full_name))
                    continue
                if not response.get('success'):
                    self.stderr.write('Unable to fetch CKAN user for "{0}"'.format(user.full_name))
                    continue
                ckan_user_id = response['result']['id']

            # Trigger slug update
            user.slug = None
            user.save()

            if HAS_CKAN:
                response = client.action('user_update', {
                    'id': ckan_user_id,
                    'name': user.slug,
                    'email': user.email,
                    'fullname': user.full_name,
                    'about': user.profile.about,
                    'sysadmin': user.is_superuser,
                })
Example #6
0
def registerInstagramListener(**kwargs):
	'''
	Trigger that is invoked when a hashtag is registered or updated using the model save method
	'''
	instance = kwargs['instance']
	object_type = instance.object_type
	object_value = instance.object_value
	callback_url = "{0}/instagramPush/{1}".format(CALLBACK_HOST, instance.id)
	res = None
	if object_type == "geography":
		res = api.create_subscription( object=object_type, aspect='media', lat=instance.lat, lng=instance.lng, radius=instance.radius, callback_url=callback_url )
	elif object_type == "user":
		print "Handling user"
		user_res = api.user_search(q=object_value, count=1)
		user_id = [ i.id for i in user_res if i.username == object_value][0]

		object_value = user_id

		res = api.create_subscription( object=object_type, object_id=user_id, aspect='media', callback_url=callback_url )
	else:
		res = api.create_subscription( object=object_type, object_id=object_value, aspect='media', callback_url=callback_url )


	#Disconnect the handler so we don't fall into endless loop
	post_save.disconnect(registerInstagramListener, sender=Subscription)
	instance.remote_id = res['data']['id']
	instance.save()
	post_save.connect(registerInstagramListener, sender=Subscription)

	#Populate the subscription with recent media
	update = {'object':object_type,'object_id':object_value, 'subscription_id': instance.remote_id }
	processInstagramUpdate(update)
Example #7
0
File: utils.py Project: u2rafi/ITSY
def update_field_name(sender, instance, **kwargs):
    """
    This function is connected to Fields post_save signal,
    to ensure that all field names are unique it saves names of fields into
    IssueFieldName model
    """
    if hasattr(instance, "name") and hasattr(instance, "_prev_name"):
        # prevent integrity error for unique name field.
        if not IssueFieldName.objects.filter(name=instance.name):
            ifn = IssueFieldName(content_object=instance, name=instance.name)
            ifn.save()
            # prevent recursive save() call signal should be disconnected and connected again
            post_save.disconnect(update_field_name, sender=instance.__class__)
            if instance._prev_name and not (instance.name == instance._prev_name):
                IssueFieldName.objects.filter(name=instance._prev_name).delete()
            instance._prev_name = instance.name
            instance.save()
            post_save.connect(update_field_name, sender=instance.__class__)

    elif hasattr(instance, "title") and hasattr(instance, "_prev_title"):
        # prevent integrity error for unique name field.
        if not IssueFieldName.objects.filter(name=instance.title):
            ifn = IssueFieldName(content_object=instance, name=instance.title)
            ifn.save()
            # prevent recursive save() call signal should be disconnected and connected again
            post_save.disconnect(update_field_name, sender=instance.__class__)
            if instance._prev_title and not (instance.title == instance._prev_title):
                IssueFieldName.objects.filter(name=instance._prev_title).delete()
            instance._prev_title = instance.title
            instance.save()
            post_save.connect(update_field_name, sender=instance.__class__)

    else:
        pass
Example #8
0
def postsave_entity(sender, instance, created, **kwargs):
        # Update packageprofile for machine of entity
    if instance.force_packageprofile == 'yes':
        mm = machine.objects.filter(entity = instance)
    else:
        mm = machine.objects.filter(entity = instance, packageprofile = instance.old_packageprofile)
        
    for m in mm:
        m.packageprofile = instance.packageprofile
        m.save()    

    if instance.force_timeprofile == 'yes':
        mm = machine.objects.filter(entity = instance)
    else:
        mm = machine.objects.filter(entity = instance, timeprofile = instance.old_timeprofile)
        
    for m in mm:
        m.timeprofile = instance.timeprofile
        m.save()    

    post_save.disconnect(receiver=postsave_entity, sender=entity)
    instance.old_packageprofile = instance.packageprofile
    instance.old_timeprofile = instance.timeprofile
    instance.save()
    post_save.connect(receiver=postsave_entity, sender=entity)
Example #9
0
def turn_off_syncing():
    """
    Disables all of the signals for syncing entities.
    """
    post_delete.disconnect(delete_entity_signal_handler, dispatch_uid='delete_entity_signal_handler')
    post_save.disconnect(save_entity_signal_handler, dispatch_uid='save_entity_signal_handler')
    post_bulk_operation.disconnect(bulk_operation_signal_handler, dispatch_uid='bulk_operation_signal_handler')
Example #10
0
 def update_have_analogues(sender, instance, **kwargs):
     instance.have_analogues = bool(instance.analogues.count())
     post_save.disconnect(Product.update_have_analogues, sender=Product)
     for analogue in instance.analogues.all():
         analogue.have_analogues = bool(analogue.analogues.count())
         analogue.save()
     post_save.connect(Product.update_have_analogues, sender=Product)
Example #11
0
def manage_bookmarks(cls, enabled=True):
    """Connects handlers for bookmarks management.
    
    This handler could be used to automatically create a related bookmark list
    on given model class instance creation. i.e.:
    
    >> manage_bookmarks(User)
        
    It will auto generate a bookmark list associated to each new User's instance.
    
    To disconnect:
    
    >> manage_bookmarks(User, False)
    """
    cls = get_model(cls)
    cls_name = cls.__name__.lower()
    create_dispatch_uid = "create_%s_bookmarks" % cls_name
    delete_dispatch_uid = "delete_%s_bookmarks" % cls_name
    
    if enabled:
        post_save.connect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
        pre_delete.connect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
        
    else:
        post_save.disconnect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
        pre_delete.disconnect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
Example #12
0
	def setUp(self, distribute_mock):
		post_save.disconnect(synchronize_to_storage, sender=Apy)
		post_save.disconnect(initialize_on_storage, sender=Base)
		distribute_mock.return_value = True

		self.user1 = User.objects.create_user('user1', '*****@*****.**', 'pass')
		self.user1.save()

		auth, created = AuthProfile.objects.get_or_create(user=self.user1)
		auth.user = self.user1
		auth.save()
		
		self.user2 = User.objects.create_user('user2', '*****@*****.**', 'pass')
		self.user2.save()
		auth, created = AuthProfile.objects.get_or_create(user=self.user2)
		auth.user = self.user2
		auth.save()	

		self.base1 = Base.objects.create(name="base1", user=self.user1)
		self.base1_apy1 = Apy.objects.create(name="base1_apy1", base=self.base1)
		self.base1_apy1.save()

		self.base1_apy_xml = Apy.objects.create(name="base1_apy_xml", base=self.base1)
		self.base1_apy_xml.module = "def func(self):"\
		"	return 'bla'"

		# counter is done in disconnected signal
		counter = Counter(apy=self.base1_apy1)
		counter.save()

		self.client1 = Client()  # logged in with objects
		self.client2 = Client()  # logged in without objects
		self.client3 = Client()  # not logged in 
		self.client_csrf = Client(enforce_csrf_checks=True)  # not logged in 
Example #13
0
    def handle(self, *args, **options):

        self.check_attachment_path()

        # disable DjangoBB signals for speedup
        post_save.disconnect(djangobb_signals.post_saved, sender=Post, dispatch_uid='djangobb_post_save')
        post_save.disconnect(djangobb_signals.topic_saved, sender=Topic, dispatch_uid='djangobb_topic_save')

        #disable_auto_fields(Forum)
        disable_auto_fields(Topic)
        disable_auto_fields(Post)

        cleanup_users = int(options.get("cleanup_users"))
        moderator_groups = phpbb_Group.objects.filter(
            name__in=["ADMINISTRATORS", "GLOBAL_MODERATORS"]
        )
        user_dict, moderators = self.migrate_users(cleanup_users, moderator_groups)

        forum_dict = self.migrate_forums(moderators)

        topic_dict = self.migrate_topic(user_dict, forum_dict)
        self.migrate_posts(user_dict, topic_dict)

        # needed if signals disabled, see above
        self.update_topic_stats()
        self.update_forum_stats()

        self.stdout.write("\nmigration done.\n")
Example #14
0
def migrate(con):

    #check_attachment_path()

    # disable DjangoBB signals for speedup
    post_save.disconnect(djangobb_signals.post_saved, sender=Post, dispatch_uid='djangobb_post_save')
    post_save.disconnect(djangobb_signals.topic_saved, sender=Topic, dispatch_uid='djangobb_topic_save')

    #disable_auto_fields(Forum)
    disable_auto_fields(Topic)
    disable_auto_fields(Post)

    groups = migrate_groups(con)
    users, moderators = migrate_users(con)
    
    migrate_user_groups(con,users,groups)
    categories = migrate_categories(con)
    forums = migrate_forums(con,categories)
    migrate_auth(con,forums,groups)

    topics = migrate_topics(con,users,forums)
    migrate_posts(con,users,topics,forums)
    
    # needed if signals disabled, see above
    update_topic_stats()
    update_forum_stats()
Example #15
0
    def setUp(self):

        # Disable Signals
        post_save.disconnect(post_save_diary, sender=Diary)

        # Run celery task synchronous
        app.conf.update(CELERY_ALWAYS_EAGER=True)

        self.test_username = TEST_USERNAME
        self.test_password = TEST_PASSWORD
        self.test_email = TEST_EMAIL

        # Create a user
        self.user = get_user_model().objects.create_user(
            username=self.test_username,
            password=self.test_password,
            email=self.test_email,
        )

        # Login
        self.client = APIClient()
        self.client.login(
            username=self.test_username,
            password=self.test_password,
        )
Example #16
0
def register_post_save_functions():
    # Disconnect first in case this function is called twice
    logger.debug('Thumbnail: Registering post_save functions.')
    post_save.disconnect(generate_thumbnail, sender=Layer)
    post_save.connect(generate_thumbnail, sender=Layer, weak=False)
    post_save.disconnect(generate_thumbnail, sender=Map)
    post_save.connect(generate_thumbnail, sender=Map, weak=False)
 def setUpClass(cls):
     # these post_save methods interact with external resources, shut them
     # down to test credentials
     post_save.disconnect(sender=nova_models.CredentialsAuthorization,
         dispatch_uid='django_openstack.CredentialsAuthorization.post_save')
     post_save.disconnect(sender=nova_models.CredentialsAuthorization,
             dispatch_uid='django_openstack.User.post_save')
Example #18
0
File: user.py Project: day9tv/ahgl
def create_user(backend, details, response, uid, username, user=None, *args,
                **kwargs):
    """Create user. Depends on get_username pipeline."""
    if user:
        return {'user': user}
    if not username:
        return None

    email = details.get('email')
    name = response.get('name')
    if name:
        # if they already have a profile created for them, use that profile
        try:
            master_user = User.objects.get(username='******')
            profile = Profile.objects.get(name=name, user=master_user)
        except Profile.DoesNotExist:
            pass
        else:
            logger.info('Using existing profile for social-auth with name {0}'.format(profile.name))
            post_save.disconnect(create_profile, sender=User)
            user = User.objects.create_user(username=username, email=email)
            post_save.connect(create_profile, sender=User)
            # in case we already associated by email to an existing account and profile...
            if not Profile.objects.filter(user=user).count():
                profile.user = user
                profile.save()
        
    if not user:
        user = User.objects.create_user(username=username, email=email)
    return {
        'user': user,
        'is_new': True
    }
Example #19
0
 def setUp(self):
     usr1 = Member.objects.create(name='user1',
                                  family_name='family_name1',
                                  email='*****@*****.**',
                                  phone='1111',)
     post_save.disconnect(receiver=inscription_confirmation,
                          sender=Inscription)
Example #20
0
 def down():
     # BBB Django 1.8 compatiblity
     if django.VERSION < (1, 9):
         rel.to = old_model
     else:
         rel.model = old_model
     post_save.disconnect(dispatch_uid=dispatch_uid)
 def test_api_create_attendee(self):
     # Setup
     # restore post_save hook for this test
     post_save.connect(send_feedback_sms, sender=Attendee)
     event = self.create_event()
     participant = self.create_participant()
     # prepare event data
     post_data = {
         "event": "/api/v1/events/%s/" % event.id,
         "participant": "/api/v1/participants/%s/" % participant.id
     }
     # add response
     responses.add(responses.PUT,
                   "https://go.vumi.org/api/v1/go/http_api_nostream/conv-key/messages.json",  # flake8: noqa
                   body=json.dumps(post_data), status=201,
                   content_type='application/json')
     # Execute
     response = self.client.post('/api/v1/attendees/',
                                 json.dumps(post_data),
                                 content_type='application/json')
     # Check
     self.assertEqual(response.status_code, status.HTTP_201_CREATED)
     d = Attendee.objects.last()
     self.assertEqual(d.survey_sent, True)
     self.assertEqual(d.event.id, event.id)
     self.assertEqual(d.participant.id, participant.id)
     self.assertEqual(len(responses.calls), 1)
     # disconnect post_save hook again
     post_save.disconnect(send_feedback_sms, sender=Attendee)
Example #22
0
 def unregister(self, model):
     """Removes a model from version control."""
     if not self.is_registered(model):
         raise RegistrationError, "%r has not been registered with django-reversion" % model
     del self._registered_models[model]
     post_save.disconnect(self._post_save_receiver, model)
     pre_delete.disconnect(self._pre_delete_receiver, model)
Example #23
0
def save_task_and_worker(sender, **kwargs):
    """Docstring."""

    logger.info('signal received')
    t = kwargs['instance']  # get task t from signal
    if t.status == 'D':
        logger.info('answer for task ' + str(t.id) + ' was recieved')
        t_answer = json.loads(t.answer)
        logger.info('worker' + t_answer['worker'] + ' did the work')
        post_save.disconnect(save_task_and_worker, sender=Task)
        try:  # see of worker alread is known
            w = Worker.objects.get(worker_uri=t_answer['worker'])  #TODO get worker URI
            t.worker_id = w.id  # set Task-Worker realtion
            logger.info("worker is already known: id = " + str(w.id))
            logger.info("worker " + str(w.id) + " did task " + str(t.id))
        except Worker.DoesNotExist:
            # create new worker
            newworker = Worker(worker_uri=t_answer['worker'])
            newworker.save()  # save befor assigning to t becasue newworker hast'got an id yet
            t.worker_id = newworker.id
            logger.info("new worker created: id = " + str(newworker.id))
            logger.info("worker " + str(newworker.id) + " did task " + str(t.id))
        t.com_date=timezone.now()
        t.save()
        post_save.connect(save_task_and_worker, sender=Task)

        process_task_answers()
Example #24
0
def _disconnect_signals():
    """ used in testing """
    post_save.disconnect(plan_watchers.notify_on_plan_is_updated, TestPlan)
    pre_delete.disconnect(
        plan_watchers.load_email_settings_for_later_deletion, TestPlan)
    post_delete.disconnect(plan_watchers.notify_deletion_of_plan, TestPlan)
    pre_save.disconnect(plan_watchers.pre_save_clean, TestPlan)
Example #25
0
 def _detach_signals(self):
     '''
     Detach all signals for eav
     '''
     post_init.disconnect(Registry.attach_eav_attr, sender=self.model_cls)
     pre_save.disconnect(Entity.pre_save_handler, sender=self.model_cls)
     post_save.disconnect(Entity.post_save_handler, sender=self.model_cls)
Example #26
0
 def cleanup(rollback=False):
     post_save.disconnect(log_save)
     post_delete.disconnect(log_delete)
     if rollback:
         transaction.rollback()
     else:
         transaction.commit()
Example #27
0
def need_initial_data(sender, **kwargs):
    from django.conf import settings
    if settings.DATABASE_NAME != ':memory:':
        # We are not testing, create the non-DB asset storage now
        need_more = singleton_send(initialize_asset_storage, None, asset=kwargs['instance'])
        if not need_more:
            post_save.disconnect(need_initial_data)
Example #28
0
 def _create(cls, target_class, *args, **kwargs):
     post_save.disconnect(create_library, User)
     user = super(UserFactory, cls)._create(target_class, *args, **kwargs)
     user.set_password('password')
     user.save()
     post_save.connect(create_library, User)
     return user
 def unregister(self, model):
     post_save.disconnect(add_obj_to_autocompleter,
         sender=model, dispatch_uid='autocompleter.%s.add' % (model))
     pre_save.disconnect(remove_old_obj_from_autocompleter, sender=model,
         dispatch_uid='autocompleter.%s.remoe_old' % (model))
     post_delete.disconnect(remove_obj_from_autocompleter,
         sender=model, dispatch_uid='autocompleter.%s.remove' % (model))
Example #30
0
    def _on_first_save(self, expected_instance_id, instance, dispatch_uid,
                       created=False, **kwargs):
        """Handler for the first save on a newly created instance.

        This will disconnect the signal and store the state on the instance.
        """
        if id(instance) == expected_instance_id:
            assert created

            # Stop listening immediately for any new signals here.
            # The Signal stuff deals with thread locks, so we shouldn't
            # have to worry about reaching any of this twice.
            post_save.disconnect(sender=instance.__class__,
                                 dispatch_uid=dispatch_uid)

            cls = self.__class__

            # This is a new row in the database (that is, the model instance
            # has been saved for the very first time), we need to flush any
            # existing state.
            #
            # The reason is that we may be running in a unit test situation, or
            # are dealing with code that deleted an entry and then saved a new
            # one with the old entry's PK explicitly assigned. Using the old
            # state will just cause problems.
            cls._reset_state(instance)

            # Now we can register each RelationCounterField on here.
            for field in instance.__class__._meta.local_fields:
                if isinstance(field, cls):
                    cls._store_state(instance, field)
Example #31
0
 def handle(self, *args, **options):
     """
     Initialize timeseries app
     """
     post_save.disconnect(serie_create_update, sender=Serie)
     post_delete.disconnect(serie_delete, sender=Serie)
     ds.start("Creating timeseries")
     dates = list(pd.date_range(start="08/01/2018", periods=100))
     vals = np.arange(10, 150, 10)
     i = 0
     while i < 200:
         s = Serie.objects.create(date=random.choice(dates),
                                  value=random.choice(vals))
         print(str(i), s)
         i += 1
     ds.end("Timeseries created")
     print("Creating timeseries dashboard")
     dash, _ = Dashboard.objects.get_or_create(
         slug="timeseries",
         title="Timeseries",
         public=True,
     )
     DashboardView.objects.create(dashboard=dash,
                                  slug="rawdata",
                                  title="Raw data")
     DashboardView.objects.create(dashboard=dash,
                                  slug="day",
                                  title="Day",
                                  active=True)
     DashboardView.objects.create(dashboard=dash, slug="week", title="Week")
     DashboardView.objects.create(dashboard=dash,
                                  slug="month",
                                  title="Month")
     query = Serie.objects.all()
     generate(query)
     post_save.connect(serie_create_update, sender=Serie)
     post_delete.connect(serie_delete, sender=Serie)
     ds.ok("Finished")
Example #32
0
def set_api(sender, instance, **kwargs):
    #  Always reset the _bot instance after save, in case the token changes.
    instance._bot = BotAPI(instance.token)

    # set webhook
    url = None
    cert = None
    if instance.enabled:
        webhook = reverse('telegrambot:webhook', kwargs={'token': instance.token})
        domain = settings.TELEGRAM_BOT_SITE_DOMAIN
        if instance.https_port is None:
            url = 'https://' + domain + webhook
        else:
            url = 'https://' + domain + ':' + str(instance.https_port) + webhook
    if instance.ssl_certificate:
        instance.ssl_certificate.open()
        cert = instance.ssl_certificate

    instance._bot.setWebhook(webhook_url=url,
                             certificate=cert)
    logger.info("Success: Webhook url %s for bot %s set" % (url, str(instance)))

    #  complete  Bot instance with api data
    if not instance.user_api:
        bot_api = instance._bot.getMe()

        botdict = bot_api.to_dict()
        modelfields = [f.name for f in User._meta.get_fields()]
        params = {k: botdict[k] for k in botdict.keys() if k in modelfields}
        user_api, _ = User.objects.get_or_create(**params)
        instance.user_api = user_api

        # Prevent signal recursion, and save.
        post_save.disconnect(set_api, sender=sender)
        instance.save()
        post_save.connect(set_api, sender=sender)

        logger.info("Success: Bot api info for bot %s set" % str(instance))
async def test_model_subscription_with_variables_succeeds():
    post_save.connect(post_save_subscription,
                      sender=SomeModel,
                      dispatch_uid="some_model_post_delete")

    communicator = WebsocketCommunicator(GraphqlSubscriptionConsumer,
                                         "/graphql/")
    connected, subprotocol = await communicator.connect()
    assert connected

    s = await sync_to_async(SomeModel.objects.create)(name="test name")

    subscription = """
        subscription SomeModelUpdated($id: ID){
            someModelUpdated(id: $id) {
                name
            }
        }
    """

    await query(subscription, communicator, {"id": s.pk})

    await sync_to_async(s.save)()

    response = await communicator.receive_json_from()

    assert response["payload"] == {
        "data": {
            "someModelUpdated": {
                "name": s.name
            }
        },
        "errors": None,
    }

    post_save.disconnect(post_save_subscription,
                         sender=SomeModel,
                         dispatch_uid="some_model_post_delete")
Example #34
0
def create_user(backend,
                details,
                response,
                uid,
                username,
                user=None,
                *args,
                **kwargs):
    """Create user. Depends on get_username pipeline."""
    if user:
        return {'user': user}
    if not username:
        return None

    email = details.get('email')
    name = response.get('name')
    if name:
        # if they already have a profile created for them, use that profile
        try:
            master_user = User.objects.get(username='******')
            profile = Profile.objects.get(name=name, user=master_user)
        except Profile.DoesNotExist:
            pass
        else:
            logger.info(
                'Using existing profile for social-auth with name {0}'.format(
                    profile.name))
            post_save.disconnect(create_profile, sender=User)
            user = User.objects.create_user(username=username, email=email)
            post_save.connect(create_profile, sender=User)
            # in case we already associated by email to an existing account and profile...
            if not Profile.objects.filter(user=user).count():
                profile.user = user
                profile.save()

    if not user:
        user = User.objects.create_user(username=username, email=email)
    return {'user': user, 'is_new': True}
Example #35
0
 def test_clean_identities_job(self):
     """
     Test clean_identities_job
     """
     post_save.disconnect(manage_citizen_perm, sender=Identity)
     nb_identity = Identity.objects.count()
     for user in self.identity_users:
         user.is_superuser = False
         user.save()
         user.user_permissions.add(self.permission)
         user = get_user_model().objects.get(pk=user.pk)
         self.assertTrue(user.has_perm(self.permission_name))
         Identity.objects.create(user=user,
                                 valid_until=self.non_valid_identity_date)
         user = get_user_model().objects.get(pk=user.pk)
         self.assertTrue(user.has_perm(self.permission_name))
     self.assertEqual(Identity.objects.count(),
                      nb_identity + self.identity_users.count())
     clean_identities_job.apply()
     self.assertEqual(Identity.objects.count(), nb_identity)
     for user in self.identity_users:
         user = get_user_model().objects.get(pk=user.pk)
         self.assertFalse(user.has_perm(self.permission_name))
Example #36
0
def change_logging(request):
    """
    Enable change logging by connecting the appropriate signals to their receivers before code is run, and
    disconnecting them afterward.

    :param request: WSGIRequest object with a unique `id` set
    """
    # Curry signals receivers to pass the current request
    handle_changed_object = curry(_handle_changed_object, request)
    handle_deleted_object = curry(_handle_deleted_object, request)

    # Connect our receivers to the post_save and post_delete signals.
    post_save.connect(handle_changed_object, dispatch_uid="handle_changed_object")
    m2m_changed.connect(handle_changed_object, dispatch_uid="handle_changed_object")
    pre_delete.connect(handle_deleted_object, dispatch_uid="handle_deleted_object")

    yield

    # Disconnect change logging signals. This is necessary to avoid recording any errant
    # changes during test cleanup.
    post_save.disconnect(handle_changed_object, dispatch_uid="handle_changed_object")
    m2m_changed.disconnect(handle_changed_object, dispatch_uid="handle_changed_object")
    pre_delete.disconnect(handle_deleted_object, dispatch_uid="handle_deleted_object")
Example #37
0
def schedule_update_notifications(sender, instance, **kwargs):
    notification = Notification()
    
    notification.time_sent = datetime.now()
    notification.user = instance.user
    notification.title = 'Your Work Schedules have been Updated'
    notification.message = 'Schedule: ' + str(instance.schedule.name) + \
        '     Task assigned: ' + str(instance.task.task_name) + \
        '     Day scheduled: ' + str(instance.date)
       
    is_disconnected = post_save.disconnect(schedule_update_notifications, sender=AssignedTask, dispatch_uid='schedule_notification')
    if is_disconnected:
        notification.save()
    is_reconnected = post_save.connect(schedule_update_notifications, sender=AssignedTask, dispatch_uid='schedule_notification')
Example #38
0
    def test_previous_status(self):
        from django.db.models.signals import post_save

        def check_different(sender, instance, **kwargs):
            check_different.is_different = (instance._status !=
                                            instance.status)

        check_different.is_different = None

        post_save.connect(check_different)

        s = Service(name="service", description="test", status=0)
        s.save()
        self.assertFalse(check_different.is_different)
        s.status = 0
        s.save()
        self.assertFalse(check_different.is_different)
        s.status = 1
        s.save()
        self.assertIsNotNone(check_different.is_different)
        self.assertTrue(check_different.is_different)

        post_save.disconnect(check_different)
Example #39
0
    def test_empty_update_fields(self):
        s = Person.objects.create(name='Sara', gender='F')
        pre_save_data = []

        def pre_save_receiver(**kwargs):
            pre_save_data.append(kwargs['update_fields'])

        pre_save.connect(pre_save_receiver)
        post_save_data = []

        def post_save_receiver(**kwargs):
            post_save_data.append(kwargs['update_fields'])

        post_save.connect(post_save_receiver)
        # Save is skipped.
        with self.assertNumQueries(0):
            s.save(update_fields=[])
        # Signals were skipped, too...
        self.assertEqual(len(pre_save_data), 0)
        self.assertEqual(len(post_save_data), 0)

        pre_save.disconnect(pre_save_receiver)
        post_save.disconnect(post_save_receiver)
Example #40
0
    def handle(self, *args, **kwargs):
        # Disconnect reindexing for every save, we'll reindex
        # once all addons were generated
        post_save.disconnect(update_search_index,
                             sender=Addon,
                             dispatch_uid='addons.search.index')

        with override_settings(CELERY_TASK_ALWAYS_EAGER=True):
            translation.activate('en-US')
            serializer = GenerateAddonsSerializer()
            serializer.create_generic_featured_addons()
            serializer.create_featured_addon_with_version()
            serializer.create_featured_theme()
            serializer.create_featured_themes()
            for addon in base_collections:
                serializer.create_a_named_collection_and_addon(
                    addon, author='mozilla')
            for addon in hero_addons:
                serializer.create_a_named_collection_and_addon(
                    addon, author='mozilla')
            for addon in hero_addons:
                serializer.create_named_addon_with_author(addon)
            serializer.create_installable_addon()
Example #41
0
    def process_exception(self, request: HttpRequest, exception: Exception):
        """Disconnect handlers in case of exception"""
        post_save.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
        pre_delete.disconnect(dispatch_uid=LOCAL.authentik["request_id"])

        if settings.DEBUG:
            return
        # Special case for SuspiciousOperation, we have a special event action for that
        if isinstance(exception, SuspiciousOperation):
            thread = EventNewThread(
                EventAction.SUSPICIOUS_REQUEST,
                request,
                message=str(exception),
            )
            thread.run()
        elif before_send({}, {"exc_info":
                              (None, exception, None)}) is not None:
            thread = EventNewThread(
                EventAction.SYSTEM_EXCEPTION,
                request,
                message=exception_to_string(exception),
            )
            thread.run()
Example #42
0
    def handle(self, *args, **options):
        post_delete.disconnect(dispatch_uid='sapl_post_delete_signal')
        post_save.disconnect(dispatch_uid='sapl_post_save_signal')
        post_delete.disconnect(dispatch_uid='cmj_post_delete_signal')
        post_save.disconnect(dispatch_uid='cmj_post_save_signal')

        self.logger = logging.getLogger(__name__)
        """self.run_autografo(
            arquivos=self.get_parametros_711(),
            epigrafe='AUTÓGRAFO Nº 711, DE 04 DE DEZEMBRO DE 2020.',
            partes_ou_completo='C'
        )

        self.run_autografo(
            arquivos=self.get_parametros_712(),
            epigrafe='AUTÓGRAFO Nº 712, DE 04 DE DEZEMBRO DE 2020.',
            partes_ou_completo='C'
        )"""

        self.run_autografo(
            arquivos=self.get_parametros_713(),
            epigrafe='AUTÓGRAFO Nº 713, DE 04 DE DEZEMBRO DE 2020.',
            partes_ou_completo='C')
Example #43
0
def disconnect_signals():
    post_delete.disconnect(post_record_delete_callback, sender=Record)
    post_save.disconnect(post_record_save_callback, sender=Record)

    post_delete.disconnect(post_taggeditem_callback, sender=TaggedItem)
    post_save.disconnect(post_taggeditem_callback, sender=TaggedItem)

    post_delete.disconnect(post_collectionitem_callback, sender=CollectionItem)
    post_save.disconnect(post_collectionitem_callback, sender=CollectionItem)
Example #44
0
def set_prefix(sender, update_fields=['prefix_id'], *args, **kwargs):
    """
    Method to generate prefix id and default timezone using signals
    Generate default currency using signals
    """
    outlet = kwargs['instance']
    default_timezone = Timezone.objects.get(pk="285461788")
    default_currency = Currency.objects.get_or_create(
        pk="4jatbw5sm",
        name="Nigerian Naira",
        symbol="₦",
        symbol_native="₦",
        decimal_digits=2,
        rounding=0,
        code="NGN",
        name_plural="Nigerian nairas")
    default_vat = Vat.objects.get_or_create(pk=id_gen, rate=00.00)

    outlet_preference = OutletPreference()
    outlet_preference.outlet = outlet
    # vat
    outlet_preference.vat_rate = default_vat[0]
    # get the set default currency
    outlet_preference.outlet_currency = default_currency[0]
    outlet_preference.outlet_timezone = default_timezone
    outlet_preference_exist = OutletPreference.objects.filter(
        outlet_id=outlet.id).exists()
    if not outlet_preference_exist:
        outlet_preference.save()

    outlet_name = outlet.name[slice(3)].upper()
    business_name = outlet.business.trading_name[slice(2)].upper()
    outlet_id = (str(outlet.id)).zfill(3)
    outlet.prefix_id = '{}{}-{}'.format(business_name, outlet_id, outlet_name)
    post_save.disconnect(set_prefix, sender=Outlet)
    outlet.save()
    post_save.connect(set_prefix, sender=Outlet)
Example #45
0
def update_areas(sender, instance, **kwargs):
    post_save.disconnect(update_areas, sender=sender)
    bobine = Bobine.objects.get(pk=instance.pk)
    bobinagem = Bobinagem.objects.get(pk=bobine.bobinagem.pk)
    bobines = Bobine.objects.filter(bobinagem=bobinagem)
    area_g = 0
    area_dm = 0
    area_r = 0
    area_ind = 0
    area_ba = 0

    for b in bobines:
        estado = b.estado

        if estado == 'G':
            area_g += b.area

        elif estado == 'R':
            area_r += b.area

        elif estado == 'DM':
            area_dm += b.area

        elif estado == 'IND':
            area_ind += b.area

        elif estado == 'BA':
            area_ba += b.area

    bobinagem.area_g = area_g
    bobinagem.area_r = area_r
    bobinagem.area_dm = area_dm
    bobinagem.area_ind = area_ind
    bobinagem.area_ba = area_ba
    bobinagem.save()

    post_save.connect(update_areas, sender=sender)
Example #46
0
    def process_response(self, request, response):
        drf_request = getattr(request, 'drf_request', None)
        drf_user = getattr(drf_request, 'user', None)
        if self.disp_uid is not None:
            post_save.disconnect(dispatch_uid=self.disp_uid)

        for instance in ActivityStream.objects.filter(
                id__in=self.instance_ids):
            if drf_user and drf_user.id:
                from awx.api.serializers import ActivityStreamSerializer
                summary_fields = ActivityStreamSerializer(
                    instance).get_summary_fields(instance)
                instance.actor = drf_user
                try:
                    instance.save(update_fields=['actor'])
                    analytics_logger.info(
                        'Activity Stream update entry for %s' %
                        str(instance.object1),
                        extra=dict(
                            changes=instance.changes,
                            relationship=instance.object_relationship_type,
                            actor=drf_user.username,
                            operation=instance.operation,
                            object1=instance.object1,
                            object2=instance.object2,
                            summary_fields=summary_fields))
                except IntegrityError:
                    logger.debug(
                        "Integrity Error saving Activity Stream instance for id : "
                        + str(instance.id))
            # else:
            #     obj1_type_actual = instance.object1_type.split(".")[-1]
            #     if obj1_type_actual in ("InventoryUpdate", "ProjectUpdate", "Job") and instance.id is not None:
            #         instance.delete()

        self.instance_ids = []
        return response
def calculate_length_and_upload_to_s3(sender, instance, **kwargs):
    post_save.disconnect(calculate_length_and_upload_to_s3, sender=sender)
    ffprobe = FFProbe(str(instance.audiofile.file.name))
    print(str(instance.audiofile.file.name))
    print(ffprobe.duration)
    if ffprobe.duration is not None:
        instance.length = ffprobe.duration
    else:
        instance.length = 0
    # try:
    #     ffprobe_out = subprocess.Popen(
    #         "ffprobe " + str(instance.audiofile.file.name),
    #         shell=True,
    #         stdout=subprocess.PIPE,
    #         stderr=subprocess.STDOUT,
    #         close_fds=True
    #     )
    #     for line in ffprobe_out.stdout.readlines():
    #         line = line.decode()
    #         if "Duration" in line:
    #             values = line.split(",")[0].split("Duration:")[1].split(":")
    #             duration = 3600 * float(values[0]) + 60 * float(values[1]) + float(values[2])
    #             ffprobe_out.stdout.close()
    #             ffprobe_out()
    #             ffprobe_out.wait()
    #             print(duration)
    #             instance.length = duration
    #
    #     ffprobe_out.stdout.close()
    #     ffprobe_out.pipe_cloexec()
    # except IOError:
    #     print("0")
    #     instance.length = 0
    instance.save()
    upload_to_s3(instance)
    post_save.connect(calculate_length_and_upload_to_s3, sender=sender)
Example #48
0
 def handle(self, *app_labels, **options):
     if Project.objects.exists():
         print("We already have data. Skip load_initial_data.")
         return
     assert post_save.disconnect(sender=DiscussionComment,
                                 dispatch_uid='discussioncomment_was_saved')
     FIXTURES = [
         'pages',
         'emails',
         'articlecategory',
         'patterncategory',
         'sample_data',
     ]
     for fixture in FIXTURES:
         call_command('loaddata', fixture, app_label='tn2app')
Example #49
0
def update_visible_and_approved(sender, instance, **kwargs):
    """
        one non approved event sets the shows visibility and approved to False.
        one visible event sets the shows visibility to True.
        all approved events sets the shows approved to True.
    """
    post_save.disconnect(update_visible_and_approved,
                         sender=Show,
                         dispatch_uid="update_visibility")
    approved = True
    visible = False
    for event in instance.events.all():
        if event.approved == False:
            approved = False
            visible = False
            break
        if event.visible == True:
            visible = True
    instance.visible = visible
    instance.approved = approved
    instance.save()
    post_save.connect(update_visible_and_approved,
                      sender=Show,
                      dispatch_uid="update_visibility")
Example #50
0
    def test_update_fields_signals(self):
        p = Person.objects.create(name='Sara', gender='F')
        pre_save_data = []

        def pre_save_receiver(**kwargs):
            pre_save_data.append(kwargs['update_fields'])

        pre_save.connect(pre_save_receiver)
        post_save_data = []

        def post_save_receiver(**kwargs):
            post_save_data.append(kwargs['update_fields'])

        post_save.connect(post_save_receiver)
        p.save(update_fields=['name'])
        self.assertEqual(len(pre_save_data), 1)
        self.assertEqual(len(pre_save_data[0]), 1)
        self.assertTrue('name' in pre_save_data[0])
        self.assertEqual(len(post_save_data), 1)
        self.assertEqual(len(post_save_data[0]), 1)
        self.assertTrue('name' in post_save_data[0])

        pre_save.disconnect(pre_save_receiver)
        post_save.disconnect(post_save_receiver)
    def save_host_maintenance(self, ):
        save_host_ok = True
        total_hosts = 0

        try:
            hostsid_list = self.hostsid.split(',')
            hosts = Host.objects.filter(pk__in=hostsid_list)
            for host in hosts:
                hm = HostMaintenance()
                hm.host = host
                hm.maintenance = self
                hm.hostname = host.hostname
                hm.save()
                total_hosts += 1

        except Exception as e:
            error = e.args[1]
            LOG.warn("Error: {}".format(error))
            self.status = self.REJECTED
            save_host_ok = False

        else:
            self.affected_hosts = total_hosts
            self.status = self.WAITING

        finally:
            # post_save signal has to be disconnected in order to avoid
            # recursive signal call
            post_save.disconnect(maintenance_post_save, sender=Maintenance)

            self.save()

            # connecting signal again
            post_save.connect(maintenance_post_save, sender=Maintenance)

        return save_host_ok
Example #52
0
def turn_off_syncing(for_post_save=True,
                     for_post_delete=True,
                     for_m2m_changed=True,
                     for_post_bulk_operation=True):
    """
    Disables all of the signals for syncing entities. By default, everything is turned off. If the user wants
    to turn off everything but one signal, for example the post_save signal, they would do:

    turn_off_sync(for_post_save=False)
    """
    if for_post_save:
        post_save.disconnect(save_entity_signal_handler,
                             dispatch_uid='save_entity_signal_handler')
    if for_post_delete:
        post_delete.disconnect(delete_entity_signal_handler,
                               dispatch_uid='delete_entity_signal_handler')
    if for_m2m_changed:
        m2m_changed.disconnect(
            m2m_changed_entity_signal_handler,
            dispatch_uid='m2m_changed_entity_signal_handler')
    if for_post_bulk_operation:
        post_bulk_operation.disconnect(
            bulk_operation_signal_handler,
            dispatch_uid='bulk_operation_signal_handler')
Example #53
0
    def test_update_fields_signals(self):
        p = Person.objects.create(name="Sara", gender="F")
        pre_save_data = []

        def pre_save_receiver(**kwargs):
            pre_save_data.append(kwargs["update_fields"])

        pre_save.connect(pre_save_receiver)
        post_save_data = []

        def post_save_receiver(**kwargs):
            post_save_data.append(kwargs["update_fields"])

        post_save.connect(post_save_receiver)
        p.save(update_fields=["name"])
        self.assertEqual(len(pre_save_data), 1)
        self.assertEqual(len(pre_save_data[0]), 1)
        self.assertIn("name", pre_save_data[0])
        self.assertEqual(len(post_save_data), 1)
        self.assertEqual(len(post_save_data[0]), 1)
        self.assertIn("name", post_save_data[0])

        pre_save.disconnect(pre_save_receiver)
        post_save.disconnect(post_save_receiver)
async def test_model_created_subscription_succeeds():
    post_save.connect(post_save_subscription,
                      sender=SomeModel,
                      dispatch_uid="some_model_post_save")

    communicator = WebsocketCommunicator(GraphqlSubscriptionConsumer,
                                         "/graphql/")
    connected, subprotocol = await communicator.connect()
    assert connected

    subscription = """
        subscription {
            someModelCreated {
                name
            }
        }
    """

    await query(subscription, communicator)

    s = await sync_to_async(SomeModel.objects.create)(name="test name")

    response = await communicator.receive_json_from()

    assert response["payload"] == {
        "data": {
            "someModelCreated": {
                "name": s.name
            }
        },
        "errors": None,
    }

    post_save.disconnect(post_save_subscription,
                         sender=SomeModel,
                         dispatch_uid="some_model_post_save")
Example #55
0
def generate_addons(num, owner, app_name, addon_type=ADDON_EXTENSION):
    """Generate `num` addons for the given `owner` and `app_name`."""
    # Disconnect this signal given that we issue a reindex at the end.
    post_save.disconnect(update_search_index, sender=Addon,
                         dispatch_uid='addons.search.index')

    featured_categories = collections.defaultdict(int)
    user = generate_user(owner)
    app = APPS[app_name]
    default_icons = [x[0] for x in icons() if x[0].startswith('icon/')]
    for name, category in _yield_name_and_cat(
            num, app=app, type=addon_type):
        # Use one of the default icons at random.
        icon_type = random.choice(default_icons)
        addon = create_addon(name=name, icon_type=icon_type,
                             application=app, type=addon_type)
        generate_addon_user_and_category(addon, user, category)
        generate_addon_preview(addon)
        generate_translations(addon)
        # Only feature 5 addons per category at max.
        if featured_categories[category] < 5:
            generate_collection(addon, app)
            featured_categories[category] += 1
        generate_ratings(addon, 5)
Example #56
0
    def __hash(self):
        """
        Create a MD5 cryptographic hash of the image, store it in the database, and rename the file.
        """
        hasher = hashlib.md5()
        filename = os.path.join(settings.MEDIA_ROOT, self.image.name)

        with open(filename, 'rb') as image:
            for buffer in iter(partial(image.read, self.__block_size), b''):
                hasher.update(buffer)

            # Make changes if stored hash does not exist
            if not self._hash or self._hash != hasher.hexdigest().lower():
                # Update hash and image name attributes
                self._hash = hasher.digest()
                self.image.name = os.path.join(
                    self.__relative_path,
                    hasher.hexdigest().lower())

                # Save
                from ..models import Icon
                post_save.disconnect(
                    Image.post_save, sender=Icon, dispatch_uid='0')
                post_save.disconnect(
                    Icon.post_save, sender=Icon, dispatch_uid='1')

                self.save()

                post_save.connect(
                    Image.post_save, sender=Icon, dispatch_uid='0')
                post_save.connect(Icon.post_save, sender=Icon, dispatch_uid='1')

                # Update filesystem
                new_filename = os.path.join(
                    settings.MEDIA_ROOT, self.image.name)
                os.rename(filename, new_filename)
Example #57
0
def create_iati_file(sender, **kwargs):
    """
    Create an IATI XML file when an entry in the iati_export table is saved, with projects.

    :param sender: IatiExport model
    """
    iati_export = kwargs.get("instance", None)
    projects = iati_export.projects.all()
    if iati_export and projects:
        post_save.disconnect(create_iati_file, sender=sender)
        try:
            iati_export.status = 2
            iati_export.save()
            iati_xml = IatiXML(projects)
            iati_file = iati_xml.save_file(
                str(iati_export.reporting_organisation.pk),
                datetime.utcnow().strftime("%Y%m%d-%H%M%S") + '.xml')
            iati_export.iati_file = iati_file
            iati_export.status = 3
            iati_export.save()
        except:
            iati_export.status = 4
            iati_export.save()
        post_save.connect(create_iati_file, sender=sender)
Example #58
0
def disconnect_entry_signals():
    """
    Disconnect all the signals on Entry model.
    """
    post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_PING_DIRECTORIES)
    post_save.disconnect(sender=Entry,
                         dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS)
    post_save.disconnect(sender=Entry,
                         dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE)
    post_delete.disconnect(sender=Entry,
                           dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
Example #59
0
def disable_computed_fields():
    post_save.disconnect(emit_update_inventory_on_created_or_deleted,
                         sender=Host)
    post_delete.disconnect(emit_update_inventory_on_created_or_deleted,
                           sender=Host)
    post_save.disconnect(emit_update_inventory_on_created_or_deleted,
                         sender=Group)
    post_delete.disconnect(emit_update_inventory_on_created_or_deleted,
                           sender=Group)
    post_save.disconnect(emit_update_inventory_on_created_or_deleted,
                         sender=InventorySource)
    post_delete.disconnect(emit_update_inventory_on_created_or_deleted,
                           sender=InventorySource)
    post_save.disconnect(emit_update_inventory_on_created_or_deleted,
                         sender=Job)
    post_delete.disconnect(emit_update_inventory_on_created_or_deleted,
                           sender=Job)
    yield
    connect_computed_field_signals()
Example #60
0
def deregister_signal_handlers():
    # Disconnects the signal handlers for easy access in importers
    m2m_changed.disconnect(product_category_m2m_changed_signal_handler,
                           sender=Product.categories.through)
    post_save.disconnect(product_post_save_signal_handler, sender=Product)
    post_delete.disconnect(product_post_delete_signal_handler, sender=Product)
    post_save.disconnect(category_change_handler, sender=Category)
    post_delete.disconnect(category_change_handler, sender=Category)
    if settings.HANDLE_STOCKRECORD_CHANGES:
        post_save.disconnect(stockrecord_change_handler, sender=StockRecord)
        post_delete.disconnect(stockrecord_post_delete_handler,
                               sender=StockRecord)
    request_finished.disconnect(update_index.synchronize_searchindex)