예제 #1
0
 def _update_or_create_shard(self, step):
     """ Find or create a random shard and alter its `count` by the given step. """
     shard_index = random.randint(0, self.field.shard_count - 1)
     # Converting the set to a list introduces some randomness in the ordering, but that's fine
     shard_pks = list(self.field.value_from_object(
         self.instance))  # needs to be indexable
     try:
         shard_pk = shard_pks[shard_index]
     except IndexError:
         # We don't have this many shards yet, so create a new one
         with transaction.atomic(xg=True):
             # We must re-fetch the instance to ensure that we do this atomically, but we must
             # also update self.instance so that the calling code which is referencing
             # self.instance also gets the updated list of shard PKs
             new_shard = self._create_shard(count=step)
             new_instance = self.instance._default_manager.get(
                 pk=self.instance.pk)
             new_instance_shard_pks = getattr(new_instance,
                                              self.field.attname, set())
             new_instance_shard_pks.add(new_shard.pk)
             setattr(self.instance, self.field.attname,
                     new_instance_shard_pks)
             models.Model.save(
                 new_instance
             )  # avoid custom save method, which might do DB lookups
     else:
         with transaction.atomic():
             from djangae.models import CounterShard
             shard = CounterShard.objects.get(pk=shard_pk)
             shard.count += step
             shard.save()
예제 #2
0
    def _update_or_create_shard(self, step, is_reset=False):
        """ Find or create a random shard and alter its `count` by the given step. """
        shard_index = random.randint(0, self.field.shard_count - 1)
        # Converting the set to a list introduces some randomness in the ordering, but that's fine
        shard_pks = list(self.field.value_from_object(self.instance)) # needs to be indexable
        try:
            shard_pk = shard_pks[shard_index]
        except IndexError:
            # We don't have this many shards yet, so create a new one
            with transaction.atomic(xg=True):
                # We must re-fetch the instance to ensure that we do this atomically, but we must
                # also update self.instance so that the calling code which is referencing
                # self.instance also gets the updated list of shard PKs
                new_shard = self._create_shard(count=step)
                new_instance = self.instance.__class__._default_manager.get(pk=self.instance.pk)
                new_instance_shard_pks = getattr(new_instance, self.field.attname, set())
                new_instance_shard_pks.add(new_shard.pk)
                setattr(self.instance, self.field.attname, new_instance_shard_pks)
                models.Model.save(new_instance) # avoid custom save method, which might do DB lookups
                shard = new_shard
        else:
            with transaction.atomic():
                from djangae.models import CounterShard
                shard = CounterShard.objects.get(pk=shard_pk)
                shard.count += step
                shard.save()

        # if the ShardedCounter has on_change callback, run it now
        if self.field.on_change:
            self.field.on_change(self.instance, step, is_reset=is_reset)
예제 #3
0
    def test_independent_transaction_applies_to_outer_context(self):
        """
            When a transaction commits successfully, we can apply its cache to the outer stack. This
            alters the behaviour of transactions a little but in a positive way. Things that change are:

            1. If you run an independent transaction inside another transaction, a subsequent Get for an entity
               updated there will return the updated instance from the cache. Due to serialization of transactions
               it's possible that this would have happened anyway (the outer transaction wouldn't start until the independent
               one had finished). It makes this behaviour consistent even when serialization isn't possible.
            2. Due to the fact the context cache is hit within a transaction, you can now Put, then Get an entity and it
               will return its current state (as the transaction would see it), rather than the state at the beginning of the
               transaction. This behaviour is nicer than the default.
        """

        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            with transaction.atomic(independent=True):
                inner = CachingTestModel.objects.get(pk=original.pk)
                inner.field1 = "Banana"
                inner.save()

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Banana", outer.field1)

            outer.field1 = "Apple"
            outer.save()

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Apple", original.field1)
예제 #4
0
    def test_independent_transaction_applies_to_outer_context(self):
        """
            When a transaction commits successfully, we can apply its cache to the outer stack. This
            alters the behaviour of transactions a little but in a positive way. Things that change are:

            1. If you run an independent transaction inside another transaction, a subsequent Get for an entity
               updated there will return the updated instance from the cache. Due to serialization of transactions
               it's possible that this would have happened anyway (the outer transaction wouldn't start until the independent
               one had finished). It makes this behaviour consistent even when serialization isn't possible.
            2. Due to the fact the context cache is hit within a transaction, you can now Put, then Get an entity and it
               will return its current state (as the transaction would see it), rather than the state at the beginning of the
               transaction. This behaviour is nicer than the default.
        """

        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            with transaction.atomic(independent=True):
                inner = CachingTestModel.objects.get(pk=original.pk)
                inner.field1 = "Banana"
                inner.save()

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Banana", outer.field1)

            outer.field1 = "Apple"
            outer.save()

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Apple", original.field1)
예제 #5
0
    def test_repeated_usage_in_a_loop(self):
        pk = TestUser.objects.create(username="******").pk
        for i in xrange(4):
            with transaction.atomic(xg=True):
                TestUser.objects.get(pk=pk)
                continue

        with transaction.atomic(xg=True):
            TestUser.objects.get(pk=pk)
예제 #6
0
    def test_repeated_usage_in_a_loop(self):
        pk = TestUser.objects.create(username="******").pk
        for i in xrange(4):
            with transaction.atomic(xg=True):
                TestUser.objects.get(pk=pk)
                continue

        with transaction.atomic(xg=True):
            TestUser.objects.get(pk=pk)
예제 #7
0
    def test_non_atomic_context_manager(self):
        from .test_connector import TestUser
        existing = TestUser.objects.create(username="******",
                                           field2="exists")

        with transaction.atomic():
            self.assertTrue(transaction.in_atomic_block())

            user = TestUser.objects.create(username="******", field2="bar")

            with transaction.non_atomic():
                # We're outside the transaction, so the user should not exist
                self.assertRaises(TestUser.DoesNotExist,
                                  TestUser.objects.get,
                                  pk=user.pk)
                self.assertFalse(transaction.in_atomic_block())

                with sleuth.watch(
                        "google.appengine.api.datastore.Get") as datastore_get:
                    TestUser.objects.get(
                        pk=existing.pk
                    )  #Should hit the cache, not the datastore

                self.assertFalse(datastore_get.called)

            with transaction.atomic(independent=True):
                user2 = TestUser.objects.create(username="******", field2="bar2")
                self.assertTrue(transaction.in_atomic_block())

                with transaction.non_atomic():
                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist,
                                      TestUser.objects.get,
                                      pk=user2.pk)

                    with transaction.non_atomic():
                        self.assertFalse(transaction.in_atomic_block())
                        self.assertRaises(TestUser.DoesNotExist,
                                          TestUser.objects.get,
                                          pk=user2.pk)

                        with sleuth.watch("google.appengine.api.datastore.Get"
                                          ) as datastore_get:
                            TestUser.objects.get(
                                pk=existing.pk
                            )  #Should hit the cache, not the datastore

                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist,
                                      TestUser.objects.get,
                                      pk=user2.pk)

                self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
                self.assertTrue(transaction.in_atomic_block())
예제 #8
0
    def test_nested_transaction_doesnt_apply_to_outer_context(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            with transaction.atomic(independent=True):
                inner = CachingTestModel.objects.get(pk=original.pk)
                inner.field1 = "Banana"
                inner.save()

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Apple", outer.field1)

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Banana", original.field1)
예제 #9
0
파일: __init__.py 프로젝트: stucox/djangae
    def increment(self):
        idx = random.randint(0, len(self) - 1)

        with transaction.atomic():
            shard = CounterShard.objects.get(pk=self[idx])
            shard.count += 1
            shard.save()
예제 #10
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        clear_context_cache()

        CachingTestModel.objects.get(pk=original.pk)  # Should update the cache

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        clear_context_cache()

        with transaction.atomic():
            with sleuth.watch(
                    "google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(
                    pk=original.pk)  # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
예제 #11
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        """
            A read inside a transaction shouldn't update the context cache outside that
            transaction
        """
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        caching.get_context().reset(keep_disabled_flags=True)

        CachingTestModel.objects.get(pk=original.pk)  # Should update the cache

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        caching.get_context().reset(keep_disabled_flags=True)

        with transaction.atomic():
            with sleuth.watch(
                    "google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(
                    pk=original.pk)  # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
예제 #12
0
    def refresh_tokens(self):
        """
        Refreshes self.access_token and updates self.token_expiry.
        Returns True if refresh was successful, False otherwise.
        """
        lock_id = "refresh_tokens:%s" % self.pk
        with lock(lock_id, steal_after_ms=6000):
            self.refresh_from_db()
            if self._oauth_tokens_are_valid():  # pragma: no cover
                return
            credentials = OAuth2Credentials(
                self.access_token, settings.GOOGLE_OAUTH2_CLIENT_ID,
                settings.GOOGLE_OAUTH2_CLIENT_SECRET, self.refresh_token,
                self.token_expiry,
                'https://accounts.google.com/o/oauth2/token',
                'YouJudge Client')

            # Do a refresh - this should update access_token and token_expiry
            try:
                credentials.refresh(httplib2.Http(proxy_info=None, timeout=5))
            except AccessTokenRefreshError as e:
                logging.error("Couldn't refresh oauth tokens for user {}. "
                              "Error was {}".format(self, e))
                return
            with transaction.atomic():
                self.refresh_from_db()
                if credentials.access_token != self.access_token:
                    self.access_token = credentials.access_token
                    self.token_expiry = credentials.token_expiry.replace(
                        tzinfo=pytz.UTC)
                    self.save()
예제 #13
0
파일: fields.py 프로젝트: benvand/djangae
    def increment(self):
        idx = random.randint(0, len(self) - 1)

        with transaction.atomic():
            shard = CounterShard.objects.get(pk=self[idx])
            shard.count += 1
            shard.save()
예제 #14
0
파일: models.py 프로젝트: mrfuxi/fluent-2.0
    def save(self, *args, **kwargs):
        assert self.text
        assert self.language_code

        # Generate the appropriate key on creation
        if self._state.adding:
            self.pk = MasterTranslation.generate_key(
                self.text, self.hint, self.language_code
            )

        # If there was no plural text specified, just use the default text
        if not self.plural_text:
            self.plural_text = self.text

        # If we are adding for the first time, then create a counterpart
        # translation for the master language
        if self._state.adding:
            with transaction.atomic(xg=True):
                new_trans = Translation.objects.create(
                    master_translation_id=self.pk,
                    language_code=self.language_code,
                    text=self.text,
                    denorm_master_text=self.text,
                    denorm_master_hint=self.hint
                )
                self.translations_by_language_code[self.language_code] = new_trans.pk
                self.translations.add(new_trans)

                return super(MasterTranslation, self).save(*args, **kwargs)
        else:
            # Otherwise just do a normal save
            return super(MasterTranslation, self).save(*args, **kwargs)
예제 #15
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Adds to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual(
                "Banana",
                CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
예제 #16
0
    def test_save_caches_outside_transaction_only(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        instance.delete()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        with transaction.atomic():
            instance = CachingTestModel.objects.create(**entity_data)

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
예제 #17
0
def unindex_instance(instance):
    indexes = Index.objects.filter(instance_db_table=instance._meta.db_table, instance_pk=instance.pk).all()
    for index in indexes:
        try:
            while True:
                try:
                    with transaction.atomic(xg=True):
                        try:
                            index = Index.objects.get(pk=index.pk)
                        except Index.DoesNotExist:
                            return

                        count = GlobalOccuranceCount.objects.get(pk=index.iexact)
                        count.count -= index.occurances
                        count.save()
                        index.delete()

                        if count.count < 0:
                            logging.error("The GOC of {} was negative ({}) while unindexing {}", count.pk, count.count, index.pk)
                        break

                except transaction.TransactionFailedError:
                    logging.warning("Transaction collision, retrying!")
                    time.sleep(1)
                    continue
        except GlobalOccuranceCount.DoesNotExist:
            logging.warning("A GlobalOccuranceCount for Index: %s does not exist, ignoring", index.pk)
            continue
예제 #18
0
    def populate(self):
        """ Create all the CounterShard objects which will be used by this field. Useful to prevent
            additional saves being caused when you call increment() or decrement() due to having to
            update the list of shard PKs on the instance.
        """
        total_to_create = self.field.shard_count - len(self)
        while total_to_create:
            with transaction.atomic(xg=True):
                # We must re-fetch the instance to ensure that we do this atomically, but we must
                # also update self.instance so that the calling code which is referencing
                # self.instance also gets the updated list of shard PKs
                new_instance = self.instance._default_manager.get(
                    pk=self.instance.pk)
                new_instance_shard_pks = getattr(new_instance,
                                                 self.field.attname, set())
                # Re-check / update the number to create based on the refreshed instance from the DB
                total_to_create = self.field.shard_count - len(
                    new_instance_shard_pks)
                num_to_create = min(total_to_create,
                                    MAX_SHARDS_PER_TRANSACTION)

                new_shard_pks = set()
                for x in xrange(num_to_create):
                    new_shard_pks.add(self._create_shard(count=0).pk)

                new_instance_shard_pks.update(new_shard_pks)
                setattr(self.instance, self.field.attname,
                        new_instance_shard_pks)
                models.Model.save(
                    new_instance
                )  # avoid custom save method, which might do DB lookups
                total_to_create -= num_to_create
예제 #19
0
    def test_save_caches_outside_transaction_only(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        instance.delete()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        with transaction.atomic():
            instance = CachingTestModel.objects.create(**entity_data)


        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
예제 #20
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual("Banana", CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
예제 #21
0
    def save(self, *args, **kwargs):
        if not isinstance(self.amount_paid, Decimal):
            self.amount_paid = Decimal(self.amount_paid or "0")
        if not isinstance(self.amount_left, Decimal):
            self.amount_left = Decimal(self.amount_left or "0")
        with transaction.atomic(xg=True):
            delta = 0
            if self.pk:
                old_state = Person.objects.get(pk=self.pk)
                delta = self.amount_paid - Decimal(old_state.amount_paid
                                                   or "0")
            else:
                delta = self.amount_paid
            super(Person, self).save(*args, **kwargs)

        if delta > 0:
            Receipt.objects.create(user=self.user, person=self, amount=delta)

        try:
            dc = DentalChart.objects.get(person_id=self.pk)
        except DentalChart.DoesNotExist:
            dc = DentalChart(person_id=self.pk)
        dc.dental_chart_type = self.dental_chart_type
        dc.dental_chart = self.dental_chart
        dc.save()
예제 #22
0
    def test_save_wipes_entity_from_cache_inside_transaction(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.save()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
예제 #23
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        """
            A read inside a transaction shouldn't update the context cache outside that
            transaction
        """
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        original = CachingTestModel.objects.create(**entity_data)

        clear_context_cache()

        CachingTestModel.objects.get(pk=original.pk) # Should update the cache

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        clear_context_cache()

        with transaction.atomic():
            with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(pk=original.pk) # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
예제 #24
0
    def test_transactional_save_wipes_the_cache_only_after_its_result_is_consistently_available(
            self):
        entity_data = {
            "field1": "old",
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Add to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual("old", cache.get(identifier)["field1"])

        @non_transactional
        def non_transactional_read(instance_pk):
            CachingTestModel.objects.get(pk=instance_pk)

        with transaction.atomic():
            instance.field1 = "new"
            instance.save()
            non_transactional_read(
                instance.pk)  # could potentially recache the old object

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
예제 #25
0
    def test_transactional_save_wipes_the_cache_only_after_its_result_is_consistently_available(self):
        entity_data = {
            "field1": "old",
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)
            ),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual("old", cache.get(identifier)["field1"])

        @non_transactional
        def non_transactional_read(instance_pk):
            CachingTestModel.objects.get(pk=instance_pk)

        with transaction.atomic():
            instance.field1 = "new"
            instance.save()
            non_transactional_read(instance.pk)  # could potentially recache the old object

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
예제 #26
0
    def populate(self):
        """ Create all the CounterShard objects which will be used by this field. Useful to prevent
            additional saves being caused when you call increment() or decrement() due to having to
            update the list of shard PKs on the instance.
        """
        total_to_create = self.field.shard_count - len(self)
        while total_to_create:
            with transaction.atomic(xg=True):
                # We must re-fetch the instance to ensure that we do this atomically, but we must
                # also update self.instance so that the calling code which is referencing
                # self.instance also gets the updated list of shard PKs
                new_instance = self.instance.__class__._default_manager.get(pk=self.instance.pk)
                new_instance_shard_pks = getattr(new_instance, self.field.attname, set())
                # Re-check / update the number to create based on the refreshed instance from the DB
                total_to_create = self.field.shard_count - len(new_instance_shard_pks)
                num_to_create = min(total_to_create, MAX_SHARDS_PER_TRANSACTION)

                new_shard_pks = set()
                for x in range(num_to_create):
                    new_shard_pks.add(self._create_shard(count=0).pk)

                new_instance_shard_pks.update(new_shard_pks)
                setattr(self.instance, self.field.attname, new_instance_shard_pks)
                models.Model.save(new_instance) # avoid custom save method, which might do DB lookups
                total_to_create -= num_to_create
예제 #27
0
    def callback(instance, text, term):
        with transaction.atomic(xg=True):
            term_count = text.lower().count(term)
            index_pk = InstanceIndex.calc_id(term, instance)
            try:
                index = InstanceIndex.objects.get(pk=index_pk)

                if index.count == term_count:
                    return
                else:
                    # OK, something weird happened and we indexed twice
                    # we need to just need to figure out what difference we need to add to
                    # the global count!
                    term_count = term_count - index.count
            except InstanceIndex.DoesNotExist:
                index = InstanceIndex.objects.create(
                    pk=index_pk,
                    **{
                        "count": term_count,
                        "iexact": term,
                        "instance_db_table": instance._meta.db_table,
                        "instance_pk": instance.pk,
                    })

            counter, created = TermCount.objects.get_or_create(pk=term)
            counter.count += term_count
            counter.save()
예제 #28
0
    def callback(instance, text, term):
        with transaction.atomic(xg=True):
            term_count = text.lower().count(term)
            index_pk = InstanceIndex.calc_id(term, instance)
            try:
                index = InstanceIndex.objects.get(pk=index_pk)

                if index.count == term_count:
                    return
                else:
                    # OK, something weird happened and we indexed twice
                    # we need to just need to figure out what difference we need to add to
                    # the global count!
                    term_count = term_count - index.count
            except InstanceIndex.DoesNotExist:
                index = InstanceIndex.objects.create(
                    pk=index_pk,
                    **{
                        "count": term_count,
                        "iexact": term,
                        "instance_db_table": instance._meta.db_table,
                        "instance_pk": instance.pk,
                    }
                )

            counter, created = TermCount.objects.get_or_create(
                pk=term
            )
            counter.count += term_count
            counter.save()
예제 #29
0
 def test_nested_transactions_dont_get_their_own_context(self):
     """
         The datastore doesn't support nested transactions, so when there is a nested
         atomic block which isn't marked as independent, the atomic is a no-op. Therefore
         we shouldn't push a context here, and we shouldn't pop it at the end either.
     """
     context = caching.get_context()
     self.assertEqual(1, context.stack.size)
     with transaction.atomic():
         self.assertEqual(2, context.stack.size)
         with transaction.atomic():
             self.assertEqual(2, context.stack.size)
             with transaction.atomic():
                 self.assertEqual(2, context.stack.size)
             self.assertEqual(2, context.stack.size)
         self.assertEqual(2, context.stack.size)
     self.assertEqual(1, context.stack.size)
예제 #30
0
파일: counting.py 프로젝트: d0ugal/djangae
 def reset(self):
     """ Reset the counter to 0. """
     with transaction.atomic(xg=True):
         value = self.value()
         if value > 0:
             self.decrement(value)
         elif value < 0:
             self.increment(value)
예제 #31
0
 def reset(self):
     """ Reset the counter to 0. """
     with transaction.atomic(xg=True):
         value = self.value()
         if value > 0:
             self.decrement(value)
         elif value < 0:
             self.increment(value)
예제 #32
0
 def delete(self, *args, **kwargs):
     """ Ensure that the count field on the Speech is decremented when this is deleted. """
     with transaction.atomic(xg=True):
         speech = Speech.objects.get(pk=self.speech_id) # reload inside transaction
         value = getattr(speech, self.speech_model_count_field)
         setattr(speech, self.speech_model_count_field, value - 1)
         speech.save()
         return super(FlagBaseModel, self).delete(*args, **kwargs)
예제 #33
0
    def test_interaction_with_datastore_txn(self):
        from google.appengine.ext import db
        from google.appengine.datastore.datastore_rpc import TransactionOptions
        from .test_connector import TestUser

        @db.transactional(propagation=TransactionOptions.INDEPENDENT)
        def some_indie_txn(_username):
            TestUser.objects.create(username=_username)

        @db.transactional()
        def some_non_indie_txn(_username):
            TestUser.objects.create(username=_username)

        @db.transactional()
        def double_nested_transactional():
            @db.transactional(propagation=TransactionOptions.INDEPENDENT)
            def do_stuff():
                TestUser.objects.create(username="******")
                raise ValueError()

            try:
                return do_stuff
            except:
                return

        with transaction.atomic():
            double_nested_transactional()


        @db.transactional()
        def something_containing_atomic():
            with transaction.atomic():
                TestUser.objects.create(username="******")

        something_containing_atomic()

        with transaction.atomic():
            with transaction.atomic():
                some_non_indie_txn("Bob1")
                some_indie_txn("Bob2")
                some_indie_txn("Bob3")

        with transaction.atomic(independent=True):
            some_non_indie_txn("Fred1")
            some_indie_txn("Fred2")
            some_indie_txn("Fred3")
예제 #34
0
    def test_transactions_get_their_own_context(self):
        with sleuth.watch(
                "djangae.db.backends.appengine.context.ContextStack.push"
        ) as context_push:
            with transaction.atomic():
                pass

            self.assertTrue(context_push.called)
예제 #35
0
    def test_nested_transactions_dont_get_their_own_context(self):
        """
            The datastore doesn't support nested transactions, so when there is a nested
            atomic block which isn't marked as independent, the atomic is a no-op. Therefore
            we shouldn't push a context here, and we shouldn't pop it at the end either.
        """

        self.assertEqual(1, caching._context.stack.size)
        with transaction.atomic():
            self.assertEqual(2, caching._context.stack.size)
            with transaction.atomic():
                self.assertEqual(2, caching._context.stack.size)
                with transaction.atomic():
                    self.assertEqual(2, caching._context.stack.size)
                self.assertEqual(2, caching._context.stack.size)
            self.assertEqual(2, caching._context.stack.size)
        self.assertEqual(1, caching._context.stack.size)
예제 #36
0
파일: tests.py 프로젝트: potatohel/djangae
    def test_atomic_context_manager(self):

        with self.assertRaises(ValueError):
            with transaction.atomic():
                TestUser.objects.create(username="******", field2="bar")
                raise ValueError()

        self.assertEqual(0, TestUser.objects.count())
예제 #37
0
    def test_atomic_context_manager(self):

        with self.assertRaises(ValueError):
            with transaction.atomic():
                TestUser.objects.create(username="******", field2="bar")
                raise ValueError()

        self.assertEqual(0, TestUser.objects.count())
예제 #38
0
    def test_interaction_with_datastore_txn(self):
        from google.appengine.ext import db
        from google.appengine.datastore.datastore_rpc import TransactionOptions
        from .test_connector import TestUser

        @db.transactional(propagation=TransactionOptions.INDEPENDENT)
        def some_indie_txn(_username):
            TestUser.objects.create(username=_username)

        @db.transactional()
        def some_non_indie_txn(_username):
            TestUser.objects.create(username=_username)

        @db.transactional()
        def double_nested_transactional():
            @db.transactional(propagation=TransactionOptions.INDEPENDENT)
            def do_stuff():
                TestUser.objects.create(username="******")
                raise ValueError()

            try:
                return do_stuff
            except Exception:
                return

        with transaction.atomic():
            double_nested_transactional()

        @db.transactional()
        def something_containing_atomic():
            with transaction.atomic():
                TestUser.objects.create(username="******")

        something_containing_atomic()

        with transaction.atomic():
            with transaction.atomic():
                some_non_indie_txn("Bob1")
                some_indie_txn("Bob2")
                some_indie_txn("Bob3")

        with transaction.atomic(independent=True):
            some_non_indie_txn("Fred1")
            some_indie_txn("Fred2")
            some_indie_txn("Fred3")
예제 #39
0
파일: tests.py 프로젝트: potatohel/djangae
    def test_transactions_clear_the_context_cache(self):
        UniqueModel.objects.create(unique_field="test") #Create an instance

        with transaction.atomic():
            self.assertFalse(caching.context.cache)
            UniqueModel.objects.create(unique_field="test2", unique_combo_one=1) #Create an instance
            self.assertTrue(caching.context.cache)

        self.assertFalse(caching.context.cache)
예제 #40
0
def begin_scan(marshall):
    """ Trigger tasks to scan template files and python files for translatable strings and to
        create corresponding MasterTranslation objects for them.
    """
    try:
        marshall.refresh_from_db()
    except ScanMarshall.DoesNotExist:
        logger.warn("Not starting scan as scanmarshall was missing")
        return

    scan_id = uuid.uuid4()

    files_to_scan = []

    def walk_dir(root, dirs, files):
        for f in files:
            filename = os.path.normpath(os.path.join(root, f))
            if os.path.splitext(filename)[1] not in (".py", ".html"):
                continue

            files_to_scan.append(filename)

    for app in settings.INSTALLED_APPS:
        module_path = os.path.dirname(
            apps.get_app_config(app.split(".")[-1]).module.__file__)

        for root, dirs, files in os.walk(module_path, followlinks=True):
            walk_dir(root, dirs, files)

    # Scan the django directory
    for root, dirs, files in os.walk(os.path.dirname(django.__file__),
                                     followlinks=True):
        walk_dir(root, dirs, files)

    # Update the ScanMarshall object with the total number of files to scan.  We must do this
    # before we defer any of the tasks, as if we go for a gradual increment(), defer(),
    #  incrememnt(), defer() approach, then the first task could finish and decrement
    # `files_left_to_process` back to 0 (thereby marking the scan as done) before we've deferred
    # the second task.
    # We can't defer the tasks inside the transaction because App Engine only lets us defer a max
    # of 5 tasks inside a single transaction.
    with transaction.atomic(xg=True):
        marshall.refresh_from_db()
        marshall.files_left_to_process += len(files_to_scan)
        marshall.save()

    for offset in xrange(0, len(files_to_scan), 100):
        files = files_to_scan[offset:offset + 100]
        # Defer with a random delay of between 0 and 10 seconds, just to avoid transaction
        # collisions caused by the tasks all finishing and updating the marshall at the same time.
        defer(_scan_list,
              marshall,
              scan_id,
              files,
              _countdown=random.randint(0, 10))

    logger.info("Deferred tasks to scan %d files", len(files_to_scan))
예제 #41
0
 def delete(self, *args, **kwargs):
     """ Ensure that the count field on the Speech is decremented when this is deleted. """
     with transaction.atomic(xg=True):
         speech = Speech.objects.get(
             pk=self.speech_id)  # reload inside transaction
         value = getattr(speech, self.speech_model_count_field)
         setattr(speech, self.speech_model_count_field, value - 1)
         speech.save()
         return super(FlagBaseModel, self).delete(*args, **kwargs)
예제 #42
0
def _scan_list(marshall, scan_id, filenames):
    # FIXME: Need to clean up the translations which aren't in use anymore

    for filename in filenames:
        # Redeploying to a new version can cause this
        if not os.path.exists(filename):
            continue

        with open(filename) as f:
            content = f.read()

        results = parse_file(content, os.path.splitext(filename)[-1])

        for text, plural, hint, group in results:
            if not text:
                logger.warn("Empty translation discovered: '{}', '{}', '{}', '{}'".format(text, plural, hint, group))
                continue

            with transaction.atomic(xg=True):
                key = MasterTranslation.generate_key(text, hint, settings.LANGUAGE_CODE)

                try:
                    mt = MasterTranslation.objects.get(pk=key)
                except MasterTranslation.DoesNotExist:
                    mt = MasterTranslation(
                        pk=key, text=text, hint=hint, language_code=settings.LANGUAGE_CODE
                    )

                # By the very act of getting here, this is true
                mt.used_in_code_or_templates = True

                # If we last updated during this scan, then append, otherwise replace
                if mt.last_updated_by_scan_uuid == scan_id:
                    mt.used_by_groups_in_code_or_templates.add(group)
                else:
                    mt.used_by_groups_in_code_or_templates = { group }

                mt.last_updated_by_scan_uuid = scan_id
                mt.save()

    with transaction.atomic():
        marshall.refresh_from_db()
        marshall.files_left_to_process -= len(filenames)
        marshall.save()
예제 #43
0
def _scan_list(marshall, scan_id, filenames):
    # FIXME: Need to clean up the translations which aren't in use anymore

    for filename in filenames:
        # Redeploying to a new version can cause this
        if not os.path.exists(filename):
            continue

        with open(filename) as f:
            content = unicode(f.read(), settings.DEFAULT_CHARSET)

        results = parse_file(content, os.path.splitext(filename)[-1])

        for text, plural, hint, group in results:
            if not text:
                logger.warn("Empty translation discovered: '{}', '{}', '{}', '{}'".format(text, plural, hint, group))
                continue

            with transaction.atomic(xg=True):
                key = MasterTranslation.generate_key(text, hint, settings.LANGUAGE_CODE)

                try:
                    mt = MasterTranslation.objects.get(pk=key)
                except MasterTranslation.DoesNotExist:
                    mt = MasterTranslation(
                        pk=key, text=text, hint=hint, language_code=settings.LANGUAGE_CODE
                    )

                # By the very act of getting here, this is true
                mt.used_in_code_or_templates = True

                # If we last updated during this scan, then append, otherwise replace
                if mt.last_updated_by_scan_uuid == scan_id:
                    mt.used_by_groups_in_code_or_templates.add(group)
                else:
                    mt.used_by_groups_in_code_or_templates = { group }

                mt.last_updated_by_scan_uuid = scan_id
                mt.save()

    with transaction.atomic():
        marshall.refresh_from_db()
        marshall.files_left_to_process -= len(filenames)
        marshall.save()
예제 #44
0
    def test_nested_rollback_doesnt_apply_on_outer_commit(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            try:
                with transaction.atomic(independent=True):
                    inner = CachingTestModel.objects.get(pk=original.pk)
                    inner.field1 = "Banana"
                    inner.save()
                    raise ValueError()  # Will rollback the transaction
            except ValueError:
                pass

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Apple", outer.field1)

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Apple", original.field1)  # Shouldn't have changed
예제 #45
0
    def append_file(filename, files_to_scan):
        files_to_scan.append(filename)
        if len(files_to_scan) == 100:
            with transaction.atomic(xg=True):
                marshall.refresh_from_db()
                marshall.files_left_to_process += 100
                marshall.save()
                defer(_scan_list, marshall, scan_id, files_to_scan, _transactional=True)

            files_to_scan = []
예제 #46
0
    def append_file(filename, files_to_scan):
        files_to_scan.append(filename)
        if len(files_to_scan) == 100:
            with transaction.atomic(xg=True):
                marshall.refresh_from_db()
                marshall.files_left_to_process += 100
                marshall.save()
                defer(_scan_list, marshall, scan_id, files_to_scan, _transactional=True)

            files_to_scan = []
예제 #47
0
파일: caching.py 프로젝트: Wombatpm/djangae
    def test_nested_transaction_doesnt_apply_to_outer_context(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            with transaction.atomic(independent=True):
                inner = CachingTestModel.objects.get(pk=original.pk)
                inner.field1 = "Banana"
                inner.save()

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Apple", outer.field1)

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Banana", original.field1)
예제 #48
0
    def test_outermost_transaction_applies_all_contexts_on_commit(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        with transaction.atomic():
            with transaction.atomic(independent=True):
                instance = CachingTestModel.objects.create(**entity_data)

            # At this point the instance should be unretrievable, even though we just created it
            try:
                CachingTestModel.objects.get(pk=instance.pk)
                self.fail("Unexpectedly was able to retrieve instance")
            except CachingTestModel.DoesNotExist:
                pass

        # Should now exist in the cache
        with sleuth.switch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=instance.pk)
            self.assertFalse(datastore_get.called)
예제 #49
0
    def test_nested_rollback_doesnt_apply_on_outer_commit(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)
        with transaction.atomic():
            try:
                with transaction.atomic(independent=True):
                    inner = CachingTestModel.objects.get(pk=original.pk)
                    inner.field1 = "Banana"
                    inner.save()
                    raise ValueError()  # Will rollback the transaction
            except ValueError:
                pass

            outer = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual("Apple", outer.field1)

        original = CachingTestModel.objects.get(pk=original.pk)
        self.assertEqual("Apple", original.field1)  # Shouldn't have changed
예제 #50
0
    def test_refresh_if_unread_for_created_objects(self):
        """ refresh_if_unread should not refresh objects which have been *created* within the
            transaction, as at the DB level they will not exist.
        """
        from .test_connector import TestFruit

        # With caching
        with transaction.atomic() as txn:
            apple = TestFruit.objects.create(name="Apple", color="Red")
            apple.color = "Pink"  # Deliberately don't save
            txn.refresh_if_unread(apple)
            self.assertEqual(apple.color, "Pink")

        # Without caching
        with DisableCache():
            with transaction.atomic() as txn:
                apple = TestFruit.objects.create(name="Radish", color="Red")
                apple.color = "Pink"  # Deliberately don't save
                txn.refresh_if_unread(apple)
                self.assertEqual(apple.color, "Pink")
예제 #51
0
    def test_get_by_key_hits_datastore_inside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            with transaction.atomic():
                instance = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual(original, instance)

        self.assertTrue(datastore_get.called)
예제 #52
0
    def test_refresh_if_unread_for_created_objects(self):
        """ refresh_if_unread should not refresh objects which have been *created* within the
            transaction, as at the DB level they will not exist.
        """
        from .test_connector import TestFruit

        # With caching
        with transaction.atomic() as txn:
            apple = TestFruit.objects.create(name="Apple", color="Red")
            apple.color = "Pink"  # Deliberately don't save
            txn.refresh_if_unread(apple)
            self.assertEqual(apple.color, "Pink")

        # Without caching
        with DisableCache():
            with transaction.atomic() as txn:
                apple = TestFruit.objects.create(name="Radish", color="Red")
                apple.color = "Pink"  # Deliberately don't save
                txn.refresh_if_unread(apple)
                self.assertEqual(apple.color, "Pink")
예제 #53
0
    def test_get_by_key_hits_datastore_inside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            with transaction.atomic():
                instance = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual(original, instance)

        self.assertTrue(datastore_get.called)
예제 #54
0
    def test_prevent_read(self):
        from .test_connector import TestFruit

        apple = TestFruit.objects.create(name="Apple", color="Red")

        # Don't allow reading apple within the transaction
        with transaction.atomic() as txn:
            txn.prevent_read(TestFruit, apple.pk)

            self.assertRaises(transaction.PreventedReadError,
                              TestFruit.objects.get,
                              pk=apple.pk)
예제 #55
0
    def callback(instance, text, term):
        try:
            with transaction.atomic(xg=True):
                index = InstanceIndex.objects.get(pk=InstanceIndex.calc_id(term, instance))

                counter = TermCount.objects.get(pk=term)
                counter.count -= index.count
                counter.save()

                index.delete()
        except (InstanceIndex.DoesNotExist, TermCount.DoesNotExist):
            pass
예제 #56
0
    def test_non_atomic_context_manager(self):
        from .test_connector import TestUser
        existing = TestUser.objects.create(username="******", field2="exists")

        with transaction.atomic():
            self.assertTrue(transaction.in_atomic_block())

            user = TestUser.objects.create(username="******", field2="bar")

            with transaction.non_atomic():
                # We're outside the transaction, so the user should not exist
                self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
                self.assertFalse(transaction.in_atomic_block())

                with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                    TestUser.objects.get(pk=existing.pk)  # Should hit the cache, not the datastore

                self.assertFalse(datastore_get.called)

            with transaction.atomic(independent=True):
                user2 = TestUser.objects.create(username="******", field2="bar2")
                self.assertTrue(transaction.in_atomic_block())

                with transaction.non_atomic():
                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                    with transaction.non_atomic():
                        self.assertFalse(transaction.in_atomic_block())
                        self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                            # Should hit the cache, not the Datastore
                            TestUser.objects.get(pk=existing.pk)

                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
                self.assertTrue(transaction.in_atomic_block())
예제 #57
0
파일: caching.py 프로젝트: Wombatpm/djangae
    def test_outermost_transaction_applies_all_contexts_on_commit(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        with transaction.atomic():
            with transaction.atomic(independent=True):
                instance = CachingTestModel.objects.create(**entity_data)

            # At this point the instance should be unretrievable, even though we just created it
            try:
                CachingTestModel.objects.get(pk=instance.pk)
                self.fail("Unexpectedly was able to retrieve instance")
            except CachingTestModel.DoesNotExist:
                pass

        # Should now exist in the cache
        with sleuth.switch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=instance.pk)
            self.assertFalse(datastore_get.called)
예제 #58
0
    def test_refresh_if_unread_for_resaved_objects(self):
        """ refresh_if_unread should not refresh objects which have been re-saved within the
            transaction.
        """
        from .test_connector import TestFruit

        # With caching
        apple = TestFruit.objects.create(name="Apple", color="Red")
        with transaction.atomic() as txn:
            apple.save()
            apple.color = "Pink"  # Deliberately don't save
            txn.refresh_if_unread(apple)
            self.assertEqual(apple.color, "Pink")

        # Without caching
        radish = TestFruit.objects.create(name="Radish", color="Red")
        with DisableCache():
            with transaction.atomic() as txn:
                radish.save()
                radish.color = "Pink"  # Deliberately don't save
                txn.refresh_if_unread(radish)
                self.assertEqual(radish.color, "Pink")
예제 #59
0
파일: fields.py 프로젝트: benvand/djangae
 def decrement(self):
     # Find a non-empty shard and decrement it
     shards = self[:]
     random.shuffle(shards)
     for shard_id in shards:
         with transaction.atomic():
             shard = CounterShard.objects.get(pk=shard_id)
             if not shard.count:
                 continue
             else:
                 shard.count -= 1
                 shard.save()
                 break
예제 #60
0
    def callback(instance, text, term):
        try:
            with transaction.atomic(xg=True):
                index = InstanceIndex.objects.get(
                    pk=InstanceIndex.calc_id(term, instance))

                counter = TermCount.objects.get(pk=term)
                counter.count -= index.count
                counter.save()

                index.delete()
        except (InstanceIndex.DoesNotExist, TermCount.DoesNotExist):
            pass