Esempio n. 1
0
    def test_bulk_cache(self):
        with sleuth.watch("djangae.db.backends.appengine.caching.KeyPrefixedClient.set_multi_async") as set_many_1:
            instance = CachingTestModel.objects.create(field1="Apple", comb1=1, comb2="Cherry")
            instance.refresh_from_db()

        self.assertEqual(set_many_1.call_count, 1)
        self.assertEqual(len(set_many_1.calls[0].args[1]), 3)

        CachingTestModel.objects.bulk_create(
            [
                CachingTestModel(field1="Banana", comb1=2, comb2="Cherry"),
                CachingTestModel(field1="Orange", comb1=3, comb2="Cherry"),
            ]
        )

        pks = list(CachingTestModel.objects.values_list("pk", flat=True))
        with sleuth.watch("djangae.db.backends.appengine.caching.KeyPrefixedClient.set_multi_async") as set_many_3:
            list(CachingTestModel.objects.filter(pk__in=pks).all())
        self.assertEqual(set_many_3.call_count, 1)
        self.assertEqual(len(set_many_3.calls[0].args[1]), 3 * len(pks))

        with sleuth.watch("djangae.db.backends.appengine.caching.KeyPrefixedClient.get_multi") as get_many:
            with sleuth.watch(
                "djangae.db.backends.appengine.caching.KeyPrefixedClient.delete_multi_async"
            ) as delete_many:
                CachingTestModel.objects.all().delete()

        self.assertEqual(get_many.call_count, 1)
        self.assertEqual(delete_many.call_count, 1)
        self.assertEqual(len(get_many.calls[0].args[1]), 3)  # Get by pk from cache
Esempio n. 2
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        """
            A read inside a transaction shouldn't update the context cache outside that
            transaction
        """
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        caching.get_context().reset(keep_disabled_flags=True)

        CachingTestModel.objects.get(pk=original.pk)  # Should update the cache

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        caching.get_context().reset(keep_disabled_flags=True)

        with transaction.atomic():
            with sleuth.watch(
                    "google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(
                    pk=original.pk)  # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
Esempio n. 3
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        clear_context_cache()

        CachingTestModel.objects.get(pk=original.pk)  # Should update the cache

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        clear_context_cache()

        with transaction.atomic():
            with sleuth.watch(
                    "google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(
                    pk=original.pk)  # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
Esempio n. 4
0
    def test_running_finished_operation_does_not_trigger_new_task(self):
        """ If we re-trigger an operation which has already been run and finished, it should simply
            return without starting a new task or updating the task marker.
        """
        TestModel.objects.create()

        operation = operations.AddFieldData(
            "testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
        )
        # Run the operation and check that it finishes
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation)
            self.assertTrue(start.called)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertFalse(task_marker["is_finished"])
        self.assertNumTasksEquals(1)
        self.process_task_queues()
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])

        # Run the operation again.  It should see that's it's finished and just return immediately.
        self.assertNumTasksEquals(0)
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation, detonate=False)
            self.assertFalse(start.called)
        self.assertNumTasksEquals(0)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])
Esempio n. 5
0
    def test_running_finished_operation_does_not_trigger_new_task(self):
        """ If we re-trigger an operation which has already been run and finished, it should simply
            return without starting a new task or updating the task marker.
        """
        TestModel.objects.create()

        operation = operations.AddFieldData(
            "testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
        )
        # Run the operation and check that it finishes
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation)
            self.assertTrue(start.called)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertFalse(task_marker["is_finished"])
        self.assertNumTasksEquals(1)
        self.process_task_queues()
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])

        # Run the operation again.  It should see that's it's finished and just return immediately.
        self.assertNumTasksEquals(0)
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation, detonate=False)
            self.assertFalse(start.called)
        self.assertNumTasksEquals(0)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])
Esempio n. 6
0
    def test_memcache_invalidates_when_the_request_ends(self):
        translation.activate("de")
        gettext("Hello World!") # Generates the cache

        # Wait for any background threads to finish
        while translations_loading():
            pass

        # Set the invalidation key
        key = _language_invalidation_key("de")
        cache.set(key, datetime.datetime.utcnow())

        # This shouldn't make a query, the invalidation hasn't applied yet
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertFalse(query.called)

        # Run the finished signal
        invalidate_caches_if_necessary(None)

        # This should now cause a query
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertTrue(query.called)
Esempio n. 7
0
    def test_memcache_invalidates_when_the_request_ends(self):
        translation.activate("de")
        gettext("Hello World!")  # Generates the cache

        # Wait for any background threads to finish
        while translations_loading():
            pass

        # Set the invalidation key
        key = _language_invalidation_key("de")
        cache.set(key, datetime.datetime.utcnow())

        # This shouldn't make a query, the invalidation hasn't applied yet
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertFalse(query.called)

        # Run the finished signal
        invalidate_caches_if_necessary(None)

        # This should now cause a query
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertTrue(query.called)
Esempio n. 8
0
    def test_bulk_cache(self):
        with sleuth.watch("django.core.cache.cache.set_many") as set_many_1:
            CachingTestModel.objects.create(field1="Apple", comb1=1, comb2="Cherry")
        self.assertEqual(set_many_1.call_count, 1)
        self.assertEqual(len(set_many_1.calls[0].args[0]), 3)

        with sleuth.watch("django.core.cache.cache.set_many") as set_many_2:
            CachingTestModel.objects.bulk_create([
                CachingTestModel(field1="Banana", comb1=2, comb2="Cherry"),
                CachingTestModel(field1="Orange", comb1=3, comb2="Cherry"),
            ])
        self.assertEqual(set_many_2.call_count, 1)
        self.assertEqual(len(set_many_2.calls[0].args[0]), 3*2)

        pks = list(CachingTestModel.objects.values_list('pk', flat=True))
        with sleuth.watch("django.core.cache.cache.set_many") as set_many_3:
            list(CachingTestModel.objects.filter(pk__in=pks).all())
        self.assertEqual(set_many_3.call_count, 1)
        self.assertEqual(len(set_many_3.calls[0].args[0]), 3*len(pks))

        with sleuth.watch("django.core.cache.cache.get_many") as get_many:
            with sleuth.watch("django.core.cache.cache.delete_many") as delete_many:
                CachingTestModel.objects.all().delete()

        self.assertEqual(get_many.call_count, 1)
        self.assertEqual(delete_many.call_count, 1)
        self.assertEqual(len(get_many.calls[0].args[0]), 3)  # Get by pk from cache
Esempio n. 9
0
    def test_consistent_read_updates_cache_outside_transaction(self):
        """
            A read inside a transaction shouldn't update the context cache outside that
            transaction
        """
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        original = CachingTestModel.objects.create(**entity_data)

        clear_context_cache()

        CachingTestModel.objects.get(pk=original.pk) # Should update the cache

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)

        clear_context_cache()

        with transaction.atomic():
            with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                CachingTestModel.objects.get(pk=original.pk) # Should *not* update the cache
                self.assertTrue(datastore_get.called)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertTrue(datastore_get.called)
Esempio n. 10
0
    def test_defer_uses_an_entity_group(self):
        with sleuth.watch('google.appengine.api.datastore.Put') as Put:
            defer(test_task)
            self.assertTrue(Put.called)

        with sleuth.watch('google.appengine.api.datastore.Put') as Put:
            defer(test_task, _small_task=True)
            self.assertFalse(Put.called)
Esempio n. 11
0
    def test_defer_uses_an_entity_group(self):
        with sleuth.watch('google.appengine.api.datastore.Put') as Put:
            defer(test_task)
            self.assertTrue(Put.called)

        with sleuth.watch('google.appengine.api.datastore.Put') as Put:
            defer(test_task, _small_task=True)
            self.assertFalse(Put.called)
Esempio n. 12
0
    def test_pk_queries_hit_the_context_cache(self):
        instance = UniqueModel.objects.create(unique_field="test") #Create an instance

        #With the context cache enabled, make sure we don't hit the DB
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as rpc_run:
            with sleuth.watch("djangae.db.backends.appengine.caching.get_from_cache") as cache_hit:
                UniqueModel.objects.get(pk=instance.pk)
                self.assertTrue(cache_hit.called)
                self.assertFalse(rpc_run.called)
Esempio n. 13
0
    def test_non_atomic_context_manager(self):
        from .test_connector import TestUser
        existing = TestUser.objects.create(username="******",
                                           field2="exists")

        with transaction.atomic():
            self.assertTrue(transaction.in_atomic_block())

            user = TestUser.objects.create(username="******", field2="bar")

            with transaction.non_atomic():
                # We're outside the transaction, so the user should not exist
                self.assertRaises(TestUser.DoesNotExist,
                                  TestUser.objects.get,
                                  pk=user.pk)
                self.assertFalse(transaction.in_atomic_block())

                with sleuth.watch(
                        "google.appengine.api.datastore.Get") as datastore_get:
                    TestUser.objects.get(
                        pk=existing.pk
                    )  #Should hit the cache, not the datastore

                self.assertFalse(datastore_get.called)

            with transaction.atomic(independent=True):
                user2 = TestUser.objects.create(username="******", field2="bar2")
                self.assertTrue(transaction.in_atomic_block())

                with transaction.non_atomic():
                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist,
                                      TestUser.objects.get,
                                      pk=user2.pk)

                    with transaction.non_atomic():
                        self.assertFalse(transaction.in_atomic_block())
                        self.assertRaises(TestUser.DoesNotExist,
                                          TestUser.objects.get,
                                          pk=user2.pk)

                        with sleuth.watch("google.appengine.api.datastore.Get"
                                          ) as datastore_get:
                            TestUser.objects.get(
                                pk=existing.pk
                            )  #Should hit the cache, not the datastore

                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist,
                                      TestUser.objects.get,
                                      pk=user2.pk)

                self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
                self.assertTrue(transaction.in_atomic_block())
Esempio n. 14
0
    def test_save_caches(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            with sleuth.watch("django.core.cache.cache.get") as memcache_get:
                original = CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)
        self.assertFalse(memcache_get.called)
Esempio n. 15
0
    def test_valid_credentials_log_user(self):
        live_server_domain = self.live_server_url.split('://')[-1]
        session = self.client.session
        session['oauth-state'] = 'somestate'
        session[REDIRECT_FIELD_NAME] = '/next_url'
        session.save()

        fake_token = {
            'access_token': '9999',
            'token_type': 'Bearer',
            'expires_in': '30',
            'scope': ['some.scope another.scope'],
            'id_token': 'someencryptedstuff'
        }

        fake_profile = {
            'id': '1',
            'email': '*****@*****.**'
        }

        idinfo = {
            'sub': '1',
            'iss': 'accounts.google.com',
            'email': '*****@*****.**'
        }

        with sleuth.fake('djangae.contrib.googleauth.views.OAuth2Session.fetch_token', return_value=fake_token), \
                sleuth.fake('djangae.contrib.googleauth.views.OAuth2Session.authorized', return_value=True), \
                sleuth.fake('djangae.contrib.googleauth.views.OAuth2Session.get', return_value=fake_profile), \
                sleuth.watch('djangae.contrib.googleauth.backends.oauth2.OAuthBackend.authenticate') as mocked_auth, \
                sleuth.watch('django.contrib.auth.login') as mocked_login, \
                sleuth.fake('google.oauth2.id_token.verify_token', idinfo):

            state = json.dumps({
                "hostname": "app.com",
            })
            response = self.client.get(
                reverse("googleauth_oauth2callback"),
                data={"state": state},
                HTTP_HOST=live_server_domain
            )

        # check authenticate and login function are called
        self.assertTrue(mocked_auth.called)
        self.assertTrue(mocked_login.called)

        self.assertEqual(response.status_code, 302)
        self.assertTrue(session[REDIRECT_FIELD_NAME] in response.url)

        user = User.objects.get(email="*****@*****.**")
        self.assertFalse(user.has_usable_password())
Esempio n. 16
0
 def test_queryset_instantiation_does_not_trigger_queries(self):
     """ The `contains` behaviour should only trigger DB calls when the queryset is evaluated,
         not when the queryset is created.
     """
     ContainsModel.objects.create(field1="Adam")
     with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
         with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
             queryset = ContainsModel.objects.filter(field1__contains="Ad")
             self.assertFalse(datastore_query.called)
             self.assertFalse(datastore_get.called)
             logging.debug('datastore_query.calls count: %d', len(datastore_query.calls))
             list(queryset)
             self.assertTrue(datastore_query.called)
             logging.debug('datastore_query.called count: %d', datastore_query.called)
             self.assertTrue(datastore_get.called)
Esempio n. 17
0
    def test_save_caches(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            with sleuth.watch("django.core.cache.cache.get") as memcache_get:
                original = CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)
        self.assertFalse(memcache_get.called)
Esempio n. 18
0
 def test_queryset_instantiation_does_not_trigger_queries(self):
     """ The `contains` behaviour should only trigger DB calls when the queryset is evaluated,
         not when the queryset is created.
     """
     ContainsModel.objects.create(field1="Adam")
     with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
         with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
             queryset = ContainsModel.objects.filter(field1__contains="Ad")
             self.assertFalse(datastore_query.called)
             self.assertFalse(datastore_get.called)
             print len(datastore_query.calls)
             list(queryset)
             self.assertTrue(datastore_query.called)
             print len(datastore_query.calls)
             self.assertTrue(datastore_get.called)
Esempio n. 19
0
    def test_save_caches(self):
        """ Test that after saving something, it exists in the context cache and therefore when we
            fetch it we don't hit memcache or the Datastore.
        """
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            with sleuth.watch("google.appengine.api.memcache.get") as memcache_get:
                with sleuth.watch("google.appengine.api.memcache.get_multi") as memcache_get_multi:
                    original = CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)
        self.assertFalse(memcache_get.called)
        self.assertFalse(memcache_get_multi.called)
Esempio n. 20
0
    def test_flag_prevents_backup(self):
        request = RequestFactory().get('/')

        with sleuth.watch(
                'djangae.contrib.backup.views.backup_datastore') as backup_fn:
            create_datastore_backup(request)
            self.assertFalse(backup_fn.called)
Esempio n. 21
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Adds to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual(
                "Banana",
                CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
Esempio n. 22
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual("Banana", CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
Esempio n. 23
0
 def test_send_email(self):
     """ Test that sending an email using Django results in the email being sent through App
         Engine.
     """
     with sleuth.watch('djangae.mail.aeemail.EmailMessage.send') as gae_send:
         send_mail("Subject", "Hello", self._get_valid_sender_address(), ["*****@*****.**"])
         self.assertTrue(gae_send.called)
Esempio n. 24
0
    def test_transactions_get_their_own_context(self):
        with sleuth.watch(
                "djangae.db.backends.appengine.context.ContextStack.push"
        ) as context_push:
            with transaction.atomic():
                pass

            self.assertTrue(context_push.called)
Esempio n. 25
0
    def test_languages_cached(self):
        # This should make a query, because the translations were invalidated
        translation.activate("de")
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Hello World!")
            self.assertEqual(trans, "Hallo Welt!")
            self.assertTrue(query.called)

        # Wait for any background threads to finish
        while translations_loading():
            pass

        # This shouldn't make a query
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertFalse(query.called)
Esempio n. 26
0
    def test_languages_cached(self):
        # This should make a query, because the translations were invalidated
        translation.activate("de")
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Hello World!")
            self.assertEqual(trans, "Hallo Welt!")
            self.assertTrue(query.called)

        # Wait for any background threads to finish
        while translations_loading():
            pass

        # This shouldn't make a query
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
            trans = gettext("Goodbye World!")
            self.assertEqual(trans, "Auf Wiedersehen Welt!")
            self.assertFalse(query.called)
Esempio n. 27
0
    def test_limit_correctly_applied_per_branch(self):
        MetaQueryTestModel.objects.create(field1="test")
        MetaQueryTestModel.objects.create(field1="test2")

        with sleuth.watch('google.appengine.api.datastore.Query.Run') as run_calls:

            list(MetaQueryTestModel.objects.filter(field1__in=["test", "test2"])[:1])

            self.assertEqual(1, run_calls.calls[0].kwargs['limit'])
            self.assertEqual(1, run_calls.calls[1].kwargs['limit'])

        with sleuth.watch('google.appengine.api.datastore.Query.Run') as run_calls:

            list(MetaQueryTestModel.objects.filter(field1__in=["test", "test2"])[1:2])

            self.assertEqual(2, run_calls.calls[0].kwargs['limit'])
            self.assertEqual(2, run_calls.calls[1].kwargs['limit'])
Esempio n. 28
0
    def test_unique_get_hits_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(field1="Apple")

        self.assertFalse(datastore_get.called)
Esempio n. 29
0
 def test_count_issues_deprecation_warning(self):
     """ The RelatedShardManager should warn you when using the deprecated `count()` method
         because its purpose is unclear.
     """
     instance = ModelWithCounter.objects.create()
     self.assertEqual(instance.counter.value(), 0)
     with sleuth.watch("djangae.fields.counting.warnings.warn") as warn:
         instance.counter.count()
         self.assertTrue(warn.called)
Esempio n. 30
0
 def test_image_serving_url_is_secure(self):
     """ When we get a serving URL for an image, it should be https:// not http:// """
     instance = ModelWithImage(image=ContentFile('content', name='my_file'))
     instance.save()
     # Because we're not on production, get_serving_url() actually just returns a relative URL,
     # so we can't check the result, so instead we check the call to get_serving_url
     with sleuth.watch("djangae.storage.get_serving_url") as watcher:
         instance.image.url  # access the URL to trigger the call to get_serving_url
     self.assertTrue(watcher.calls[0].kwargs['secure_url'])
Esempio n. 31
0
    def test_that_transactions_dont_inherit_context_cache(self):
        """
            It's fine to hit the context cache inside an independent transaction,
            providing that the cache doesn't inherit the outer cache! Otherwise we have
            a situation where the transaction never hits the database when reloading an entity
        """
        entity_data = {"field1": u"Apple", "comb1": 1, "comb2": u"Cherry"}

        instance = CachingTestModel.objects.create(**entity_data)

        with transaction.atomic():
            with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                instance = CachingTestModel.objects.get(pk=instance.pk)
                self.assertEqual(1, datastore_get.call_count)  # Shouldn't hit the cache!
                instance.save()

            with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                self.assertEqual(0, datastore_get.call_count)  # Should hit the cache
Esempio n. 32
0
 def test_count_issues_deprecation_warning(self):
     """ The RelatedShardManager should warn you when using the deprecated `count()` method
         because its purpose is unclear.
     """
     instance = ModelWithCounter.objects.create()
     self.assertEqual(instance.counter.value(), 0)
     with sleuth.watch("djangae.fields.counting.warnings.warn") as warn:
         instance.counter.count()
         self.assertTrue(warn.called)
Esempio n. 33
0
    def test_unique_filter_applies_all_filters(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
            # Expect no matches
            num_instances = CachingTestModel.objects.filter(field1="Apple", comb1=0).count()
            self.assertEqual(num_instances, 0)
Esempio n. 34
0
    def test_login_called_when_necessary(self, verify_token_mock):
        """
            login should only be called if:

             - There is no user ID stored in request.session
             - The user ID in request.session does not match
        """

        user = '******'
        user_email = '*****@*****.**'
        verify_token_mock.return_value = {
            'sub': f'auth.example.com:{user}',
            'email': user_email,
        }

        headers = {
            'HTTP_X_GOOG_AUTHENTICATED_USER_ID': f'auth.example.com:{user}',
            'HTTP_X_GOOG_AUTHENTICATED_USER_EMAIL':
            f'auth.example.com:{user_email}',
            'HTTP_X_GOOG_IAP_JWT_ASSERTION': 'JWT',
        }

        # First authentication, login should be called
        with sleuth.watch(
                "djangae.contrib.googleauth.middleware.login") as login:
            self.client.get("/", **headers)
            self.assertTrue(User.objects.exists())
            self.assertTrue(login.called)

        # Already logged-in, login shouldn't be called
        with sleuth.watch(
                "djangae.contrib.googleauth.middleware.login") as login:
            self.client.get("/", **headers)
            self.assertFalse(login.called)

        session = self.client.session
        session[SESSION_KEY] = 1
        session.save()

        # Mismatched user ID, login should be called again
        with sleuth.watch(
                "djangae.contrib.googleauth.middleware.login") as login:
            self.client.get("/", **headers)
            self.assertTrue(login.called)
Esempio n. 35
0
    def test_insert_then_unique_query_returns_from_cache(self):
        UniqueModel.objects.create(unique_field="test")  # Create an instance

        # With the context cache enabled, make sure we don't hit the DB
        with sleuth.watch("google.appengine.api.datastore.Query.Run") as rpc_wrapper:
            with sleuth.watch("djangae.db.backends.appengine.caching.get_from_cache") as cache_hit:
                instance_from_cache = UniqueModel.objects.get(unique_field="test")
                self.assertTrue(cache_hit.called)
                self.assertFalse(rpc_wrapper.called)

        # Disable the context cache, make sure that we hit the database
        with caching.disable_context_cache():
            with sleuth.watch("google.appengine.api.datastore.Query.Run") as rpc_wrapper:
                with sleuth.watch("djangae.db.backends.appengine.caching.get_from_cache") as cache_hit:
                    instance_from_database = UniqueModel.objects.get(unique_field="test")
                    self.assertTrue(cache_hit.called)
                    self.assertTrue(rpc_wrapper.called)

        self.assertEqual(instance_from_cache, instance_from_database)
Esempio n. 36
0
    def test_new_objects_with_an_explicit_acl(self):
        storage = CloudStorage(google_acl="public-read")
        filename = "example.txt"
        fileobj = ContentFile("content", name=filename)

        with sleuth.watch("cloudstorage.open") as open_func:
            storage.save(filename, fileobj)

        self.assertTrue(storage.exists(filename))
        self.assertEqual(open_func.calls[0].kwargs["options"], {"x-goog-acl": "public-read"})
    def test_that_marker_is_read(self):
        paginator = Paginator(TestUser.objects.all().order_by("first_name"), 1, readahead=1)
        paginator.page(2)

        with sleuth.watch("djangae.contrib.pagination.paginator._get_marker") as get_marker:
            paginator.page(4)

            self.assertTrue(get_marker.called)
            self.assertIsNotNone(get_marker.call_returns[0][0])
            self.assertEqual(1, get_marker.call_returns[0][1])
    def test_really_long_string(self):
        long_string = "".join(["A"] * 1501)
        instance1 = CCollationModel.objects.create(field2=long_string)

        with sleuth.watch("djangae.fields.language.logger.warn") as warn:
            try:
                instance1.save()
                self.assertTrue(warn.called)
            except datastore_errors.BadRequestError:
                self.fail("Threw bad request when saving collation key")
Esempio n. 39
0
    def test_unique_filter_hits_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "djangae.db.backends.appengine.rpc.Get") as datastore_get:
            list(CachingTestModel.objects.filter(field1="Apple"))

        self.assertFalse(datastore_get.called)
Esempio n. 40
0
    def test_get_by_key_hits_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "djangae.db.backends.appengine.rpc.Get") as datastore_get:
            CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)
Esempio n. 41
0
    def test_no_previous_on_first_page(self):
        with sleuth.watch('djangae.db.backends.appengine.commands.datastore.Query.Run') as query:
            paginator = Paginator(SimplePaginatedModel.objects.all(), 2)

            self.assertFalse(query.called)
            page = paginator.page(1)
            self.assertFalse(page.has_previous())

            page = paginator.page(2)
            self.assertTrue(page.has_previous())
Esempio n. 42
0
    def test_non_unique_filter_hits_datastore(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
            instance = CachingTestModel.objects.filter(comb1=1).all()[0]
            self.assertEqual(original, instance)

        self.assertTrue(datastore_query.called)
Esempio n. 43
0
    def test_really_long_string(self):
        long_string = "".join(["A"] * 1501)
        instance1 = CCollationModel.objects.create(field2=long_string)

        with sleuth.watch("djangae.fields.language.logger.warn") as warn:
            try:
                instance1.save()
                self.assertTrue(warn.called)
            except datastore_errors.BadRequestError:
                self.fail("Threw bad request when saving collation key")
Esempio n. 44
0
    def test_unique_get_hits_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            CachingTestModel.objects.get(field1="Apple")

        self.assertFalse(datastore_get.called)
Esempio n. 45
0
 def test_queryset_instantiation_does_not_trigger_queries(self):
     """ The `contains` behaviour should only trigger DB calls when the queryset is evaluated,
         not when the queryset is created.
     """
     ContainsModel.objects.create(field1="Adam")
     with sleuth.watch("djangae.db.backends.appengine.rpc.Query.Run"
                       ) as datastore_query:
         with sleuth.watch(
                 "djangae.db.backends.appengine.rpc.Get") as datastore_get:
             queryset = ContainsModel.objects.filter(field1__contains="Ad")
             self.assertFalse(datastore_query.called)
             self.assertFalse(datastore_get.called)
             logging.debug('datastore_query.calls count: %d',
                           len(datastore_query.calls))
             list(queryset)
             self.assertTrue(datastore_query.called)
             logging.debug('datastore_query.called count: %d',
                           datastore_query.called)
             self.assertTrue(datastore_get.called)
Esempio n. 46
0
    def test_that_marker_is_read(self):
        paginator = Paginator(TestUser.objects.all().order_by("first_name"), 1, readahead=1)
        paginator.page(2)

        with sleuth.watch("djangae.contrib.pagination.paginator._get_marker") as get_marker:
            paginator.page(4)

            self.assertTrue(get_marker.called)
            self.assertIsNotNone(get_marker.call_returns[0][0])
            self.assertEqual(1, get_marker.call_returns[0][1])
Esempio n. 47
0
    def test_save_caches(self):
        """ Test that after saving something, it exists in the context cache and therefore when we
            fetch it we don't hit memcache or the Datastore.
        """
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            with sleuth.watch(
                    "google.appengine.api.memcache.get") as memcache_get:
                with sleuth.watch("google.appengine.api.memcache.get_multi"
                                  ) as memcache_get_multi:
                    original = CachingTestModel.objects.get(pk=original.pk)

        self.assertFalse(datastore_get.called)
        self.assertFalse(memcache_get.called)
        self.assertFalse(memcache_get_multi.called)
Esempio n. 48
0
 def test_get_params_propogate(self):
     request = RequestFactory().get('/?kind=django_admin_log&bucket=foobar')
     with sleuth.watch(
             'djangae.contrib.backup.views.backup_datastore') as backup_fn:
         create_datastore_backup(request)
         self.assertTrue(backup_fn.called)
         self.assertEqual(backup_fn.calls[0][1], {
             'bucket': 'foobar',
             'kinds': [u'django_admin_log']
         })
Esempio n. 49
0
    def test_unique_filter_applies_all_filters(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Query.Run") as datastore_query:
            # Expect no matches
            num_instances = CachingTestModel.objects.filter(field1="Apple",
                                                            comb1=0).count()
            self.assertEqual(num_instances, 0)
Esempio n. 50
0
    def test_get_by_key_hits_datastore_inside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            with transaction.atomic():
                instance = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual(original, instance)

        self.assertTrue(datastore_get.called)
Esempio n. 51
0
    def test_unique_get_hits_memcache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)
        original.refresh_from_db()

        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            instance = CachingTestModel.objects.get(field1="Apple")
            self.assertEqual(original, instance)

        self.assertFalse(datastore_get.called)
Esempio n. 52
0
    def test_non_unique_filter_hits_datastore(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Query.Run") as datastore_query:
            instance = CachingTestModel.objects.filter(comb1=1).all()[0]
            self.assertEqual(original, instance)

        self.assertTrue(datastore_query.called)
Esempio n. 53
0
    def test_get_by_key_hits_memcache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        original = CachingTestModel.objects.create(**entity_data)

        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            instance = CachingTestModel.objects.get(pk=original.pk)
            self.assertEqual(original, instance)

        self.assertFalse(datastore_get.called)
Esempio n. 54
0
 def test_image_serving_url_is_secure(self):
     """ When we get a serving URL for an image, it should be https:// not http:// """
     storage = BlobstoreStorage()
     # Save a new file
     f = ContentFile('content', name='my_file')
     filename = storage.save('tmp', f)
     # Because we're not on production, get_serving_url() actually just returns a relative URL,
     # so we can't check the result, so instead we check the call to get_serving_url
     with sleuth.watch("djangae.storage.get_serving_url") as watcher:
         storage.url(filename)  # access the URL to trigger the call to get_serving_url
     self.assertTrue(watcher.calls[0].kwargs['secure_url'])
Esempio n. 55
0
    def test_new_objects_get_the_default_acl(self):
        storage = CloudStorage()
        filename = 'example.txt'
        fileobj = ContentFile('content', name=filename)

        with sleuth.watch('cloudstorage.open') as open_func:
            storage.save(filename, fileobj)

        self.assertTrue(storage.exists(filename))
        # There's no x-goog-acl argument, so default perms are applied.
        self.assertEqual(open_func.calls[0].kwargs['options'], {})
Esempio n. 56
0
    def test_new_objects_get_the_default_acl(self):
        storage = CloudStorage()
        filename = "example.txt"
        fileobj = ContentFile("content", name=filename)

        with sleuth.watch("cloudstorage.open") as open_func:
            storage.save(filename, fileobj)

        self.assertTrue(storage.exists(filename))
        # There's no x-goog-acl argument, so default perms are applied.
        self.assertEqual(open_func.calls[0].kwargs["options"], {})
Esempio n. 57
0
    def test_non_atomic_context_manager(self):
        from .test_connector import TestUser
        existing = TestUser.objects.create(username="******", field2="exists")

        with transaction.atomic():
            self.assertTrue(transaction.in_atomic_block())

            user = TestUser.objects.create(username="******", field2="bar")

            with transaction.non_atomic():
                # We're outside the transaction, so the user should not exist
                self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
                self.assertFalse(transaction.in_atomic_block())

                with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                    TestUser.objects.get(pk=existing.pk)  # Should hit the cache, not the datastore

                self.assertFalse(datastore_get.called)

            with transaction.atomic(independent=True):
                user2 = TestUser.objects.create(username="******", field2="bar2")
                self.assertTrue(transaction.in_atomic_block())

                with transaction.non_atomic():
                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                    with transaction.non_atomic():
                        self.assertFalse(transaction.in_atomic_block())
                        self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
                            # Should hit the cache, not the Datastore
                            TestUser.objects.get(pk=existing.pk)

                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
                self.assertTrue(transaction.in_atomic_block())
Esempio n. 58
0
    def test_new_objects_with_an_explicit_acl(self):
        storage = CloudStorage(google_acl='public-read')
        filename = 'example.txt'
        fileobj = ContentFile('content', name=filename)

        with sleuth.watch('cloudstorage.open') as open_func:
            storage.save(filename, fileobj)

        self.assertTrue(storage.exists(filename))
        self.assertEqual(
            open_func.calls[0].kwargs['options'],
            {'x-goog-acl': 'public-read'},
        )