def test_repeated_usage_in_a_loop(self):
        pk = TestUser.objects.create(username="******").pk
        for i in range(4):
            with transaction.atomic(xg=True):
                TestUser.objects.get(pk=pk)
                continue

        with transaction.atomic(xg=True):
            TestUser.objects.get(pk=pk)
    def test_atomic_context_manager(self):
        with self.assertRaises(ValueError):
            with transaction.atomic():
                TestUser.objects.create(username="******", field2="bar")
                raise ValueError()

        self.assertEqual(0, TestUser.objects.count())
    def test_non_atomic_context_manager(self):
        existing = TestUser.objects.create(username="******", field2="exists", first_name="one", second_name="one")

        with transaction.atomic():
            self.assertTrue(transaction.in_atomic_block())

            user = TestUser.objects.create(username="******", field2="bar", first_name="two", second_name="two")

            with transaction.non_atomic():
                # We're outside the transaction, so the user should not exist
                self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
                self.assertFalse(transaction.in_atomic_block())

                with sleuth.watch("google.cloud.datastore.client.Client.get") as datastore_get:
                    TestUser.objects.get(pk=existing.pk)  # Should hit the cache, not the datastore

                self.assertFalse(datastore_get.called)

            with transaction.atomic(independent=True):
                user2 = TestUser.objects.create(username="******", field2="bar2", first_name="three", second_name="three")
                self.assertTrue(transaction.in_atomic_block())

                with transaction.non_atomic():
                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                    with transaction.non_atomic():
                        self.assertFalse(transaction.in_atomic_block())
                        self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                        with sleuth.watch("google.cloud.datastore.client.Client.get") as datastore_get:
                            # Should hit the cache, not the Datastore
                            TestUser.objects.get(pk=existing.pk)
                            self.assertFalse(datastore_get.called)

                    self.assertFalse(transaction.in_atomic_block())
                    self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)

                # Should hit the cache
                self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
                self.assertTrue(transaction.in_atomic_block())

        self.assertFalse(transaction.in_atomic_block())
Пример #4
0
    def add(self, document_or_documents):
        """
            Add a document, or documents to the index.

            Returns the IDs of *new* documents that have been
            added. If document_or_documents was a list, the result
            will also be a list.
        """

        added_document_ids = []

        if isinstance(document_or_documents, Document):
            was_list = False
            documents = [document_or_documents]
        else:
            was_list = True
            documents = document_or_documents[:]

        # First-pass validation
        self._validate_documents(documents)

        with transaction.atomic(independent=True):
            for document in documents:
                record = document._record

                # We go through the document fields, pull out the values that have been set
                # then we index them.
                field_data = {
                    f: getattr(document,
                               document.get_field(f).attname)
                    for f in document.get_fields() if f != "id"
                }

                # Generate a database representation of this Document use
                # the passed ID if there is one
                record, created = DocumentRecord.objects.update_or_create(
                    pk=document.id,
                    defaults={
                        "index_stats": self.index,
                        "data": field_data
                    })
                document.id = record.id
                document._record = record

                if created:
                    index_document(self.name, document)
                    added_document_ids.append(record.id)
                else:
                    # This wipes out any existing document, bumps the revision
                    # and then indexes this one
                    reindex_document(document)

        return added_document_ids if was_list else (
            added_document_ids[0] if added_document_ids else 0)
Пример #5
0
    def test_transactional_defer(self):
        try:
            with transaction.atomic():
                defer(create_defer_model_b, 1, _transactional=True)
                raise ValueError()  # Rollback the transaction
        except ValueError:
            pass

        self.process_task_queues()

        # Shouldn't have created anything
        self.assertEqual(0, DeferModelB.objects.count())

        with transaction.atomic():
            defer(create_defer_model_b, 1, _transactional=True)
            self.process_task_queues()
            self.assertEqual(0, DeferModelB.objects.count())  # Still nothing

        # Now we should be good!
        self.process_task_queues()
        self.assertEqual(1, DeferModelB.objects.count())
    def test_has_already_read(self):
        apple = TestFruit.objects.create(name="Apple", color="Red")
        pear = TestFruit.objects.create(name="Pear", color="Green")

        with transaction.atomic(xg=True) as txn:
            self.assertFalse(txn.has_already_been_read(apple))
            self.assertFalse(txn.has_already_been_read(pear))

            apple.refresh_from_db()

            self.assertTrue(txn.has_already_been_read(apple))
            self.assertFalse(txn.has_already_been_read(pear))

            with transaction.atomic(xg=True) as txn:
                self.assertTrue(txn.has_already_been_read(apple))
                self.assertFalse(txn.has_already_been_read(pear))
                pear.refresh_from_db()
                self.assertTrue(txn.has_already_been_read(pear))

                with transaction.atomic(independent=True) as txn2:
                    self.assertFalse(txn2.has_already_been_read(apple))
                    self.assertFalse(txn2.has_already_been_read(pear))
    def test_enable_cache_argument(self):
        user = TestUser.objects.create(username="******", first_name="Randy")

        with sleuth.watch('gcloudc.db.backends.datastore.context.CacheDict.get') as cachedict_get:
            TestUser.objects.get(username="******")
            self.assertEqual(cachedict_get.call_count, 1)

            with transaction.atomic(enable_cache=False):
                user.first_name = "Łukasz"
                user.save()

                non_cached = TestUser.objects.get(username="******")
                # Result is not fetched from the cache
                self.assertEqual(non_cached.first_name, "Randy")
                self.assertEqual(cachedict_get.call_count, 1)
    def test_refresh_if_unread(self):
        apple = TestFruit.objects.create(name="Apple", color="Red")

        with transaction.atomic() as txn:
            apple.color = "Pink"

            txn.refresh_if_unread(apple)

            self.assertEqual(apple.name, "Apple")

            apple.color = "Pink"

            # Already been read this transaction, don't read it again!
            txn.refresh_if_unread(apple)

            self.assertEqual(apple.color, "Pink")
Пример #9
0
def index_document(index_name, document):

    assert (document.id)  # This should be a thing by now

    for field_name, field in document.get_fields().items():
        if field_name == "id":
            continue

        if not field.index:
            # Some fields are just stored, not indexed
            continue

        # Get the field value, use the default if it's not set
        value = getattr(document, field.attname, None)
        value = field.default if value is None else value
        value = field.normalize_value(value)

        # Tokenize the value, this will effectively mean lower-casing
        # removing punctuation etc. and returning a list of things
        # to index
        tokens = field.tokenize_value(value)

        if tokens is None:
            # Nothing to index
            continue

        tokens = set(tokens)  # Remove duplicates

        for token in tokens:
            token = field.clean_token(token)
            if token is None or token == '':
                continue

            with transaction.atomic(independent=True):
                # FIXME: Update occurrances
                key = TokenFieldIndex.generate_key(index_name, token,
                                                   field.attname, document.id,
                                                   document.revision)

                obj, _ = TokenFieldIndex.objects.get_or_create(
                    pk=key,
                    defaults=dict(record_id=document.id,
                                  revision=document.revision,
                                  token=token,
                                  index_stats_id=index_name,
                                  field_name=field.attname))
Пример #10
0
    def add(self, document_or_documents):
        """
            Add a document, or documents to the index.

            Returns the IDs of *new* documents that have been
            added. If document_or_documents was a list, the result
            will also be a list.
        """

        from .models import (  # Prevent import too early
            DocumentRecord, TokenFieldIndex,
        )

        added_document_ids = []

        if isinstance(document_or_documents, Document):
            was_list = False
            documents = [document_or_documents]
        else:
            was_list = True
            documents = document_or_documents[:]

        # First-pass validation
        self._validate_documents(documents)

        with transaction.atomic(independent=True):
            for document in documents:
                # We go through the document fields, pull out the values that have been set
                # then we index them.
                field_data = {
                    f: getattr(document,
                               document.get_field(f).attname)
                    for f in document.get_fields() if f != "id"
                }

                record = document._record

                created = False
                if record is None:
                    # Generate a database representation of this Document use
                    # the passed ID if there is one
                    record, created = DocumentRecord.objects.get_or_create(
                        pk=document.id,
                        defaults={
                            "index_stats": self.index,
                            "data": field_data
                        })
                    document.id = record.id
                    document._record = record

                if created:
                    added_document_ids.append(record.id)
                else:
                    record.data = field_data

                assert (document.id)  # This should be a thing by now

                for field_name, field in document.get_fields().items():
                    if field_name == "id":
                        continue

                    if not field.index:
                        # Some fields are just stored, not indexed
                        continue

                    # Get the field value, use the default if it's not set
                    value = getattr(document, field.attname, None)
                    value = field.default if value is None else value
                    value = field.normalize_value(value)

                    # Tokenize the value, this will effectively mean lower-casing
                    # removing punctuation etc. and returning a list of things
                    # to index
                    tokens = field.tokenize_value(value)

                    if tokens is None:
                        # Nothing to index
                        continue

                    tokens = set(tokens)  # Remove duplicates

                    for token in tokens:
                        token = field.clean_token(token)
                        if token is None:
                            continue

                        assert (token.strip())

                        # FIXME: Update occurrances
                        try:
                            obj = TokenFieldIndex.objects.get(
                                record_id=document.id,
                                token=token,
                                index_stats=self.index,
                                field_name=field.attname)
                        except TokenFieldIndex.DoesNotExist:
                            obj = TokenFieldIndex.objects.create(
                                record_id=document.id,
                                index_stats=self.index,
                                token=token,
                                field_name=field.attname)
                        record.token_field_indexes.add(obj)
                record.save()

        return added_document_ids if was_list else added_document_ids[0]