示例#1
0
def add_entity_to_context_cache(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)

    for identifier in identifiers:
        context.cache[identifier] = entity

    context.reverse_cache[entity.key()] = identifiers
示例#2
0
    def test_eventual_read_doesnt_update_memcache(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.all()[0] # Inconsistent read

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#3
0
    def test_consistent_read_updates_memcache_outside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Add to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.get(id=222)  # Consistent read

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))
示例#4
0
    def test_consistent_read_updates_memcache_outside_transaction(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.get(id=222) # Consistent read

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))
示例#5
0
def add_entity_to_cache(model, entity, situation, skip_memcache=False):
    ensure_context()

    identifiers = unique_identifiers_from_entity(model, entity)

    # Don't cache on Get if we are inside a transaction, even in the context
    # This is because transactions don't see the current state of the datastore
    # We can still cache in the context on Put() but not in memcache
    if situation == CachingSituation.DATASTORE_GET and datastore.IsInTransaction():
        return

    if (
        situation in (CachingSituation.DATASTORE_PUT, CachingSituation.DATASTORE_GET_PUT)
        and datastore.IsInTransaction()
    ):
        # We have to wipe the entity from memcache
        if entity.key():
            _remove_entity_from_memcache_by_key(entity.key())

    get_context().stack.top.cache_entity(identifiers, entity, situation)

    # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction)
    # the exception is GET_PUT - which we do in our own transaction so we have to ignore that!
    if (
        not datastore.IsInTransaction()
        and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)
    ) or situation == CachingSituation.DATASTORE_GET_PUT:

        if not skip_memcache:
            _add_entity_to_memcache(model, entity, identifiers)
示例#6
0
    def test_transactional_save_wipes_the_cache_only_after_its_result_is_consistently_available(
            self):
        entity_data = {
            "field1": "old",
        }
        identifiers = unique_utils.unique_identifiers_from_entity(
            CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual("old", cache.get(identifier)["field1"])

        @non_transactional
        def non_transactional_read(instance_pk):
            CachingTestModel.objects.get(pk=instance_pk)

        with transaction.atomic():
            instance.field1 = "new"
            instance.save()
            non_transactional_read(
                instance.pk)  # could potentially recache the old object

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#7
0
    def test_save_caches_outside_transaction_only(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        instance.delete()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        with transaction.atomic():
            instance = CachingTestModel.objects.create(**entity_data)

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#8
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = unique_utils.unique_identifiers_from_entity(
            CachingTestModel, FakeEntity(entity_data, id=222))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch(
                "google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual(
                "Banana",
                CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
示例#9
0
文件: models.py 项目: mrfuxi/djangae
    def map(entity, model, *args, **kwargs):
        """ The Clean mapper maps over all UniqueMarker instances. """

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")

        model = decode_model(model)
        if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
            # Only include markers which are for this model
            return

        assert namespace == entity.namespace()
        with disable_cache():
            # At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
            instance_id = entity["instance"].id_or_name()
            try:
                instance = model.objects.using(alias).get(pk=instance_id)
            except model.DoesNotExist:
                logging.info("Deleting unique marker {} because the associated instance no longer exists".format(entity.key().id_or_name()))
                datastore.Delete(entity)
                return

            # Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
            instance_entity = django_instance_to_entity(connections[alias], model, instance._meta.fields, raw=True, instance=instance, check_null=False)
            identifiers = unique_identifiers_from_entity(model, instance_entity, ignore_pk=True)
            identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=entity["instance"].namespace()) for i in identifiers]
            if entity.key() not in identifier_keys:
                logging.info("Deleting unique marker {} because the it no longer represents the associated instance state".format(entity.key().id_or_name()))
                datastore.Delete(entity)
示例#10
0
def add_entities_to_cache(model, entities, situation, skip_memcache=False):
    ensure_context()

    # Don't cache on Get if we are inside a transaction, even in the context
    # This is because transactions don't see the current state of the datastore
    # We can still cache in the context on Put() but not in memcache
    if situation == CachingSituation.DATASTORE_GET and datastore.IsInTransaction():
        return

    if situation in (CachingSituation.DATASTORE_PUT, CachingSituation.DATASTORE_GET_PUT) and datastore.IsInTransaction():
        # We have to wipe the entity from memcache
        _remove_entities_from_memcache_by_key([entity.key() for entity in entities if entity.key()])

    identifiers = [
        unique_identifiers_from_entity(model, entity) for entity in entities
    ]

    for ent_identifiers, entity in zip(identifiers, entities):
        get_context().stack.top.cache_entity(ent_identifiers, entity, situation)

    # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction)
    # the exception is GET_PUT - which we do in our own transaction so we have to ignore that!
    if (not datastore.IsInTransaction() and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)) or \
            situation == CachingSituation.DATASTORE_GET_PUT:

        if not skip_memcache:

            mc_key_entity_map = {}
            for ent_identifiers, entity in zip(identifiers, entities):
                mc_key_entity_map.update({
                    identifier: entity for identifier in ent_identifiers
                })
            _add_entity_to_memcache(model, mc_key_entity_map)
示例#11
0
    def test_save_wipes_entity_from_cache_inside_transaction(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.save()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#12
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual("Banana", CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
示例#13
0
def add_entity_to_context_cache(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)

    for identifier in identifiers:
        context.cache[identifier] = entity

    context.reverse_cache[entity.key()] = identifiers
示例#14
0
    def test_eventual_read_doesnt_update_memcache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.all()[0]  # Inconsistent read

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#15
0
    def test_transactional_save_wipes_the_cache_only_after_its_result_is_consistently_available(self):
        entity_data = {
            "field1": "old",
        }

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)
            ),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual("old", cache.get(identifier)["field1"])

        @non_transactional
        def non_transactional_read(instance_pk):
            CachingTestModel.objects.get(pk=instance_pk)

        with transaction.atomic():
            instance.field1 = "new"
            instance.save()
            non_transactional_read(instance.pk)  # could potentially recache the old object

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#16
0
文件: caching.py 项目: stucox/djangae
def add_entity_to_cache(model, entity, situation):
    ensure_context()

    identifiers = unique_identifiers_from_entity(model, entity)

    # Don't cache on Get if we are inside a transaction, even in the context
    # This is because transactions don't see the current state of the datastore
    # We can still cache in the context on Put() but not in memcache
    if situation == CachingSituation.DATASTORE_GET and datastore.IsInTransaction(
    ):
        return

    if situation in (CachingSituation.DATASTORE_PUT, CachingSituation.
                     DATASTORE_GET_PUT) and datastore.IsInTransaction():
        # We have to wipe the entity from memcache
        if entity.key():
            _remove_entity_from_memcache_by_key(entity.key())

    _context.stack.top.cache_entity(identifiers, entity, situation)

    # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction)
    # the exception is GET_PUT - which we do in our own transaction so we have to ignore that!
    if (not datastore.IsInTransaction() and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)) or \
            situation == CachingSituation.DATASTORE_GET_PUT:

        _add_entity_to_memcache(model, entity, identifiers)
示例#17
0
    def test_consistent_read_updates_memcache_outside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Add to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.get(id=222)  # Consistent read

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))
示例#18
0
    def test_save_inside_transaction_evicts_cache(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = caching._apply_namespace(
            unique_utils.unique_identifiers_from_entity(
                CachingTestModel, FakeEntity(entity_data, id=222)),
            DEFAULT_NAMESPACE,
        )

        instance = CachingTestModel.objects.create(id=222, **entity_data)
        instance.refresh_from_db()  # Adds to memcache (consistent Get)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.field1 = "Banana"
            instance.save()

        # Make sure that altering inside the transaction evicted the item from the cache
        # and that a get then hits the datastore (which then in turn caches)
        with sleuth.watch(
                "djangae.db.backends.appengine.rpc.Get") as datastore_get:
            for identifier in identifiers:
                self.assertIsNone(cache.get(identifier))

            self.assertEqual(
                "Banana",
                CachingTestModel.objects.get(pk=instance.pk).field1)
            self.assertTrue(datastore_get.called)
示例#19
0
    def test_save_caches_outside_transaction_only(self):
        entity_data = {
            "field1": "Apple",
            "comb1": 1,
            "comb2": "Cherry"
        }

        identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        instance.delete()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        with transaction.atomic():
            instance = CachingTestModel.objects.create(**entity_data)


        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#20
0
文件: models.py 项目: stucox/djangae
    def map(entity, model, *args, **kwargs):
        """ The Clean mapper maps over all UniqueMarker instances. """

        model = decode_model(model)

        if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
            # Only include markers which are for this model
            return

        with disable_cache():
            # At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
            instance_id = entity["instance"].id_or_name()
            try:
                instance = model.objects.get(pk=instance_id)
            except model.DoesNotExist:
                logging.info("Deleting unique marker {} because the associated instance no longer exists".format(entity.key().id_or_name()))
                datastore.Delete(entity)
                return

            # Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
            instance = django_instance_to_entity(connection, model, instance._meta.fields, raw=True, instance=instance, check_null=False)
            identifiers = unique_identifiers_from_entity(model, instance, ignore_pk=True)
            identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i) for i in identifiers]
            if entity.key() not in identifier_keys:
                logging.info("Deleting unique marker {} because the it no longer represents the associated instance state".format(entity.key().id_or_name()))
                datastore.Delete(entity)
示例#21
0
文件: models.py 项目: mrfuxi/djangae
    def map(instance, *args, **kwargs):
        """ Figure out what markers the instance should use and verify they're attached to
        this instance. Log any weirdness and in repair mode - recreate missing markers. """
        action_id = kwargs.get("action_pk")
        repair = kwargs.get("repair")

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")
        assert alias == (instance._state.db or "default")
        entity = django_instance_to_entity(connections[alias], type(instance), instance._meta.fields, raw=True, instance=instance, check_null=False)
        identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True)
        identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=namespace) for i in identifiers]

        markers = datastore.Get(identifier_keys)
        instance_key = str(entity.key())

        markers_to_save = []

        for i, m in zip(identifier_keys, markers):
            marker_key = str(i)
            if m is None:
                # Missig marker
                if repair:
                    new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name(), namespace=namespace)
                    new_marker['instance'] = entity.key()
                    new_marker['created'] = datetime.datetime.now()
                    markers_to_save.append(new_marker)
                else:
                    log(action_id, "missing_marker", instance_key, marker_key)

            elif 'instance' not in m or not m['instance']:
                # Marker with missining instance attribute
                if repair:
                    m['instance'] = entity.key()
                    markers_to_save.append(m)
                else:
                    log(action_id, "missing_instance", instance_key, marker_key)

            elif m['instance'] != entity.key():

                if isinstance(m['instance'], basestring):
                    m['instance'] = datastore.Key(m['instance'])

                    if repair:
                        markers_to_save.append(m)
                    else:
                        log(action_id, "old_instance_key", instance_key, marker_key)

                if m['instance'] != entity.key():
                    # Marker already assigned to a different instance
                    log(action_id, "already_assigned", instance_key, marker_key)
                    # Also log in repair mode as reparing would break the other instance.

        if markers_to_save:
            datastore.Put(markers_to_save)
示例#22
0
    def map(instance, *args, **kwargs):
        """ Figure out what markers the instance should use and verify they're attached to
        this instance. Log any weirdness and in repair mode - recreate missing markers. """
        action_id = kwargs.get("action_pk")
        repair = kwargs.get("repair")

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")
        assert alias == (instance._state.db or "default")
        entity, _ = django_instance_to_entities(connections[alias], instance._meta.fields, raw=True, instance=instance, check_null=False)
        identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True)
        identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=namespace) for i in identifiers]

        markers = datastore.Get(identifier_keys)
        instance_key = str(entity.key())

        markers_to_save = []

        for i, m in zip(identifier_keys, markers):
            marker_key = str(i)
            if m is None:
                # Missig marker
                if repair:
                    new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name(), namespace=namespace)
                    new_marker['instance'] = entity.key()
                    new_marker['created'] = datetime.datetime.now()
                    markers_to_save.append(new_marker)
                else:
                    log(action_id, "missing_marker", instance_key, marker_key)

            elif 'instance' not in m or not m['instance']:
                # Marker with missining instance attribute
                if repair:
                    m['instance'] = entity.key()
                    markers_to_save.append(m)
                else:
                    log(action_id, "missing_instance", instance_key, marker_key)

            elif m['instance'] != entity.key():

                if isinstance(m['instance'], basestring):
                    m['instance'] = datastore.Key(m['instance'])

                    if repair:
                        markers_to_save.append(m)
                    else:
                        log(action_id, "old_instance_key", instance_key, marker_key)

                if m['instance'] != entity.key():
                    # Marker already assigned to a different instance
                    log(action_id, "already_assigned", instance_key, marker_key)
                    # Also log in repair mode as reparing would break the other instance.

        if markers_to_save:
            datastore.Put(markers_to_save)
示例#23
0
def _remove_entity_from_memcache_by_key(key):
    """
        Note, if the key of the entity got evicted from the cache, it's possible that stale cache
        entries would be left behind. Remember if you need pure atomicity then use disable_cache() or a
        transaction.
    """

    cache_key, model = _get_cache_key_and_model_from_datastore_key(key)
    entity = cache.get(cache_key)

    if entity:
        identifiers = unique_identifiers_from_entity(model, entity)
        cache.delete_many(identifiers)
示例#24
0
def _remove_entity_from_memcache_by_key(key):
    """
        Note, if the key of the entity got evicted from the cache, it's possible that stale cache
        entries would be left behind. Remember if you need pure atomicity then use disable_cache() or a
        transaction.
    """

    cache_key, model = _get_cache_key_and_model_from_datastore_key(key)
    entity = cache.get(cache_key)

    if entity:
        identifiers = unique_identifiers_from_entity(model, entity)
        cache.delete_many(identifiers)
示例#25
0
文件: caching.py 项目: vzts/djangae
def add_entities_to_cache(model,
                          entities,
                          situation,
                          namespace,
                          skip_memcache=False):
    if not CACHE_ENABLED:
        return None

    context = get_context()

    if not (context.context_enabled or context.memcache_enabled):
        # Don't cache anything if caching is disabled
        return

    # Don't cache on Get if we are inside a transaction, even in the context
    # This is because transactions don't see the current state of the datastore
    # We can still cache in the context on Put() but not in memcache
    if situation == CachingSituation.DATASTORE_GET and rpc.IsInTransaction():
        return

    if situation in (
            CachingSituation.DATASTORE_PUT,
            CachingSituation.DATASTORE_GET_PUT) and rpc.IsInTransaction():
        # We have to wipe the entity from memcache
        _remove_entities_from_memcache_by_key(
            [entity.key() for entity in entities if entity.key()], namespace)

    identifiers = [
        unique_identifiers_from_entity(model, entity) for entity in entities
    ]

    for ent_identifiers, entity in zip(identifiers, entities):
        get_context().stack.top.cache_entity(
            _apply_namespace(ent_identifiers, namespace), entity, situation)

    # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction)
    # the exception is GET_PUT - which we do in our own transaction so we have to ignore that!
    if ((not rpc.IsInTransaction() and situation
         in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT))
            or situation == CachingSituation.DATASTORE_GET_PUT):

        if not skip_memcache:

            mc_key_entity_map = {}
            for ent_identifiers, entity in zip(identifiers, entities):
                mc_key_entity_map.update(
                    {identifier: entity
                     for identifier in ent_identifiers})
            _add_entity_to_memcache(model, mc_key_entity_map, namespace)
示例#26
0
def add_entities_to_cache(model, entities, situation, namespace, skip_memcache=False):
    if not CACHE_ENABLED:
        return None

    context = get_context()

    if not (context.context_enabled or context.memcache_enabled):
        # Don't cache anything if caching is disabled
        return

    # Don't cache on Get if we are inside a transaction, even in the context
    # This is because transactions don't see the current state of the datastore
    # We can still cache in the context on Put() but not in memcache
    if situation == CachingSituation.DATASTORE_GET and rpc.IsInTransaction():
        return

    if situation in (CachingSituation.DATASTORE_PUT, CachingSituation.DATASTORE_GET_PUT) and rpc.IsInTransaction():
        # We have to wipe the entity from memcache
        _remove_entities_from_memcache_by_key([entity.key() for entity in entities if entity.key()], namespace)

    identifiers = [
        unique_identifiers_from_entity(model, entity) for entity in entities
    ]

    for ent_identifiers, entity in zip(identifiers, entities):
        get_context().stack.top.cache_entity(_apply_namespace(ent_identifiers, namespace), entity, situation)

    # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction)
    # the exception is GET_PUT - which we do in our own transaction so we have to ignore that!
    if (
        (
            not rpc.IsInTransaction()
            and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)
        )
        or situation == CachingSituation.DATASTORE_GET_PUT
    ):

        if not skip_memcache:

            mc_key_entity_map = {}
            for ent_identifiers, entity in zip(identifiers, entities):
                mc_key_entity_map.update({
                    identifier: entity for identifier in ent_identifiers
                })
            _add_entity_to_memcache(model, mc_key_entity_map, namespace)
示例#27
0
def _remove_entities_from_memcache_by_key(keys):
    """
        Note, if the key of the entity got evicted from the cache, it's possible that stale cache
        entries would be left behind. Remember if you need pure atomicity then use disable_cache() or a
        transaction.
    """

    # Key -> model
    cache_keys = dict(
        _get_cache_key_and_model_from_datastore_key(key) for key in keys
    )
    entities = cache.get_many(cache_keys.keys())

    if entities:
        identifiers = [
            unique_identifiers_from_entity(cache_keys[key], entity)
            for key, entity in entities.items()
        ]
        cache.delete_many(itertools.chain(*identifiers))
示例#28
0
    def test_save_wipes_entity_from_cache_inside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = unique_utils.unique_identifiers_from_entity(
            CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        instance = CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        with transaction.atomic():
            instance.save()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))
示例#29
0
文件: caching.py 项目: mrfuxi/djangae
def _remove_entities_from_memcache_by_key(keys, namespace):
    """
        Given an iterable of datastore.Key objects, remove the corresponding entities from memcache.
        Note, if the key of the entity got evicted from the cache, it's possible that stale cache
        entries would be left behind. Remember if you need pure atomicity then use disable_cache() or a
        transaction.
        In theory the keys should all have the same namespace as `namespace`.
    """
    # Key -> model
    cache_keys = dict(
        _get_cache_key_and_model_from_datastore_key(key) for key in keys
    )
    entities = _strip_namespace(cache.get_many(_apply_namespace(cache_keys.keys(), namespace)))

    if entities:
        identifiers = [
            unique_identifiers_from_entity(cache_keys[key], entity)
            for key, entity in entities.items()
        ]
        cache.delete_many(_apply_namespace(itertools.chain(*identifiers), namespace))
示例#30
0
def _remove_entities_from_memcache_by_key(keys, namespace):
    """
        Given an iterable of datastore.Key objects, remove the corresponding entities from memcache.
        Note, if the key of the entity got evicted from the cache, it's possible that stale cache
        entries would be left behind. Remember if you need pure atomicity then use disable_cache() or a
        transaction.
        In theory the keys should all have the same namespace as `namespace`.
    """
    # Key -> model
    cache_keys = dict(
        _get_cache_key_and_model_from_datastore_key(key) for key in keys)
    entities = _strip_namespace(get_memcache_client().get_multi(
        _apply_namespace(cache_keys.keys(), namespace)))

    if entities:
        identifiers = [
            unique_identifiers_from_entity(cache_keys[key], entity)
            for key, entity in entities.items()
        ]
        get_memcache_client().delete_multi_async(
            _apply_namespace(itertools.chain(*identifiers), namespace))
示例#31
0
    def test_consistent_read_updates_memcache_outside_transaction(self):
        entity_data = {"field1": "Apple", "comb1": 1, "comb2": "Cherry"}

        identifiers = unique_utils.unique_identifiers_from_entity(
            CachingTestModel, FakeEntity(entity_data, id=222))

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.create(id=222, **entity_data)

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))

        cache.clear()

        for identifier in identifiers:
            self.assertIsNone(cache.get(identifier))

        CachingTestModel.objects.get(id=222)  # Consistent read

        for identifier in identifiers:
            self.assertEqual(entity_data, cache.get(identifier))
示例#32
0
def cache_entity(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)
    logger.debug("Caching entity with key %s and identifiers %s", entity.key(), identifiers)
    cache.set_many({x: entity for x in identifiers})
示例#33
0
def cache_entity(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)
    logger.debug("Caching entity with key %s and identifiers %s", entity.key(),
                 identifiers)
    cache.set_many({x: entity for x in identifiers})
示例#34
0
def uncache_entity(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)
    logger.debug("Uncaching entity with key %s and identifiers %s",
                 entity.key(), identifiers)
    for identifier in identifiers:
        cache.delete(identifier)
示例#35
0
def uncache_entity(model, entity):
    identifiers = unique_identifiers_from_entity(model, entity)
    logger.debug("Uncaching entity with key %s and identifiers %s", entity.key(), identifiers)
    for identifier in identifiers:
        cache.delete(identifier)
示例#36
0
    def map(instance, *args, **kwargs):
        """ Figure out what markers the instance should use and verify they're attached to
        this instance. Log any weirdness and in repair mode - recreate missing markers. """
        action_id = kwargs.get("action_pk")
        repair = kwargs.get("repair")

        entity = django_instance_to_entity(connection,
                                           type(instance),
                                           instance._meta.fields,
                                           raw=False,
                                           instance=instance)
        identifiers = unique_identifiers_from_entity(type(instance),
                                                     entity,
                                                     ignore_pk=True)
        identifier_keys = [
            datastore.Key.from_path(UniqueMarker.kind(), i)
            for i in identifiers
        ]

        markers = datastore.Get(identifier_keys)
        instance_key = str(entity.key())

        markers_to_save = []

        for i, m in zip(identifier_keys, markers):
            marker_key = str(i)
            if m is None:
                # Missig marker
                if repair:
                    new_marker = datastore.Entity(UniqueMarker.kind(),
                                                  name=i.name())
                    new_marker['instance'] = entity.key()
                    markers_to_save.append(new_marker)
                else:
                    log(action_id, "missing_marker", instance_key, marker_key)

            elif 'instance' not in m or not m['instance']:
                # Marker with missining instance attribute
                if repair:
                    m['instance'] = entity.key()
                    markers_to_save.append(m)
                else:
                    log(action_id, "missing_instance", instance_key,
                        marker_key)

            elif m['instance'] != entity.key():

                if isinstance(m['instance'], basestring):
                    m['instance'] = datastore.Key(m['instance'])

                    if repair:
                        markers_to_save.append(m)
                    else:
                        log(action_id, "old_instance_key", instance_key,
                            marker_key)

                if m['instance'] != entity.key():
                    # Marker already assigned to a different instance
                    log(action_id, "already_assigned", instance_key,
                        marker_key)
                    # Also log in repair mode as reparing would break the other instance.

        if markers_to_save:
            datastore.Put(markers_to_save)

        yield ('_', [instance.pk])