Example #1
0
    def Run(self, limit, offset):
        opts = self._gae_query._Query__query_options
        if opts.keys_only or opts.projection:
            return self._gae_query.Run(limit=limit, offset=offset)

        ret = caching.get_from_cache(self._identifier, self._namespace)
        if ret is not None and not utils.entity_matches_query(ret, self._gae_query):
            ret = None

        if ret is None:
            # We do a fast keys_only query to get the result
            keys_query = Query(self._gae_query._Query__kind, keys_only=True, namespace=self._namespace)
            keys_query.update(self._gae_query)
            keys = keys_query.Run(limit=limit, offset=offset)

            # Do a consistent get so we don't cache stale data, and recheck the result matches the query
            ret = [x for x in datastore.Get(keys) if x and utils.entity_matches_query(x, self._gae_query)]
            if len(ret) == 1:
                caching.add_entities_to_cache(
                    self._model,
                    [ret[0]],
                    caching.CachingSituation.DATASTORE_GET,
                    self._namespace,
                )
            return iter(ret)

        return iter([ret])
Example #2
0
    def Run(self, limit, offset):
        opts = self._gae_query._Query__query_options
        if opts.keys_only or opts.projection:
            return self._gae_query.Run(limit=limit, offset=offset)

        ret = caching.get_from_cache(self._identifier, self._namespace)
        if ret is not None and not utils.entity_matches_query(
                ret, self._gae_query):
            ret = None

        if ret is None:
            # We do a fast keys_only query to get the result
            keys_query = Query(self._gae_query._Query__kind,
                               keys_only=True,
                               namespace=self._namespace)
            keys_query.update(self._gae_query)
            keys = keys_query.Run(limit=limit, offset=offset)

            # Do a consistent get so we don't cache stale data, and recheck the result matches the query
            ret = [
                x for x in datastore.Get(keys)
                if x and utils.entity_matches_query(x, self._gae_query)
            ]
            if len(ret) == 1:
                caching.add_entities_to_cache(
                    self._model,
                    [ret[0]],
                    caching.CachingSituation.DATASTORE_GET,
                    self._namespace,
                )
            return iter(ret)

        return iter([ret])
Example #3
0
        def iter_results(results):
            returned = 0
            # This is safe, because Django is fetching all results any way :(
            sorted_results = sorted(results, cmp=partial(utils.django_ordering_comparison, self.ordering))
            sorted_results = [result for result in sorted_results if result is not None]
            if cache and sorted_results:
                caching.add_entities_to_cache(self.model, sorted_results, caching.CachingSituation.DATASTORE_GET)

            for result in sorted_results:

                if not any([ utils.entity_matches_query(result, qry) for qry in self.queries_by_key[result.key()]]):
                    continue

                if offset and returned < offset:
                    # Skip entities based on offset
                    returned += 1
                    continue
                else:

                    yield _convert_entity_based_on_query_options(result, opts)

                    returned += 1

                    # If there is a limit, we might be done!
                    if limit is not None and returned == (offset or 0) + limit:
                        break
Example #4
0
    def Run(self, limit=None, offset=None):
        assert not self.query._Query__ancestor_pb #FIXME: We don't handle this yet

        opts = self.query._Query__query_options

        results = None

        #If we have a single key lookup going on, just hit the cache
        if len(self.keys) == 1:
            ret = caching.get_from_cache_by_key(self.keys[0])
            if ret is not None:
                results = [ret]

        #If there was nothing in the cache, or we had more than one key, then use Get()
        if results is None:
            results = sorted((x for x in datastore.Get(self.keys) if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering))

        results = [
            _convert_entity_based_on_query_options(x, opts)
            for x in results if utils.entity_matches_query(x, self.query)
        ]

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return iter(results)
Example #5
0
    def Run(self, limit=None, offset=None):
        assert not self.query._Query__ancestor_pb #FIXME: We don't handle this yet

        opts = self.query._Query__query_options

        results = None

        #If we have a single key lookup going on, just hit the cache
        if len(self.keys) == 1:
            ret = caching.get_from_cache_by_key(self.keys[0])
            if ret is not None:
                results = [ret]

        #If there was nothing in the cache, or we had more than one key, then use Get()
        if results is None:
            results = sorted((x for x in datastore.Get(self.keys) if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering))

        results = [
            _convert_entity_based_on_query_options(x, opts)
            for x in results if utils.entity_matches_query(x, self.query)
        ]

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return iter(results)
Example #6
0
    def Count(self, limit, offset):
        ret = caching.get_from_cache(self._identifier)
        if ret is not None and not utils.entity_matches_query(ret, self._gae_query):
            ret = None

        if ret is None:
            return self._gae_query.Count(limit=limit, offset=offset)
        return 1
Example #7
0
    def _update_entity(self, key):
        caching.remove_entity_from_cache_by_key(key)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (
            isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
            and not utils.entity_matches_query(result, self.select.gae_query)
        ):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {field.attname:value for field, param, value in self.values}

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        instance = MockInstance(**instance_kwargs)

        # Update the entity we read above with the new values
        result.update(django_instance_to_entity(
            self.connection, self.model,
            [ x[0] for x in self.values],  # Pass in the fields that were updated
            True, instance)
        )

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True
Example #8
0
    def Count(self, limit, offset):
        ret = caching.get_from_cache(self._identifier)
        if ret is not None and not utils.entity_matches_query(
                ret, self._gae_query):
            ret = None

        if ret is None:
            return self._gae_query.Count(limit=limit, offset=offset)
        return 1
Example #9
0
    def Run(self, limit, offset):

        results = sorted((x for x in datastore.Get(self.keys) if x), cmp=partial(utils.django_ordering_comparison, self.ordering))

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return ( x for x in results if utils.entity_matches_query(x, self.query) )
Example #10
0
    def test_entity_matches_query(self):
        entity = datastore.Entity("test_model")
        entity["name"] = "Charlie"
        entity["age"] = 22

        query = datastore.Query("test_model")
        query["name ="] = "Charlie"
        self.assertTrue(entity_matches_query(entity, query))

        query["age >="] = 5
        self.assertTrue(entity_matches_query(entity, query))
        del query["age >="]

        query["age <"] = 22
        self.assertFalse(entity_matches_query(entity, query))
        del query["age <"]

        query["age <="] = 22
        self.assertTrue(entity_matches_query(entity, query))
        del query["age <="]

        query["name ="] = "Fred"
        self.assertFalse(entity_matches_query(entity, query))

        # If the entity has a list field, then if any of them match the
        # query then it's a match
        entity["name"] = [ "Bob", "Fred", "Dave" ]
        self.assertTrue(entity_matches_query(entity, query))  # ListField test
Example #11
0
    def Run(self, limit=None, offset=None):
        assert not self.queries[
            0]._Query__ancestor_pb  #FIXME: We don't handle this yet

        # FIXME: What if the query options differ?
        opts = self.queries[0]._Query__query_options

        results = None

        # If we have a single key lookup going on, just hit the cache
        if len(self.queries_by_key) == 1:
            keys = self.queries_by_key.keys()
            ret = caching.get_from_cache_by_key(keys[0])
            if ret is not None:
                results = [ret]

        # If there was nothing in the cache, or we had more than one key, then use Get()
        if results is None:
            keys = self.queries_by_key.keys()
            results = datastore.Get(keys)
            for result in results:
                if result is None:
                    continue
                caching.add_entity_to_cache(
                    self.model, result, caching.CachingSituation.DATASTORE_GET)
            results = sorted((x for x in results if x is not None),
                             cmp=partial(utils.django_ordering_comparison,
                                         self.ordering))

        results = [
            _convert_entity_based_on_query_options(x, opts) for x in results
            if any([
                utils.entity_matches_query(x, qry)
                for qry in self.queries_by_key[x.key()]
            ])
        ]

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return iter(results)
Example #12
0
        def iter_results(results):
            returned = 0
            # This is safe, because Django is fetching all results any way :(
            sorted_results = sorted(results,
                                    cmp=partial(
                                        utils.django_ordering_comparison,
                                        self.ordering))
            sorted_results = [
                result for result in sorted_results if result is not None
            ]
            if cache_results and sorted_results:
                caching.add_entities_to_cache(
                    self.model,
                    sorted_results[:max_cache_count],
                    caching.CachingSituation.DATASTORE_GET,
                    self.namespace,
                )

            for result in sorted_results:
                if is_projection:
                    entity_matches_query = True
                else:
                    entity_matches_query = any(
                        utils.entity_matches_query(result, qry)
                        for qry in self.queries_by_key[result.key()])

                if not entity_matches_query:
                    continue

                if offset and returned < offset:
                    # Skip entities based on offset
                    returned += 1
                    continue
                else:

                    yield _convert_entity_based_on_query_options(result, opts)

                    returned += 1

                    # If there is a limit, we might be done!
                    if limit is not None and returned == (offset or 0) + limit:
                        break
Example #13
0
    def Run(self, limit=None, offset=None):
        assert not self.queries[0]._Query__ancestor_pb #FIXME: We don't handle this yet

        # FIXME: What if the query options differ?
        opts = self.queries[0]._Query__query_options

        results = None

        # If we have a single key lookup going on, just hit the cache
        if len(self.queries_by_key) == 1:
            keys = self.queries_by_key.keys()
            ret = caching.get_from_cache_by_key(keys[0])
            if ret is not None:
                results = [ret]

        # If there was nothing in the cache, or we had more than one key, then use Get()
        if results is None:
            keys = self.queries_by_key.keys()
            results = datastore.Get(keys)
            for result in results:
                if result is None:
                    continue
                caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_GET)
            results = sorted((x for x in results if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering))

        results = [
            _convert_entity_based_on_query_options(x, opts)
            for x in results if any([ utils.entity_matches_query(x, qry) for qry in self.queries_by_key[x.key()]])
        ]

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return iter(results)
Example #14
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (isinstance(
                    self.select.gae_query,
                (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                    and not utils.entity_matches_query(result,
                                                       self.select.gae_query)):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {
                field.attname: value
                for field, param, value in self.values
            }

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(_original=MockInstance(_meta=meta,
                                                           **result),
                                    _meta=meta,
                                    **instance_kwargs)

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(
                django_instance_to_entity(
                    self.connection,
                    self.model,
                    [x[0] for x in self.values
                     ],  # Pass in the fields that were updated
                    True,
                    instance))

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                    set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result)
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire,
                                               markers_to_release,
                                               result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logging.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
Example #15
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (
                isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
                and not utils.entity_matches_query(result, self.select.gae_query)
            ):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {field.attname:value for field, param, value in self.values}

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(
                _original=MockInstance(_meta=meta, **result),
                _meta=meta,
                **instance_kwargs
            )

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(django_instance_to_entity(
                self.connection, self.model,
                [ x[0] for x in self.values],  # Pass in the fields that were updated
                True, instance)
            )

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result
                )
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire, markers_to_release, result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logger.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
Example #16
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = rpc.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (
                isinstance(self.select.gae_query, (Query, meta_queries.UniqueQuery)) # ignore QueryByKeys and NoOpQuery
                and not utils.entity_matches_query(result, self.select.gae_query)
            ):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {field.attname: value for field, param, value in self.values}

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entities it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(
                _original=MockInstance(_meta=meta, **result),
                _meta=meta,
                **instance_kwargs
            )

            # Convert the instance to an entity
            primary, descendents = django_instance_to_entities(
                self.connection,
                [x[0] for x in self.values],  # Pass in the fields that were updated
                True, instance,
                model=self.model
            )

            # Update the entity we read above with the new values
            result.update(primary)

            # Remove fields which have been marked to be unindexed
            for col in getattr(primary, "_properties_to_remove", []):
                if col in result:
                    del result[col]

            # Make sure that any polymodel classes which were in the original entity are kept,
            # as django_instance_to_entities may have wiped them as well as added them.
            polymodel_classes = list(set(
                original.get(POLYMODEL_CLASS_ATTRIBUTE, []) + result.get(POLYMODEL_CLASS_ATTRIBUTE, [])
            ))
            if polymodel_classes:
                result[POLYMODEL_CLASS_ATTRIBUTE] = polymodel_classes

            def perform_insert():
                """
                    Inserts result, and any descendents with their ancestor
                    value set
                """
                inserted_key = rpc.Put(result)
                if descendents:
                    for i, descendent in enumerate(descendents):
                        descendents[i] = Entity(
                            descendent.kind(),
                            parent=inserted_key,
                            namespace=inserted_key.namespace(),
                            id=descendent.key().id() or None,
                            name=descendent.key().name() or None
                        )
                        descendents[i].update(descendent)
                    rpc.Put(descendents)

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                perform_insert()

                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result
                )

                perform_insert()

                constraints.update_identifiers(markers_to_acquire, markers_to_release, result.key())

                # If the rpc.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logger.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
Example #17
0
    def _update_entity(self, key):
        caching.remove_entity_from_cache_by_key(key)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (isinstance(
                self.select.gae_query,
            (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                and
                not utils.entity_matches_query(result, self.select.gae_query)):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {
            field.attname: value
            for field, param, value in self.values
        }

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        instance = MockInstance(**instance_kwargs)

        # We need to add to the class attribute, rather than replace it!
        original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

        # Update the entity we read above with the new values
        result.update(
            django_instance_to_entity(
                self.connection,
                self.model,
                [x[0]
                 for x in self.values],  # Pass in the fields that were updated
                True,
                instance))

        # Make sure we keep all classes in the inheritence tree!
        if original_class:
            if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                # Make sure we don't add duplicates
            else:
                result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

        if POLYMODEL_CLASS_ATTRIBUTE in result:
            result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                set(result[POLYMODEL_CLASS_ATTRIBUTE]))

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entity_to_cache(self.model, result,
                                        caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(
                self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entity_to_cache(
                    self.model, result, caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True