Ejemplo n.º 1
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any([x.primary_key for x in fields])
        self.entities = []
        self.included_keys = []
        self.model = model

        for obj in objs:
            if self.has_pk:
                # We must convert the PK value here, even though this normally happens in django_instance_to_entity otherwise
                # custom PK fields don't work properly
                value = model._meta.pk.get_db_prep_save(
                    model._meta.pk.pre_save(obj, True), connection)
                self.included_keys.append(
                    get_datastore_key(model, value) if value else None)
                if not self.model._meta.pk.blank and self.included_keys[
                        -1] is None:
                    raise IntegrityError(
                        "You must specify a primary key value for {} instances"
                        .format(model))
            else:
                # We zip() self.entities and self.included_keys in execute(), so they should be the same length
                self.included_keys.append(None)

            self.entities.append(
                django_instance_to_entity(connection, model, fields, raw, obj))
Ejemplo n.º 2
0
    def map(entity, model, *args, **kwargs):
        """ The Clean mapper maps over all UniqueMarker instances. """

        model = decode_model(model)

        if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
            # Only include markers which are for this model
            return

        with disable_cache():
            # At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
            instance_id = entity["instance"].id_or_name()
            try:
                instance = model.objects.get(pk=instance_id)
            except model.DoesNotExist:
                logging.info("Deleting unique marker {} because the associated instance no longer exists".format(entity.key().id_or_name()))
                datastore.Delete(entity)
                return

            # Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
            instance = django_instance_to_entity(connection, model, instance._meta.fields, raw=True, instance=instance, check_null=False)
            identifiers = unique_identifiers_from_entity(model, instance, ignore_pk=True)
            identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i) for i in identifiers]
            if entity.key() not in identifier_keys:
                logging.info("Deleting unique marker {} because the it no longer represents the associated instance state".format(entity.key().id_or_name()))
                datastore.Delete(entity)
Ejemplo n.º 3
0
    def map(entity, model, *args, **kwargs):
        """ The Clean mapper maps over all UniqueMarker instances. """

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")

        model = decode_model(model)
        if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
            # Only include markers which are for this model
            return

        assert namespace == entity.namespace()
        with disable_cache():
            # At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
            instance_id = entity["instance"].id_or_name()
            try:
                instance = model.objects.using(alias).get(pk=instance_id)
            except model.DoesNotExist:
                logging.info("Deleting unique marker {} because the associated instance no longer exists".format(entity.key().id_or_name()))
                datastore.Delete(entity)
                return

            # Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
            instance_entity = django_instance_to_entity(connections[alias], model, instance._meta.fields, raw=True, instance=instance, check_null=False)
            identifiers = unique_identifiers_from_entity(model, instance_entity, ignore_pk=True)
            identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=entity["instance"].namespace()) for i in identifiers]
            if entity.key() not in identifier_keys:
                logging.info("Deleting unique marker {} because the it no longer represents the associated instance state".format(entity.key().id_or_name()))
                datastore.Delete(entity)
Ejemplo n.º 4
0
    def _update_entity(self, key):
        caching.remove_entity_from_cache_by_key(key)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {
            field.attname: value
            for field, param, value in self.values
        }

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        instance = MockInstance(**instance_kwargs)

        # Update the entity we read above with the new values
        result.update(
            django_instance_to_entity(
                self.connection,
                self.model,
                [x[0]
                 for x in self.values],  # Pass in the fields that were updated
                True,
                instance))

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entity_to_cache(self.model, result,
                                        caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(
                self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entity_to_cache(
                    self.model, result, caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True
Ejemplo n.º 5
0
    def _update_entity(self, key):
        caching.remove_entity_from_cache_by_key(key)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (
            isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
            and not utils.entity_matches_query(result, self.select.gae_query)
        ):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {field.attname:value for field, param, value in self.values}

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        instance = MockInstance(**instance_kwargs)

        # Update the entity we read above with the new values
        result.update(django_instance_to_entity(
            self.connection, self.model,
            [ x[0] for x in self.values],  # Pass in the fields that were updated
            True, instance)
        )

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True
Ejemplo n.º 6
0
    def map(instance, *args, **kwargs):
        """ Figure out what markers the instance should use and verify they're attached to
        this instance. Log any weirdness and in repair mode - recreate missing markers. """
        action_id = kwargs.get("action_pk")
        repair = kwargs.get("repair")

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")
        assert alias == (instance._state.db or "default")
        entity = django_instance_to_entity(connections[alias], type(instance), instance._meta.fields, raw=True, instance=instance, check_null=False)
        identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True)
        identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=namespace) for i in identifiers]

        markers = datastore.Get(identifier_keys)
        instance_key = str(entity.key())

        markers_to_save = []

        for i, m in zip(identifier_keys, markers):
            marker_key = str(i)
            if m is None:
                # Missig marker
                if repair:
                    new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name(), namespace=namespace)
                    new_marker['instance'] = entity.key()
                    new_marker['created'] = datetime.datetime.now()
                    markers_to_save.append(new_marker)
                else:
                    log(action_id, "missing_marker", instance_key, marker_key)

            elif 'instance' not in m or not m['instance']:
                # Marker with missining instance attribute
                if repair:
                    m['instance'] = entity.key()
                    markers_to_save.append(m)
                else:
                    log(action_id, "missing_instance", instance_key, marker_key)

            elif m['instance'] != entity.key():

                if isinstance(m['instance'], basestring):
                    m['instance'] = datastore.Key(m['instance'])

                    if repair:
                        markers_to_save.append(m)
                    else:
                        log(action_id, "old_instance_key", instance_key, marker_key)

                if m['instance'] != entity.key():
                    # Marker already assigned to a different instance
                    log(action_id, "already_assigned", instance_key, marker_key)
                    # Also log in repair mode as reparing would break the other instance.

        if markers_to_save:
            datastore.Put(markers_to_save)
Ejemplo n.º 7
0
    def map(instance, *args, **kwargs):
        """ Figure out what markers the instance should use and verify they're attached to
        this instance. Log any weirdness and in repair mode - recreate missing markers. """
        action_id = kwargs.get("action_pk")
        repair = kwargs.get("repair")

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")
        assert alias == (instance._state.db or "default")
        entity = django_instance_to_entity(connections[alias], type(instance), instance._meta.fields, raw=True, instance=instance, check_null=False)
        identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True)
        identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=namespace) for i in identifiers]

        markers = datastore.Get(identifier_keys)
        instance_key = str(entity.key())

        markers_to_save = []

        for i, m in zip(identifier_keys, markers):
            marker_key = str(i)
            if m is None:
                # Missig marker
                if repair:
                    new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name(), namespace=namespace)
                    new_marker['instance'] = entity.key()
                    new_marker['created'] = datetime.datetime.now()
                    markers_to_save.append(new_marker)
                else:
                    log(action_id, "missing_marker", instance_key, marker_key)

            elif 'instance' not in m or not m['instance']:
                # Marker with missining instance attribute
                if repair:
                    m['instance'] = entity.key()
                    markers_to_save.append(m)
                else:
                    log(action_id, "missing_instance", instance_key, marker_key)

            elif m['instance'] != entity.key():

                if isinstance(m['instance'], basestring):
                    m['instance'] = datastore.Key(m['instance'])

                    if repair:
                        markers_to_save.append(m)
                    else:
                        log(action_id, "old_instance_key", instance_key, marker_key)

                if m['instance'] != entity.key():
                    # Marker already assigned to a different instance
                    log(action_id, "already_assigned", instance_key, marker_key)
                    # Also log in repair mode as reparing would break the other instance.

        if markers_to_save:
            datastore.Put(markers_to_save)
Ejemplo n.º 8
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any(x.primary_key for x in fields)
        self.model = model
        self.objs = objs
        self.connection = connection
        self.namespace = connection.ops.connection.settings_dict.get(
            "NAMESPACE")
        self.raw = raw
        self.fields = fields

        self.entities = []
        self.included_keys = []

        for obj in self.objs:
            if self.has_pk:
                # We must convert the PK value here, even though this normally happens in django_instance_to_entity otherwise
                # custom PK fields don't work properly
                value = self.model._meta.pk.get_db_prep_save(
                    self.model._meta.pk.pre_save(obj, True), self.connection)
                self.included_keys.append(
                    get_datastore_key(self.model, value, self.namespace
                                      ) if value else None)

                if value == 0:
                    raise IntegrityError(
                        "The datastore doesn't support 0 as a key value")

                if not self.model._meta.pk.blank and self.included_keys[
                        -1] is None:
                    raise IntegrityError(
                        "You must specify a primary key value for {} instances"
                        .format(self.model))
            else:
                # We zip() self.entities and self.included_keys in execute(), so they should be the same length
                self.included_keys.append(None)

            # We don't use the values returned, but this does make sure we're
            # doing the same validation as Django. See issue #493 for an
            # example of how not doing this can mess things up
            for field in fields:
                field.get_db_prep_save(
                    getattr(obj, field.attname) if raw else field.pre_save(
                        obj, True),
                    connection=connection,
                )

            self.entities.append(
                django_instance_to_entity(self.connection, self.model,
                                          self.fields, self.raw, obj))
Ejemplo n.º 9
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any([x.primary_key for x in fields])
        self.entities = []
        self.included_keys = []
        self.model = model

        for obj in objs:
            if self.has_pk:
                self.included_keys.append(get_datastore_key(model, obj.pk))
            else:
                #We zip() self.entities and self.included_keys in execute(), so they should be the same legnth
                self.included_keys.append(None)

            self.entities.append(
                django_instance_to_entity(connection, model, fields, raw, obj)
            )
Ejemplo n.º 10
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any(x.primary_key for x in fields)
        self.model = model
        self.objs = objs
        self.connection = connection
        self.namespace = connection.ops.connection.settings_dict.get("NAMESPACE")
        self.raw = raw
        self.fields = fields

        self.entities = []
        self.included_keys = []

        for obj in self.objs:
            if self.has_pk:
                # We must convert the PK value here, even though this normally happens in django_instance_to_entity otherwise
                # custom PK fields don't work properly
                value = self.model._meta.pk.get_db_prep_save(
                    self.model._meta.pk.pre_save(obj, True),
                    self.connection
                )
                self.included_keys.append(
                    get_datastore_key(self.model, value, self.namespace)
                    if value else None
                )

                if value == 0:
                    raise IntegrityError("The datastore doesn't support 0 as a key value")

                if not self.model._meta.pk.blank and self.included_keys[-1] is None:
                    raise IntegrityError("You must specify a primary key value for {} instances".format(self.model))
            else:
                # We zip() self.entities and self.included_keys in execute(), so they should be the same length
                self.included_keys.append(None)

            # We don't use the values returned, but this does make sure we're
            # doing the same validation as Django. See issue #493 for an
            # example of how not doing this can mess things up
            for field in fields:
                field.get_db_prep_save(
                    getattr(obj, field.attname) if raw else field.pre_save(obj, True),
                    connection=connection,
                )

            self.entities.append(
                django_instance_to_entity(self.connection, self.model, self.fields, self.raw, obj)
            )
Ejemplo n.º 11
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any([x.primary_key for x in fields])
        self.entities = []
        self.included_keys = []
        self.model = model

        for obj in objs:
            if self.has_pk:
                #FIXME: Apparently if the PK is required, and obj.pk is None here, we need to raise an IntegrityError
                self.included_keys.append(get_datastore_key(model, obj.pk) if obj.pk else None)
            else:
                #We zip() self.entities and self.included_keys in execute(), so they should be the same legnth
                self.included_keys.append(None)

            self.entities.append(
                django_instance_to_entity(connection, model, fields, raw, obj)
            )
Ejemplo n.º 12
0
    def __init__(self, connection, model, objs, fields, raw):
        self.has_pk = any([x.primary_key for x in fields])
        self.entities = []
        self.included_keys = []
        self.model = model

        for obj in objs:
            if self.has_pk:
                # We must convert the PK value here, even though this normally happens in django_instance_to_entity otherwise
                # custom PK fields don't work properly
                value = model._meta.pk.get_db_prep_save(model._meta.pk.pre_save(obj, True), connection)
                self.included_keys.append(get_datastore_key(model, value) if value else None)
                if not self.model._meta.pk.blank and self.included_keys[-1] is None:
                    raise IntegrityError("You must specify a primary key value for {} instances".format(model))
            else:
                # We zip() self.entities and self.included_keys in execute(), so they should be the same length
                self.included_keys.append(None)

            self.entities.append(django_instance_to_entity(connection, model, fields, raw, obj))
Ejemplo n.º 13
0
    def _update_entity(self, key):
        caching.remove_entity_from_cache_by_key(key)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (isinstance(
                self.select.gae_query,
            (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                and
                not utils.entity_matches_query(result, self.select.gae_query)):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {
            field.attname: value
            for field, param, value in self.values
        }

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        instance = MockInstance(**instance_kwargs)

        # We need to add to the class attribute, rather than replace it!
        original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

        # Update the entity we read above with the new values
        result.update(
            django_instance_to_entity(
                self.connection,
                self.model,
                [x[0]
                 for x in self.values],  # Pass in the fields that were updated
                True,
                instance))

        # Make sure we keep all classes in the inheritence tree!
        if original_class:
            if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                # Make sure we don't add duplicates
            else:
                result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

        if POLYMODEL_CLASS_ATTRIBUTE in result:
            result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                set(result[POLYMODEL_CLASS_ATTRIBUTE]))

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entity_to_cache(self.model, result,
                                        caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(
                self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entity_to_cache(
                    self.model, result, caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True
Ejemplo n.º 14
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (
                isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
                and not utils.entity_matches_query(result, self.select.gae_query)
            ):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {field.attname:value for field, param, value in self.values}

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(
                _original=MockInstance(_meta=meta, **result),
                _meta=meta,
                **instance_kwargs
            )

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(django_instance_to_entity(
                self.connection, self.model,
                [ x[0] for x in self.values],  # Pass in the fields that were updated
                True, instance)
            )

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result
                )
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire, markers_to_release, result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logger.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
Ejemplo n.º 15
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (isinstance(
                    self.select.gae_query,
                (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                    and not utils.entity_matches_query(result,
                                                       self.select.gae_query)):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {
                field.attname: value
                for field, param, value in self.values
            }

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(_original=MockInstance(_meta=meta,
                                                           **result),
                                    _meta=meta,
                                    **instance_kwargs)

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(
                django_instance_to_entity(
                    self.connection,
                    self.model,
                    [x[0] for x in self.values
                     ],  # Pass in the fields that were updated
                    True,
                    instance))

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                    set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result)
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire,
                                               markers_to_release,
                                               result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logging.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True