示例#1
0
    def _wrapped_map_entity(self, entity):
        """ Wrapper for self._map_entity which removes the entity from Djangae's cache. """

        # TODO: Note that other threads (from the general application running) could also be
        # modifying the entity, and that we're not using Djangae's transaction managers for our
        # stuff here.

        remove_entities_from_cache_by_key([entity.key()], self.namespace)
        try:
            retry(self._map_entity, entity)
        except DeadlineExceededError:
            # This is (probably) not an error with the individual entity, but more likey that the
            # task has tried to process too many entities. Either way, we always re-raise it so
            # that the mapper library can deal with it
            raise
        except Exception:
            if self.skip_errors:
                logging.exception(
                    "Error processing operation %s for entity %s.  Skipping.",
                    self.identifier, entity.key()
                )
            else:
                raise
        if entity.key():
            # Assuming the entity hasn't been deleted and/or it's key been wiped...
            remove_entities_from_cache_by_key([entity.key()], self.namespace)
示例#2
0
    def execute(self):
        self.select.execute()

        # This is a little bit more inefficient than just doing a keys_only query and
        # sending it to delete, but I think this is the sacrifice to make for the unique caching layer
        keys = []

        def spawn_query(kind, key):
            qry = Query(kind, namespace=key.namespace() or None) # TODO: is the namespace necessary if we're passing the key?
            qry["__key__ ="] = key
            return qry

        queries = [spawn_query(x.key().kind(), x.key()) for x in self.select.results]
        if not queries:
            return

        for entity in QueryByKeys(self.model, queries, [], self.namespace).Run():
            keys.append(entity.key())

            # Delete constraints if that's enabled
            if constraints.constraint_checks_enabled(self.model):
                constraints.release(self.model, entity)

        caching.remove_entities_from_cache_by_key(keys, self.namespace)
        datastore.Delete(keys)
示例#3
0
        def delete_batch(key_slice):
            entities = datastore.Get(key_slice)

            #FIXME: We need to make sure the entity still matches the query!
#            entities = (x for x in entities if utils.entity_matches_query(x, self.select.gae_query))

            to_delete = []
            to_update = []
            updated_keys = []

            # Go through the entities
            for entity in entities:
                if entity is None:
                    continue

                wipe_polymodel_from_entity(entity, self.table_to_delete)
                if not entity.get('class'):
                    to_delete.append(entity)
                    constraints.release(self.model, entity)
                else:
                    to_update.append(entity)
                updated_keys.append(entity.key())

            datastore.DeleteAsync([x.key() for x in to_delete])
            datastore.PutAsync(to_update)

            caching.remove_entities_from_cache_by_key(
                updated_keys, self.namespace
            )

            return len(updated_keys)
示例#4
0
        def delete_batch(key_slice):
            entities = datastore.Get(key_slice)

            #FIXME: We need to make sure the entity still matches the query!
            #            entities = (x for x in entities if utils.entity_matches_query(x, self.select.gae_query))

            to_delete = []
            to_update = []
            updated_keys = []

            # Go through the entities
            for entity in entities:
                if entity is None:
                    continue

                wipe_polymodel_from_entity(entity, self.table_to_delete)
                if not entity.get('class'):
                    to_delete.append(entity)
                    constraints.release(self.model, entity)
                else:
                    to_update.append(entity)
                updated_keys.append(entity.key())

            datastore.DeleteAsync([x.key() for x in to_delete])
            datastore.PutAsync(to_update)

            caching.remove_entities_from_cache_by_key(updated_keys,
                                                      self.namespace)

            return len(updated_keys)
    def execute(self):
        self.select.execute()

        # This is a little bit more inefficient than just doing a keys_only query and
        # sending it to delete, but I think this is the sacrifice to make for the unique caching layer
        keys = []

        def spawn_query(kind, key):
            qry = Query(kind)
            qry["__key__ ="] = key
            return qry

        queries = [
            spawn_query(x.key().kind(), x.key()) for x in self.select.results
        ]
        if not queries:
            return

        for entity in QueryByKeys(self.model, queries, []).Run():
            keys.append(entity.key())

            # Delete constraints if that's enabled
            if constraints.constraint_checks_enabled(self.model):
                constraints.release(self.model, entity)

        caching.remove_entities_from_cache_by_key(keys)
        datastore.Delete(keys)
示例#6
0
    def _wrapped_map_entity(self, entity):
        """ Wrapper for self._map_entity which removes the entity from Djangae's cache. """

        # TODO: Note that other threads (from the general application running) could also be
        # modifying the entity, and that we're not using Djangae's transaction managers for our
        # stuff here.

        remove_entities_from_cache_by_key([entity.key()], self.namespace)
        try:
            retry(self._map_entity, entity)
        except DeadlineExceededError:
            # This is (probably) not an error with the individual entity, but more likey that the
            # task has tried to process too many entities. Either way, we always re-raise it so
            # that the mapper library can deal with it
            raise
        except Exception:
            if self.skip_errors:
                logger.exception(
                    "Error processing operation %s for entity %s.  Skipping.",
                    self.identifier, entity.key())
            else:
                raise
        if entity.key():
            # Assuming the entity hasn't been deleted and/or it's key been wiped...
            remove_entities_from_cache_by_key([entity.key()], self.namespace)
示例#7
0
文件: commands.py 项目: vzts/djangae
        def delete_batch(key_slice):
            entities = rpc.Get(key_slice)

            # FIXME: We need to make sure the entity still matches the query!
#            entities = (x for x in entities if utils.entity_matches_query(x, self.select.gae_query))

            to_delete = []
            to_update = []
            updated_keys = []

            # Go through the entities
            for entity in entities:
                if entity is None:
                    continue

                wipe_polymodel_from_entity(entity, self.table_to_delete)
                if not entity.get('class'):
                    to_delete.append(entity.key())
                    if constraints_enabled:
                        constraints.release(self.model, entity)
                else:
                    to_update.append(entity)
                updated_keys.append(entity.key())

            rpc.DeleteAsync(to_delete)
            rpc.PutAsync(to_update)

            # Clean up any special index things that need to be cleaned
            for indexer in indexers_for_model(self.model):
                for key in to_delete:
                    indexer.cleanup(key)

            caching.remove_entities_from_cache_by_key(
                updated_keys, self.namespace
            )

            return len(updated_keys)
示例#8
0
    def _update_entity(self, key):
        caching.remove_entities_from_cache_by_key([key], self.namespace)

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (
            isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
            and not utils.entity_matches_query(result, self.select.gae_query)
        ):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        instance_kwargs = {field.attname:value for field, param, value in self.values}

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        meta = self.model._meta
        instance = MockInstance(
            _original=MockInstance(_meta=meta, **result),
            _meta=meta,
            **instance_kwargs
        )

        # We need to add to the class attribute, rather than replace it!
        original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

        # Update the entity we read above with the new values
        result.update(django_instance_to_entity(
            self.connection, self.model,
            [ x[0] for x in self.values],  # Pass in the fields that were updated
            True, instance)
        )

        # Make sure we keep all classes in the inheritence tree!
        if original_class:
            if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                # Make sure we don't add duplicates
            else:
                result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

        if POLYMODEL_CLASS_ATTRIBUTE in result:
            result[POLYMODEL_CLASS_ATTRIBUTE] = list(set(result[POLYMODEL_CLASS_ATTRIBUTE]))

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entities_to_cache(
                self.model,
                [result],
                caching.CachingSituation.DATASTORE_PUT,
                self.namespace,
            )
        else:
            to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                )
            except:
                constraints.release_identifiers(to_acquire, namespace=self.namespace)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release, self.namespace)

        # Return true to indicate update success
        return True
示例#9
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (
                isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery
                and not utils.entity_matches_query(result, self.select.gae_query)
            ):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {field.attname:value for field, param, value in self.values}

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(
                _original=MockInstance(_meta=meta, **result),
                _meta=meta,
                **instance_kwargs
            )

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(django_instance_to_entity(
                self.connection, self.model,
                [ x[0] for x in self.values],  # Pass in the fields that were updated
                True, instance)
            )

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result
                )
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire, markers_to_release, result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logger.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
示例#10
0
文件: commands.py 项目: vzts/djangae
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = rpc.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (
                isinstance(self.select.gae_query, (Query, meta_queries.UniqueQuery)) # ignore QueryByKeys and NoOpQuery
                and not utils.entity_matches_query(result, self.select.gae_query)
            ):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {field.attname: value for field, param, value in self.values}

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entities it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(
                _original=MockInstance(_meta=meta, **result),
                _meta=meta,
                **instance_kwargs
            )

            # Convert the instance to an entity
            primary, descendents = django_instance_to_entities(
                self.connection,
                [x[0] for x in self.values],  # Pass in the fields that were updated
                True, instance,
                model=self.model
            )

            # Update the entity we read above with the new values
            result.update(primary)

            # Remove fields which have been marked to be unindexed
            for col in getattr(primary, "_properties_to_remove", []):
                if col in result:
                    del result[col]

            # Make sure that any polymodel classes which were in the original entity are kept,
            # as django_instance_to_entities may have wiped them as well as added them.
            polymodel_classes = list(set(
                original.get(POLYMODEL_CLASS_ATTRIBUTE, []) + result.get(POLYMODEL_CLASS_ATTRIBUTE, [])
            ))
            if polymodel_classes:
                result[POLYMODEL_CLASS_ATTRIBUTE] = polymodel_classes

            def perform_insert():
                """
                    Inserts result, and any descendents with their ancestor
                    value set
                """
                inserted_key = rpc.Put(result)
                if descendents:
                    for i, descendent in enumerate(descendents):
                        descendents[i] = Entity(
                            descendent.kind(),
                            parent=inserted_key,
                            namespace=inserted_key.namespace(),
                            id=descendent.key().id() or None,
                            name=descendent.key().name() or None
                        )
                        descendents[i].update(descendent)
                    rpc.Put(descendents)

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                perform_insert()

                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result
                )

                perform_insert()

                constraints.update_identifiers(markers_to_acquire, markers_to_release, result.key())

                # If the rpc.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logger.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
示例#11
0
        def txn():
            caching.remove_entities_from_cache_by_key([key], self.namespace)

            try:
                result = datastore.Get(key)
            except datastore_errors.EntityNotFoundError:
                # Return false to indicate update failure
                return False

            if (isinstance(
                    self.select.gae_query,
                (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                    and not utils.entity_matches_query(result,
                                                       self.select.gae_query)):
                # Due to eventual consistency they query may have returned an entity which no longer
                # matches the query
                return False

            original = copy.deepcopy(result)

            instance_kwargs = {
                field.attname: value
                for field, param, value in self.values
            }

            # Note: If you replace MockInstance with self.model, you'll find that some delete
            # tests fail in the test app. This is because any unspecified fields would then call
            # get_default (even though we aren't going to use them) which may run a query which
            # fails inside this transaction. Given as we are just using MockInstance so that we can
            # call django_instance_to_entity it on it with the subset of fields we pass in,
            # what we have is fine.
            meta = self.model._meta
            instance = MockInstance(_original=MockInstance(_meta=meta,
                                                           **result),
                                    _meta=meta,
                                    **instance_kwargs)

            # We need to add to the class attribute, rather than replace it!
            original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

            # Update the entity we read above with the new values
            result.update(
                django_instance_to_entity(
                    self.connection,
                    self.model,
                    [x[0] for x in self.values
                     ],  # Pass in the fields that were updated
                    True,
                    instance))

            # Make sure we keep all classes in the inheritence tree!
            if original_class:
                if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                    result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                    # Make sure we don't add duplicates
                else:
                    result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

            if POLYMODEL_CLASS_ATTRIBUTE in result:
                result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                    set(result[POLYMODEL_CLASS_ATTRIBUTE]))

            if not constraints.has_active_unique_constraints(self.model):
                # The fast path, no constraint checking
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model,
                    [result],
                    caching.CachingSituation.DATASTORE_PUT,
                    self.namespace,
                    skip_memcache=True,
                )
            else:
                markers_to_acquire[:], markers_to_release[:] = constraints.get_markers_for_update(
                    self.model, original, result)
                datastore.Put(result)

                constraints.update_identifiers(markers_to_acquire,
                                               markers_to_release,
                                               result.key())

                # If the datastore.Put() fails then the exception will only be raised when the
                # transaction applies, which means that we will still get to here and will still have
                # applied the marker changes (because they're in a nested, independent transaction).
                # Hence we set this flag to tell us that we got this far and that we should roll them back.
                rollback_markers[0] = True
                # If something dies between here and the `return` statement then we'll have stale unique markers

                try:
                    # Update the cache before dealing with unique markers, as CachingSituation.DATASTORE_PUT
                    # will only update the context cache
                    caching.add_entities_to_cache(
                        self.model,
                        [result],
                        caching.CachingSituation.DATASTORE_PUT,
                        self.namespace,
                        skip_memcache=True,
                    )
                except:
                    # We ignore the exception because raising will rollback the transaction causing
                    # an inconsistent state
                    logging.exception("Unable to update the context cache")
                    pass

            # Return true to indicate update success
            return True
    def _update_entity(self, key):
        caching.remove_entities_from_cache_by_key([key])

        try:
            result = datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            # Return false to indicate update failure
            return False

        if (isinstance(
                self.select.gae_query,
            (Query, UniqueQuery))  # ignore QueryByKeys and NoOpQuery
                and
                not utils.entity_matches_query(result, self.select.gae_query)):
            # Due to eventual consistency they query may have returned an entity which no longer
            # matches the query
            return False

        original = copy.deepcopy(result)

        # Note: If you replace MockInstance with self.model, you'll find that some delete
        # tests fail in the test app. This is because any unspecified fields would then call
        # get_default (even though we aren't going to use them) which may run a query which
        # fails inside this transaction. Given as we are just using MockInstance so that we can
        # call django_instance_to_entity it on it with the subset of fields we pass in,
        # what we have is fine.
        meta = self.model._meta
        instance_kwargs = {
            field.attname: value
            for field, param, value in self.values
        }
        instance = MockInstance(_original=MockInstance(_meta=meta, **result),
                                _meta=meta,
                                **instance_kwargs)

        # We need to add to the class attribute, rather than replace it!
        original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, [])

        # Update the entity we read above with the new values
        result.update(
            django_instance_to_entity(
                self.connection,
                self.model,
                [x[0]
                 for x in self.values],  # Pass in the fields that were updated
                True,
                instance))

        # Make sure we keep all classes in the inheritence tree!
        if original_class:
            if result[POLYMODEL_CLASS_ATTRIBUTE] is not None:
                result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class)
                # Make sure we don't add duplicates
            else:
                result[POLYMODEL_CLASS_ATTRIBUTE] = original_class

        if POLYMODEL_CLASS_ATTRIBUTE in result:
            result[POLYMODEL_CLASS_ATTRIBUTE] = list(
                set(result[POLYMODEL_CLASS_ATTRIBUTE]))

        if not constraints.constraint_checks_enabled(self.model):
            # The fast path, no constraint checking
            datastore.Put(result)
            caching.add_entities_to_cache(
                self.model, [result], caching.CachingSituation.DATASTORE_PUT)
        else:
            to_acquire, to_release = constraints.get_markers_for_update(
                self.model, original, result)

            # Acquire first, because if that fails then we don't want to alter what's already there
            constraints.acquire_identifiers(to_acquire, result.key())
            try:
                datastore.Put(result)
                caching.add_entities_to_cache(
                    self.model, [result],
                    caching.CachingSituation.DATASTORE_PUT)
            except:
                constraints.release_identifiers(to_acquire)
                raise
            else:
                # Now we release the ones we don't want anymore
                constraints.release_identifiers(to_release)

        # Return true to indicate update success
        return True