Beispiel #1
0
def _assign_entity_to_mutation(mutation_pb, entity, auto_id_entities):
    """Copy ``entity`` into appropriate slot of ``mutation_pb``.

    If ``entity.key`` is incomplete, append ``entity`` to ``auto_id_entities``
    for later fixup during ``commit``.

    Helper method for ``Batch.put``.

    :type mutation_pb: :class:`gcloud.datastore._datastore_v1_pb2.Mutation`
    :param mutation_pb: The Mutation protobuf for the batch / transaction.

    :type entity: :class:`gcloud.datastore.entity.Entity`
    :param entity: The entity being updated within the batch / transaction.

    :type auto_id_entities: list of :class:`gcloud.datastore.entity.Entity`
    :param auto_id_entities: Entities with partial keys, to be fixed up
                             during commit.
    """
    auto_id = entity.key.is_partial

    key_pb = entity.key.to_protobuf()
    key_pb = helpers._prepare_key_for_request(key_pb)

    if auto_id:
        insert = mutation_pb.insert_auto_id.add()
        auto_id_entities.append(entity)
    else:
        # We use ``upsert`` for entities with completed keys, rather than
        # ``insert`` or ``update``, in order not to create race conditions
        # based on prior existence / removal of the entity.
        insert = mutation_pb.upsert.add()

    insert.key.CopyFrom(key_pb)

    for name, value in entity.items():

        value_is_list = isinstance(value, list)
        if value_is_list and len(value) == 0:
            continue

        prop = insert.property.add()
        # Set the name of the property.
        prop.name = name

        # Set the appropriate value.
        helpers._set_protobuf_value(prop.value, value)

        if name in entity.exclude_from_indexes:
            if not value_is_list:
                prop.value.indexed = False

            for sub_value in prop.value.list_value:
                sub_value.indexed = False
Beispiel #2
0
def _assign_entity_to_mutation(mutation_pb, entity, auto_id_entities):
    """Copy ``entity`` into appropriate slot of ``mutation_pb``.

    If ``entity.key`` is incomplete, append ``entity`` to ``auto_id_entities``
    for later fixup during ``commit``.

    Helper method for ``Batch.put``.

    :type mutation_pb: :class:`gcloud.datastore._datastore_v1_pb2.Mutation`
    :param mutation_pb; the Mutation protobuf for the batch / transaction.

    :type entity: :class:`gcloud.datastore.entity.Entity`
    :param entity; the entity being updated within the batch / transaction.

    :type auto_id_entities: list of :class:`gcloud.datastore.entity.Entity`
    :param auto_id_entities: entiites with partial keys, to be fixed up
                              during commit.
    """
    auto_id = entity.key.is_partial

    key_pb = entity.key.to_protobuf()
    key_pb = helpers._prepare_key_for_request(key_pb)

    if auto_id:
        insert = mutation_pb.insert_auto_id.add()
        auto_id_entities.append(entity)
    else:
        # We use ``upsert`` for entities with completed keys, rather than
        # ``insert`` or ``update``, in order not to create race conditions
        # based on prior existence / removal of the entity.
        insert = mutation_pb.upsert.add()

    insert.key.CopyFrom(key_pb)

    for name, value in entity.items():

        value_is_list = isinstance(value, list)
        if value_is_list and len(value) == 0:
            continue

        prop = insert.property.add()
        # Set the name of the property.
        prop.name = name

        # Set the appropriate value.
        helpers._set_protobuf_value(prop.value, value)

        if name in entity.exclude_from_indexes:
            if not value_is_list:
                prop.value.indexed = False

            for sub_value in prop.value.list_value:
                sub_value.indexed = False
Beispiel #3
0
 def test_ancestor(self):
     from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
     from gcloud.datastore.key import Key
     from gcloud.datastore.helpers import _prepare_key_for_request
     ancestor = Key('Ancestor', 123, dataset_id='DATASET')
     pb = self._callFUT(_Query(ancestor=ancestor))
     cfilter = pb.filter.composite_filter
     self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
     self.assertEqual(len(cfilter.filter), 1)
     pfilter = cfilter.filter[0].property_filter
     self.assertEqual(pfilter.property.name, '__key__')
     ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
     self.assertEqual(pfilter.value.key_value, ancestor_pb)
Beispiel #4
0
 def test_ancestor(self):
     from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
     from gcloud.datastore.key import Key
     from gcloud.datastore.helpers import _prepare_key_for_request
     ancestor = Key('Ancestor', 123, dataset_id='DATASET')
     pb = self._callFUT(_Query(ancestor=ancestor))
     cfilter = pb.filter.composite_filter
     self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
     self.assertEqual(len(cfilter.filter), 1)
     pfilter = cfilter.filter[0].property_filter
     self.assertEqual(pfilter.property.name, '__key__')
     ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
     self.assertEqual(pfilter.value.key_value, ancestor_pb)
Beispiel #5
0
    def test_ancestor(self):
        from gcloud.datastore.key import Key
        from gcloud.datastore.helpers import _prepare_key_for_request
        from gcloud.datastore._generated import query_pb2

        ancestor = Key('Ancestor', 123, project='PROJECT')
        pb = self._callFUT(_Query(ancestor=ancestor))
        cfilter = pb.filter.composite_filter
        self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
        self.assertEqual(len(cfilter.filter), 1)
        pfilter = cfilter.filter[0].property_filter
        self.assertEqual(pfilter.property.name, '__key__')
        ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
        self.assertEqual(pfilter.value.key_value, ancestor_pb)
Beispiel #6
0
    def test_ancestor(self):
        from gcloud.datastore.key import Key
        from gcloud.datastore.helpers import _prepare_key_for_request
        from gcloud.datastore._generated import query_pb2

        ancestor = Key('Ancestor', 123, project='PROJECT')
        pb = self._callFUT(_Query(ancestor=ancestor))
        cfilter = pb.filter.composite_filter
        self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
        self.assertEqual(len(cfilter.filter), 1)
        pfilter = cfilter.filter[0].property_filter
        self.assertEqual(pfilter.property.name, '__key__')
        ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
        self.assertEqual(pfilter.value.key_value, ancestor_pb)
Beispiel #7
0
def _assign_entity_to_pb(entity_pb, entity):
    """Copy ``entity`` into ``entity_pb``.

    Helper method for ``Batch.put``.

    :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
    :param entity_pb: The entity owned by a mutation.

    :type entity: :class:`gcloud.datastore.entity.Entity`
    :param entity: The entity being updated within the batch / transaction.
    """
    bare_entity_pb = helpers.entity_to_protobuf(entity)
    key_pb = helpers._prepare_key_for_request(bare_entity_pb.key)
    bare_entity_pb.key.CopyFrom(key_pb)
    entity_pb.CopyFrom(bare_entity_pb)
Beispiel #8
0
def _assign_entity_to_pb(entity_pb, entity):
    """Copy ``entity`` into ``entity_pb``.

    Helper method for ``Batch.put``.

    :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
    :param entity_pb: The entity owned by a mutation.

    :type entity: :class:`gcloud.datastore.entity.Entity`
    :param entity: The entity being updated within the batch / transaction.
    """
    bare_entity_pb = helpers.entity_to_protobuf(entity)
    key_pb = helpers._prepare_key_for_request(bare_entity_pb.key)
    bare_entity_pb.key.CopyFrom(key_pb)
    entity_pb.CopyFrom(bare_entity_pb)
Beispiel #9
0
 def test_filter_key(self):
     from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
     from gcloud.datastore.key import Key
     from gcloud.datastore.helpers import _prepare_key_for_request
     key = Key('Kind', 123, dataset_id='DATASET')
     query = _Query(filters=[('__key__', '=', key)])
     query.OPERATORS = {
         '=': datastore_pb.PropertyFilter.EQUAL,
     }
     pb = self._callFUT(query)
     cfilter = pb.filter.composite_filter
     self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
     self.assertEqual(len(cfilter.filter), 1)
     pfilter = cfilter.filter[0].property_filter
     self.assertEqual(pfilter.property.name, '__key__')
     key_pb = _prepare_key_for_request(key.to_protobuf())
     self.assertEqual(pfilter.value.key_value, key_pb)
Beispiel #10
0
 def test_filter_key(self):
     from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
     from gcloud.datastore.key import Key
     from gcloud.datastore.helpers import _prepare_key_for_request
     key = Key('Kind', 123, dataset_id='DATASET')
     query = _Query(filters=[('__key__', '=', key)])
     query.OPERATORS = {
         '=': datastore_pb.PropertyFilter.EQUAL,
     }
     pb = self._callFUT(query)
     cfilter = pb.filter.composite_filter
     self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
     self.assertEqual(len(cfilter.filter), 1)
     pfilter = cfilter.filter[0].property_filter
     self.assertEqual(pfilter.property.name, '__key__')
     key_pb = _prepare_key_for_request(key.to_protobuf())
     self.assertEqual(pfilter.value.key_value, key_pb)
Beispiel #11
0
    def delete(self, key):
        """Remember a key to be deleted durring ``commit``.

        :type key: :class:`gcloud.datastore.key.Key`
        :param key: the key to be deleted.

        :raises: ValueError if key is not complete, or if the key's
                 ``dataset_id`` does not match ours.
        """
        if key.is_partial:
            raise ValueError("Key must be complete")

        if not _dataset_ids_equal(self.dataset_id, key.dataset_id):
            raise ValueError("Key must be from same dataset as batch")

        key_pb = helpers._prepare_key_for_request(key.to_protobuf())
        self.mutation.delete.add().CopyFrom(key_pb)
Beispiel #12
0
    def delete(self, key):
        """Remember a key to be deleted durring ``commit``.

        :type key: :class:`gcloud.datastore.key.Key`
        :param key: the key to be deleted.

        :raises: ValueError if key is not complete, or if the key's
                 ``dataset_id`` does not match ours.
        """
        if key.is_partial:
            raise ValueError("Key must be complete")

        if not _dataset_ids_equal(self.dataset_id, key.dataset_id):
            raise ValueError("Key must be from same dataset as batch")

        key_pb = helpers._prepare_key_for_request(key.to_protobuf())
        self.mutation.delete.add().CopyFrom(key_pb)
Beispiel #13
0
    def delete(self, key):
        """Remember a key to be deleted during :meth:`commit`.

        :type key: :class:`gcloud.datastore.key.Key`
        :param key: the key to be deleted.

        :raises: ValueError if key is not complete, or if the key's
                 ``project`` does not match ours.
        """
        if key.is_partial:
            raise ValueError("Key must be complete")

        if not _projects_equal(self.project, key.project):
            raise ValueError("Key must be from same project as batch")

        key_pb = helpers._prepare_key_for_request(key.to_protobuf())
        self._add_delete_key_pb().CopyFrom(key_pb)
Beispiel #14
0
    def delete(self, key):
        """Remember a key to be deleted during :meth:`commit`.

        :type key: :class:`gcloud.datastore.key.Key`
        :param key: the key to be deleted.

        :raises: ValueError if key is not complete, or if the key's
                 ``project`` does not match ours.
        """
        if key.is_partial:
            raise ValueError("Key must be complete")

        if not _projects_equal(self.project, key.project):
            raise ValueError("Key must be from same project as batch")

        key_pb = helpers._prepare_key_for_request(key.to_protobuf())
        self._add_delete_key_pb().CopyFrom(key_pb)
Beispiel #15
0
    def test_filter_key(self):
        from gcloud.datastore.key import Key
        from gcloud.datastore.helpers import _prepare_key_for_request
        from gcloud.datastore._generated import query_pb2

        key = Key('Kind', 123, project='PROJECT')
        query = _Query(filters=[('__key__', '=', key)])
        query.OPERATORS = {
            '=': query_pb2.PropertyFilter.EQUAL,
        }
        pb = self._callFUT(query)
        cfilter = pb.filter.composite_filter
        self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
        self.assertEqual(len(cfilter.filter), 1)
        pfilter = cfilter.filter[0].property_filter
        self.assertEqual(pfilter.property.name, '__key__')
        key_pb = _prepare_key_for_request(key.to_protobuf())
        self.assertEqual(pfilter.value.key_value, key_pb)
Beispiel #16
0
    def test_filter_key(self):
        from gcloud.datastore.key import Key
        from gcloud.datastore.helpers import _prepare_key_for_request
        from gcloud.datastore._generated import query_pb2

        key = Key('Kind', 123, project='PROJECT')
        query = _Query(filters=[('__key__', '=', key)])
        query.OPERATORS = {
            '=': query_pb2.PropertyFilter.EQUAL,
        }
        pb = self._callFUT(query)
        cfilter = pb.filter.composite_filter
        self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
        self.assertEqual(len(cfilter.filter), 1)
        pfilter = cfilter.filter[0].property_filter
        self.assertEqual(pfilter.property.name, '__key__')
        key_pb = _prepare_key_for_request(key.to_protobuf())
        self.assertEqual(pfilter.value.key_value, key_pb)
Beispiel #17
0
def _pb_from_query(query):
    """Convert a Query instance to the corresponding protobuf.

    :type query: :class:`Query`
    :param query: The source query.

    :rtype: :class:`gcloud.datastore._generated.query_pb2.Query`
    :returns: A protobuf that can be sent to the protobuf API.  N.b. that
              it does not contain "in-flight" fields for ongoing query
              executions (cursors, offset, limit).
    """
    pb = _query_pb2.Query()

    for projection_name in query.projection:
        pb.projection.add().property.name = projection_name

    if query.kind:
        pb.kind.add().name = query.kind

    composite_filter = pb.filter.composite_filter
    composite_filter.operator = _query_pb2.CompositeFilter.AND

    if query.ancestor:
        ancestor_pb = helpers._prepare_key_for_request(
            query.ancestor.to_protobuf())

        # Filter on __key__ HAS_ANCESTOR == ancestor.
        ancestor_filter = composite_filter.filter.add().property_filter
        ancestor_filter.property.name = '__key__'
        ancestor_filter.operator = _query_pb2.PropertyFilter.HAS_ANCESTOR
        ancestor_filter.value.key_value.CopyFrom(ancestor_pb)

    for property_name, operator, value in query.filters:
        pb_op_enum = query.OPERATORS.get(operator)

        # Add the specific filter
        property_filter = composite_filter.filter.add().property_filter
        property_filter.property.name = property_name
        property_filter.operator = pb_op_enum

        # Set the value to filter on based on the type.
        if property_name == '__key__':
            key_pb = value.to_protobuf()
            property_filter.value.key_value.CopyFrom(
                helpers._prepare_key_for_request(key_pb))
        else:
            helpers._set_protobuf_value(property_filter.value, value)

    if not composite_filter.filter:
        pb.ClearField('filter')

    for prop in query.order:
        property_order = pb.order.add()

        if prop.startswith('-'):
            property_order.property.name = prop[1:]
            property_order.direction = property_order.DESCENDING
        else:
            property_order.property.name = prop
            property_order.direction = property_order.ASCENDING

    for group_by_name in query.group_by:
        pb.group_by.add().name = group_by_name

    return pb
Beispiel #18
0
def _pb_from_query(query):
    """Convert a Query instance to the corresponding protobuf.

    :type query: :class:`Query`
    :param query: The source query.

    :rtype: :class:`gcloud.datastore._datastore_v1_pb2.Query`
    :returns: A protobuf that can be sent to the protobuf API.  N.b. that
              it does not contain "in-flight" fields for ongoing query
              executions (cursors, offset, limit).
    """
    pb = datastore_pb.Query()

    for projection_name in query.projection:
        pb.projection.add().property.name = projection_name

    if query.kind:
        pb.kind.add().name = query.kind

    composite_filter = pb.filter.composite_filter
    composite_filter.operator = datastore_pb.CompositeFilter.AND

    if query.ancestor:
        ancestor_pb = helpers._prepare_key_for_request(
            query.ancestor.to_protobuf())

        # Filter on __key__ HAS_ANCESTOR == ancestor.
        ancestor_filter = composite_filter.filter.add().property_filter
        ancestor_filter.property.name = '__key__'
        ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR
        ancestor_filter.value.key_value.CopyFrom(ancestor_pb)

    for property_name, operator, value in query.filters:
        pb_op_enum = query.OPERATORS.get(operator)

        # Add the specific filter
        property_filter = composite_filter.filter.add().property_filter
        property_filter.property.name = property_name
        property_filter.operator = pb_op_enum

        # Set the value to filter on based on the type.
        if property_name == '__key__':
            key_pb = value.to_protobuf()
            property_filter.value.key_value.CopyFrom(
                helpers._prepare_key_for_request(key_pb))
        else:
            helpers._set_protobuf_value(property_filter.value, value)

    if not composite_filter.filter:
        pb.ClearField('filter')

    for prop in query.order:
        property_order = pb.order.add()

        if prop.startswith('-'):
            property_order.property.name = prop[1:]
            property_order.direction = property_order.DESCENDING
        else:
            property_order.property.name = prop
            property_order.direction = property_order.ASCENDING

    for group_by_name in query.group_by:
        pb.group_by.add().name = group_by_name

    return pb
Beispiel #19
0
    def _callFUT(self, key_pb):
        from gcloud.datastore.helpers import _prepare_key_for_request

        return _prepare_key_for_request(key_pb)