Ejemplo n.º 1
0
    def test__next_page_w_skipped_lt_offset(self):
        from google.api_core import page_iterator
        from google.cloud.datastore_v1.types import datastore as datastore_pb2
        from google.cloud.datastore_v1.types import entity as entity_pb2
        from google.cloud.datastore_v1.types import query as query_pb2
        from google.cloud.datastore.query import Query

        project = "prujekt"
        skipped_1 = 100
        skipped_cursor_1 = b"DEADBEEF"
        skipped_2 = 50
        skipped_cursor_2 = b"FACEDACE"

        more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED

        result_1 = _make_query_response([], b"", more_enum, skipped_1)
        result_1.batch.skipped_cursor = skipped_cursor_1
        result_2 = _make_query_response([], b"", more_enum, skipped_2)
        result_2.batch.skipped_cursor = skipped_cursor_2

        ds_api = _make_datastore_api(result_1, result_2)
        client = _Client(project, datastore_api=ds_api)

        query = Query(client)
        offset = 150
        iterator = self._make_one(query, client, offset=offset)

        page = iterator._next_page()

        self.assertIsInstance(page, page_iterator.Page)
        self.assertIs(page._parent, iterator)

        partition_id = entity_pb2.PartitionId(project_id=project)
        read_options = datastore_pb2.ReadOptions()

        query_1 = query_pb2.Query(offset=offset)
        query_2 = query_pb2.Query(start_cursor=skipped_cursor_1,
                                  offset=(offset - skipped_1))
        expected_calls = [
            mock.call(
                request={
                    "project_id": project,
                    "partition_id": partition_id,
                    "read_options": read_options,
                    "query": query,
                }) for query in [query_1, query_2]
        ]
        self.assertEqual(ds_api.run_query.call_args_list, expected_calls)
Ejemplo n.º 2
0
    def test__build_protobuf_all_values_except_offset(self):
        # this test and the following (all_values_except_start_and_end_cursor)
        # test mutually exclusive states; the offset is ignored
        # if a start_cursor is supplied
        from google.cloud.datastore_v1.types import query as query_pb2
        from google.cloud.datastore.query import Query

        client = _Client(None)
        query = Query(client)
        limit = 15
        start_bytes = b"i\xb7\x1d"
        start_cursor = "abcd"
        end_bytes = b"\xc3\x1c\xb3"
        end_cursor = "wxyz"
        iterator = self._make_one(query,
                                  client,
                                  limit=limit,
                                  start_cursor=start_cursor,
                                  end_cursor=end_cursor)
        self.assertEqual(iterator.max_results, limit)
        iterator.num_results = 4
        iterator._skipped_results = 1

        pb = iterator._build_protobuf()
        expected_pb = query_pb2.Query(start_cursor=start_bytes,
                                      end_cursor=end_bytes)
        expected_pb._pb.limit.value = limit - iterator.num_results
        self.assertEqual(pb, expected_pb)
Ejemplo n.º 3
0
    def test__build_protobuf_empty(self):
        from google.cloud.datastore_v1.types import query as query_pb2
        from google.cloud.datastore.query import Query

        client = _Client(None)
        query = Query(client)
        iterator = self._make_one(query, client)

        pb = iterator._build_protobuf()
        expected_pb = query_pb2.Query()
        self.assertEqual(pb, expected_pb)
Ejemplo n.º 4
0
    def _next_page_helper(self, txn_id=None, retry=None, timeout=None):
        from google.api_core import page_iterator
        from google.cloud.datastore_v1.types import datastore as datastore_pb2
        from google.cloud.datastore_v1.types import entity as entity_pb2
        from google.cloud.datastore_v1.types import query as query_pb2
        from google.cloud.datastore.query import Query

        more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
        result = _make_query_response([], b"", more_enum, 0)
        project = "prujekt"
        ds_api = _make_datastore_api(result)
        if txn_id is None:
            client = _Client(project, datastore_api=ds_api)
        else:
            transaction = mock.Mock(id=txn_id, spec=["id"])
            client = _Client(project,
                             datastore_api=ds_api,
                             transaction=transaction)

        query = Query(client)
        kwargs = {}

        if retry is not None:
            kwargs["retry"] = retry

        if timeout is not None:
            kwargs["timeout"] = timeout

        iterator = self._make_one(query, client, **kwargs)

        page = iterator._next_page()

        self.assertIsInstance(page, page_iterator.Page)
        self.assertIs(page._parent, iterator)

        partition_id = entity_pb2.PartitionId(project_id=project)
        if txn_id is None:
            read_options = datastore_pb2.ReadOptions()
        else:
            read_options = datastore_pb2.ReadOptions(transaction=txn_id)
        empty_query = query_pb2.Query()
        ds_api.run_query.assert_called_once_with(
            request={
                "project_id": project,
                "partition_id": partition_id,
                "read_options": read_options,
                "query": empty_query,
            },
            **kwargs,
        )
Ejemplo n.º 5
0
    def test__build_protobuf_all_values_except_start_and_end_cursor(self):
        # this test and the previous (all_values_except_start_offset)
        # test mutually exclusive states; the offset is ignored
        # if a start_cursor is supplied
        from google.cloud.datastore_v1.types import query as query_pb2
        from google.cloud.datastore.query import Query

        client = _Client(None)
        query = Query(client)
        limit = 15
        offset = 9
        iterator = self._make_one(query, client, limit=limit, offset=offset)
        self.assertEqual(iterator.max_results, limit)
        iterator.num_results = 4

        pb = iterator._build_protobuf()
        expected_pb = query_pb2.Query(offset=offset -
                                      iterator._skipped_results)
        expected_pb._pb.limit.value = limit - iterator.num_results
        self.assertEqual(pb, expected_pb)
Ejemplo n.º 6
0
def _pb_from_query(query):
    """Convert a Query instance to the corresponding protobuf.

    :type query: :class:`Query`
    :param query: The source query.

    :rtype: :class:`.query_pb2.Query`
    :returns: A protobuf that can be sent to the protobuf API.  N.b. that
              it does not contain "in-flight" fields for ongoing query
              executions (cursors, offset, limit).
    """
    pb = query_pb2.Query()

    for projection_name in query.projection:
        projection = query_pb2.Projection()
        projection.property.name = projection_name
        pb.projection.append(projection)

    if query.kind:
        kind = query_pb2.KindExpression()
        kind.name = query.kind
        pb.kind.append(kind)

    composite_filter = pb.filter.composite_filter
    composite_filter.op = query_pb2.CompositeFilter.Operator.AND

    if query.ancestor:
        ancestor_pb = query.ancestor.to_protobuf()

        # Filter on __key__ HAS_ANCESTOR == ancestor.
        ancestor_filter = composite_filter.filters._pb.add().property_filter
        ancestor_filter.property.name = "__key__"
        ancestor_filter.op = query_pb2.PropertyFilter.Operator.HAS_ANCESTOR
        ancestor_filter.value.key_value.CopyFrom(ancestor_pb._pb)

    for property_name, operator, value in query.filters:
        pb_op_enum = query.OPERATORS.get(operator)

        # Add the specific filter
        property_filter = composite_filter.filters._pb.add().property_filter
        property_filter.property.name = property_name
        property_filter.op = pb_op_enum

        # Set the value to filter on based on the type.
        if property_name == "__key__":
            key_pb = value.to_protobuf()
            property_filter.value.key_value.CopyFrom(key_pb._pb)
        else:
            helpers._set_protobuf_value(property_filter.value, value)

    if not composite_filter.filters:
        pb._pb.ClearField("filter")

    for prop in query.order:
        property_order = query_pb2.PropertyOrder()

        if prop.startswith("-"):
            property_order.property.name = prop[1:]
            property_order.direction = property_order.Direction.DESCENDING
        else:
            property_order.property.name = prop
            property_order.direction = property_order.Direction.ASCENDING

        pb.order.append(property_order)

    for distinct_on_name in query.distinct_on:
        ref = query_pb2.PropertyReference()
        ref.name = distinct_on_name
        pb.distinct_on.append(ref)

    return pb
Ejemplo n.º 7
0
    def _next_page(self):
        """Get the next page in the iterator.

        :rtype: :class:`~google.cloud.iterator.Page`
        :returns: The next page in the iterator (or :data:`None` if
                  there are no pages left).
        """
        if not self._more_results:
            return None

        query_pb = self._build_protobuf()
        transaction = self.client.current_transaction
        if transaction is None:
            transaction_id = None
        else:
            transaction_id = transaction.id
        read_options = helpers.get_read_options(self._eventual, transaction_id)

        partition_id = entity_pb2.PartitionId(
            project_id=self._query.project, namespace_id=self._query.namespace)

        kwargs = {}

        if self._retry is not None:
            kwargs["retry"] = self._retry

        if self._timeout is not None:
            kwargs["timeout"] = self._timeout

        response_pb = self.client._datastore_api.run_query(
            request={
                "project_id": self._query.project,
                "partition_id": partition_id,
                "read_options": read_options,
                "query": query_pb,
            },
            **kwargs,
        )

        while (response_pb.batch.more_results == _NOT_FINISHED
               and response_pb.batch.skipped_results < query_pb.offset):
            # We haven't finished processing. A likely reason is we haven't
            # skipped all of the results yet. Don't return any results.
            # Instead, rerun query, adjusting offsets. Datastore doesn't process
            # more than 1000 skipped results in a query.
            old_query_pb = query_pb
            query_pb = query_pb2.Query()
            query_pb._pb.CopyFrom(old_query_pb._pb)  # copy for testability
            query_pb.start_cursor = response_pb.batch.skipped_cursor
            query_pb.offset -= response_pb.batch.skipped_results

            response_pb = self.client._datastore_api.run_query(
                request={
                    "project_id": self._query.project,
                    "partition_id": partition_id,
                    "read_options": read_options,
                    "query": query_pb,
                },
                **kwargs,
            )

        entity_pbs = self._process_query_results(response_pb)
        return page_iterator.Page(self, entity_pbs, self.item_to_value)
Ejemplo n.º 8
0
    def _make_query_pb(kind):
        from google.cloud.datastore_v1.types import query as query_pb2

        return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)])