Ejemplo n.º 1
0
    def read_row(self, row_key, filter_=None):
        """Read a single row from this table.

        :type row_key: bytes
        :param row_key: The key of the row to read from.

        :type filter_: :class:`.RowFilter`
        :param filter_: (Optional) The filter to apply to the contents of the
                        row. If unset, returns the entire row.

        :rtype: :class:`.PartialRowData`, :data:`NoneType <types.NoneType>`
        :returns: The contents of the row if any chunks were returned in
                  the response, otherwise :data:`None`.
        :raises: :class:`ValueError <exceptions.ValueError>` if a commit row
                 chunk is never encountered.
        """
        request_pb = _create_row_request(self.name, row_key=row_key,
                                         filter_=filter_)
        client = self._instance._client
        response_iterator = client._data_stub.ReadRows(request_pb,
                                                       client.timeout_seconds)
        rows_data = PartialRowsData(response_iterator)
        rows_data.consume_all()
        if rows_data.state not in (rows_data.NEW_ROW, rows_data.START):
            raise ValueError('The row remains partial / is not committed.')

        if len(rows_data.rows) == 0:
            return None

        return rows_data.rows[row_key]
Ejemplo n.º 2
0
    def read_row(self, row_key, filter_=None):
        """Read a single row from this table.

        :type row_key: bytes
        :param row_key: The key of the row to read from.

        :type filter_: :class:`.RowFilter`
        :param filter_: (Optional) The filter to apply to the contents of the
                        row. If unset, returns the entire row.

        :rtype: :class:`.PartialRowData`, :data:`NoneType <types.NoneType>`
        :returns: The contents of the row if any chunks were returned in
                  the response, otherwise :data:`None`.
        :raises: :class:`ValueError <exceptions.ValueError>` if a commit row
                 chunk is never encountered.
        """
        request_pb = _create_row_request(self.name,
                                         row_key=row_key,
                                         filter_=filter_)
        client = self._instance._client
        response_iterator = client._data_stub.ReadRows(request_pb,
                                                       client.timeout_seconds)
        rows_data = PartialRowsData(response_iterator)
        rows_data.consume_all()
        if rows_data.state not in (rows_data.NEW_ROW, rows_data.START):
            raise ValueError('The row remains partial / is not committed.')

        if len(rows_data.rows) == 0:
            return None

        return rows_data.rows[row_key]
Ejemplo n.º 3
0
    def read_rows(self,
                  start_key=None,
                  end_key=None,
                  allow_row_interleaving=None,
                  limit=None,
                  filter_=None):
        """Read rows from this table.

        :type start_key: bytes
        :param start_key: (Optional) The beginning of a range of row keys to
                          read from. The range will include ``start_key``. If
                          left empty, will be interpreted as the empty string.

        :type end_key: bytes
        :param end_key: (Optional) The end of a range of row keys to read from.
                        The range will not include ``end_key``. If left empty,
                        will be interpreted as an infinite string.

        :type allow_row_interleaving: bool
        :param allow_row_interleaving: (Optional) By default, rows are read
                                       sequentially, producing results which
                                       are guaranteed to arrive in increasing
                                       row order. Setting
                                       ``allow_row_interleaving`` to
                                       :data:`True` allows multiple rows to be
                                       interleaved in the response stream,
                                       which increases throughput but breaks
                                       this guarantee, and may force the
                                       client to use more memory to buffer
                                       partially-received rows.

        :type limit: int
        :param limit: (Optional) The read will terminate after committing to N
                      rows' worth of results. The default (zero) is to return
                      all results. Note that if ``allow_row_interleaving`` is
                      set to :data:`True`, partial results may be returned for
                      more than N rows. However, only N ``commit_row`` chunks
                      will be sent.

        :type filter_: :class:`.RowFilter`
        :param filter_: (Optional) The filter to apply to the contents of the
                        specified row(s). If unset, reads every column in
                        each row.

        :rtype: :class:`.PartialRowsData`
        :returns: A :class:`.PartialRowsData` convenience wrapper for consuming
                  the streamed results.
        """
        request_pb = _create_row_request(
            self.name,
            start_key=start_key,
            end_key=end_key,
            filter_=filter_,
            allow_row_interleaving=allow_row_interleaving,
            limit=limit)
        client = self._cluster._client
        response_iterator = client._data_stub.ReadRows(request_pb,
                                                       client.timeout_seconds)
        # We expect an iterator of `data_messages_pb2.ReadRowsResponse`
        return PartialRowsData(response_iterator)
Ejemplo n.º 4
0
    def test_read_rows(self):
        from gcloud._testing import _Monkey
        from gcloud.bigtable._testing import _FakeStub
        from gcloud.bigtable.row_data import PartialRowsData
        from gcloud.bigtable import table as MUT

        project_id = 'project-id'
        zone = 'zone'
        cluster_id = 'cluster-id'
        table_id = 'table-id'
        timeout_seconds = 1111
        client = _Client(timeout_seconds=timeout_seconds)
        cluster_name = ('projects/' + project_id + '/zones/' + zone +
                        '/clusters/' + cluster_id)
        cluster = _Cluster(cluster_name, client=client)
        table = self._makeOne(table_id, cluster)

        # Create request_pb
        request_pb = object()  # Returned by our mock.
        mock_created = []

        def mock_create_row_request(table_name, **kwargs):
            mock_created.append((table_name, kwargs))
            return request_pb

        # Create response_iterator
        response_iterator = object()

        # Patch the stub used by the API method.
        client._data_stub = stub = _FakeStub(response_iterator)

        # Create expected_result.
        expected_result = PartialRowsData(response_iterator)

        # Perform the method and check the result.
        start_key = b'start-key'
        end_key = b'end-key'
        filter_obj = object()
        allow_row_interleaving = True
        limit = 22
        with _Monkey(MUT, _create_row_request=mock_create_row_request):
            result = table.read_rows(
                start_key=start_key, end_key=end_key, filter_=filter_obj,
                allow_row_interleaving=allow_row_interleaving, limit=limit)

        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [(
            'ReadRows',
            (request_pb, timeout_seconds),
            {},
        )])
        created_kwargs = {
            'start_key': start_key,
            'end_key': end_key,
            'filter_': filter_obj,
            'allow_row_interleaving': allow_row_interleaving,
            'limit': limit,
        }
        self.assertEqual(mock_created, [(table.name, created_kwargs)])
Ejemplo n.º 5
0
    def test_read_rows(self):
        from gcloud._testing import _Monkey
        from gcloud.bigtable._testing import _FakeStub
        from gcloud.bigtable.row_data import PartialRowsData
        from gcloud.bigtable import table as MUT

        client = _Client(timeout_seconds=self.TIMEOUT_SECONDS)
        instance = _Instance(self.INSTANCE_NAME, client=client)
        table = self._makeOne(self.TABLE_ID, instance)

        # Create request_pb
        request_pb = object()  # Returned by our mock.
        mock_created = []

        def mock_create_row_request(table_name, **kwargs):
            mock_created.append((table_name, kwargs))
            return request_pb

        # Create response_iterator
        response_iterator = object()

        # Patch the stub used by the API method.
        client._data_stub = stub = _FakeStub(response_iterator)

        # Create expected_result.
        expected_result = PartialRowsData(response_iterator)

        # Perform the method and check the result.
        start_key = b'start-key'
        end_key = b'end-key'
        filter_obj = object()
        limit = 22
        with _Monkey(MUT, _create_row_request=mock_create_row_request):
            result = table.read_rows(start_key=start_key,
                                     end_key=end_key,
                                     filter_=filter_obj,
                                     limit=limit)

        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [(
            'ReadRows',
            (request_pb, self.TIMEOUT_SECONDS),
            {},
        )])
        created_kwargs = {
            'start_key': start_key,
            'end_key': end_key,
            'filter_': filter_obj,
            'limit': limit,
        }
        self.assertEqual(mock_created, [(table.name, created_kwargs)])
Ejemplo n.º 6
0
    def read_rows(self,
                  start_key=None,
                  end_key=None,
                  limit=None,
                  filter_=None):
        """Read rows from this table.

        :type start_key: bytes
        :param start_key: (Optional) The beginning of a range of row keys to
                          read from. The range will include ``start_key``. If
                          left empty, will be interpreted as the empty string.

        :type end_key: bytes
        :param end_key: (Optional) The end of a range of row keys to read from.
                        The range will not include ``end_key``. If left empty,
                        will be interpreted as an infinite string.

        :type limit: int
        :param limit: (Optional) The read will terminate after committing to N
                      rows' worth of results. The default (zero) is to return
                      all results.

        :type filter_: :class:`.RowFilter`
        :param filter_: (Optional) The filter to apply to the contents of the
                        specified row(s). If unset, reads every column in
                        each row.

        :rtype: :class:`.PartialRowsData`
        :returns: A :class:`.PartialRowsData` convenience wrapper for consuming
                  the streamed results.
        """
        request_pb = _create_row_request(self.name,
                                         start_key=start_key,
                                         end_key=end_key,
                                         filter_=filter_,
                                         limit=limit)
        client = self._instance._client
        response_iterator = client._data_stub.ReadRows(request_pb,
                                                       client.timeout_seconds)
        # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse`
        return PartialRowsData(response_iterator)