示例#1
0
    def read_row(self, row_key, filter_=None):
        """Read a single row from this table.

        :type row_key: bytes
        :param row_key: The key of the row to read from.

        :type filter_: :class:`.RowFilter`
        :param filter_: (Optional) The filter to apply to the contents of the
                        row. If unset, returns the entire row.

        :rtype: :class:`.PartialRowData`, :data:`NoneType <types.NoneType>`
        :returns: The contents of the row if any chunks were returned in
                  the response, otherwise :data:`None`.
        :raises: :class:`ValueError <exceptions.ValueError>` if a commit row
                 chunk is never encountered.
        """
        request_pb = _create_row_request(self.name,
                                         row_key=row_key,
                                         filter_=filter_)
        client = self._cluster._client
        response_iterator = client._data_stub.ReadRows(request_pb,
                                                       client.timeout_seconds)
        # We expect an iterator of `data_messages_pb2.ReadRowsResponse`
        result = PartialRowData(row_key)
        for read_rows_response in response_iterator:
            result.update_from_read_rows(read_rows_response)

        # Make sure the result actually contains data.
        if not result._chunks_encountered:
            return None
        # Make sure the result was committed by the back-end.
        if not result.committed:
            raise ValueError('The row remains partial / is not committed.')
        return result
示例#2
0
    def test_read_rows(self):
        row = self._table.row(ROW_KEY)
        row_alt = self._table.row(ROW_KEY_ALT)
        self.rows_to_delete.extend([row, row_alt])

        cell1, cell2, cell3, cell4 = self._write_to_row(row, row_alt,
                                                        row, row_alt)
        row.commit()
        row_alt.commit()

        rows_data = self._table.read_rows()
        self.assertEqual(rows_data.rows, {})
        rows_data.consume_all()

        # NOTE: We should refrain from editing protected data on instances.
        #       Instead we should make the values public or provide factories
        #       for constructing objects with them.
        row_data = PartialRowData(ROW_KEY)
        row_data._chunks_encountered = True
        row_data._committed = True
        row_data._cells = {
            COLUMN_FAMILY_ID1: {
                COL_NAME1: [cell1],
                COL_NAME2: [cell3],
            },
        }

        row_alt_data = PartialRowData(ROW_KEY_ALT)
        row_alt_data._chunks_encountered = True
        row_alt_data._committed = True
        row_alt_data._cells = {
            COLUMN_FAMILY_ID1: {
                COL_NAME1: [cell2],
            },
            COLUMN_FAMILY_ID2: {
                COL_NAME3: [cell4],
            },
        }

        expected_rows = {
            ROW_KEY: row_data,
            ROW_KEY_ALT: row_alt_data,
        }
        self.assertEqual(rows_data.rows, expected_rows)
示例#3
0
    def _read_row_helper(self, chunks):
        from gcloud._testing import _Monkey
        from gcloud.bigtable._generated import (
            bigtable_service_messages_pb2 as messages_pb2)
        from gcloud.bigtable._testing import _FakeStub
        from gcloud.bigtable.row_data import PartialRowData
        from gcloud.bigtable import table as MUT

        project_id = 'project-id'
        zone = 'zone'
        cluster_id = 'cluster-id'
        table_id = 'table-id'
        timeout_seconds = 596
        client = _Client(timeout_seconds=timeout_seconds)
        cluster_name = ('projects/' + project_id + '/zones/' + zone +
                        '/clusters/' + cluster_id)
        cluster = _Cluster(cluster_name, client=client)
        table = self._makeOne(table_id, cluster)

        # Create request_pb
        request_pb = object()  # Returned by our mock.
        mock_created = []

        def mock_create_row_request(table_name, row_key, filter_):
            mock_created.append((table_name, row_key, filter_))
            return request_pb

        # Create response_iterator
        row_key = b'row-key'
        response_pb = messages_pb2.ReadRowsResponse(row_key=row_key,
                                                    chunks=chunks)
        response_iterator = [response_pb]

        # Patch the stub used by the API method.
        client._data_stub = stub = _FakeStub(response_iterator)

        # Create expected_result.
        if chunks:
            expected_result = PartialRowData(row_key)
            expected_result._committed = True
            expected_result._chunks_encountered = True
        else:
            expected_result = None

        # Perform the method and check the result.
        filter_obj = object()
        with _Monkey(MUT, _create_row_request=mock_create_row_request):
            result = table.read_row(row_key, filter_=filter_obj)

        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [(
            'ReadRows',
            (request_pb, timeout_seconds),
            {},
        )])
        self.assertEqual(mock_created, [(table.name, row_key, filter_obj)])
示例#4
0
    def test_consume_next(self):
        from gcloud.bigtable._generated import (
            bigtable_service_messages_pb2 as messages_pb2)
        from gcloud.bigtable.row_data import PartialRowData

        row_key = b'row-key'
        value_pb = messages_pb2.ReadRowsResponse(row_key=row_key)
        response_iterator = _MockCancellableIterator(value_pb)
        partial_rows_data = self._makeOne(response_iterator)
        self.assertEqual(partial_rows_data.rows, {})
        partial_rows_data.consume_next()
        expected_rows = {row_key: PartialRowData(row_key)}
        self.assertEqual(partial_rows_data.rows, expected_rows)
示例#5
0
    def test_consume_next_row_exists(self):
        from gcloud.bigtable._generated import (
            bigtable_service_messages_pb2 as messages_pb2)
        from gcloud.bigtable.row_data import PartialRowData

        row_key = b'row-key'
        chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True)
        value_pb = messages_pb2.ReadRowsResponse(row_key=row_key,
                                                 chunks=[chunk])
        response_iterator = _MockCancellableIterator(value_pb)
        partial_rows_data = self._makeOne(response_iterator)
        existing_values = PartialRowData(row_key)
        partial_rows_data._rows[row_key] = existing_values
        self.assertFalse(existing_values.committed)
        partial_rows_data.consume_next()
        self.assertTrue(existing_values.committed)
        self.assertEqual(existing_values.cells, {})
示例#6
0
    def test_read_row_complete(self):
        from gcloud.bigtable.row_data import Cell
        from gcloud.bigtable.row_data import PartialRowData

        chunk = _ReadRowsResponseCellChunkPB(
            row_key=self.ROW_KEY,
            family_name=self.FAMILY_NAME,
            qualifier=self.QUALIFIER,
            timestamp_micros=self.TIMESTAMP_MICROS,
            value=self.VALUE,
            commit_row=True,
        )
        chunks = [chunk]
        expected_result = PartialRowData(row_key=self.ROW_KEY)
        family = expected_result._cells.setdefault(self.FAMILY_NAME, {})
        column = family.setdefault(self.QUALIFIER, [])
        column.append(Cell.from_pb(chunk))
        self._read_row_helper(chunks, expected_result)