def test_read_rows(self):
        row = self._table.row(ROW_KEY)
        row_alt = self._table.row(ROW_KEY_ALT)
        self.rows_to_delete.extend([row, row_alt])

        cell1, cell2, cell3, cell4 = self._write_to_row(row, row_alt,
                                                        row, row_alt)
        row.commit()
        row_alt.commit()

        rows_data = self._table.read_rows()
        self.assertEqual(rows_data.rows, {})
        rows_data.consume_all()

        # NOTE: We should refrain from editing protected data on instances.
        #       Instead we should make the values public or provide factories
        #       for constructing objects with them.
        row_data = PartialRowData(ROW_KEY)
        row_data._chunks_encountered = True
        row_data._committed = True
        row_data._cells = {
            COLUMN_FAMILY_ID1: {
                COL_NAME1: [cell1],
                COL_NAME2: [cell3],
            },
        }

        row_alt_data = PartialRowData(ROW_KEY_ALT)
        row_alt_data._chunks_encountered = True
        row_alt_data._committed = True
        row_alt_data._cells = {
            COLUMN_FAMILY_ID1: {
                COL_NAME1: [cell2],
            },
            COLUMN_FAMILY_ID2: {
                COL_NAME3: [cell4],
            },
        }

        expected_rows = {
            ROW_KEY: row_data,
            ROW_KEY_ALT: row_alt_data,
        }
        self.assertEqual(rows_data.rows, expected_rows)
    def _read_row_helper(self, chunks):
        from gcloud_bigtable._generated import (
            bigtable_service_messages_pb2 as messages_pb2)
        from gcloud_bigtable._grpc_mocks import StubMock
        from gcloud_bigtable._testing import _MockCalled
        from gcloud_bigtable._testing import _Monkey
        from gcloud_bigtable.row_data import PartialRowData
        from gcloud_bigtable import table as MUT

        client = _Client()
        cluster_name = ('projects/' + PROJECT_ID + '/zones/' + ZONE +
                        '/clusters/' + CLUSTER_ID)
        cluster = _Cluster(cluster_name, client=client)
        table = self._makeOne(TABLE_ID, cluster)

        # Create request_pb
        request_pb = object()  # Returned by our mock.
        mock_create_row_request = _MockCalled(request_pb)

        # Create response_iterator
        row_key = b'row-key'
        response_pb = messages_pb2.ReadRowsResponse(row_key=row_key,
                                                    chunks=chunks)
        response_iterator = [response_pb]

        # Patch the stub used by the API method.
        client.data_stub = stub = StubMock(response_iterator)

        # Create expected_result.
        if chunks:
            expected_result = PartialRowData(row_key)
            expected_result._committed = True
            expected_result._chunks_encountered = True
        else:
            expected_result = None

        # Perform the method and check the result.
        filter_obj = object()
        timeout_seconds = 596
        with _Monkey(MUT, _create_row_request=mock_create_row_request):
            result = table.read_row(row_key, filter_=filter_obj,
                                    timeout_seconds=timeout_seconds)

        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [(
            'ReadRows',
            (request_pb, timeout_seconds),
            {},
        )])
        mock_create_row_request.check_called(
            self, [(table.name,)],
            [{'row_key': row_key, 'filter_': filter_obj}])