def test_read_rows(self): row = self._table.row(ROW_KEY) row_alt = self._table.row(ROW_KEY_ALT) self.rows_to_delete.extend([row, row_alt]) cell1, cell2, cell3, cell4 = self._write_to_row(row, row_alt, row, row_alt) row.commit() row_alt.commit() rows_data = self._table.read_rows() self.assertEqual(rows_data.rows, {}) rows_data.consume_all() # NOTE: We should refrain from editing protected data on instances. # Instead we should make the values public or provide factories # for constructing objects with them. row_data = PartialRowData(ROW_KEY) row_data._chunks_encountered = True row_data._committed = True row_data._cells = {COLUMN_FAMILY_ID1: {COL_NAME1: [cell1], COL_NAME2: [cell3]}} row_alt_data = PartialRowData(ROW_KEY_ALT) row_alt_data._chunks_encountered = True row_alt_data._committed = True row_alt_data._cells = { COLUMN_FAMILY_ID1: {COL_NAME1: [cell2]}, COLUMN_FAMILY_ID2: {COL_NAME3: [cell4]}, } expected_rows = {ROW_KEY: row_data, ROW_KEY_ALT: row_alt_data} self.assertEqual(rows_data.rows, expected_rows)
def test_table_read_rows(data_table, rows_to_delete): from google.cloud.bigtable.row_data import PartialRowData row = data_table.direct_row(ROW_KEY) rows_to_delete.append(row) row_alt = data_table.direct_row(ROW_KEY_ALT) rows_to_delete.append(row_alt) cell1, cell2, cell3, cell4 = _write_to_row(row, row_alt, row, row_alt) row.commit() row_alt.commit() rows_data = data_table.read_rows() assert rows_data.rows == {} rows_data.consume_all() # NOTE: We should refrain from editing protected data on instances. # Instead we should make the values public or provide factories # for constructing objects with them. row_data = PartialRowData(ROW_KEY) row_data._chunks_encountered = True row_data._committed = True row_data._cells = { COLUMN_FAMILY_ID1: { COL_NAME1: [cell1], COL_NAME2: [cell3] } } row_alt_data = PartialRowData(ROW_KEY_ALT) row_alt_data._chunks_encountered = True row_alt_data._committed = True row_alt_data._cells = { COLUMN_FAMILY_ID1: { COL_NAME1: [cell2] }, COLUMN_FAMILY_ID2: { COL_NAME3: [cell4] }, } expected_rows = {ROW_KEY: row_data, ROW_KEY_ALT: row_alt_data} assert rows_data.rows == expected_rows
def test_row_with_results(self): from google.cloud.bigtable.row_data import PartialRowData row_key = "row-key" name = "table-name" connection = None table = self._make_one(name, connection) table._low_level_table = _MockLowLevelTable() partial_row = PartialRowData(row_key) table._low_level_table.read_row_result = partial_row # Set-up mocks. fake_filter = object() mock_filters = [] def mock_filter_chain_helper(**kwargs): mock_filters.append(kwargs) return fake_filter fake_pair = object() mock_cells = [] def mock_cells_to_pairs(*args, **kwargs): mock_cells.append((args, kwargs)) return [fake_pair] col_fam = u"cf1" qual = b"qual" fake_cells = object() partial_row._cells = {col_fam: {qual: fake_cells}} include_timestamp = object() patch = mock.patch.multiple( "google.cloud.happybase.table", _filter_chain_helper=mock_filter_chain_helper, _cells_to_pairs=mock_cells_to_pairs, ) with patch: result = table.row(row_key, include_timestamp=include_timestamp) # The results come from _cells_to_pairs. expected_result = {col_fam.encode("ascii") + b":" + qual: fake_pair} self.assertEqual(result, expected_result) read_row_args = (row_key, ) read_row_kwargs = {"filter_": fake_filter} self.assertEqual(table._low_level_table.read_row_calls, [(read_row_args, read_row_kwargs)]) expected_kwargs = {"filters": [], "versions": 1, "timestamp": None} self.assertEqual(mock_filters, [expected_kwargs]) to_pairs_kwargs = {"include_timestamp": include_timestamp} self.assertEqual(mock_cells, [((fake_cells, ), to_pairs_kwargs)])
def test_scan_with_results(self): from google.cloud.bigtable.row_data import PartialRowData row_key1 = "row-key1" row1 = PartialRowData(row_key1) rr_result = _MockPartialRowsData(rows={row_key1: row1}, iterations=1) include_timestamp = object() expected_result = [(row_key1, {})] self._scan_test_helper( include_timestamp=include_timestamp, rr_result=rr_result, expected_result=expected_result, )
def test_without_timestamp(self): from google.cloud.bigtable.row_data import Cell from google.cloud.bigtable.row_data import PartialRowData row_data = PartialRowData(b"row-key") val1 = b"hi-im-bytes" val2 = b"bi-im-hytes" row_data._cells[u"fam1"] = { b"col1": [Cell(val1, None)], b"col2": [Cell(val2, None)], } result = self._call_fut(row_data) expected_result = {b"fam1:col1": val1, b"fam1:col2": val2} self.assertEqual(result, expected_result)
def test_read_row_complete(self): from google.cloud.bigtable.row_data import Cell from google.cloud.bigtable.row_data import PartialRowData chunk = _ReadRowsResponseCellChunkPB( row_key=self.ROW_KEY, family_name=self.FAMILY_NAME, qualifier=self.QUALIFIER, timestamp_micros=self.TIMESTAMP_MICROS, value=self.VALUE, commit_row=True, ) chunks = [chunk] expected_result = PartialRowData(row_key=self.ROW_KEY) family = expected_result._cells.setdefault(self.FAMILY_NAME, {}) column = family.setdefault(self.QUALIFIER, []) column.append(Cell.from_pb(chunk)) self._read_row_helper(chunks, expected_result)
def test_with_timestamp(self): from google.cloud.bigtable.row_data import Cell from google.cloud.bigtable.row_data import PartialRowData row_data = PartialRowData(b"row-key") val1 = b"hi-im-bytes" ts1_millis = 1221934570148 ts1 = ts1_millis * 1000 val2 = b"bi-im-hytes" ts2_millis = 1331934880000 ts2 = ts2_millis * 1000 row_data._cells[u"fam1"] = { b"col1": [Cell(val1, ts1)], b"col2": [Cell(val2, ts2)], } result = self._call_fut(row_data, include_timestamp=True) expected_result = { b"fam1:col1": (val1, ts1_millis), b"fam1:col2": (val2, ts2_millis), } self.assertEqual(result, expected_result)
def test_cells_with_results(self): from google.cloud.bigtable.row_data import PartialRowData row_key = "row-key" name = "table-name" connection = None table = self._make_one(name, connection) table._low_level_table = _MockLowLevelTable() partial_row = PartialRowData(row_key) table._low_level_table.read_row_result = partial_row # These are all passed to mocks. versions = object() timestamp = object() include_timestamp = object() # Set-up mocks. fake_filter = object() mock_filters = [] def mock_filter_chain_helper(**kwargs): mock_filters.append(kwargs) return fake_filter fake_result = object() mock_cells = [] def mock_cells_to_pairs(*args, **kwargs): mock_cells.append((args, kwargs)) return fake_result col_fam = "cf1" qual = "qual" fake_cells = object() partial_row._cells = {col_fam: {qual.encode("utf-8"): fake_cells}} column = (col_fam + ":" + qual).encode("utf-8") patch = mock.patch.multiple( "google.cloud.happybase.table", _filter_chain_helper=mock_filter_chain_helper, _cells_to_pairs=mock_cells_to_pairs, ) with patch: result = table.cells( row_key, column, versions=versions, timestamp=timestamp, include_timestamp=include_timestamp, ) self.assertEqual(result, fake_result) read_row_args = (row_key, ) read_row_kwargs = {"filter_": fake_filter} self.assertEqual(table._low_level_table.read_row_calls, [(read_row_args, read_row_kwargs)]) filter_kwargs = { "column": column, "versions": versions, "timestamp": timestamp } self.assertEqual(mock_filters, [filter_kwargs]) to_pairs_kwargs = {"include_timestamp": include_timestamp} self.assertEqual(mock_cells, [((fake_cells, ), to_pairs_kwargs)])
def test_rows_with_results(self): from google.cloud.bigtable.row_data import PartialRowData row_key1 = "row-key1" row_key2 = "row-key2" rows = [row_key1, row_key2] name = "table-name" connection = None table = self._make_one(name, connection) table._low_level_table = _MockLowLevelTable() row1 = PartialRowData(row_key1) # Return row1 but not row2 rr_result = _MockPartialRowsData(rows={row_key1: row1}) table._low_level_table.read_rows_result = rr_result # Set-up mocks. fake_row_set = object() def mock_get_row_set_from_rows(*args): # pylint: disable=unused-argument return fake_row_set fake_filter = object() mock_filters = [] def mock_filter_chain_helper(**kwargs): mock_filters.append(kwargs) return fake_filter fake_pair = object() mock_cells = [] def mock_cells_to_pairs(*args, **kwargs): mock_cells.append((args, kwargs)) return [fake_pair] col_fam = u"cf1" qual = b"qual" fake_cells = object() row1._cells = {col_fam: {qual: fake_cells}} include_timestamp = object() patch = mock.patch.multiple( "google.cloud.happybase.table", _filter_chain_helper=mock_filter_chain_helper, _get_row_set_from_rows=mock_get_row_set_from_rows, _cells_to_pairs=mock_cells_to_pairs, ) with patch: result = table.rows(rows, include_timestamp=include_timestamp) # read_rows_result == PartialRowsData with row_key1 expected_result = {col_fam.encode("ascii") + b":" + qual: fake_pair} self.assertEqual(result, [(row_key1, expected_result)]) read_rows_args = () read_rows_kwargs = {"row_set": fake_row_set, "filter_": fake_filter} self.assertEqual(table._low_level_table.read_rows_calls, [(read_rows_args, read_rows_kwargs)]) expected_kwargs = {"filters": [], "versions": 1, "timestamp": None} self.assertEqual(mock_filters, [expected_kwargs]) to_pairs_kwargs = {"include_timestamp": include_timestamp} self.assertEqual(mock_cells, [((fake_cells, ), to_pairs_kwargs)])
def _make_partial_row_data(*args, **kwargs): from google.cloud.bigtable.row_data import PartialRowData return PartialRowData(*args, **kwargs)