def test_to_pb(self): from gcloud.bigtable.row_filters import TimestampRange range_ = TimestampRange() row_filter = self._makeOne(range_) pb_val = row_filter.to_pb() expected_pb = _RowFilterPB(timestamp_range_filter=_TimestampRangePB()) self.assertEqual(pb_val, expected_pb)
def test_delete_cells_with_time_range(self): import datetime from gcloud._helpers import _EPOCH from gcloud.bigtable.row_filters import TimestampRange microseconds = 30871000 # Makes sure already milliseconds granularity start = _EPOCH + datetime.timedelta(microseconds=microseconds) time_range = TimestampRange(start=start) self._delete_cells_helper(time_range=time_range)
def test_to_pb(self): from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable.row_filters import TimestampRange range_ = TimestampRange() row_filter = self._makeOne(range_) pb_val = row_filter.to_pb() expected_pb = data_pb2.RowFilter( timestamp_range_filter=data_pb2.TimestampRange()) self.assertEqual(pb_val, expected_pb)
def _convert_to_time_range(timestamp=None): """Create a timestamp range from an HBase / HappyBase timestamp. HBase uses timestamp as an argument to specify an exclusive end deadline. Cloud Bigtable also uses exclusive end times, so the behavior matches. :type timestamp: int :param timestamp: (Optional) Timestamp (in milliseconds since the epoch). Intended to be used as the end of an HBase time range, which is exclusive. :rtype: :class:`gcloud.bigtable.row.TimestampRange`, :data:`NoneType <types.NoneType>` :returns: The timestamp range corresponding to the passed in ``timestamp``. """ if timestamp is None: return None next_timestamp = _datetime_from_microseconds(1000 * timestamp) return TimestampRange(end=next_timestamp)
def __init__(self, table, timestamp=None, batch_size=None, transaction=False, wal=_WAL_SENTINEL): if wal is not _WAL_SENTINEL: _WARN(_WAL_WARNING) if batch_size is not None: if transaction: raise TypeError('When batch_size is set, a Batch cannot be ' 'transactional') if batch_size <= 0: raise ValueError('batch_size must be positive') self._table = table self._batch_size = batch_size self._timestamp = self._delete_range = None # Timestamp is in milliseconds, convert to microseconds. if timestamp is not None: self._timestamp = _datetime_from_microseconds(1000 * timestamp) # For deletes, we get the very next timestamp (assuming timestamp # granularity is milliseconds). This is because HappyBase users # expect HBase deletes to go **up to** and **including** the # timestamp while Cloud Bigtable Time Ranges **exclude** the # final timestamp. next_timestamp = self._timestamp + _ONE_MILLISECOND self._delete_range = TimestampRange(end=next_timestamp) self._transaction = transaction # Internal state for tracking mutations. self._row_map = {} self._mutation_count = 0
def test_constructor_explicit(self): from gcloud._helpers import _datetime_from_microseconds from gcloud.bigtable.row_filters import TimestampRange table = object() timestamp = 144185290431 batch_size = 42 transaction = False # Must be False when batch_size is non-null batch = self._makeOne(table, timestamp=timestamp, batch_size=batch_size, transaction=transaction) self.assertEqual(batch._table, table) self.assertEqual(batch._batch_size, batch_size) self.assertEqual(batch._timestamp, _datetime_from_microseconds(1000 * timestamp)) next_timestamp = _datetime_from_microseconds(1000 * (timestamp + 1)) time_range = TimestampRange(end=next_timestamp) self.assertEqual(batch._delete_range, time_range) self.assertEqual(batch._transaction, transaction) self.assertEqual(batch._row_map, {}) self.assertEqual(batch._mutation_count, 0)