def test_timestamp_filter_millisecond_granularity(self): from google.cloud.bigtable import row_filters end = datetime.datetime.now() start = end - datetime.timedelta(minutes=60) timestamp_range = row_filters.TimestampRange(start=start, end=end) timefilter = row_filters.TimestampRangeFilter(timestamp_range) row_data = self._table.read_rows(filter_=timefilter) row_data.consume_all()
def filter_limit_timestamp_range(project_id, instance_id, table_id): client = bigtable.Client(project=project_id, admin=True) instance = client.instance(instance_id) table = instance.table(table_id) end = datetime.datetime(2019, 5, 1) rows = table.read_rows(filter_=row_filters.TimestampRangeFilter( row_filters.TimestampRange(end=end))) for row in rows: print_row(row)
def _TimestampToFilter(self, timestamp): if timestamp == data_store.DataStore.ALL_TIMESTAMPS: return None if timestamp is None or timestamp == data_store.DataStore.NEWEST_TIMESTAMP: # Latest value only return row_filters.CellsColumnLimitFilter(1) if isinstance(timestamp, tuple): return row_filters.TimestampRangeFilter( self._TimestampRangeFromTuple(timestamp)) raise data_store.Error("Invalid timestamp specification: %s." % timestamp)