def __init__(self, keyspace=None, column_family=None, key=None, super_column=None): if not keyspace or not column_family: raise ErrorIncompleteKey("A required attribute was not set.") key = key or self._gen_uuid() self.keyspace, self.key = keyspace, key ColumnParent.__init__(self, column_family, super_column) CassandraBase.__init__(self)
def multiget_count( self, keys, super_column=None, read_consistency_level=None, columns=None, column_start="", column_finish="", ): """ Perform a get_count in parallel on a list of keys. :Parameters: `keys` : [str] The keys to count columns for `columns`: [str] Limit the columns or super_columns counted to the specified list `column_start`: str Only count when a column or super_column is >= column_start `column_finish`: str Only count when a column or super_column is <= column_finish `super_column` : str Count the columns only in this super_column `read_consistency_level` : :class:`pycassa.cassandra.ttypes.ConsistencyLevel` Affects the guaranteed replication factor before returning from any read operation :Returns: {'keyname': int count} """ (super_column, column_start, column_finish) = self._pack_slice_cols(super_column, column_start, column_finish) packed_cols = None if columns is not None: packed_cols = [] for col in columns: packed_cols.append( self._pack_name(col, is_supercol_name=self.super)) cp = ColumnParent(column_family=self.column_family, super_column=super_column) sp = create_SlicePredicate(packed_cols, column_start, column_finish, False, self.MAX_COUNT) return self.client.multiget_count(keys, cp, sp, self._rcl(read_consistency_level))
def _column_parent(self, super_column=None): return ColumnParent(column_family=self.column_family, super_column=self._pack_name(super_column, is_supercol_name=True))
def get_range(self, start="", finish="", columns=None, column_start="", column_finish="", column_reversed=False, column_count=100, row_count=None, include_timestamp=False, super_column=None, read_consistency_level=None): """ Get an iterator over keys in a specified range :Parameters: `start`: str Start from this key (inclusive) `finish`: str End at this key (inclusive) `columns`: [str] Limit the columns or super_columns fetched to the specified list `column_start`: str Only fetch when a column or super_column is >= column_start `column_finish`: str Only fetch when a column or super_column is <= column_finish `column_reversed`: bool Fetch the columns or super_columns in reverse order. This will do nothing unless you passed a dict_class to the constructor. `column_count`: int Limit the number of columns or super_columns fetched per key `row_count`: int Limit the number of rows fetched `include_timestamp`: bool If true, return a (value, timestamp) tuple for each column `super_column`: string Return columns only in this super_column `read_consistency_level`: :class:`pycassa.cassandra.ttypes.ConsistencyLevel` Affects the guaranteed replication factor before returning from any read operation :Returns: iterator over ('key', {'column': 'value'}) """ (super_column, column_start, column_finish) = self._pack_slice_cols(super_column, column_start, column_finish) packed_cols = None if columns is not None: packed_cols = [] for col in columns: packed_cols.append( self._pack_name(col, is_supercol_name=self.super)) cp = ColumnParent(column_family=self.column_family, super_column=super_column) sp = create_SlicePredicate(packed_cols, column_start, column_finish, column_reversed, column_count) count = 0 i = 0 last_key = start buffer_size = self.buffer_size if row_count is not None: buffer_size = min(row_count, self.buffer_size) while True: key_range = KeyRange(start_key=last_key, end_key=finish, count=buffer_size) key_slices = self.client.get_range_slices( cp, sp, key_range, self._rcl(read_consistency_level)) # This may happen if nothing was ever inserted if key_slices is None: return for j, key_slice in enumerate(key_slices): # Ignore the first element after the first iteration # because it will be a duplicate. if j == 0 and i != 0: continue yield (key_slice.key, self._convert_ColumnOrSuperColumns_to_dict_class( key_slice.columns, include_timestamp)) count += 1 if row_count is not None and count >= row_count: return if len(key_slices) != self.buffer_size: return last_key = key_slices[-1].key i += 1
def multiget(self, keys, columns=None, column_start="", column_finish="", column_reversed=False, column_count=100, include_timestamp=False, super_column=None, read_consistency_level=None): """ Fetch multiple keys from a Cassandra server :Parameters: `keys`: [str] A list of keys to fetch `columns`: [str] Limit the columns or super_columns fetched to the specified list `column_start`: str Only fetch when a column or super_column is >= column_start `column_finish`: str Only fetch when a column or super_column is <= column_finish `column_reversed`: bool Fetch the columns or super_columns in reverse order. This will do nothing unless you passed a dict_class to the constructor. `column_count`: int Limit the number of columns or super_columns fetched per key `include_timestamp`: bool If true, return a (value, timestamp) tuple for each column `super_column`: str Return columns only in this super_column `read_consistency_level`: :class:`pycassa.cassandra.ttypes.ConsistencyLevel` Affects the guaranteed replication factor before returning from any read operation :Returns: if include_timestamp == True: {'key': {'column': ('value', timestamp)}} else: {'key': {'column': 'value'}} """ (super_column, column_start, column_finish) = self._pack_slice_cols(super_column, column_start, column_finish) packed_cols = None if columns is not None: packed_cols = [] for col in columns: packed_cols.append( self._pack_name(col, is_supercol_name=self.super)) cp = ColumnParent(column_family=self.column_family, super_column=super_column) sp = create_SlicePredicate(packed_cols, column_start, column_finish, column_reversed, column_count) keymap = self.client.multiget_slice(keys, cp, sp, self._rcl(read_consistency_level)) ret = self.dict_class() # Keep the order of keys for key in keys: ret[key] = None non_empty_keys = [] for key, columns in keymap.iteritems(): if len(columns) > 0: non_empty_keys.append(key) ret[key] = self._convert_ColumnOrSuperColumns_to_dict_class( columns, include_timestamp) for key in keys: if key not in non_empty_keys: del ret[key] return ret
def get_indexed_slices(self, index_clause, columns=None, column_start="", column_finish="", column_reversed=False, column_count=100, include_timestamp=False, super_column=None, read_consistency_level=None): """ Fetches a list of KeySlices from a Cassandra server based on an index clause :Parameters: `index_clause`: :class:`~pycassa.cassandra.ttypes.IndexClause` Limits the keys that are returned based on expressions that compare the value of a column to a given value. At least one of the expressions in the IndexClause must be on an indexed column. .. seealso:: meth::pycassa.index.create_index_clause() and meth::pycassa.index.create_index_expression(). `columns`: [str] Limit the columns or super_columns fetched to the specified list `column_start`: str Only fetch when a column or super_column is >= column_start `column_finish`: str Only fetch when a column or super_column is <= column_finish `column_reversed`: bool Fetch the columns or super_columns in reverse order. This will do nothing unless you passed a dict_class to the constructor. `column_count`: int Limit the number of columns or super_columns fetched per key `include_timestamp`: bool If true, return a (value, timestamp) tuple for each column `super_column`: str Return columns only in this super_column `read_consistency_level`: :class:`pycassa.cassandra.ttypes.ConsistencyLevel` Affects the guaranteed replication factor before returning from any read operation :Returns: if include_timestamp == True: {key : {column : (value, timestamp)}} else: {key : {column : value}} """ (super_column, column_start, column_finish) = self._pack_slice_cols(super_column, column_start, column_finish) packed_cols = None if columns is not None: packed_cols = [] for col in columns: packed_cols.append( self._pack_name(col, is_supercol_name=self.super)) cp = ColumnParent(column_family=self.column_family, super_column=super_column) sp = create_SlicePredicate(packed_cols, column_start, column_finish, column_reversed, column_count) # Pack the values in the index clause expressions new_exprs = [] for expr in index_clause.expressions: new_exprs.append(IndexExpression(self._pack_name(expr.column_name), expr.op, \ self._pack_value(expr.value, expr.column_name))) index_clause.expressions = new_exprs keyslice_list = self.client.get_indexed_slices( cp, index_clause, sp, self._rcl(read_consistency_level)) if len(keyslice_list) == 0: raise NotFoundException() return self._convert_KeySlice_list_to_dict_class( keyslice_list, include_timestamp)
def get(self, key, columns=None, column_start="", column_finish="", column_reversed=False, column_count=100, include_timestamp=False, super_column=None, read_consistency_level=None): """ Fetch a key from a Cassandra server :Parameters: `key`: str The key to fetch `columns`: [str] Limit the columns or super_columns fetched to the specified list `column_start`: str Only fetch when a column or super_column is >= column_start `column_finish`: str Only fetch when a column or super_column is <= column_finish `column_reversed`: bool Fetch the columns or super_columns in reverse order. This will do nothing unless you passed a ``dict_class`` to the constructor. `column_count`: int Limit the number of columns or super_columns fetched per key `include_timestamp` : bool If true, return a (value, timestamp) tuple for each column `super_column`: str Return columns only in this super_column `read_consistency_level`: :class:`pycassa.cassandra.ttypes.ConsistencyLevel` Affects the guaranteed replication factor before returning from any read operation :Returns: if include_timestamp == True: {'column': ('value', timestamp)} else: {'column': 'value'} """ super_column, column_start, column_finish = self._pack_slice_cols( super_column, column_start, column_finish) packed_cols = None if columns is not None: packed_cols = [] for col in columns: packed_cols.append( self._pack_name(col, is_supercol_name=self.super)) cp = ColumnParent(column_family=self.column_family, super_column=super_column) sp = create_SlicePredicate(packed_cols, column_start, column_finish, column_reversed, column_count) list_col_or_super = self.client.get_slice( key, cp, sp, self._rcl(read_consistency_level)) if len(list_col_or_super) == 0: raise NotFoundException() return self._convert_ColumnOrSuperColumns_to_dict_class( list_col_or_super, include_timestamp)