def test_caching_pre_11(self): version = tuple( [int(v) for v in sys._conn.describe_version().split('.')]) if version >= (19, 30, 0): raise SkipTest('CF specific caching no longer supported.') sys.create_column_family(TEST_KS, 'CachedCF10', row_cache_size=100, key_cache_size=100, row_cache_save_period_in_seconds=3, key_cache_save_period_in_seconds=3) pool = ConnectionPool(TEST_KS) cf = ColumnFamily(pool, 'CachedCF10') assert_equal(cf._cfdef.row_cache_size, 100) assert_equal(cf._cfdef.key_cache_size, 100) assert_equal(cf._cfdef.row_cache_save_period_in_seconds, 3) assert_equal(cf._cfdef.key_cache_save_period_in_seconds, 3) sys.alter_column_family(TEST_KS, 'CachedCF10', row_cache_size=200, key_cache_size=200, row_cache_save_period_in_seconds=4, key_cache_save_period_in_seconds=4) cf1 = ColumnFamily(pool, 'CachedCF10') assert_equal(cf1._cfdef.row_cache_size, 200) assert_equal(cf1._cfdef.key_cache_size, 200) assert_equal(cf1._cfdef.row_cache_save_period_in_seconds, 4) assert_equal(cf1._cfdef.key_cache_save_period_in_seconds, 4)
def __init__(self, client, qname, trend=5): super(Buyer, self).__init__(client, uuid.uuid4().hex) self.holdings = {} self.cash = 100000.0 self.history = {} self.trend = trend self.pool = ConnectionPool('example_consumer_Buyer') self.stored_holdings = ColumnFamily(self.pool, 'Holdings') self.quote_history = ColumnFamily(self.pool, 'Quotes') self.stored_cash = ColumnFamily(self.pool, 'Cash') try: cash = self.stored_cash.get('current') self.cash = cash['amount'] except ttypes.NotFoundException: self.stored_cash.insert('current', {'amount': self.cash}) for symbol, columns in self.stored_holdings.get_range(): self.holdings[symbol] = (columns['number_of_shares'], columns['price'], columns['cost']) date_expression = create_index_expression('timestamp', datetime.date.today(), GT) date_clause = create_index_clause([date_expression], count=1000) for key, columns in self.quote_history.get_range(): symbol = columns['symbol'] price = columns['price'] self.add_quote(symbol, price)
def query(pool): cf_logs = ColumnFamily(pool, CF_LOGS) row_key = ymd_from_epoch() try: cf_logs.get(row_key, column_count=0) except NotFoundException: # FIXME: this is extremely inefficient! row_key = cf_logs.get_range().next()[0] logging.info("-" * 120) # ------------------------------ logging.info("Querying for key %s", row_key) logging.info("-" * 120) # ------------------------------ count = 20 for k, v in cf_logs.get(row_key, column_reversed=True).iteritems(): #@UnusedVariable logging.info(v) count -= 1 if count == 0: break del cf_logs logging.info("-" * 120) # ------------------------------ cf_logs_by_app = ColumnFamily(pool, CF_LOGS_BY_APP) row_key = EXAMPLE_APPS[0] logging.info("Querying for key %s", row_key) logging.info("-" * 120) # ------------------------------ count = 20 for k, v in cf_logs_by_app.get(row_key, column_reversed=True).iteritems(): #@UnusedVariable logging.info(v) count -= 1 if count == 0: break del cf_logs_by_app logging.info("-" * 120) # ------------------------------ cf_logs_by_host = ColumnFamily(pool, CF_LOGS_BY_HOST) row_key = EXAMPLE_HOSTS[0] logging.info("Querying for key %s", row_key) logging.info("-" * 120) # ------------------------------ count = 20 for k, v in cf_logs_by_host.get(row_key, column_reversed=True).iteritems(): #@UnusedVariable logging.info(v) count -= 1 if count == 0: break del cf_logs_by_host logging.info("-" * 120) # ------------------------------ cf_logs_by_severity = ColumnFamily(pool, CF_LOGS_BY_SEVERITY) row_key = 'WARN' logging.info("Querying for key %s", row_key) logging.info("-" * 120) # ------------------------------ count = 20 for k, v in cf_logs_by_severity.get(row_key, column_reversed=True).iteritems(): #@UnusedVariable logging.info(v) count -= 1 if count == 0: break del cf_logs_by_severity
def setup_class(cls): sys = SystemManager() sys.create_column_family(TEST_KS, 'StdLong', comparator_type=LongType()) sys.create_column_family(TEST_KS, 'StdInteger', comparator_type=IntegerType()) sys.create_column_family(TEST_KS, 'StdBigInteger', comparator_type=IntegerType()) sys.create_column_family(TEST_KS, 'StdTimeUUID', comparator_type=TimeUUIDType()) sys.create_column_family(TEST_KS, 'StdLexicalUUID', comparator_type=LexicalUUIDType()) sys.create_column_family(TEST_KS, 'StdAscii', comparator_type=AsciiType()) sys.create_column_family(TEST_KS, 'StdUTF8', comparator_type=UTF8Type()) sys.create_column_family(TEST_KS, 'StdBytes', comparator_type=BytesType()) sys.create_column_family(TEST_KS, 'StdComposite', comparator_type=CompositeType(LongType(), BytesType())) sys.close() cls.cf_long = ColumnFamily(pool, 'StdLong') cls.cf_int = ColumnFamily(pool, 'StdInteger') cls.cf_big_int = ColumnFamily(pool, 'StdBigInteger') cls.cf_time = ColumnFamily(pool, 'StdTimeUUID') cls.cf_lex = ColumnFamily(pool, 'StdLexicalUUID') cls.cf_ascii = ColumnFamily(pool, 'StdAscii') cls.cf_utf8 = ColumnFamily(pool, 'StdUTF8') cls.cf_bytes = ColumnFamily(pool, 'StdBytes') cls.cf_composite = ColumnFamily(pool, 'StdComposite') cls.cfs = [cls.cf_long, cls.cf_int, cls.cf_time, cls.cf_lex, cls.cf_ascii, cls.cf_utf8, cls.cf_bytes, cls.cf_composite]
def test_alter_column_non_bytes_type(self): sys.create_column_family(TEST_KS, 'LongCF', comparator_type=LONG_TYPE) sys.create_index(TEST_KS, 'LongCF', 3, LONG_TYPE) pool = ConnectionPool(TEST_KS) cf = ColumnFamily(pool, 'LongCF') cf.insert('key', {3: 3}) assert_equal(cf.get('key')[3], 3) sys.alter_column(TEST_KS, 'LongCF', 2, LONG_TYPE) cf = ColumnFamily(pool, 'LongCF') cf.insert('key', {2: 2}) assert_equal(cf.get('key')[2], 2)
def mass_insert(pool): cf_logs = ColumnFamily(pool, CF_LOGS) cf_logs_by_app = ColumnFamily(pool, CF_LOGS_BY_APP) cf_logs_by_host = ColumnFamily(pool, CF_LOGS_BY_HOST) cf_logs_by_severity = ColumnFamily(pool, CF_LOGS_BY_SEVERITY) rnd_inst = random.Random() rnd_inst.seed(1) start = time.time() count = 0 try: for item in log_generator(1): msg = item[0] app = item[1] host = item[2] severity = item[3] # http://pycassa.github.com/pycassa/assorted/time_uuid.html # http://www.slideshare.net/jeremiahdjordan/pycon-2012-apache-cassandra # http://www.slideshare.net/rbranson/how-do-i-cassandra @ slide 80 # https://github.com/pycassa/pycassa/issues/135 # Save on <CF> CF_LOGS event_uuid = uuid.uuid1() row_key = ymd_from_uuid1(event_uuid) cf_logs.insert(str(row_key), { event_uuid: msg, }) # Save on <CF> CF_LOGS_BY_APP cf_logs_by_app.insert(app, { event_uuid: msg, }) # Save on <CF> CF_LOGS_BY_HOST cf_logs_by_host.insert(host, { event_uuid: msg, }) # Save on <CF> CF_LOGS_BY_SEVERITY cf_logs_by_severity.insert(severity, { event_uuid: msg, }) count += 4 if count % 400 == 0: avg = float(count) / (time.time() - start) logging.info("Inserted %d columns, %f insert/sec", count, avg) except KeyboardInterrupt: logging.info("Stopping...") end = time.time() avg = float(count) / (end - start) logging.info("%d columns inserted. Avg: %f insert/sec", count, avg)
def test_alter_column_family_default_validation_class(self): sys.create_column_family(TEST_KS, 'AlteredCF', default_validation_class=LONG_TYPE) pool = ConnectionPool(TEST_KS) cf = ColumnFamily(pool, 'AlteredCF') assert_equal(cf.default_validation_class, "LongType") sys.alter_column_family(TEST_KS, 'AlteredCF', default_validation_class=UTF8_TYPE) cf = ColumnFamily(pool, 'AlteredCF') assert_equal(cf.default_validation_class, "UTF8Type")
def setup_class(cls): sys = SystemManager() sys.create_column_family(TEST_KS, 'SuperLongSubLong', super=True, comparator_type=LongType(), subcomparator_type=LongType()) sys.create_column_family(TEST_KS, 'SuperLongSubInt', super=True, comparator_type=LongType(), subcomparator_type=IntegerType()) sys.create_column_family(TEST_KS, 'SuperLongSubBigInt', super=True, comparator_type=LongType(), subcomparator_type=IntegerType()) sys.create_column_family(TEST_KS, 'SuperLongSubTime', super=True, comparator_type=LongType(), subcomparator_type=TimeUUIDType()) sys.create_column_family(TEST_KS, 'SuperLongSubLex', super=True, comparator_type=LongType(), subcomparator_type=LexicalUUIDType()) sys.create_column_family(TEST_KS, 'SuperLongSubAscii', super=True, comparator_type=LongType(), subcomparator_type=AsciiType()) sys.create_column_family(TEST_KS, 'SuperLongSubUTF8', super=True, comparator_type=LongType(), subcomparator_type=UTF8Type()) sys.create_column_family(TEST_KS, 'SuperLongSubBytes', super=True, comparator_type=LongType(), subcomparator_type=BytesType()) sys.close() cls.cf_suplong_sublong = ColumnFamily(pool, 'SuperLongSubLong') cls.cf_suplong_subint = ColumnFamily(pool, 'SuperLongSubInt') cls.cf_suplong_subbigint = ColumnFamily(pool, 'SuperLongSubBigInt') cls.cf_suplong_subtime = ColumnFamily(pool, 'SuperLongSubTime') cls.cf_suplong_sublex = ColumnFamily(pool, 'SuperLongSubLex') cls.cf_suplong_subascii = ColumnFamily(pool, 'SuperLongSubAscii') cls.cf_suplong_subutf8 = ColumnFamily(pool, 'SuperLongSubUTF8') cls.cf_suplong_subbytes = ColumnFamily(pool, 'SuperLongSubBytes') cls.cfs = [cls.cf_suplong_subint, cls.cf_suplong_subint, cls.cf_suplong_subtime, cls.cf_suplong_sublex, cls.cf_suplong_subascii, cls.cf_suplong_subutf8, cls.cf_suplong_subbytes]
def test_static_composite_slicing(self): cf = ColumnFamily(pool, 'StaticComposite') u1 = uuid.uuid1() u4 = uuid.uuid4() col0 = (0, 1, u1, u4, '', '', '') col1 = (1, 1, u1, u4, '', '', '') col2 = (1, 2, u1, u4, '', '', '') col3 = (1, 3, u1, u4, '', '', '') col4 = (2, 1, u1, u4, '', '', '') cf.insert('key2', {col0: '', col1: '', col2: '', col3: '', col4: ''}) result = cf.get('key2', column_start=((1, True),), column_finish=((1, True),)) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=(1,), column_finish=((2, False), )) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=((1, True),), column_finish=((2, False), )) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=(1, ), column_finish=((2, False), )) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=((0, False), ), column_finish=((2, False), )) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=(1, 1), column_finish=(1, 3)) assert_equal(result, {col1: '', col2: ''}) result = cf.get('key2', column_start=(1, 1), column_finish=(1, (3, True))) assert_equal(result, {col1: '', col2: '', col3: ''}) result = cf.get('key2', column_start=(1, (1, True)), column_finish=((2, False), )) assert_equal(result, {col1: '', col2: '', col3: ''})
def __init__(self, client, qname): super(Processor, self).__init__(client, qname) self.pool = ConnectionPool('processing_llama_Processor') self.trends = ColumnFamily(self.pool, 'Trend') def get_sleep_time(): return 60
def _update_analytics_start_time(self, start_time): if mockcassandra.use_cql(): cluster = Cluster(['127.0.0.1'], port=int(self.__class__.cassandra_port)) session = cluster.connect(COLLECTOR_KEYSPACE_CQL) query = "INSERT INTO {0} (key, \"{1}\") VALUES ('{2}', {3})".format( SYSTEM_OBJECT_TABLE, SYSTEM_OBJECT_START_TIME, SYSTEM_OBJECT_ANALYTICS, start_time) try: session.execute(query) except Exception as e: logging.error("INSERT INTO %s: Key %s Column %s Value %d " "FAILED: %s" % (SYSTEM_OBJECT_TABLE, SYSTEM_OBJECT_ANALYTICS, SYSTEM_OBJECT_START_TIME, start_time, str(e))) assert False else: cluster.shutdown() else: pool = ConnectionPool( COLLECTOR_KEYSPACE, ['127.0.0.1:%s' % (self.__class__.cassandra_port)]) col_family = ColumnFamily(pool, SYSTEM_OBJECT_TABLE) col_family.insert(SYSTEM_OBJECT_ANALYTICS, {SYSTEM_OBJECT_START_TIME: start_time})
def execute(self): ## first validate data data_ok, fault = self._validate_data() if not data_ok: return (False, fault) ## if data ok, construct InsertCommands if self.op_type == CassandraQuery.OP_DELETE: try: domain = self.data.domain row_key = self.data.get_pk() client = db_connection.get_client() cf = ColumnFamily(client, domain) ## if cascading is enabled, first delete all DBObject and collections comprised in this DBObject if self.cascade: pass ## lastly remove data for current element cf.remove(row_key) return (True, None) except Exception, ex: return (False, ex)
def _get_analytics_start_time(self): try: col_family = ColumnFamily(self._pool, SYSTEM_OBJECT_TABLE) row = col_family.get(SYSTEM_OBJECT_ANALYTICS) except Exception as e: self._logger.error("Exception: analytics_start_time Failure %s" % e) return None # Initialize the dictionary before returning if (SYSTEM_OBJECT_START_TIME not in row): return None ret_row = {} ret_row[SYSTEM_OBJECT_START_TIME] = row[SYSTEM_OBJECT_START_TIME] if (SYSTEM_OBJECT_FLOW_START_TIME not in row): ret_row[SYSTEM_OBJECT_FLOW_START_TIME] = row[ SYSTEM_OBJECT_START_TIME] else: ret_row[SYSTEM_OBJECT_FLOW_START_TIME] = row[ SYSTEM_OBJECT_FLOW_START_TIME] if (SYSTEM_OBJECT_STAT_START_TIME not in row): ret_row[SYSTEM_OBJECT_STAT_START_TIME] = row[ SYSTEM_OBJECT_START_TIME] else: ret_row[SYSTEM_OBJECT_STAT_START_TIME] = row[ SYSTEM_OBJECT_STAT_START_TIME] if (SYSTEM_OBJECT_MSG_START_TIME not in row): ret_row[SYSTEM_OBJECT_MSG_START_TIME] = row[ SYSTEM_OBJECT_START_TIME] else: ret_row[SYSTEM_OBJECT_MSG_START_TIME] = row[ SYSTEM_OBJECT_MSG_START_TIME] return ret_row
def setup_class(cls): sys = SystemManager() sys.create_column_family(TEST_KS, 'TestTimeUUIDs', comparator_type=TimeUUIDType()) sys.close() cls.cf_time = ColumnFamily(pool, 'TestTimeUUIDs')
def test_packing_disabled(self): self.cf = ColumnFamily(pool, 'Standard1', autopack_names=False, autopack_values=False) self.cf.insert('key', {'col': 'val'}) assert_raises(TypeError, self.cf.insert, args=('key', {123: 'val'})) assert_raises(TypeError, self.cf.insert, args=('key', {'col': 123})) assert_raises(TypeError, self.cf.insert, args=('key', {123: 123})) self.cf.remove('key')
def test_packing_enabled(self): self.cf = ColumnFamily(pool, 'Standard1') self.cf.insert('key', {'col': 'val'}) assert_raises(TypeError, self.cf.insert, args=('key', {123: 'val'})) assert_raises(TypeError, self.cf.insert, args=('key', {'col': 123})) assert_raises(TypeError, self.cf.insert, args=('key', {123: 123})) self.cf.remove('key')
def get_row_key_id(domain): counter_column, counter_lock = domain_counter_map[domain] ## acquire lock before getting value of counter_lock.acquire() try: client = db_connection.get_client() cf = ColumnFamily(client, CONFIG_DOMAIN) ## get new key id id_key = cf.get(CONFIG_ROW, counter_column)[counter_column] ## increment value if not None if id_key: new_id_key = id_key + 1 cf.insert(CONFIG_ROW, {counter_column: new_id_key}, write_consistency_level=ConsistencyLevel.ALL) return id_key """ if id_key: str_id_key = str(id_key) str_id_key.zfill(MAX_PADDING_RANGE) return str_id_key else: return None """ finally: ## release lock before returning from this function counter_lock.release()
def _update_analytics_start_time(self, start_time): pool = ConnectionPool( COLLECTOR_KEYSPACE, ['127.0.0.1:%s' % (self.__class__.cassandra_port)]) col_family = ColumnFamily(pool, SYSTEM_OBJECT_TABLE) col_family.insert(SYSTEM_OBJECT_ANALYTICS, {SYSTEM_OBJECT_START_TIME: start_time})
def _update_analytics_start_time(self, start_times): try: col_family = ColumnFamily(self._pool, SYSTEM_OBJECT_TABLE) col_family.insert(SYSTEM_OBJECT_ANALYTICS, start_times) except Exception as e: self._logger.error("Exception: update_analytics_start_time " "Connection Failure %s" % e)
def connect_server(address, key_space, column_family): """Establish connection.""" pool = ConnectionPool(key_space, address) print "[INFO] Connection to '" + key_space + "' established." cf = ColumnFamily(pool, column_family) print "[INFO] Column family '" + column_family + "' used." return pool, cf
def mass_insert(pool): cf_logs = ColumnFamily(pool, CF_LOGS) rnd_inst = random.Random() rnd_inst.seed(1) start = time.time() count = 0 try: for item in log_generator(1): msg = item[0] app = item[1] # http://pycassa.github.com/pycassa/assorted/time_uuid.html # http://www.slideshare.net/jeremiahdjordan/pycon-2012-apache-cassandra # http://www.slideshare.net/rbranson/how-do-i-cassandra @ slide 80 # https://github.com/pycassa/pycassa/issues/135 cf_logs.insert(app, { uuid.uuid1(): msg, }) count += 1 if count % 100 == 0: logging.info("Inserted %d columns", count) except KeyboardInterrupt: logging.info("Stopping...") end = time.time() avg = float(count) / (end - start) logging.info("Avg: %f insert/sec", avg)
def setup_class(cls): sys = SystemManager() have_key_validators = sys._conn.version != CASSANDRA_07 if not have_key_validators: raise SkipTest("Cassandra 0.7 does not have key validators") sys.create_column_family(TEST_KS, 'KeyLong', key_validation_class=LongType()) sys.create_column_family(TEST_KS, 'KeyInteger', key_validation_class=IntegerType()) sys.create_column_family(TEST_KS, 'KeyTimeUUID', key_validation_class=TimeUUIDType()) sys.create_column_family(TEST_KS, 'KeyLexicalUUID', key_validation_class=LexicalUUIDType()) sys.create_column_family(TEST_KS, 'KeyAscii', key_validation_class=AsciiType()) sys.create_column_family(TEST_KS, 'KeyUTF8', key_validation_class=UTF8Type()) sys.create_column_family(TEST_KS, 'KeyBytes', key_validation_class=BytesType()) sys.close() cls.cf_long = ColumnFamily(pool, 'KeyLong') cls.cf_int = ColumnFamily(pool, 'KeyInteger') cls.cf_time = ColumnFamily(pool, 'KeyTimeUUID') cls.cf_lex = ColumnFamily(pool, 'KeyLexicalUUID') cls.cf_ascii = ColumnFamily(pool, 'KeyAscii') cls.cf_utf8 = ColumnFamily(pool, 'KeyUTF8') cls.cf_bytes = ColumnFamily(pool, 'KeyBytes') cls.cfs = [cls.cf_long, cls.cf_int, cls.cf_time, cls.cf_lex, cls.cf_ascii, cls.cf_utf8, cls.cf_bytes]
def setup_class(cls): sys = SystemManager() sys.create_column_family(TEST_KS, 'StdLong', comparator_type=LONG_TYPE) sys.create_column_family(TEST_KS, 'StdInteger', comparator_type=INT_TYPE) sys.create_column_family(TEST_KS, 'StdTimeUUID', comparator_type=TIME_UUID_TYPE) sys.create_column_family(TEST_KS, 'StdLexicalUUID', comparator_type=LEXICAL_UUID_TYPE) sys.create_column_family(TEST_KS, 'StdAscii', comparator_type=ASCII_TYPE) sys.create_column_family(TEST_KS, 'StdUTF8', comparator_type=UTF8_TYPE) sys.create_column_family(TEST_KS, 'StdBytes', comparator_type=BYTES_TYPE) sys.close() cls.cf_long = ColumnFamily(pool, 'StdLong') cls.cf_int = ColumnFamily(pool, 'StdInteger') cls.cf_time = ColumnFamily(pool, 'StdTimeUUID') cls.cf_lex = ColumnFamily(pool, 'StdLexicalUUID') cls.cf_ascii = ColumnFamily(pool, 'StdAscii') cls.cf_utf8 = ColumnFamily(pool, 'StdUTF8') cls.cf_bytes = ColumnFamily(pool, 'StdBytes') cls.cfs = [ cls.cf_long, cls.cf_int, cls.cf_time, cls.cf_lex, cls.cf_ascii, cls.cf_utf8, cls.cf_bytes ]
def test_caching_post_11(self): version = tuple( [int(v) for v in sys._conn.describe_version().split('.')]) if version < (19, 30, 0): raise SkipTest('CF caching policy not yet supported.') sys.create_column_family(TEST_KS, 'CachedCF11') pool = ConnectionPool(TEST_KS) cf = ColumnFamily(pool, 'CachedCF11') assert_equal(cf._cfdef.caching, 'KEYS_ONLY') sys.alter_column_family(TEST_KS, 'CachedCF11', caching='all') cf = ColumnFamily(pool, 'CachedCF11') assert_equal(cf._cfdef.caching, 'ALL') sys.alter_column_family(TEST_KS, 'CachedCF11', caching='rows_only') cf = ColumnFamily(pool, 'CachedCF11') assert_equal(cf._cfdef.caching, 'ROWS_ONLY') sys.alter_column_family(TEST_KS, 'CachedCF11', caching='none') cf = ColumnFamily(pool, 'CachedCF11') assert_equal(cf._cfdef.caching, 'NONE')
def _get_analytics_start_time_thrift(self): try: col_family = ColumnFamily(self._pool, SYSTEM_OBJECT_TABLE) row = col_family.get(SYSTEM_OBJECT_ANALYTICS) return row except Exception as e: self._logger.error("Exception: analytics_start_time Failure %s" % e) return None
def query(pool): logging.info("-" * 120) # ------------------------------ logging.info("-" * 120) # ------------------------------ cf_logs = ColumnFamily(pool, CF_LOGS) for obj in cf_logs.get_range(): #@UnusedVariable print "Key: {0}".format(obj[0]) # print dir(obj[1]) for k, v in obj[1].iteritems(): print " {0} -> {1}".format(k, v)
def _get_analytics_start_time(self): try: col_family = ColumnFamily(self._pool, SYSTEM_OBJECT_TABLE) row = col_family.get(SYSTEM_OBJECT_ANALYTICS, columns=[SYSTEM_OBJECT_START_TIME]) except Exception as e: self._logger.error("Exception: analytics_start_time Failure %s" % e) return -1 else: return row[SYSTEM_OBJECT_START_TIME]
def __init__(self, collection, db): self.backend = 'cassandra' self.port = '9160' self.host = '127.0.0.1' self.column_families = ['pageviews', 'meta', 'rank'] self.setup(db) self.pool = ConnectionPool(db) #create column family Users with comparator=UTF8Type and default_validation_class=UTF8Type and key_validation_class=UTF8Type; #autopack_names=False,autopack_values=False self.pageviews = ColumnFamily( self.pool, self.column_families[0] ) #, comparator=UTF8Type(),default_validation_class=BytesType(),key_validation_class=UTF8Type()) self.meta = ColumnFamily( self.pool, self.column_families[1] ) #, column_validators={'downloaded': IntegerType},comparator=UTF8Type(),default_validation_class=IntegerType,key_validation_class=UTF8Type()) self.rank = ColumnFamily(self.pool, self.column_families[2], reversed=True) #)#, comparator=FloatType() self.pageview_collection = ColumnFamilyMap(Pageview, self.pool, 'pageviews')
def verify_with_thrift(self): # No more thrift in 4.0 if self.cluster.version() >= '4': return pool = ConnectionPool("supcols", pool_size=1) super_col_fam = ColumnFamily(pool, "cols") for name in NAMES: super_col_value = super_col_fam.get(name) self.assertEqual(OrderedDict([(('attr', u'name'), name)]), super_col_value)
def ensure_cassandra_cf(self): s = SystemManager() if self.keyspace not in s.list_keyspaces(): s.create_keyspace(self.keyspace, SIMPLE_STRATEGY, {'replication_factor': '1'}) if self.cassandra_columns_family not in s.get_keyspace_column_families( self.keyspace): s.create_column_family(self.keyspace, self.cassandra_columns_family) self.columnfamily = ColumnFamily(self.cassandra_session, self.cassandra_columns_family)