def test_add_remove_counter(self): sys = SystemManager() sys.create_column_family(TEST_KS, 'KeyLongCounter', key_validation_class=LongType(), default_validation_class='CounterColumnType') sys.close() cf_long = ColumnFamily(pool, 'KeyLongCounter') key = 1111111111111111L cf_long.add(key, 'col') assert_equal(cf_long.get(key), {'col': 1}) cf_long.remove_counter(key, 'col') time.sleep(0.1) assert_raises(NotFoundException, cf_long.get, key)
def test_add_remove_counter(self): sys = SystemManager() if sys._conn.version == CASSANDRA_07: raise SkipTest("Cassandra 0.7 does not have key validators") sys.create_column_family(TEST_KS, 'KeyLongCounter', key_validation_class=LongType(), default_validation_class=COUNTER_COLUMN_TYPE) sys.close() cf_long = ColumnFamily(pool, 'KeyLongCounter') key = 1111111111111111L cf_long.add(key, 'col') assert_equal(cf_long.get(key), {'col': 1}) cf_long.remove_counter(key, 'col') time.sleep(0.1) assert_raises(NotFoundException, cf_long.get, key)
def load_builds(self, o, builders): cf = ColumnFamily(self._pool, 'builds') batch = cf.batch() indices = ColumnFamily(self._pool, 'indices') i_batch = indices.batch() simple_indices = ColumnFamily(self._pool, 'simple_indices') si_batch = simple_indices.batch() # We defer counter inserts until then end because Python # increments are cheaper than Cassandra increments (hopefully). counters = { 'builder_number': Counter(), 'builder_duration': Counter(), 'builder_number_by_day': {}, 'builder_duration_by_day': {}, 'builder_number_by_category': {}, 'builder_duration_by_category': {}, 'builder_number_by_day_and_category': {}, 'builder_duration_by_day_and_category': {}, } existing_filenames = set(self._connection.filenames()) for build in o: self._load_build(batch, i_batch, si_batch, counters, build, builders, existing_filenames) batch.send() i_batch.send() si_batch.send() cf = ColumnFamily(self._pool, 'counters') for builder, count in counters['builder_number'].items(): cf.add('builder_number', builder, count) for builder, count in counters['builder_duration'].items(): cf.add('builder_duration', builder, count) cf = ColumnFamily(self._pool, 'super_counters') del counters['builder_number'] del counters['builder_duration'] for key, super_columns in counters.items(): for super_column, counts in super_columns.items(): for column, count in counts.items(): cf.add(key, column, count, super_column) return len(o)
def parse_logs(self, build_ids): """Parse the logs for the specified build IDs into storage.""" # TODO hook up parallel processing. OUR_VERSION = '1' mut = Mutator(self._pool) cf = ColumnFamily(self._pool, 'build_timelines') i_cf = ColumnFamily(self._pool, 'indices') builds_cf = ColumnFamily(self._pool, 'builds') counters = ColumnFamily(self._pool, 'counters') super_counters = ColumnFamily(self._pool, 'super_counters') for build_id in build_ids: info = self._connection.build_from_id(build_id) if not info: continue existing_version = info.get('log_parsing_version') if existing_version and existing_version >= OUR_VERSION: continue if info['log_fetch_status'] != 'fetched': continue log = self._connection.file_data(info['log_url']) if not log: continue parsed = parse_build_log(log) cat = info['builder_category'] cols = {} indices = {} for step in parsed.steps: start = calendar.timegm(step.start.utctimetuple()) end = calendar.timegm(step.end.utctimetuple()) elapsed = end - start name = step.name cols[start] = { 'name': name, 'state': step.state, 'results': step.results, 'start': unicode(start), 'end': unicode(end), 'elapsed': unicode(elapsed) } start_date = step.start.date().isoformat() indices[name] = {build_id: ''} counters.add('build_step_number', name) counters.add('build_step_duration', name, elapsed) super_counters.add('build_step_number_by_category', name, 1, cat) super_counters.add('build_step_duration_by_category', name, elapsed, cat) super_counters.add('build_step_number_by_day', name, 1, start_date) super_counters.add('build_step_duration_by_day', name, elapsed, start_date) day_cat = '%s.%s' % (start_date, cat) super_counters.add('build_step_number_by_day_and_category', name, 1, day_cat) super_counters.add('build_step_duration_by_day_and_category', name, elapsed, day_cat) mut.insert(cf, build_id, cols) mut.insert(i_cf, 'build_step_name_to_build_ids', indices) mut.insert(builds_cf, build_id, {'log_parsing_version': OUR_VERSION}) yield 'Parsed build %s into %d steps.' % (build_id, len(parsed.steps)) mut.send()