コード例 #1
0
 def test_check_and_trigger_conditions(self):
     this_path = os.path.abspath(os.path.dirname(__file__))
     logs_path_prefix = os.path.join(this_path, 'input_files/LOG-0')
     column_families = ['default', 'col-fam-A', 'col-fam-B']
     db_logs = DatabaseLogs(logs_path_prefix, column_families)
     # matches, has 2 col_fams
     condition1 = LogCondition.create(Condition('cond-A'))
     condition1.set_parameter('regex', 'random log message')
     # matches, multiple lines message
     condition2 = LogCondition.create(Condition('cond-B'))
     condition2.set_parameter('regex', 'continuing on next line')
     # does not match
     condition3 = LogCondition.create(Condition('cond-C'))
     condition3.set_parameter('regex', 'this should match no log')
     db_logs.check_and_trigger_conditions(
         [condition1, condition2, condition3])
     cond1_trigger = condition1.get_trigger()
     self.assertEqual(2, len(cond1_trigger.keys()))
     self.assertSetEqual({'col-fam-A', NO_COL_FAMILY},
                         set(cond1_trigger.keys()))
     self.assertEqual(2, len(cond1_trigger['col-fam-A']))
     messages = [
         "[db/db_impl.cc:563] [col-fam-A] random log message for testing",
         "[db/db_impl.cc:653] [col-fam-A] another random log message"
     ]
     self.assertIn(cond1_trigger['col-fam-A'][0].get_message(), messages)
     self.assertIn(cond1_trigger['col-fam-A'][1].get_message(), messages)
     self.assertEqual(1, len(cond1_trigger[NO_COL_FAMILY]))
     self.assertEqual(
         cond1_trigger[NO_COL_FAMILY][0].get_message(),
         "[db/db_impl.cc:331] [unknown] random log message no column family"
     )
     cond2_trigger = condition2.get_trigger()
     self.assertEqual(['col-fam-B'], list(cond2_trigger.keys()))
     self.assertEqual(1, len(cond2_trigger['col-fam-B']))
     self.assertEqual(
         cond2_trigger['col-fam-B'][0].get_message(),
         "[db/db_impl.cc:234] [col-fam-B] log continuing on next line\n" +
         "remaining part of the log")
     self.assertIsNone(condition3.get_trigger())
コード例 #2
0
    def test_check_and_trigger_conditions(self):
        # options only from CFOptions
        # setup the OptionCondition objects to check and trigger
        update_dict = {
            'CFOptions.level0_file_num_compaction_trigger': {
                'col_fam_A': '4'
            },
            'CFOptions.max_bytes_for_level_base': {
                'col_fam_A': '10'
            }
        }
        self.db_options.update_options(update_dict)
        cond1 = Condition('opt-cond-1')
        cond1 = OptionCondition.create(cond1)
        cond1.set_parameter('options', [
            'CFOptions.level0_file_num_compaction_trigger',
            'TableOptions.BlockBasedTable.block_restart_interval',
            'CFOptions.max_bytes_for_level_base'
        ])
        cond1.set_parameter(
            'evaluate', 'int(options[0])*int(options[1])-int(options[2])>=0')
        # only DBOptions
        cond2 = Condition('opt-cond-2')
        cond2 = OptionCondition.create(cond2)
        cond2.set_parameter('options', [
            'DBOptions.db_write_buffer_size', 'bloom_bits',
            'rate_limiter_bytes_per_sec'
        ])
        cond2.set_parameter(
            'evaluate',
            '(int(options[2]) * int(options[1]) * int(options[0]))==0')
        # mix of CFOptions and DBOptions
        cond3 = Condition('opt-cond-3')
        cond3 = OptionCondition.create(cond3)
        cond3.set_parameter(
            'options',
            [
                'DBOptions.db_write_buffer_size',  # 0
                'CFOptions.num_levels',  # 5, 7
                'bloom_bits'  # 4
            ])
        cond3.set_parameter(
            'evaluate', 'int(options[2])*int(options[0])+int(options[1])>6')
        self.db_options.check_and_trigger_conditions([cond1, cond2, cond3])

        cond1_trigger = {'col_fam_A': ['4', '16', '10']}
        self.assertDictEqual(cond1_trigger, cond1.get_trigger())
        cond2_trigger = {NO_COL_FAMILY: ['0', '4', '1024000']}
        self.assertDictEqual(cond2_trigger, cond2.get_trigger())
        cond3_trigger = {'default': ['0', '7', '4']}
        self.assertDictEqual(cond3_trigger, cond3.get_trigger())
コード例 #3
0
ファイル: db_options_parser.py プロジェクト: zhoumhh/rocksdb
def main():
    options_file = 'temp/OPTIONS_default.tmp'
    misc_options = ["misc_opt1=10", "misc_opt2=100", "misc_opt3=1000"]
    db_options = DatabaseOptions(options_file, misc_options)
    print(db_options.get_column_families())
    get_op = db_options.get_options([
        'DBOptions.db_log_dir', 'DBOptions.is_fd_close_on_exec',
        'CFOptions.memtable_prefix_bloom_size_ratio',
        'TableOptions.BlockBasedTable.verify_compression', 'misc_opt1',
        'misc_opt3'
    ])
    print(get_op)
    get_op['DBOptions.db_log_dir'][NO_COL_FAMILY] = 'some_random_path'
    get_op['CFOptions.memtable_prefix_bloom_size_ratio']['default'] = 2.31
    get_op['TableOptions.BlockBasedTable.verify_compression']['default'] = 4.4
    get_op['misc_opt2'] = {}
    get_op['misc_opt2'][NO_COL_FAMILY] = 2
    db_options.update_options(get_op)
    print('options updated in ' + db_options.generate_options_config(123))
    print('misc options ' + repr(db_options.get_misc_options()))

    options_file = 'temp/OPTIONS_123.tmp'
    db_options = DatabaseOptions(options_file, misc_options)
    # only CFOptions
    cond1 = Condition('opt-cond-1')
    cond1 = OptionCondition.create(cond1)
    cond1.set_parameter('options', [
        'CFOptions.level0_file_num_compaction_trigger',
        'CFOptions.write_buffer_size', 'CFOptions.max_bytes_for_level_base'
    ])
    cond1.set_parameter('evaluate',
                        'int(options[0])*int(options[1])-int(options[2])>=0')
    # only DBOptions
    cond2 = Condition('opt-cond-2')
    cond2 = OptionCondition.create(cond2)
    cond2.set_parameter('options', [
        'DBOptions.max_file_opening_threads',
        'DBOptions.table_cache_numshardbits', 'misc_opt2', 'misc_opt3'
    ])
    cond2_expr = ('(int(options[0])*int(options[2]))-' +
                  '((4*int(options[1])*int(options[3]))/10)==0')
    cond2.set_parameter('evaluate', cond2_expr)
    # mix of CFOptions and DBOptions
    cond3 = Condition('opt-cond-3')
    cond3 = OptionCondition.create(cond3)
    cond3.set_parameter(
        'options',
        [
            'DBOptions.max_background_jobs',  # 2
            'DBOptions.write_thread_slow_yield_usec',  # 3
            'CFOptions.num_levels',  # 7
            'misc_opt1'  # 10
        ])
    cond3_expr = ('(int(options[3])*int(options[2]))-' +
                  '(int(options[1])*int(options[0]))==64')
    cond3.set_parameter('evaluate', cond3_expr)

    db_options.check_and_trigger_conditions([cond1, cond2, cond3])
    print(cond1.get_trigger())  # {'col-fam-B': ['4', '10', '10']}
    print(cond2.get_trigger())  # {'DB_WIDE': ['16', '4']}
    # {'col-fam-B': ['2', '3', '10'], 'col-fam-A': ['2', '3', '7']}
    print(cond3.get_trigger())
コード例 #4
0
def main():
    # populating the statistics
    log_stats = LogStatsParser('temp/db_stats_fetcher_main_LOG.tmp', 20)
    print(log_stats.type)
    print(log_stats.keys_ts)
    print(log_stats.logs_file_prefix)
    print(log_stats.stats_freq_sec)
    print(log_stats.duration_sec)
    statistics = [
        'rocksdb.number.rate_limiter.drains.count',
        'rocksdb.number.block.decompressed.count', 'rocksdb.db.get.micros.p50',
        'rocksdb.manifest.file.sync.micros.p99', 'rocksdb.db.get.micros.p99'
    ]
    log_stats.fetch_timeseries(statistics)
    print()
    print(log_stats.keys_ts)
    # aggregated statistics
    print()
    print(
        log_stats.fetch_aggregated_values(
            NO_ENTITY, statistics, TimeSeriesData.AggregationOperator.latest))
    print(
        log_stats.fetch_aggregated_values(
            NO_ENTITY, statistics, TimeSeriesData.AggregationOperator.oldest))
    print(
        log_stats.fetch_aggregated_values(
            NO_ENTITY, statistics, TimeSeriesData.AggregationOperator.max))
    print(
        log_stats.fetch_aggregated_values(
            NO_ENTITY, statistics, TimeSeriesData.AggregationOperator.min))
    print(
        log_stats.fetch_aggregated_values(
            NO_ENTITY, statistics, TimeSeriesData.AggregationOperator.avg))
    # condition 'evaluate_expression' that evaluates to true
    cond1 = Condition('cond-1')
    cond1 = TimeSeriesCondition.create(cond1)
    cond1.set_parameter('keys', statistics)
    cond1.set_parameter('behavior', 'evaluate_expression')
    cond1.set_parameter('evaluate', 'keys[3]-keys[2]>=0')
    cond1.set_parameter('aggregation_op', 'avg')
    # condition 'evaluate_expression' that evaluates to false
    cond2 = Condition('cond-2')
    cond2 = TimeSeriesCondition.create(cond2)
    cond2.set_parameter('keys', statistics)
    cond2.set_parameter('behavior', 'evaluate_expression')
    cond2.set_parameter('evaluate', '((keys[1]-(2*keys[0]))/100)<3000')
    cond2.set_parameter('aggregation_op', 'latest')
    # condition 'evaluate_expression' that evaluates to true; no aggregation_op
    cond3 = Condition('cond-3')
    cond3 = TimeSeriesCondition.create(cond3)
    cond3.set_parameter('keys', [statistics[2], statistics[3]])
    cond3.set_parameter('behavior', 'evaluate_expression')
    cond3.set_parameter('evaluate', '(keys[1]/keys[0])>23')
    # check remaining methods
    conditions = [cond1, cond2, cond3]
    print()
    print(log_stats.get_keys_from_conditions(conditions))
    log_stats.check_and_trigger_conditions(conditions)
    print()
    print(cond1.get_trigger())
    print(cond2.get_trigger())
    print(cond3.get_trigger())
コード例 #5
0
 def test_check_and_trigger_conditions_eval(self):
     # mock fetch_timeseries() because 'keys_ts' has been pre-populated
     self.log_stats_parser.fetch_timeseries = MagicMock()
     # condition: evaluate_expression
     cond1 = Condition('cond-1')
     cond1 = TimeSeriesCondition.create(cond1)
     cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
     cond1.set_parameter('behavior', 'evaluate_expression')
     keys = [
         'rocksdb.manifest.file.sync.micros.p99',
         'rocksdb.db.get.micros.p50'
     ]
     cond1.set_parameter('keys', keys)
     cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)>500')
     self.log_stats_parser.check_and_trigger_conditions([cond1])
     expected_trigger = {
         NO_ENTITY: {
             1530896414: [9938.0, 16.31508],
             1530896440: [9938.0, 16.346602],
             1530896466: [9938.0, 16.284669],
             1530896492: [9938.0, 16.16005]
         }
     }
     self.assertDictEqual(expected_trigger, cond1.get_trigger())
     self.log_stats_parser.fetch_timeseries.assert_called_once()
コード例 #6
0
 def test_check_and_trigger_conditions_eval_agg(self):
     # mock fetch_timeseries() because 'keys_ts' has been pre-populated
     self.log_stats_parser.fetch_timeseries = MagicMock()
     # condition: evaluate_expression
     cond1 = Condition('cond-1')
     cond1 = TimeSeriesCondition.create(cond1)
     cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
     cond1.set_parameter('behavior', 'evaluate_expression')
     keys = [
         'rocksdb.manifest.file.sync.micros.p99',
         'rocksdb.db.get.micros.p50'
     ]
     cond1.set_parameter('keys', keys)
     cond1.set_parameter('aggregation_op', 'latest')
     # condition evaluates to FALSE
     cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)>200')
     self.log_stats_parser.check_and_trigger_conditions([cond1])
     expected_cond_trigger = {NO_ENTITY: [1792.0, 15.9638]}
     self.assertIsNone(cond1.get_trigger())
     # condition evaluates to TRUE
     cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)<200')
     self.log_stats_parser.check_and_trigger_conditions([cond1])
     expected_cond_trigger = {NO_ENTITY: [1792.0, 15.9638]}
     self.assertDictEqual(expected_cond_trigger, cond1.get_trigger())
     # ensure that fetch_timeseries() was called
     self.log_stats_parser.fetch_timeseries.assert_called()
コード例 #7
0
 def test_check_and_trigger_conditions_bursty(self):
     # mock fetch_timeseries() because 'keys_ts' has been pre-populated
     self.log_stats_parser.fetch_timeseries = MagicMock()
     # condition: bursty
     cond1 = Condition('cond-1')
     cond1 = TimeSeriesCondition.create(cond1)
     cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
     cond1.set_parameter('behavior', 'bursty')
     cond1.set_parameter('window_sec', 40)
     cond1.set_parameter('rate_threshold', 0)
     self.log_stats_parser.check_and_trigger_conditions([cond1])
     expected_cond_trigger = {NO_ENTITY: {1530896440: 0.9767546362322214}}
     self.assertDictEqual(expected_cond_trigger, cond1.get_trigger())
     # ensure that fetch_timeseries() was called once
     self.log_stats_parser.fetch_timeseries.assert_called_once()
コード例 #8
0
    def test_check_and_trigger_conditions(self):
        # options only from CFOptions
        # setup the OptionCondition objects to check and trigger
        update_dict = {
            'CFOptions.level0_file_num_compaction_trigger': {'col_fam_A': '4'},
            'CFOptions.max_bytes_for_level_base': {'col_fam_A': '10'}
        }
        self.db_options.update_options(update_dict)
        cond1 = Condition('opt-cond-1')
        cond1 = OptionCondition.create(cond1)
        cond1.set_parameter(
            'options', [
                'CFOptions.level0_file_num_compaction_trigger',
                'TableOptions.BlockBasedTable.block_restart_interval',
                'CFOptions.max_bytes_for_level_base'
            ]
        )
        cond1.set_parameter(
            'evaluate',
            'int(options[0])*int(options[1])-int(options[2])>=0'
        )
        # only DBOptions
        cond2 = Condition('opt-cond-2')
        cond2 = OptionCondition.create(cond2)
        cond2.set_parameter(
            'options', [
                'DBOptions.db_write_buffer_size',
                'bloom_bits',
                'rate_limiter_bytes_per_sec'
            ]
        )
        cond2.set_parameter(
            'evaluate',
            '(int(options[2]) * int(options[1]) * int(options[0]))==0'
        )
        # mix of CFOptions and DBOptions
        cond3 = Condition('opt-cond-3')
        cond3 = OptionCondition.create(cond3)
        cond3.set_parameter(
            'options', [
                'DBOptions.db_write_buffer_size',  # 0
                'CFOptions.num_levels',  # 5, 7
                'bloom_bits'  # 4
            ]
        )
        cond3.set_parameter(
            'evaluate', 'int(options[2])*int(options[0])+int(options[1])>6'
        )
        self.db_options.check_and_trigger_conditions([cond1, cond2, cond3])

        cond1_trigger = {'col_fam_A': ['4', '16', '10']}
        self.assertDictEqual(cond1_trigger, cond1.get_trigger())
        cond2_trigger = {NO_COL_FAMILY: ['0', '4', '1024000']}
        self.assertDictEqual(cond2_trigger, cond2.get_trigger())
        cond3_trigger = {'default': ['0', '7', '4']}
        self.assertDictEqual(cond3_trigger, cond3.get_trigger())