Пример #1
0
    def write(self, data):
        quantile_25, quantile_75, states = data
        prefix = '%s.%s' % (self.namespace, self.service)
        count = len(states)
        invalid = 0
        now = int(time())
        tuples = []
        for name, state in states.iteritems():
            if state:
                invalid += 1
            name = extract_service_name(name)
            tuples.append(TimeSeriesTuple('%s.%s' % (prefix, name), now,
                                          state))

        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'quantile_25'), now,
                            quantile_25))
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'quantile_75'), now,
                            quantile_75))
        tuples.append(TimeSeriesTuple('%s.%s' % (prefix, 'count'), now, count))
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'invalid'), now, invalid))

        self.output_sink.write(tuples)
Пример #2
0
    def test_process_for_valid_neg_input(self):
        data = [
            TimeSeriesTuple('service1', 100, 8.1),
            TimeSeriesTuple('service1', 110, 9.1),
            TimeSeriesTuple('service1', 120, 8)
        ]
        self.options['params']['error_params']['threshold'] = 2
        self.test_seasonal_decomposition_ensemble = seasonal_decomposition_ensemble.SeasonalDecompositionEnsemble(
            config=analyzer, logger=None, options=self.options)

        states = self.test_seasonal_decomposition_ensemble.process(data)
        exp_states = (8.0, 3.0, 6.0, 9.0, -1.0, -1.0 / 9, {
            'abs_quantile': {
                'flag': 0.0
            },
            'norm_tukey': {
                'flag': 1.0
            },
            'overall': {
                'consensus': 2.0,
                'flag': 1
            },
            'abs_tukey': {
                'flag': 1.0
            },
            'norm_quantile': {
                'flag': 0.0
            }
        })
        expect(states).to.be.equal(exp_states)
Пример #3
0
 def test_get_closest_datapoint(self):
     expect(get_closest_datapoint(self.timeseries, 10)).to.be.equal(
         TimeSeriesTuple('_', 10, 1))
     expect(get_closest_datapoint(self.timeseries, 40)).to.be.equal(
         TimeSeriesTuple('_', 30, 1))
     expect(get_closest_datapoint(self.timeseries, 99)).to.be.equal(
         TimeSeriesTuple('_', 100, 1))
Пример #4
0
    def write(self, state):
        (deviation, states) = state
        prefix = '%s.%s' % (self.namespace, self.service)
        now = int(time())
        tuples = []
        for metric, value in states.iteritems():
            tuples.append(TimeSeriesTuple('%s.%s' % (prefix, metric), now, value))

        tuples.append(TimeSeriesTuple('%s.%s' % (prefix, 'deviation'), now, deviation))
        self.output_sink.write(tuples)
Пример #5
0
    def test_process_for_valid_pos_input(self):
        data = [TimeSeriesTuple('service1', 100, 8.1),
                TimeSeriesTuple('service1', 110, 9.1),
                TimeSeriesTuple('service1', 120, 10.1)]

        self.options['params']['error_params']['error_handling'] = 'tukey'
        self.options['params']['error_params']['error_type'] = 'abs'
        self.test_seasonal_decomposition = seasonal_decomposition.SeasonalDecomposition(
            config=analyzer, logger=None, options=self.options)

        states = self.test_seasonal_decomposition.process(data)
        exp_states = (10.1, 9.0, 3.0, 6.0, 10.1 - 9, {'flag': 1.0})
        expect(states).to.be.equal(exp_states)
Пример #6
0
 def test_sink_write(self):
     self.my_sink.redis_pipeline.setex = self.stub_setex
     self.my_sink.redis_pipeline.execute = self.stub_execute
     data_tuple = TimeSeriesTuple('service', 60, 1.0)
     for nr_elements_to_insert in [0, 20]:
         redis_value = [RedisTimeStamped({'ttl': 60}, data_tuple)
                        ] * nr_elements_to_insert
         self.my_sink.write(redis_value)
         expected_pipeline = [
             ('service:60', 60,
              (pickle.dumps(TimeSeriesTuple('service', 60, 1.0))))
         ] * (nr_elements_to_insert % self.configuration['pipeline_size'])
         expect(self.redis_pipeline_list).to.equal(expected_pipeline)
Пример #7
0
    def test_write(self):
        state = (10, {'flag': 0})
        exp_tuples = [
            TimeSeriesTuple(name='FlowDifference.flow_service1.flag',
                            timestamp=1000,
                            value=0),
            TimeSeriesTuple(name='FlowDifference.flow_service1.deviation',
                            timestamp=1000,
                            value=10)
        ]
        self.test_flow_difference.write(state)

        self.test_flow_difference.output_sink.write.assert_called_with(
            exp_tuples)
Пример #8
0
    def test_sink_read_should_return_RedisTimeStamped_models(self):
        data = {}
        keys = [
            'service1', 'service2', 'service3', 'service4', 'service5',
            'service6'
        ]
        self.my_sink.connection.keys.return_value = keys

        for i in range(1, 7):
            service_name = 'service%s' % (i)
            data[service_name] = RedisTimeStamped({'ttl': 120},
                                                  TimeSeriesTuple(
                                                      service_name, 60, 1.0))

        data['service4'] = None

        def stub_get(key):
            item = data.get(key)
            if item:
                return pickle.dumps(item)

        self.my_sink.connection.get = stub_get
        count = 0
        for item in self.my_sink.read(None):
            expect(item).to.be.a(RedisTimeStamped)
            count += 1
        expect(count).to.equal(5)
Пример #9
0
 def test_insert_missing_datapoints(self):
     default = 111
     step_size = 10
     exp = [
         TimeSeriesTuple('_', 10, 1),
         TimeSeriesTuple('_', 20, 1),
         TimeSeriesTuple('_', 30, 1),
         TimeSeriesTuple('_', 40, 111),
         TimeSeriesTuple('_', 50, 1),
         TimeSeriesTuple('_', 60, 1),
         TimeSeriesTuple('_', 70, 111),
         TimeSeriesTuple('_', 80, 111),
         TimeSeriesTuple('_', 90, 111),
         TimeSeriesTuple('_', 100, 1)
     ]
     expect(insert_missing_datapoints(self.timeseries, default,
                                      step_size)).to.be.equal(exp)
Пример #10
0
 def test_read_data(self):
     # for data points in time
     self.test_flow_difference.metric_sink.read = self.stub_read_metric_sink_valid
     data = self.test_flow_difference._read_data('')
     expect(data).to.be.equal([
         TimeSeriesTuple(name='cpu', timestamp=1000 - 2, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 1, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000, value=10)
     ])
     # for stale data points
     self.test_flow_difference.metric_sink.read = self.stub_read_metric_sink_stale
     data = self.test_flow_difference._read_data('')
     expect(data).to.be.equal(None)
     # no invalid data
     self.test_flow_difference.metric_sink.read = self.stub_read_metric_sink_invalid
     data = self.test_flow_difference._read_data('')
     expect(data).to.be.equal(None)
Пример #11
0
    def write(self, state):
        (input_value, seasonal, trend, model, error_abs, error_norm,
         states) = state
        prefix = '%s.%s' % (self.namespace, self.service)
        now = int(time())
        tuples = []
        for name, state_dict in states.iteritems():
            for metric, value in state_dict.iteritems():
                tuples.append(
                    TimeSeriesTuple('%s.%s.%s' % (prefix, name, metric), now,
                                    value))

        if not input_value:
            input_value = 0.0
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'input'), now, input_value))
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'seasonal'), now, seasonal))
        tuples.append(TimeSeriesTuple('%s.%s' % (prefix, 'trend'), now, trend))
        tuples.append(TimeSeriesTuple('%s.%s' % (prefix, 'model'), now, model))
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'error'), now, error_abs))
        tuples.append(
            TimeSeriesTuple('%s.%s' % (prefix, 'error_norm'), now, error_norm))

        self.output_sink.write(tuples)
Пример #12
0
 def setUp(self):
     self.timeseries = [
         TimeSeriesTuple('_', 10, 1),
         TimeSeriesTuple('_', 20, 1),
         TimeSeriesTuple('_', 30, 1),
         TimeSeriesTuple('_', 50, 1),
         TimeSeriesTuple('_', 60, 1),
         TimeSeriesTuple('_', 100, 1)
     ]
Пример #13
0
 def stub_read_data(self, metric):
     if metric == 'service1.out':
         return [TimeSeriesTuple(name='cpu', timestamp=10, value=10),
                 TimeSeriesTuple(name='cpu', timestamp=20, value=10),
                 TimeSeriesTuple(name='cpu', timestamp=30, value=10)]
     elif metric == 'service2.in':
         return [TimeSeriesTuple(name='cpu', timestamp=10, value=20),
                 TimeSeriesTuple(name='cpu', timestamp=20, value=20),
                 TimeSeriesTuple(name='cpu', timestamp=30, value=20)]
Пример #14
0
 def test_write(self):
     state = (10.1, 9.0, 3, 7, 0.1, {'flag': 1.0})
     exp_tuples = [TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.flag', timestamp=1000, value=1.0),
                   TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.model', timestamp=1000, value=9.0),
                   TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.input', timestamp=1000, value=10.1),
                   TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.seasonal', timestamp=1000, value=3),
                   TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.trend', timestamp=1000, value=7),
                   TimeSeriesTuple(name='SeasonalDecomposition.stl_service1.error', timestamp=1000, value=0.1)]
     self.test_seasonal_decomposition.write(state)
     self.test_seasonal_decomposition.output_sink.write.assert_called_with(exp_tuples)
Пример #15
0
 def test_read_for_incomplete_data(self):
     self.test_seasonal_decomposition._read_tdigest = Mock()
     # for incomplete data points
     self.test_seasonal_decomposition.metric_sink.read = self.stub_read_metric_sink_incomplete
     data = self.test_seasonal_decomposition.read()
     expect(data).to.be.equal([TimeSeriesTuple(name='cpu', timestamp=1000 - 6, value=10),
                               TimeSeriesTuple(name='cpu', timestamp=1000 - 5, value=10),
                               TimeSeriesTuple(name='cpu', timestamp=1000 - 4, value=10),
                               TimeSeriesTuple(name='cpu', timestamp=1000 - 3, value=10),
                               TimeSeriesTuple(name='cpu', timestamp=1000 - 2, value=False),
                               TimeSeriesTuple(name='cpu', timestamp=1000 - 1, value=10)])
Пример #16
0
 def test_read_for_valid_data(self):
     self.test_seasonal_decomposition_ensemble._read_tdigest = Mock()
     # for data points in time
     self.test_seasonal_decomposition_ensemble.metric_sink.read = self.stub_read_metric_sink_valid
     data = self.test_seasonal_decomposition_ensemble.read()
     expect(data).to.be.equal([
         TimeSeriesTuple(name='cpu', timestamp=1000 - 6, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 5, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 4, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 3, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 2, value=10),
         TimeSeriesTuple(name='cpu', timestamp=1000 - 1, value=10)
     ])
Пример #17
0
 def stub_read_metric_sink_incomplete(self, option):
     vals = range(0, 8)
     vals.remove(2)
     shuffle(vals)
     for i in vals:
         yield TimeSeriesTuple(name='cpu', timestamp=1000 - i, value=10)
Пример #18
0
 def test_write(self):
     state = (10.1, 3, 7, 10, 0.1, 0.01, {'overall': {'flag': -1.0}})
     exp_tuples = [
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.overall.flag',
             timestamp=1000,
             value=-1.0),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.input',
             timestamp=1000,
             value=10.1),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.seasonal',
             timestamp=1000,
             value=3),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.trend',
             timestamp=1000,
             value=7),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.model',
             timestamp=1000,
             value=10),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.error',
             timestamp=1000,
             value=0.1),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.error_norm',
             timestamp=1000,
             value=0.01),
     ]
     self.test_seasonal_decomposition_ensemble.write(state)
     self.test_seasonal_decomposition_ensemble.output_sink.write.assert_called_with(
         exp_tuples)
     # for invalid input value
     state = (False, 3, 7, 10, 0.1, 0.01, {'overall': {'flag': -1.0}})
     exp_tuples = [
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.overall.flag',
             timestamp=1000,
             value=-1.0),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.input',
             timestamp=1000,
             value=0.0),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.seasonal',
             timestamp=1000,
             value=3),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.trend',
             timestamp=1000,
             value=7),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.model',
             timestamp=1000,
             value=10),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.error',
             timestamp=1000,
             value=0.1),
         TimeSeriesTuple(
             name='SeasonalDecompositionEnsemble.stle_service1.error_norm',
             timestamp=1000,
             value=0.01),
     ]
     self.test_seasonal_decomposition_ensemble.write(state)
     self.test_seasonal_decomposition_ensemble.output_sink.write.assert_called_with(
         exp_tuples)
Пример #19
0
 def test_collector_process_ignores_whitelisted_but_blacklisted_metrics(
         self):
     collector.process(
         self.writer,
         TimeSeriesTuple('host.ip.127-0-0-1.serv1.cpu_crit.avg', 1, 1))
     self.writer.write.called.should.be.false
Пример #20
0
 def stub_read_metric_sink_stale(self, option):
     for i in [2, 0, 1]:
         yield TimeSeriesTuple(name='cpu',
                               timestamp=1000 - 30 - i,
                               value=10)
Пример #21
0
 def stub_read_metric_sink_missing(self, option):
     vals = range(0, 6)
     shuffle(vals)
     for i in vals:
         yield TimeSeriesTuple(name='cpu', timestamp=1000 - i, value=10)
Пример #22
0
 def test_collector_process_accepts_whitelisted_and_not_blacklisted_metrics(
         self):
     collector.process(
         self.writer,
         TimeSeriesTuple('host.ip.127-0-0-1.serv1.cpu.avg', 1, 1))
     self.writer.write.called.should.be.true