Example #1
0
    def test_analysis_histogram_heatmap(self):
        self.maxDiff = None
        series = Series.from_array([
            [1628294400, 0],
            [1628337600, 0],
            [1628380800, 1],
            [1628424000, 1],
            [1628467200, 1],
            [1628510400, 0],
            [1628553600, 0],
            [1628596800, 0],
        ])
        factory = NodeFactory.detector('test_node', 'SimpleThreshold')
        factory.set_param_value('inside', False)
        factory.set_param_value('strict', False)
        factory.set_param_value('lower', None)
        factory.set_param_value('upper', 1)
        factory.add_source(InputRef('input'))
        node = factory.build()

        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=True)
        analysis = analyzer.analyze({'input': series})
        actual_output = analysis.output_format()

        expected_file = os.path.join(os.path.dirname(__file__), 'resources/analysis/expected_histogram_heatmap.json')
        # Uncomment to fix test
        # print(json.dumps(actual_output, indent=2), file=open(expected_file, 'w'))
        expected_output = json.loads(Path(expected_file).read_text())
        self.assertEqual(expected_output, actual_output)
Example #2
0
    def test_integration_performance(self):
        self.maxDiff = None
        pipeline = Pipeline.from_json(self.load_json('pipeline'))
        input1 = Series.from_array(self.load_json('input1'), unit='ms')
        input2 = Series.from_array(self.load_json('input2'), unit='ms')

        analyzer = Analyzer(pipeline, True)
        result = analyzer.analyze({'1': input1, '2': input2})

        # pr = cProfile.Profile()
        # pr.enable()
        actual_output = result.output_format()

        # pr.disable()
        # pr.dump_stats('output.prof')
        # s = io.StringIO()
        # sortby = 'cumulative'
        # ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
        # ps.print_stats()
        # print(s.getvalue())

        expected_file = os.path.join(os.path.dirname(__file__),
                                     'resources/perf/expected_output.json')
        # Uncomment to fix test
        # print(json.dumps(actual_output, indent=2), file=open(expected_file, 'w'))
        expected_output = json.loads(Path(expected_file).read_text())
        self.assertEqual(expected_output, actual_output)
Example #3
0
    def case(self, series, node, expected_series):
        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=True)
        inputs = {'input': series}
        analysis = analyzer.analyze(inputs)
        result = analysis.result_for_node(node.id)

        self.assertEqual(expected_series, result.output_series.as_list())
Example #4
0
    def case(self, node, expected_series):
        series = self.build_series()
        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=False)
        analysis = analyzer.analyze({'input': series})
        result = analysis.result_for_node(node.id)

        self.assertEqual(expected_series, result.output_series.as_list())
Example #5
0
    def case(self, node, s1, s2, expected_index, expected_series):
        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=False)
        analysis = analyzer.analyze({'lhs': s1, 'rhs': s2})
        result = analysis.result_for_node(node.id)
        output_series = result.output_series

        self.assertEqual(expected_series, output_series.as_list())
        self.assertEqual(expected_index, list(output_series.pdseries.index))
Example #6
0
 def case(self, node, expected_series, debug_info):
     series = self.build_triangle()
     pipeline = Pipeline([node])
     analyzer = Analyzer(pipeline=pipeline, debug=False)
     analysis = analyzer.analyze({'input': series})
     result = analysis.result_for_node(node.id)
     self.assertEqual(result.debug_info, debug_info)
     actual_series = result.output_series.as_list()
     for i in range(0, len(expected_series)):
         self.assertAlmostEqual(expected_series[i], actual_series[i], 2)
Example #7
0
    def test_parsing_loop_detection_1node(self):
        obj = {
            'nodes': [
                build_test_node('1', ['1']),
            ]
        }

        try:
            pipeline = Pipeline.from_json(obj)
            raise AssertionError("Should throw loop exception")
        except ValueError as e:
            self.assertEqual("Found recursion in node 1, path: ['1']", str(e))
Example #8
0
 def test_parsing_loop_detection_complex(self):
     obj = {
         'nodes': [
             build_test_node('a', ['1'], True),
             build_test_node('b', ['1'], True),
             build_test_node('c', ['a', 'b']),
             build_test_node('d', ['c']),
             build_test_node('e', ['c']),
             build_test_node('f', ['d', 'e']),
         ]
     }
     # import json
     # print(json.dumps(obj, indent=2))
     pipeline = Pipeline.from_json(obj)
Example #9
0
    def test_analysis_without_debug(self):
        self.maxDiff = None
        series = self.build_triangle()
       
        factory = NodeFactory.transformer('test_node', 'RollingAggregate')
        factory.set_param_value('window', 5)
        factory.set_param_value('center', False)
        factory.set_param_value('min_periods', 0)
        factory.set_param_value('agg_method', 'max')
        factory.add_source(InputRef('input'))
        node = factory.build()

        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=False)
        analysis = analyzer.analyze({'input':series})

        expected = {
            "anomalies": [],
            "series": {
                "input": [
                    [0, 0.0],
                    [1000, 1.0],
                    [2000, 2.0],
                    [3000, 3.0],
                    [4000, 4.0],
                    [5000, 5.0],
                    [6000, 6.0],
                    [7000, 7.0],
                    [8000, 8.0],
                    [9000, 9.0],
                    [10000, 9.0],
                    [11000, 8.0],
                    [12000, 7.0],
                    [13000, 6.0],
                    [14000, 5.0],
                    [15000, 4.0],
                    [16000, 3.0],
                    [17000, 2.0],
                    [18000, 1.0],
                    [19000, 0.0],
                ]
            },
        }

        self.assertEqual(expected, analysis.output_format())
Example #10
0
    def case(self, node, expected_anomalies):
        series = self.build_triangle()
        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=False)
        analysis = analyzer.analyze({'input': series})
        anomalies = analysis.anomalies

        for i in range(len(expected_anomalies)):
            expected_anomaly = expected_anomalies[i]
            self.assertEqual(
                {
                    'source_node': node.id,
                    'id': anomalies[i].id(),
                    'from': expected_anomaly[0] * 1000,
                    'to': expected_anomaly[1] * 1000,
                    'duration': expected_anomaly[1] - expected_anomaly[0],
                    'score': 1.0,
                    'source_anomalies': [],
                }, anomalies[i].output_format())
        self.assertEqual(len(expected_anomalies), len(anomalies))
Example #11
0
    def test_parsing_implicit_aggregator(self):
        obj = {
            'nodes': [{
                'id':
                '2',
                'group':
                'detector',
                'type':
                'SimpleThreshold',
                'params': [{
                    'id': 'inside',
                    'value': False
                }, {
                    'id': 'strict',
                    'value': False
                }],
                'sources': []
            }, {
                'id':
                '3',
                'group':
                'detector',
                'type':
                'SimpleThreshold',
                'params': [{
                    'id': 'inside',
                    'value': True
                }, {
                    'id': 'strict',
                    'value': True
                }],
                'sources': []
            }]
        }
        pipeline = Pipeline.from_json(obj)

        self.assertEqual('_Root(Node[2],Node[3])', str(pipeline.root_node))
        self.assertEqual(2, len(pipeline.root_node.sources))
        self.assertEqual('Node[2]', str(pipeline.root_node.sources[0]))
        self.assertEqual('Node[3]', str(pipeline.root_node.sources[1]))
Example #12
0
    def test_analysis_with_debug(self):
        self.maxDiff = None
        series = self.build_triangle()
       
        factory = NodeFactory.transformer('test_node', 'RollingAggregate')
        factory.set_param_value('window', 5)
        factory.set_param_value('center', False)
        factory.set_param_value('min_periods', 0)
        factory.set_param_value('agg_method', 'max')
        factory.add_source(InputRef('input'))
        node = factory.build()

        pipeline = Pipeline([node])
        analyzer = Analyzer(pipeline=pipeline, debug=True)
        analysis = analyzer.analyze({'input': series})
        actual_output = analysis.output_format()

        expected_file = os.path.join(os.path.dirname(__file__), 'resources/analysis/expected_simplified.json')
        # Uncomment to fix test
        # print(json.dumps(actual_output, indent=2), file=open(expected_file, 'w'))
        expected_output = json.loads(Path(expected_file).read_text())
        self.assertEqual(expected_output, actual_output)
Example #13
0
    def test_parsing_explicit_aggregator(self):
        obj = {
            'nodes': [{
                'id':
                '1',
                'group':
                'aggregator',
                'type':
                'Union',
                'params': [],
                'sources': [
                    {
                        'type': 'node',
                        'ref': '2',
                    },
                    {
                        'type': 'node',
                        'ref': '3',
                    },
                ]
            }, {
                'id':
                '2',
                'group':
                'detector',
                'type':
                'SimpleThreshold',
                'params': [{
                    'id': 'inside',
                    'value': False
                }, {
                    'id': 'strict',
                    'value': False
                }],
                'sources': []
            }, {
                'id':
                '3',
                'group':
                'detector',
                'type':
                'SimpleThreshold',
                'params': [{
                    'id': 'inside',
                    'value': True
                }, {
                    'id': 'strict',
                    'value': False
                }],
                'sources': []
            }]
        }

        pipeline = Pipeline.from_json(obj)

        self.assertEqual('_Root(Node[1])', str(pipeline.root_node))
        self.assertEqual(1, len(pipeline.root_node.sources))
        or_node = pipeline.resolve_node_reference(
            pipeline.root_node.sources[0].ref)
        self.assertEqual('Union(Node[2],Node[3])[1]', str(or_node))
        self.assertEqual(2, len(or_node.sources))
        self.assertEqual('Node[2]', str(or_node.sources[0]))
        self.assertEqual('Node[3]', str(or_node.sources[1]))
        or_node_source_0_resolved = pipeline.resolve_node_reference(
            or_node.sources[0].ref)
        or_node_source_1_resolved = pipeline.resolve_node_reference(
            or_node.sources[1].ref)
        self.assertEqual('SimpleThreshold(None,None,False,False)[2]',
                         str(or_node_source_0_resolved))
        self.assertEqual('SimpleThreshold(None,None,True,False)[3]',
                         str(or_node_source_1_resolved))