コード例 #1
0
    def test_16_rbac(self, mock_is_role_cloud_admin,
                     mock_get_resource_list_from_uve_type):
        logging.info("%%% test_16_rbac %%%")
        if AnalyticsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        collectors = [vizd_obj.get_collector()]
        start_time = UTCTimestampUsec() - 3600 * 1000 * 1000
        generator_obj = self.useFixture(
            GeneratorFixture("contrail-vrouter-agent", collectors, logging,
                             vizd_obj.get_opserver_port(), start_time))
        assert generator_obj.verify_on_setup()
        logging.info("Starting intervn gen " + str(UTCTimestampUsec()))
        # send 2 vn UVEs
        generator_obj.generate_intervn()
        mock_is_role_cloud_admin.return_value = True
        mock_get_resource_list_from_uve_type.return_value = None
        # for admin role, there should be 2 vn uves
        uves = vizd_obj.get_opserver_vns()
        assert (len(uves) == 2)
        mock_is_role_cloud_admin.return_value = False
        mock_get_resource_list_from_uve_type.return_value = set(
            ["default-domain:vn0"])
        # for non-admin role, there should be 1 vn uve
        uves = vizd_obj.get_opserver_vns()
        assert (len(uves) == 1)
        # 403
        mock_is_role_cloud_admin.return_value = False
        mock_get_resource_list_from_uve_type.side_effect = bottle.HTTPResponse(
            status=403)
        resp = vizd_obj.get_opserver_vns_response()
        assert (resp.status_code == 403)
        return True
コード例 #2
0
    def test_02_message_table_query(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it checks that the collector UVE (via redis)
        and syslog (via cassandra) can be accessed from
        opserver.
        '''
        logging.info("*** test_02_message_table_query ***")
        if AnalyticsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        assert vizd_obj.verify_message_table_moduleid()
        assert vizd_obj.verify_message_table_select_uint_type()
        assert vizd_obj.verify_message_table_messagetype()
        assert vizd_obj.verify_message_table_where_or()
        assert vizd_obj.verify_message_table_where_and()
        assert vizd_obj.verify_message_table_filter()
        assert vizd_obj.verify_message_table_sort()
        return True
コード例 #3
0
 def test_18_verify_object_table_query(self):
     '''
     This test verifies the Object Table query.
     '''
     logging.info('%%% test_18_verify_object_table_query %%%')
     vizd_obj = self.useFixture(
         AnalyticsFixture(logging, builddir,
                          self.__class__.cassandra_port))
     assert vizd_obj.verify_on_setup()
     collectors = [vizd_obj.get_collector()]
     generator_obj = self.useFixture(
         GeneratorFixture('contrail-control', collectors,
                          logging, None, node_type='Control',
                          sandesh_config={'system_logs_rate_limit':10}))
     assert generator_obj.verify_on_setup()
     msg_types = generator_obj.send_sandesh_types_object_logs(
                     socket.getfqdn("127.0.0.1"))
     assert vizd_obj.verify_object_table_sandesh_types('ObjectBgpRouter',
             socket.getfqdn("127.0.0.1"), msg_types)
     # Check if ObjectId's can be fetched properly for ObjectTable queries
     vm_generator_obj = self.useFixture(
         GeneratorFixture("contrail-vrouter-agent", collectors,
                          logging, vizd_obj.get_opserver_port()))
     assert vm_generator_obj.verify_on_setup()
     assert vizd_obj.verify_object_table_objectid_values('ObjectBgpRouter',
         [socket.getfqdn("127.0.0.1")])
     vm1_name = 'vm1'
     vm2_name = 'vm2'
     vm_generator_obj.send_vm_uve(vm_id=vm1_name,
                               num_vm_ifs=2,
                               msg_count=2)
     vm_generator_obj.send_vm_uve(vm_id=vm2_name,
                               num_vm_ifs=2,
                               msg_count=2)
     assert vizd_obj.verify_object_table_objectid_values('ObjectVMTable',
         [vm1_name, vm2_name])
コード例 #4
0
    def test_05_collector_ha(self):
        logging.info('*** test_05_collector_ha ***')
        if AnalyticsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging,
                             builddir,
                             self.__class__.cassandra_port,
                             collector_ha_test=True))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        # OpServer and QueryEngine are started with collectors[0] as
        # primary and collectors[1] as secondary
        exp_genlist = ['Collector', 'OpServer', 'QueryEngine']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[0],
                                              exp_genlist)
        # start the VRouterAgent with collectors[1] as primary and
        # collectors[0] as secondary
        collectors = [
            vizd_obj.collectors[1].get_addr(),
            vizd_obj.collectors[0].get_addr()
        ]
        vr_agent = self.useFixture(
            GeneratorFixture("VRouterAgent", collectors, logging,
                             vizd_obj.get_opserver_port()))
        assert vr_agent.verify_on_setup()
        exp_genlist = ['Collector', 'VRouterAgent']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[1],
                                              exp_genlist)
        # stop collectors[0] and verify that OpServer and QE switch
        # from primary to secondary collector
        vizd_obj.collectors[0].stop()
        exp_genlist = ['Collector', 'VRouterAgent', 'OpServer', 'QueryEngine']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[1],
                                              exp_genlist)
        # start collectors[0]
        vizd_obj.collectors[0].start()
        exp_genlist = ['Collector']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[0],
                                              exp_genlist)
        # verify that the old UVEs are flushed from redis when collector restarts
        exp_genlist = [vizd_obj.collectors[0].get_generator_id()]
        assert vizd_obj.verify_generator_list_in_redis(\
                                vizd_obj.collectors[0].get_redis_uve(),
                                exp_genlist)

        # stop collectors[1] and verify that OpServer and QE switch
        # from secondary to primary and VRouterAgent from primary to
        # secondary
        vizd_obj.collectors[1].stop()
        exp_genlist = ['Collector', 'VRouterAgent', 'OpServer', 'QueryEngine']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[0],
                                              exp_genlist)
        # verify the generator list in redis
        exp_genlist = [
            vizd_obj.collectors[0].get_generator_id(),
            vr_agent.get_generator_id(),
            vizd_obj.opserver.get_generator_id(),
            vizd_obj.query_engine.get_generator_id()
        ]
        assert vizd_obj.verify_generator_list_in_redis(\
                                vizd_obj.collectors[0].get_redis_uve(),
                                exp_genlist)

        # stop Opserver and QE
        vizd_obj.opserver.stop()
        vizd_obj.query_engine.stop()
        exp_genlist = ['Collector', 'VRouterAgent']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[0],
                                              exp_genlist)

        # verify the generator list in redis
        exp_genlist = [
            vizd_obj.collectors[0].get_generator_id(),
            vr_agent.get_generator_id()
        ]
        assert vizd_obj.verify_generator_list_in_redis(\
                                vizd_obj.collectors[0].get_redis_uve(),
                                exp_genlist)

        # start Opserver and QE with collectors[1] as the primary and
        # collectors[0] as the secondary. On generator startup, verify
        # that it connects to the secondary collector, if the
        # connection to the primary fails
        vizd_obj.opserver.set_primary_collector(
            vizd_obj.collectors[1].get_addr())
        vizd_obj.opserver.set_secondary_collector(
            vizd_obj.collectors[0].get_addr())
        vizd_obj.opserver.start()
        vizd_obj.query_engine.set_primary_collector(
            vizd_obj.collectors[1].get_addr())
        vizd_obj.query_engine.set_secondary_collector(
            vizd_obj.collectors[0].get_addr())
        vizd_obj.query_engine.start()
        exp_genlist = ['Collector', 'VRouterAgent', 'OpServer', 'QueryEngine']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[0],
                                              exp_genlist)
        # stop the collectors[0] - both collectors[0] and collectors[1] are down
        # send the VM UVE and verify that the VM UVE is synced after connection
        # to the collector
        vizd_obj.collectors[0].stop()
        # Make sure the connection to the collector is teared down before
        # sending the VM UVE
        while True:
            if vr_agent.verify_on_setup() is False:
                break
        vr_agent.send_vm_uve(vm_id='abcd-1234-efgh-5678',
                             num_vm_ifs=5,
                             msg_count=5)
        vizd_obj.collectors[1].start()
        exp_genlist = ['Collector', 'VRouterAgent', 'OpServer', 'QueryEngine']
        assert vizd_obj.verify_generator_list(vizd_obj.collectors[1],
                                              exp_genlist)
        assert vr_agent.verify_vm_uve(vm_id='abcd-1234-efgh-5678',
                                      num_vm_ifs=5,
                                      msg_count=5)
コード例 #5
0
    def test_01_statprefix(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it sends test stats to the collector
        and checks if they can be accessed from QE, using prefix-suffix indexes
        '''
        logging.info("*** test_01_statprefix ***")
        if StatsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        collectors = [vizd_obj.get_collector()]

        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         vizd_obj.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat("t010", "lxxx", "samp1", 1, 1)
        generator_obj.send_test_stat("t010", "lyyy", "samp1", 2, 2)
        generator_obj.send_test_stat("t010", "lyyy", "samp3", 2, 2, "", 5)
        generator_obj.send_test_stat("t011", "lyyy", "samp2", 1, 1.1, "", 7)
        generator_obj.send_test_stat("t011", "lxxx", "samp2", 2, 1.2)
        generator_obj.send_test_stat("t011", "lxxx", "samp2", 2, 1.2, "", 9)

        logging.info("Checking Stats str-str " + str(UTCTimestampUsec()))

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.i1", "st.d1"],
            where_clause='name|st.s1=t010|samp1',
            num=2,
            check_rows=[{
                "st.s1": "samp1",
                "st.i1": 2,
                "st.d1": 2
            }, {
                "st.s1": "samp1",
                "st.i1": 1,
                "st.d1": 1
            }])

        logging.info("Checking Stats int-int " + str(UTCTimestampUsec()))

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.i1", "st.d1"],
            where_clause='st.i1|st.i2=2|1<6',
            num=1,
            check_rows=[{
                "st.s1": "samp3",
                "st.i1": 2,
                "st.d1": 2
            }])

        logging.info("Checking CLASS " + str(UTCTimestampUsec()))

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["T", "name", "l1", "CLASS(T)"],
            where_clause='name=*',
            num=6,
            check_uniq={"CLASS(T)": 4})

        return True
コード例 #6
0
    def test_00_basicsamples(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it sends test stats to the collector
        and checks if they can be accessed from QE.
        '''
        logging.info("%%% test_00_basicsamples %%%")
        if StatsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        collectors = [vizd_obj.get_collector()]

        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         vizd_obj.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat_dynamic("t00", "samp1", 1, 1)
        generator_obj.send_test_stat_dynamic("t00", "samp2", 2, 1.1)
        generator_obj.send_test_stat_dynamic("t00", "samp3", 1, -5062)
        generator_obj.send_test_stat_dynamic("t00&t01", "samp1&samp2", 2, 1.1)
        generator_obj.send_test_stat_dynamic("t00>t01>", "samp1&samp2", 2, 1.1,
                                             "&test_s2>")

        logging.info("Checking Stats " + str(UTCTimestampUsec()))

        assert generator_obj.verify_test_stat(
            "StatTable.TestStateDynamic.ts",
            "-2m",
            select_fields=["UUID", "ts.s1", "ts.i1", "ts.d1"],
            where_clause='name="t00"',
            num=3,
            check_rows=[{
                "ts.s1": "samp2",
                "ts.i1": 2,
                "ts.d1": 1.1
            }, {
                "ts.s1": "samp1",
                "ts.i1": 1,
                "ts.d1": 1
            }, {
                "ts.s1": "samp3",
                "ts.i1": 1,
                "ts.d1": -5062
            }])
        assert generator_obj.verify_test_stat(
            "StatTable.TestStateDynamic.ts",
            "-2m",
            select_fields=["UUID", "ts.s1", "ts.s2"],
            where_clause='name="t00&t01"',
            num=1,
            check_rows=[{
                "ts.s1": "samp1&samp2",
                "ts.s2": ""
            }])
        assert generator_obj.verify_test_stat(
            "StatTable.TestStateDynamic.ts",
            "-2m",
            select_fields=["UUID", "name", "ts.s2"],
            where_clause='ts.s1="samp1&samp2"',
            num=2,
            check_rows=[{
                "name": "t00&t01",
                "ts.s2": ""
            }, {
                "name": "t00>t01>",
                "ts.s2": "&test_s2>"
            }])

        return True
コード例 #7
0
    def test_06_stats_filter(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it sends test stats to the collector
        and checks if all filter operations work properly.
        '''
        logging.info("%%% test_06_stats_filter %%%")
        if StatsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        collectors = [vizd_obj.get_collector()]

        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         vizd_obj.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat("name0", "lxxx", "samp1", 10, 12)
        generator_obj.send_test_stat("name0", "lxxx", "samp2", 20, 12.6)
        generator_obj.send_test_stat("name0", "lyyy", "samp1", 500, 2.345)
        generator_obj.send_test_stat("name0", "lyyy", "samp2", 1000, 15.789)

        # verify that all the stats messages are added in the analytics db
        # before starting the filter tests
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=4,
            check_rows=[{
                "l1": "lxxx",
                "st.s1": "samp1",
                "st.i1": 10,
                "st.d1": 12
            }, {
                "l1": "lxxx",
                "st.s1": "samp2",
                "st.i1": 20,
                "st.d1": 12.6
            }, {
                "l1": "lyyy",
                "st.s1": "samp1",
                "st.i1": 500,
                "st.d1": 2.345
            }, {
                "l1": "lyyy",
                "st.s1": "samp2",
                "st.i1": 1000,
                "st.d1": 15.789
            }])

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=2,
            check_rows=[{
                "l1": "lxxx",
                "st.s1": "samp1",
                "st.i1": 10,
                "st.d1": 12
            }, {
                "l1": "lxxx",
                "st.s1": "samp2",
                "st.i1": 20,
                "st.d1": 12.6
            }],
            filt="l1 = lxxx")

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=1,
            check_rows=[{
                "l1": "lyyy",
                "st.s1": "samp1",
                "st.i1": 500,
                "st.d1": 2.345
            }],
            filt="st.i1 = 500")

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=2,
            check_rows=[{
                "l1": "lxxx",
                "st.s1": "samp1",
                "st.i1": 10,
                "st.d1": 12
            }, {
                "l1": "lxxx",
                "st.s1": "samp2",
                "st.i1": 20,
                "st.d1": 12.6
            }],
            filt="st.i1 <= 400")

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=2,
            check_rows=[{
                "l1": "lyyy",
                "st.s1": "samp1",
                "st.i1": 500,
                "st.d1": 2.345
            }, {
                "l1": "lyyy",
                "st.s1": "samp2",
                "st.i1": 1000,
                "st.d1": 15.789
            }],
            filt="st.i1 >= 500")

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=1,
            check_rows=[{
                "l1": "lyyy",
                "st.s1": "samp1",
                "st.i1": 500,
                "st.d1": 2.345
            }],
            filt="st.d1 <= 3")

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "l1", "st.s1", "st.i1", "st.d1"],
            where_clause='name=name0',
            num=2,
            check_rows=[{
                "l1": "lxxx",
                "st.s1": "samp2",
                "st.i1": 20,
                "st.d1": 12.6
            }, {
                "l1": "lyyy",
                "st.s1": "samp2",
                "st.i1": 1000,
                "st.d1": 15.789
            }],
            filt="st.d1 >= 12.5")
コード例 #8
0
    def test_14_verify_qe_stats_collection(self):
        '''
        This test checks if the QE is able to collect the stats
        related to DB reads correctly
        '''
        logging.info('%%% test_14_verify_qe_stats_collection %%%')
        analytics = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert analytics.verify_on_setup()
        # make stat table entries also
        collectors = [analytics.get_collector()]
        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         analytics.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat("t010", "lxxx", "samp1", 1, 1)
        generator_obj.send_test_stat("t010", "lyyy", "samp1", 2, 2)
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.i1", "st.d1"],
            where_clause='name|st.s1=t010|samp1',
            num=2,
            check_rows=[{
                "st.s1": "samp1",
                "st.i1": 2,
                "st.d1": 2
            }, {
                "st.s1": "samp1",
                "st.i1": 1,
                "st.d1": 1
            }])
        # Get the current read stats for MessageTable
        old_reads = analytics.get_db_read_stats_from_qe(
            analytics.query_engine, 'MessageTable')
        # read some data from message table and issue thequery again
        assert analytics.verify_message_table_moduleid()
        new_reads = analytics.get_db_read_stats_from_qe(
            analytics.query_engine, 'MessageTable')
        assert (old_reads < new_reads)
        # Get the current read stats for stats table
        old_reads = analytics.get_db_read_stats_from_qe(
            analytics.query_engine, 'StatTestState:st', True)
        assert (old_reads > 0)
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.i1", "st.d1"],
            where_clause='name|st.s1=t010|samp1',
            num=2,
            check_rows=[{
                "st.s1": "samp1",
                "st.i1": 2,
                "st.d1": 2
            }, {
                "st.s1": "samp1",
                "st.i1": 1,
                "st.d1": 1
            }])
        new_reads = analytics.get_db_read_stats_from_qe(
            analytics.query_engine, 'StatTestState:st', True)
        assert (new_reads > old_reads)
コード例 #9
0
    def test_13_verify_sandesh_ssl(self):
        '''
        This test enables sandesh ssl on contrail-collector and all the
        analytics generators in the AnalyticsFixture and verifies that the
        secure sandesh connection is established between the Collector and all
        the Generators.
        '''
        logging.info('%%% test_13_verify_sandesh_ssl %%%')
        sandesh_cfg = {
            'sandesh_keyfile':
            builddir + '/opserver/test/data/ssl/server-privkey.pem',
            'sandesh_certfile':
            builddir + '/opserver/test/data/ssl/server.pem',
            'sandesh_ca_cert':
            builddir + '/opserver/test/data/ssl/ca-cert.pem',
            'sandesh_ssl_enable': 'True'
        }
        vizd_obj = self.useFixture(
            AnalyticsFixture(logging,
                             builddir,
                             self.__class__.cassandra_port,
                             sandesh_config=sandesh_cfg))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()

        source = socket.gethostname()
        exp_genlist = [
            source + ':Analytics:contrail-collector:0',
            source + ':Analytics:contrail-analytics-api:0',
            source + ':Analytics:contrail-query-engine:0'
        ]
        assert vizd_obj.verify_generator_list(vizd_obj.collectors, exp_genlist)

        # start a python generator without enabling sandesh ssl
        # and verify that it is not connected to the Collector.
        test_gen1 = self.useFixture(
            GeneratorFixture("contrail-test-generator1",
                             vizd_obj.get_collectors(), logging,
                             vizd_obj.get_opserver_port()))
        assert not test_gen1.verify_on_setup()

        # start a python generator with sandesh_ssl_enable = True
        # and verify that it is connected to the Collector.
        test_gen2 = self.useFixture(
            GeneratorFixture("contrail-test-generator2",
                             vizd_obj.get_collectors(),
                             logging,
                             vizd_obj.get_opserver_port(),
                             sandesh_config=sandesh_cfg))
        assert test_gen2.verify_on_setup()

        # stop QE and verify the generator list
        vizd_obj.query_engine.stop()
        exp_genlist = [
            source + ':Analytics:contrail-collector:0',
            source + ':Analytics:contrail-analytics-api:0',
            source + ':Test:contrail-test-generator2:0'
        ]
        assert vizd_obj.verify_generator_list(vizd_obj.collectors, exp_genlist)

        # Start QE with sandesh_ssl_enable = False and verify that the
        # QE is not connected to the Collector
        vizd_obj.query_engine.set_sandesh_config(None)
        vizd_obj.query_engine.start()
        assert not vizd_obj.verify_generator_collector_connection(
            vizd_obj.query_engine.http_port)
        assert vizd_obj.verify_generator_list(vizd_obj.collectors, exp_genlist)

        # Restart Collector with sandesh_ssl_enable = False and verify the
        # generator list in the Collector.
        vizd_obj.collectors[0].stop()
        vizd_obj.collectors[0].set_sandesh_config(None)
        vizd_obj.collectors[0].start()
        assert not vizd_obj.verify_generator_collector_connection(
            test_gen2._http_port)
        assert not vizd_obj.verify_generator_collector_connection(
            vizd_obj.opserver.http_port)
        exp_genlist = [
            source + ':Analytics:contrail-collector:0',
            source + ':Analytics:contrail-query-engine:0',
            source + ':Test:contrail-test-generator1:0'
        ]
        assert vizd_obj.verify_generator_list(vizd_obj.collectors, exp_genlist)
コード例 #10
0
    def test_07_container_samples(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it sends test stats to the collector
        and checks if they can be accessed from QE.
        '''
        logging.info("%%% test_07_container_samples %%%")
        if StatsTest._check_skip_test() is True:
            return True

        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        collectors = [vizd_obj.get_collector()]

        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         vizd_obj.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat_container("t07", "samp1", {
            "me1": "val1",
            "me2": "val2"
        }, ["le1", "le2"], ["se1", "se2"], 2)
        generator_obj.send_test_stat_container("t07", "samp1", {
            "me1": "val2",
            "me3": "val3"
        }, ["le1", "le2", "le4"], ["se3", "se4"], 2)
        generator_obj.send_test_stat_container("t07", "samp2", {
            "me3": "val3",
            "me2": "val2"
        }, ["le3", "le5"], ["se3", "se7"], 2)
        generator_obj.send_test_stat_container("t07", "samp3", {
            "me1": "val1",
            "me2": "val3",
            "me3": "val3"
        }, ["le1"], ["se3", "se2"], 2)
        generator_obj.send_test_stat_container("t07", "samp4", {"me4": "val4"},
                                               ["le3", "le7"], ["se3", "se5"],
                                               2)

        logging.info("Checking Stats " + str(UTCTimestampUsec()))

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestContainerState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.m1", "st.i1"],
            where_clause='name="t07" AND st.l1 CONTAINS le2',
            num=2,
            check_rows=[{
                "st.s1": "samp1",
                "st.m1": "{\"me1\":\"val1\", \"me2\":\"val2\"}",
                "st.i1": 2
            }, {
                "st.s1": "samp1",
                "st.m1": "{\"me1\":\"val2\", \"me3\":\"val3\"}",
                "st.i1": 2
            }])
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestContainerState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.m1.me2", "st.i1"],
            where_clause='name="t07" AND st.l2 CONTAINS se2',
            num=2,
            check_rows=[{
                "st.s1": "samp1",
                "st.m1.me2": "val2",
                "st.i1": 2
            }, {
                "st.s1": "samp3",
                "st.m1.me2": "val3",
                "st.i1": 2
            }])
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestContainerState.st",
            "-2m",
            select_fields=["UUID", "st.s1", "st.l1", "st.l2", "st.i1"],
            where_clause='name="t07" AND st.m1.me2=val*',
            num=3,
            check_rows=[{
                "st.s1": "samp1",
                "st.l1": "le1; le2",
                "st.l2": "se1; se2",
                "st.i1": 2
            }, {
                "st.s1": "samp2",
                "st.l1": "le3; le5",
                "st.l2": "se3; se7",
                "st.i1": 2
            }, {
                "st.s1": "samp3",
                "st.l1": "le1",
                "st.l2": "se3; se2",
                "st.i1": 2
            }])

        return True
コード例 #11
0
    def test_03_aggregate_query(self):
        '''
        This test starts redis,vizd,opserver and qed
        It uses the test class' cassandra instance
        Then it inserts into the stat table rows 
        and queries aggregate functions on them
        '''
        logging.info("%%% test_03_aggregate_query %%%")
        if StatsTest._check_skip_test() is True:
            return True
        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, self.__class__.cassandra_port))
        assert vizd_obj.verify_on_setup()
        assert vizd_obj.verify_collector_obj_count()
        collectors = [vizd_obj.get_collector()]

        generator_obj = self.useFixture(
            StatsFixture("VRouterAgent", collectors, logging,
                         vizd_obj.get_opserver_port()))
        assert generator_obj.verify_on_setup()

        logging.info("Starting stat gen " + str(UTCTimestampUsec()))

        generator_obj.send_test_stat("t04", "lxxx", "samp1", 1, 5)
        generator_obj.send_test_stat("t04", "lyyy", "samp1", 4, 3.4)
        generator_obj.send_test_stat("t04", "lyyy", "samp1", 2, 4, "", 5)
        generator_obj.send_test_stat("t04", "lyyy", "samp2", 3, 7.2)

        logging.info("Checking Stats " + str(UTCTimestampUsec()))
        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["MAX(st.i1)", "PERCENTILES(st.i1)", "AVG(st.i1)"],
            where_clause='name|st.s1=t04|samp1',
            num=1,
            check_rows=[{
                u'MAX(st.i1)': 4,
                u'PERCENTILES(st.i1)': None,
                u'AVG(st.i1)': 2.33333
            }])

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["MIN(st.d1)", "AVG(st.d1)"],
            where_clause='name|st.s1=t04|samp1',
            num=1,
            check_rows=[{
                u'MIN(st.d1)': 3.4,
                u'AVG(st.d1)': 4.13333
            }])

        assert generator_obj.verify_test_stat(
            "StatTable.StatTestState.st",
            "-2m",
            select_fields=["l1", "COUNT_DISTINCT(st.s1)", "SUM(st.i1)"],
            where_clause='name=t04',
            num=2,
            check_rows=[{
                'l1': 'lxxx',
                u'COUNT_DISTINCT(st.s1)': 1,
                u'SUM(st.i1)': 1
            }, {
                'l1': 'lyyy',
                u'COUNT_DISTINCT(st.s1)': 2,
                u'SUM(st.i1)': 9
            }])

        return True
コード例 #12
0
    def test_00_startup(self):
        '''
        This test loads the pre existing data into cassandra and does
        queries to opserver
        The idea is to use this to monitor/improve qed performance
        '''
        logging.info("%%% test_00_startup %%%")
        if AnalyticsTest._check_skip_test() == True:
            return True

        vizd_obj = self.useFixture(AnalyticsFixture(logging, \
            builddir, self.__class__.cassandra_port, True))
        assert vizd_obj.verify_on_setup()

        assert AnalyticsTest._load_data_into_cassandra(
            self.__class__.cassandra_port)

        #'SystemObjectTable' is not getting the updated timestamp, hence we are hardcoding
        # analytics_start_time
        #
        #pool = ConnectionPool(COLLECTOR_KEYSPACE, ['127.0.0.1:%s'
        #                      % (self.__class__.cassandra_port)])
        #col_family = ColumnFamily(pool, SYSTEM_OBJECT_TABLE)
        #analytics_info_row = col_family.get(SYSTEM_OBJECT_ANALYTICS)
        #if analytics_info_row[SYSTEM_OBJECT_START_TIME]:
        #    analytics_start_time = analytics_info_row[SYSTEM_OBJECT_START_TIME]
        #else:
        #    assert False

        analytics_start_time = 1387254916542720

        vizd_obj.query_engine.start(analytics_start_time)

        opserver_port = vizd_obj.get_opserver_port()

        vns = VerificationOpsSrv('127.0.0.1', opserver_port)

        # message table
        a_query = Query(table="MessageTable",
                        start_time=analytics_start_time + 5 * 60 * 1000000,
                        end_time=analytics_start_time + 10 * 60 * 1000000,
                        select_fields=[
                            "MessageTS", "Source", "ModuleId", "Messagetype",
                            "Xmlmessage"
                        ],
                        sort_fields=["MessageTS"],
                        sort=1)
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series table aggregation on a tuple
        a_query = Query(table="FlowSeriesTable",
                        start_time=analytics_start_time + 40 * 60 * 1000000,
                        end_time=analytics_start_time + 100 * 60 * 1000000,
                        select_fields=[
                            "sourcevn", "sourceip", "destvn", "destip",
                            "sum(bytes)"
                        ])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series table port distribution table
        a_query = Query(
            table="FlowSeriesTable",
            start_time=analytics_start_time + 40 * 60 * 1000000,
            end_time=analytics_start_time + 100 * 60 * 1000000,
            select_fields=["dport", "protocol", "flow_count", "sum(bytes)"],
            sort=2,
            sort_fields=["sum(bytes)"],
            where=[[{
                "name": "protocol",
                "value": 1,
                "op": 1
            }, {
                "name": "sourcevn",
                "value": "default-domain:demo:vn0",
                "op": 1
            }],
                   [{
                       "name": "protocol",
                       "value": 6,
                       "op": 1
                   }, {
                       "name": "sourcevn",
                       "value": "default-domain:demo:vn0",
                       "op": 1
                   }],
                   [{
                       "name": "protocol",
                       "value": 17,
                       "op": 1
                   }, {
                       "name": "sourcevn",
                       "value": "default-domain:demo:vn0",
                       "op": 1
                   }]])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series map
        a_query = Query(table="FlowSeriesTable",
                        start_time=analytics_start_time + 40 * 60 * 1000000,
                        end_time=analytics_start_time + 100 * 60 * 1000000,
                        select_fields=[
                            "sum(bytes)", "sum(packets)", "T=7", "sourcevn",
                            "flow_count"
                        ],
                        where=[[{
                            "name": "sourcevn",
                            "value": "default-domain:demo:vn0",
                            "op": 1
                        }]])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        return True
コード例 #13
0
    def test_07_alarm(self):
        '''
        This test starts redis, collectors, analytics-api and
        python generators that simulates alarm generator. This
        test sends alarms from alarm generators and verifies the
        retrieval of alarms from analytics-api.
        '''
        logging.info('*** test_07_alarm ***')
        # collector_ha_test flag is set to True, because we wanna test
        # retrieval of alarms across multiple redis servers.
        vizd_obj = self.useFixture(
            AnalyticsFixture(logging, builddir, -1, 0, collector_ha_test=True))
        assert vizd_obj.verify_on_setup()

        # create alarm-generator and attach it to the first collector.
        collectors = [
            vizd_obj.collectors[0].get_addr(),
            vizd_obj.collectors[1].get_addr()
        ]
        alarm_gen1 = self.useFixture(
            GeneratorFixture('contrail-alarm-gen', [collectors[0]],
                             logging,
                             None,
                             hostname=socket.gethostname() + '_1'))
        alarm_gen1.verify_on_setup()

        # send process state alarm for analytics-node
        alarms = alarm_gen1.create_process_state_alarm('contrail-query-engine')
        alarms += alarm_gen1.create_process_state_alarm(
            'contrail-snmp-collector')
        alarm_gen1.send_alarm(socket.gethostname() + '_1', alarms,
                              COLLECTOR_INFO_TABLE)
        analytics_tbl = _OBJECT_TABLES[COLLECTOR_INFO_TABLE].log_query_name

        # send proces state alarm for control-node
        alarms = alarm_gen1.create_process_state_alarm('contrail-dns')
        alarm_gen1.send_alarm('<&' + socket.gethostname() + '_1>', alarms,
                              BGP_ROUTER_TABLE)
        control_tbl = _OBJECT_TABLES[BGP_ROUTER_TABLE].log_query_name

        # create another alarm-generator and attach it to the second collector.
        alarm_gen2 = self.useFixture(
            GeneratorFixture('contrail-alarm-gen', [collectors[1]],
                             logging,
                             None,
                             hostname=socket.gethostname() + '_2'))
        alarm_gen2.verify_on_setup()

        # send process state alarm for analytics-node
        alarms = alarm_gen2.create_process_state_alarm('contrail-topology')
        alarm_gen2.send_alarm(socket.gethostname() + '_2', alarms,
                              COLLECTOR_INFO_TABLE)

        keys = [socket.gethostname() + '_1', socket.gethostname() + '_2']
        assert (vizd_obj.verify_alarms_table(analytics_tbl, keys))
        assert (vizd_obj.verify_alarm(
            analytics_tbl, keys[0],
            obj_to_dict(
                alarm_gen1.alarms[COLLECTOR_INFO_TABLE][keys[0]].data)))
        assert (vizd_obj.verify_alarm(
            analytics_tbl, keys[1],
            obj_to_dict(
                alarm_gen2.alarms[COLLECTOR_INFO_TABLE][keys[1]].data)))

        keys = ['<&' + socket.gethostname() + '_1>']
        assert (vizd_obj.verify_alarms_table(control_tbl, keys))
        assert (vizd_obj.verify_alarm(
            control_tbl, keys[0],
            obj_to_dict(alarm_gen1.alarms[BGP_ROUTER_TABLE][keys[0]].data)))

        # delete analytics-node alarm generated by alarm_gen2
        alarm_gen2.delete_alarm(socket.gethostname() + '_2',
                                COLLECTOR_INFO_TABLE)

        # verify analytics-node alarms
        keys = [socket.gethostname() + '_1']
        assert (vizd_obj.verify_alarms_table(analytics_tbl, keys))
        assert (vizd_obj.verify_alarm(
            analytics_tbl, keys[0],
            obj_to_dict(
                alarm_gen1.alarms[COLLECTOR_INFO_TABLE][keys[0]].data)))
        assert (vizd_obj.verify_alarm(analytics_tbl,
                                      socket.gethostname() + '_2', {}))

        # Disconnect alarm_gen1 from Collector and verify that all
        # alarms generated by alarm_gen1 is removed by the Collector.
        alarm_gen1.disconnect_from_collector()
        assert (vizd_obj.verify_alarms_table(analytics_tbl, []))
        assert (vizd_obj.verify_alarm(analytics_tbl,
                                      socket.gethostname() + '_1', {}))
        assert (vizd_obj.verify_alarms_table(control_tbl, []))
        assert (vizd_obj.verify_alarm(control_tbl,
                                      '<&' + socket.gethostname() + '_1', {}))

        # update analytics-node alarm in disconnect state
        alarms = alarm_gen1.create_process_state_alarm(
            'contrail-snmp-collector')
        alarm_gen1.send_alarm(socket.gethostname() + '_1', alarms,
                              COLLECTOR_INFO_TABLE)

        # Connect alarm_gen1 to Collector and verify that all
        # alarms generated by alarm_gen1 is synced with Collector.
        alarm_gen1.connect_to_collector()
        keys = [socket.gethostname() + '_1']
        assert (vizd_obj.verify_alarms_table(analytics_tbl, keys))
        assert (vizd_obj.verify_alarm(
            analytics_tbl, keys[0],
            obj_to_dict(
                alarm_gen1.alarms[COLLECTOR_INFO_TABLE][keys[0]].data)))

        keys = ['<&' + socket.gethostname() + '_1>']
        assert (vizd_obj.verify_alarms_table(control_tbl, keys))
        assert (vizd_obj.verify_alarm(
            control_tbl, keys[0],
            obj_to_dict(alarm_gen1.alarms[BGP_ROUTER_TABLE][keys[0]].data)))
コード例 #14
0
    def test_03_redis_uve_restart(self):
        logging.info('*** test_03_redis_uve_restart ***')

        vizd_obj = self.useFixture(AnalyticsFixture(logging, builddir, -1, 0))
        self.verify_uve_resync(vizd_obj)