コード例 #1
0
    def test_00_startup(self):
        '''
        This test loads the pre existing data into cassandra and does
        queries to opserver
        The idea is to use this to monitor/improve qed performance
        '''
        logging.info("*** test_00_startup ***")
        if AnalyticsTest._check_skip_test() == True:
            return True

        vizd_obj = self.useFixture(AnalyticsFixture(logging, \
            builddir, self.__class__.cassandra_port, True))
        assert vizd_obj.verify_on_setup()

        assert AnalyticsTest._load_data_into_cassandra(self.__class__.cassandra_port)

        #'SystemObjectTable' is not getting the updated timestamp, hence we are hardcoding
        # analytics_start_time
        #
        #pool = ConnectionPool(COLLECTOR_KEYSPACE, ['127.0.0.1:%s'
        #                      % (self.__class__.cassandra_port)])
        #col_family = ColumnFamily(pool, SYSTEM_OBJECT_TABLE)
        #analytics_info_row = col_family.get(SYSTEM_OBJECT_ANALYTICS)
        #if analytics_info_row[SYSTEM_OBJECT_START_TIME]:
        #    analytics_start_time = analytics_info_row[SYSTEM_OBJECT_START_TIME]
        #else:
        #    assert False

        analytics_start_time = 1387254916542720

        vizd_obj.query_engine.start(analytics_start_time)

        opserver_port = vizd_obj.get_opserver_port()

        vns = VerificationOpsSrv('127.0.0.1', opserver_port)
        
        # message table
        a_query = Query(table="MessageTable",
                start_time=analytics_start_time+5*60*1000000,
                end_time=analytics_start_time+10*60*1000000,
                select_fields=["MessageTS", "Source", "ModuleId", "Messagetype", "Xmlmessage"],
                sort_fields = ["MessageTS"],
                sort = 1)
        json_qstr = json.dumps(a_query.__dict__)
        t1=time.time();
        res = vns.post_query_json(json_qstr)
        t2=time.time();
        logging.info("Result Length: "+str(len(res)))
        logging.info("Query: "+json_qstr)
        logging.info("Time(s): "+str(t2-t1))

        # flow series table aggregation on a tuple
        a_query = Query(table="FlowSeriesTable",
                start_time=analytics_start_time+40*60*1000000,
                end_time=analytics_start_time+100*60*1000000,
                select_fields=["sourcevn", "sourceip", "destvn", "destip", "sum(bytes)"])
        json_qstr = json.dumps(a_query.__dict__)
        t1=time.time();
        res = vns.post_query_json(json_qstr)
        t2=time.time();
        logging.info("Result Length: "+str(len(res)))
        logging.info("Query: "+json_qstr)
        logging.info("Time(s): "+str(t2-t1))

        # flow series table port distribution table
        a_query = Query(table="FlowSeriesTable",
                start_time=analytics_start_time+40*60*1000000,
                end_time=analytics_start_time+100*60*1000000,
                select_fields=["dport", "protocol", "flow_count", "sum(bytes)"],
                sort=2,
                sort_fields=["sum(bytes)"],
                where=[[{"name": "protocol", "value": 1, "op": 1},
                {"name": "sourcevn", "value": "default-domain:demo:vn0", "op": 1}],
                [{"name": "protocol", "value": 6, "op": 1},
                {"name": "sourcevn", "value": "default-domain:demo:vn0", "op": 1}],
                [{"name": "protocol", "value": 17, "op": 1},
                {"name": "sourcevn", "value": "default-domain:demo:vn0", "op": 1}]])
        json_qstr = json.dumps(a_query.__dict__)
        t1=time.time();
        res = vns.post_query_json(json_qstr)
        t2=time.time();
        logging.info("Result Length: "+str(len(res)))
        logging.info("Query: "+json_qstr)
        logging.info("Time(s): "+str(t2-t1))

        # flow series map
        a_query = Query(table="FlowSeriesTable",
                start_time=analytics_start_time+40*60*1000000,
                end_time=analytics_start_time+100*60*1000000,
                select_fields=["sum(bytes)", "sum(packets)", "T=7", "sourcevn", "flow_count"],
                where=[[{"name": "sourcevn", "value": "default-domain:demo:vn0", "op": 1}]])
        json_qstr = json.dumps(a_query.__dict__)
        t1=time.time();
        res = vns.post_query_json(json_qstr)
        t2=time.time();
        logging.info("Result Length: "+str(len(res)))
        logging.info("Query: "+json_qstr)
        logging.info("Time(s): "+str(t2-t1))

        return True
コード例 #2
0
    def test_00_startup(self):
        '''
        This test loads the pre existing data into cassandra and does
        queries to opserver
        The idea is to use this to monitor/improve qed performance
        '''
        logging.info("%%% test_00_startup %%%")
        if AnalyticsTest._check_skip_test() == True:
            return True

        vizd_obj = self.useFixture(AnalyticsFixture(logging, \
            builddir, self.__class__.cassandra_port, True))
        assert vizd_obj.verify_on_setup()

        assert AnalyticsTest._load_data_into_cassandra(
            self.__class__.cassandra_port)

        #'SystemObjectTable' is not getting the updated timestamp, hence we are hardcoding
        # analytics_start_time
        #
        #pool = ConnectionPool(COLLECTOR_KEYSPACE, ['127.0.0.1:%s'
        #                      % (self.__class__.cassandra_port)])
        #col_family = ColumnFamily(pool, SYSTEM_OBJECT_TABLE)
        #analytics_info_row = col_family.get(SYSTEM_OBJECT_ANALYTICS)
        #if analytics_info_row[SYSTEM_OBJECT_START_TIME]:
        #    analytics_start_time = analytics_info_row[SYSTEM_OBJECT_START_TIME]
        #else:
        #    assert False

        analytics_start_time = 1387254916542720

        vizd_obj.query_engine.start(analytics_start_time)

        opserver_port = vizd_obj.get_opserver_port()

        vns = VerificationOpsSrv('127.0.0.1', opserver_port)

        # message table
        a_query = Query(table="MessageTable",
                        start_time=analytics_start_time + 5 * 60 * 1000000,
                        end_time=analytics_start_time + 10 * 60 * 1000000,
                        select_fields=[
                            "MessageTS", "Source", "ModuleId", "Messagetype",
                            "Xmlmessage"
                        ],
                        sort_fields=["MessageTS"],
                        sort=1)
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series table aggregation on a tuple
        a_query = Query(table="FlowSeriesTable",
                        start_time=analytics_start_time + 40 * 60 * 1000000,
                        end_time=analytics_start_time + 100 * 60 * 1000000,
                        select_fields=[
                            "sourcevn", "sourceip", "destvn", "destip",
                            "sum(bytes)"
                        ])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series table port distribution table
        a_query = Query(
            table="FlowSeriesTable",
            start_time=analytics_start_time + 40 * 60 * 1000000,
            end_time=analytics_start_time + 100 * 60 * 1000000,
            select_fields=["dport", "protocol", "flow_count", "sum(bytes)"],
            sort=2,
            sort_fields=["sum(bytes)"],
            where=[[{
                "name": "protocol",
                "value": 1,
                "op": 1
            }, {
                "name": "sourcevn",
                "value": "default-domain:demo:vn0",
                "op": 1
            }],
                   [{
                       "name": "protocol",
                       "value": 6,
                       "op": 1
                   }, {
                       "name": "sourcevn",
                       "value": "default-domain:demo:vn0",
                       "op": 1
                   }],
                   [{
                       "name": "protocol",
                       "value": 17,
                       "op": 1
                   }, {
                       "name": "sourcevn",
                       "value": "default-domain:demo:vn0",
                       "op": 1
                   }]])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        # flow series map
        a_query = Query(table="FlowSeriesTable",
                        start_time=analytics_start_time + 40 * 60 * 1000000,
                        end_time=analytics_start_time + 100 * 60 * 1000000,
                        select_fields=[
                            "sum(bytes)", "sum(packets)", "T=7", "sourcevn",
                            "flow_count"
                        ],
                        where=[[{
                            "name": "sourcevn",
                            "value": "default-domain:demo:vn0",
                            "op": 1
                        }]])
        json_qstr = json.dumps(a_query.__dict__)
        t1 = time.time()
        res = vns.post_query_json(json_qstr)
        t2 = time.time()
        logging.info("Result Length: " + str(len(res)))
        logging.info("Query: " + json_qstr)
        logging.info("Time(s): " + str(t2 - t1))

        return True