def test_view_api(self):
        """ Test some basic properties of the view api
        """
        job = setup_capture_job(self.shark)
        clip = create_trace_clip(self.shark, job)
        columns, filters = setup_defaults()
        with self.shark.create_view(
                clip,
                columns,
                filters,
                name='test_view_on_api') as v:
            legend = v.get_legend()
            for col in legend:
                # make sure we have name and description attributes
                self.assertTrue(col['name'])
                self.assertTrue(col['description'])

            for row in v.get_data():
                self.assertTrue(isinstance(row['t'], datetime.datetime))
                for val in row['vals']:
                    self.assertEqual(len(val), len(legend))

            # do an aggregated get_data()
            rows = v.get_data(aggregated=True)
            if len(rows) == 0:
                logger.warn('no data in view, cannot test aggregated get')
            self.assertTrue(len(rows) == 0 or len(rows) == 1)
    def test_view_api(self):
        """ Test some basic properties of the view api
        """
        job = setup_capture_job(self.shark)
        clip = create_trace_clip(self.shark, job)
        columns, filters = setup_defaults()
        with self.shark.create_view(clip,
                                    columns,
                                    filters,
                                    name='test_view_on_api') as v:
            legend = v.get_legend()
            for col in legend:
                # make sure we have name and description attributes
                self.assertTrue(col['name'])
                self.assertTrue(col['description'])

            for row in v.get_data():
                self.assertTrue(isinstance(row['t'], datetime.datetime))
                for val in row['vals']:
                    self.assertEqual(len(val), len(legend))

            # do an aggregated get_data()
            rows = v.get_data(aggregated=True)
            if len(rows) == 0:
                logger.warn('no data in view, cannot test aggregated get')
            self.assertTrue(len(rows) == 0 or len(rows) == 1)
    def test_view_on_job(self):
        """ Test creating a view on a capture job """
        job = setup_capture_job(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(job,
                                    columns,
                                    None,
                                    name='test_view_on_job') as view:
            data = view.get_data()
            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        #testing bug 111168
        #http://bugzilla.nbttech.com/show_bug.cgi?id=111168

        with self.shark.create_view(job, columns, filters,
                                    name='bug_111168') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        with self.shark.create_view(job,
                                    columns,
                                    [TimeFilter.parse_range('last 2 hours')],
                                    name='bug_111168_2') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))
            self.assertEqual(len(view.config['input_source']['filters']), 1)
            filter = view.config['input_source']['filters'][0]
            self.assertEqual(filter.start + datetime.timedelta(hours=2),
                             filter.end)
 def test_async_view(self):
     job = setup_capture_job(self.shark)
     clip = create_trace_clip(self.shark, job)
     columns, filters = setup_defaults()
     with self.shark.create_view(clip, columns, filters,
                                 sync=False, name='test_async_view') as v:
         while v.get_progress() < 100:
             time.sleep(1)
 def test_async_view(self):
     job = setup_capture_job(self.shark)
     clip = create_trace_clip(self.shark, job)
     columns, filters = setup_defaults()
     with self.shark.create_view(clip,
                                 columns,
                                 filters,
                                 sync=False,
                                 name='test_async_view') as v:
         while v.get_progress() < 100:
             time.sleep(1)
    def test_view_on_clip(self):
        """ Test creating a view on a trace clip """
        job = setup_capture_job(self.shark)
        clip = create_trace_clip(self.shark, job)
        columns, filters = setup_defaults()

        with self.shark.create_view(clip,
                                    columns,
                                    None,
                                    name='test_view_on_clip') as view:
            data = view.get_data()

            self.assertTrue(len(data) >= 0)
            self.assertTrue(
                view.config['input_source']['path'].startswith('clip'))
    def test_view_on_clip(self):
        """ Test creating a view on a trace clip """
        job = setup_capture_job(self.shark)
        clip = create_trace_clip(self.shark, job)
        columns, filters = setup_defaults()

        with self.shark.create_view(
                clip,
                columns,
                None,
                name='test_view_on_clip') as view:
            data = view.get_data()

            self.assertTrue(len(data) >= 0)
            self.assertTrue(
                view.config['input_source']['path'].startswith('clip'))
    def test_view_on_file(self):
        """ Test creating a view on a trace file """
        tracefile = create_tracefile(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(
                tracefile, columns,
                None,
                name='test_view_on_file') as view:
            data = view.get_data()
            try:
                self.assertTrue(len(data) > 0)
            except:
                # this may fail in low traffic machines
                pass
            self.assertTrue(
                view.config['input_source']['path'].startswith('fs'))
    def test_view_on_file(self):
        """ Test creating a view on a trace file """
        tracefile = create_tracefile(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(tracefile,
                                    columns,
                                    None,
                                    name='test_view_on_file') as view:
            data = view.get_data()
            try:
                self.assertTrue(len(data) > 0)
            except:
                # this may fail in low traffic machines
                pass
            self.assertTrue(
                view.config['input_source']['path'].startswith('fs'))
    def test_view_on_job(self):
        """ Test creating a view on a capture job """
        job = setup_capture_job(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(
                job,
                columns,
                None,
                name='test_view_on_job') as view:
            data = view.get_data()
            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        #testing bug 111168
        #http://bugzilla.nbttech.com/show_bug.cgi?id=111168

        with self.shark.create_view(job,
                                    columns,
                                    filters,
                                    name='bug_111168') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        with self.shark.create_view(job, columns,
                                    [TimeFilter.parse_range('last 2 hours')],
                                    name='bug_111168_2') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))
            self.assertEqual(len(view.config['input_source']['filters']), 1)
            filter = view.config['input_source']['filters'][0]
            self.assertEqual(
                filter.start + datetime.timedelta(hours=2),
                filter.end)
    def test_view_on_interface(self):
        """ Test creating a view on an interface """
        try:
            interface = self.shark.get_interface_by_name('mon0')
        except KeyError:
            interface = self.shark.get_interfaces()[0]
        columns, _ = setup_defaults()
        filters = None

        with self.shark.create_view(interface,
                                    columns,
                                    filters,
                                    name='test_view_interface',
                                    sync=True) as view:
            progress = view.get_progress()
            data = view.get_data()
            ti = view.get_timeinfo()

            #self.assertTrue(ti['start'] > 0)
            #self.assertTrue(len(data) >= 0)
            self.assertTrue(progress == 100)
            self.assertTrue(
                view.config['input_source']['path'].startswith('interfaces'))
    def test_view_on_interface(self):
        """ Test creating a view on an interface """
        try:
            interface = self.shark.get_interface_by_name('mon0')
        except KeyError:
            interface = self.shark.get_interfaces()[0]
        columns, _ = setup_defaults()
        filters = None

        with self.shark.create_view(interface,
                                    columns,
                                    filters,
                                    name='test_view_interface',
                                    sync=True) as view:
            progress = view.get_progress()
            data = view.get_data()
            ti = view.get_timeinfo()

            #self.assertTrue(ti['start'] > 0)
            #self.assertTrue(len(data) >= 0)
            self.assertTrue(progress == 100)
            self.assertTrue(
                view.config['input_source']['path'].startswith('interfaces'))
    def test_live_view_api(self):
        #test on live interface
        s = self.shark
        columns, filters = setup_defaults()
        interface = s.get_interfaces()[0]
        view = s.create_view(
            interface, columns, None, name='test_live_view', sync=True)

        time.sleep(20)
        # 20 seconds delta
        start = view.get_timeinfo()['start']
        onesec = 1000000000
        end = start + 20*onesec

        data = view.get_data(start=start)
        table = [(x['p'], x['t'],
                  timeutils.datetime_to_nanoseconds(x['t'])) for x in data]

        # XXX figure how to split these up into
        # separate tests without adding 20sec delay
        #     for each of them

        # this part needs to be redone since delta
        # is no longer accepted for aggregated calls

        # aggregate and compare against first row of data
        # print table
        # delta = table[0][2] - start + onesec
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        # # aggregate and compare against first two rows of data
        # # note extra onesec not needed here
        # delta = table[1][2] - start
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        if len(table) >= 2:
            # aggregate with start/end as last two samples
            #
            start = table[-2][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], table[-2][0])

            # aggregate with start/end as first and last sample
            #  result is sum of samples without last one
            start = table[0][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], sum(x[0] for x in table[:-1]))

        # # aggregate with start as second sample and delta to end of table
        # #
        # start = table[1][2]
        # delta = table[-1][2] - start
        # d = view.get_data(aggregated=True, start=start, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[1:-1]))

        # # aggregate going backwards from last sample
        # #
        # end = table[-1][2]
        # delta = end - table[-3][2]
        # d = view.get_data(aggregated=True, end=end, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[-3:-1]))

        view.close()
    def test_live_view_api(self):
        #test on live interface
        s = self.shark
        columns, filters = setup_defaults()
        interface = s.get_interfaces()[0]
        view = s.create_view(interface,
                             columns,
                             None,
                             name='test_live_view',
                             sync=True)

        time.sleep(20)
        # 20 seconds delta
        start = view.get_timeinfo()['start']
        onesec = 1000000000
        end = start + 20 * onesec

        data = view.get_data(start=start)
        table = [(x['p'], x['t'], timeutils.datetime_to_nanoseconds(x['t']))
                 for x in data]

        # XXX figure how to split these up into
        # separate tests without adding 20sec delay
        #     for each of them

        # this part needs to be redone since delta
        # is no longer accepted for aggregated calls

        # aggregate and compare against first row of data
        # print table
        # delta = table[0][2] - start + onesec
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        # # aggregate and compare against first two rows of data
        # # note extra onesec not needed here
        # delta = table[1][2] - start
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        if len(table) >= 2:
            # aggregate with start/end as last two samples
            #
            start = table[-2][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], table[-2][0])

            # aggregate with start/end as first and last sample
            #  result is sum of samples without last one
            start = table[0][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], sum(x[0] for x in table[:-1]))

        # # aggregate with start as second sample and delta to end of table
        # #
        # start = table[1][2]
        # delta = table[-1][2] - start
        # d = view.get_data(aggregated=True, start=start, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[1:-1]))

        # # aggregate going backwards from last sample
        # #
        # end = table[-1][2]
        # delta = end - table[-3][2]
        # d = view.get_data(aggregated=True, end=end, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[-3:-1]))

        view.close()