Example #1
0
 def to_dict(self):
     return {
         'type':
         'TIME',
         'value':
         str(timeutils.datetime_to_nanoseconds(self.start)) + ', ' +
         str(timeutils.datetime_to_nanoseconds(self.end))
     }
Example #2
0
 def to_dict(self):
     return {
         'type': 'TIME',
         'value': str(timeutils.datetime_to_nanoseconds(self.start)) +
         ', ' + str(timeutils.datetime_to_nanoseconds(self.end))
         }
    def test_live_view_api(self):
        #test on live interface
        s = self.shark
        columns, filters = setup_defaults()
        interface = s.get_interfaces()[0]
        view = s.create_view(
            interface, columns, None, name='test_live_view', sync=True)

        time.sleep(20)
        # 20 seconds delta
        start = view.get_timeinfo()['start']
        onesec = 1000000000
        end = start + 20*onesec

        data = view.get_data(start=start)
        table = [(x['p'], x['t'],
                  timeutils.datetime_to_nanoseconds(x['t'])) for x in data]

        # XXX figure how to split these up into
        # separate tests without adding 20sec delay
        #     for each of them

        # this part needs to be redone since delta
        # is no longer accepted for aggregated calls

        # aggregate and compare against first row of data
        # print table
        # delta = table[0][2] - start + onesec
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        # # aggregate and compare against first two rows of data
        # # note extra onesec not needed here
        # delta = table[1][2] - start
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        if len(table) >= 2:
            # aggregate with start/end as last two samples
            #
            start = table[-2][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], table[-2][0])

            # aggregate with start/end as first and last sample
            #  result is sum of samples without last one
            start = table[0][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], sum(x[0] for x in table[:-1]))

        # # aggregate with start as second sample and delta to end of table
        # #
        # start = table[1][2]
        # delta = table[-1][2] - start
        # d = view.get_data(aggregated=True, start=start, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[1:-1]))

        # # aggregate going backwards from last sample
        # #
        # end = table[-1][2]
        # delta = end - table[-3][2]
        # d = view.get_data(aggregated=True, end=end, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[-3:-1]))

        view.close()
Example #4
0
    def run(self):
        """ Main execution method
        """
        criteria = self.job.criteria

        self.timeseries = False       # if key column called 'time' is created
        self.column_names = []

        # Resolution comes in as a time_delta
        resolution = timedelta_total_seconds(criteria.resolution)

        default_delta = 1000000000                      # one second
        self.delta = int(default_delta * resolution)    # sample size interval

        if criteria.netshark_device == '':
            logger.debug('%s: No netshark device selected' % self.table)
            self.job.mark_error("No NetShark Device Selected")
            return False

        shark = DeviceManager.get_device(criteria.netshark_device)

        logger.debug("Creating columns for NetShark table %d" % self.table.id)

        # Create Key/Value Columns
        columns = []
        for tc in self.table.get_columns(synthetic=False):
            tc_options = tc.options
            if (tc.iskey and tc.name == 'time' and
                    tc_options.extractor == 'sample_time'):
                # don't create column, use the sample time for timeseries
                self.timeseries = True
                self.column_names.append('time')
                continue
            elif tc.iskey:
                c = Key(tc_options.extractor,
                        description=tc.label,
                        default_value=tc_options.default_value)
            else:
                if tc_options.operation:
                    try:
                        operation = getattr(Operation, tc_options.operation)
                    except AttributeError:
                        operation = Operation.sum
                        print ('ERROR: Unknown operation attribute '
                               '%s for column %s.' %
                               (tc_options.operation, tc.name))
                else:
                    operation = Operation.none

                c = Value(tc_options.extractor,
                          operation,
                          description=tc.label,
                          default_value=tc_options.default_value)

            self.column_names.append(tc.name)
            columns.append(c)

        # Identify Sort Column
        sortidx = None
        if self.table.sortcols is not None:
            sortcol = Column.objects.get(table=self.table,
                                         name=self.table.sortcols[0])
            sort_name = sortcol.options.extractor
            for i, c in enumerate(columns):
                if c.field == sort_name:
                    sortidx = i
                    break

        # Initialize filters
        criteria = self.job.criteria

        filters = []

        if hasattr(criteria, 'netshark_filterexpr'):
            logger.debug('calculating netshark filter expression ...')
            filterexpr = self.job.combine_filterexprs(
                exprs=criteria.netshark_filterexpr,
                joinstr="&"
            )
            if filterexpr:
                logger.debug('applying netshark filter expression: %s'
                             % filterexpr)
                filters.append(NetSharkFilter(filterexpr))

        if hasattr(criteria, 'netshark_bpf_filterexpr'):
            # TODO evaluate how to combine multiple BPF filters
            # this will just apply one at a time
            filterexpr = criteria.netshark_bpf_filterexpr
            logger.debug('applying netshark BPF filter expression: %s'
                         % filterexpr)
            filters.append(BpfFilter(filterexpr))

        resolution = criteria.resolution
        if resolution.seconds == 1:
            sampling_time_msec = 1000
        elif resolution.microseconds == 1000:
            sampling_time_msec = 1
            if criteria.duration > parse_timedelta('1s'):
                msg = ("Cannot run a millisecond report with a duration "
                       "longer than 1 second")
                raise ValueError(msg)
        else:
            sampling_time_msec = 1000

        # Get source type from options
        logger.debug("NetShark Source: %s" %
                     self.job.criteria.netshark_source_name)

        source = path_to_class(
            shark, self.job.criteria.netshark_source_name)
        live = source.is_live()
        persistent = criteria.get('netshark_persistent', False)

        if live and not persistent:
            raise ValueError("Live views must be run with persistent set")

        view = None
        if persistent:
            # First, see a view by this title already exists
            # Title is the table name plus a criteria hash including
            # all criteria *except* the timeframe
            h = hashlib.md5()
            h.update('.'.join([c.name for c in
                               self.table.get_columns()]))
            for k, v in criteria.iteritems():
                if criteria.is_timeframe_key(k):
                    continue
                h.update('%s:%s' % (k, v))

            title = '/'.join(['steelscript-appfwk', str(self.table.id),
                              self.table.namespace, self.table.name,
                              h.hexdigest()])
            view = NetSharkViews.find_by_name(shark, title)
            logger.debug("Persistent view title: %s" % title)
        else:
            # Only assign a title for persistent views
            title = None

        if not view:
            # Not persistent, or not yet created...

            if not live:
                # Cannot attach time filter to a live view,
                # it will be added later at get_data() time
                tf = TimeFilter(start=criteria.starttime,
                                end=criteria.endtime)
                filters.append(tf)

                logger.info("Setting netshark table %d timeframe to %s" %
                            (self.table.id, str(tf)))

            # Create it
            with lock:
                logger.debug("%s: Creating view for table %s" %
                             (str(self), str(self.table)))
                view = shark.create_view(
                    source, columns, filters=filters, sync=False,
                    name=title, sampling_time_msec=sampling_time_msec)

            if not live:
                done = False
                logger.debug("Waiting for netshark table %d to complete" %
                             self.table.id)
                while not done:
                    time.sleep(0.5)
                    with lock:
                        s = view.get_progress()
                        self.job.mark_progress(s)
                        self.job.save()
                        done = view.is_ready()

        logger.debug("Retrieving data for timeframe: %s - %s" %
                     (datetime_to_nanoseconds(criteria.starttime),
                      datetime_to_nanoseconds(criteria.endtime)))

        # Retrieve the data
        with lock:
            getdata_kwargs = {}
            if sortidx:
                getdata_kwargs['sortby'] = sortidx

            if self.table.options.aggregated:
                getdata_kwargs['aggregated'] = self.table.options.aggregated
            else:
                getdata_kwargs['delta'] = self.delta

            if live:
                # For live views, attach the time frame to the get_data()
                getdata_kwargs['start'] = (
                    datetime_to_nanoseconds(criteria.starttime))
                getdata_kwargs['end'] = (
                    datetime_to_nanoseconds(criteria.endtime))

            self.data = view.get_data(**getdata_kwargs)

            if not persistent:
                view.close()

        if self.table.rows > 0:
            self.data = self.data[:self.table.rows]

        self.parse_data()

        logger.info("NetShark Report %s returned %s rows" %
                    (self.job, len(self.data)))

        return QueryComplete(self.data)
    def test_live_view_api(self):
        #test on live interface
        s = self.shark
        columns, filters = setup_defaults()
        interface = s.get_interfaces()[0]
        view = s.create_view(interface,
                             columns,
                             None,
                             name='test_live_view',
                             sync=True)

        time.sleep(20)
        # 20 seconds delta
        start = view.get_timeinfo()['start']
        onesec = 1000000000
        end = start + 20 * onesec

        data = view.get_data(start=start)
        table = [(x['p'], x['t'], timeutils.datetime_to_nanoseconds(x['t']))
                 for x in data]

        # XXX figure how to split these up into
        # separate tests without adding 20sec delay
        #     for each of them

        # this part needs to be redone since delta
        # is no longer accepted for aggregated calls

        # aggregate and compare against first row of data
        # print table
        # delta = table[0][2] - start + onesec
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        # # aggregate and compare against first two rows of data
        # # note extra onesec not needed here
        # delta = table[1][2] - start
        # d = view.get_data(aggregated=True, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], table[0][0])

        if len(table) >= 2:
            # aggregate with start/end as last two samples
            #
            start = table[-2][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], table[-2][0])

            # aggregate with start/end as first and last sample
            #  result is sum of samples without last one
            start = table[0][2]
            end = table[-1][2]
            d = view.get_data(aggregated=True, start=start, end=end)
            self.assertEqual(len(d), 1)
            self.assertEqual(d[0]['p'], sum(x[0] for x in table[:-1]))

        # # aggregate with start as second sample and delta to end of table
        # #
        # start = table[1][2]
        # delta = table[-1][2] - start
        # d = view.get_data(aggregated=True, start=start, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[1:-1]))

        # # aggregate going backwards from last sample
        # #
        # end = table[-1][2]
        # delta = end - table[-3][2]
        # d = view.get_data(aggregated=True, end=end, delta=delta)
        # self.assertEqual(len(d), 1)
        # self.assertEqual(d[0]['p'], sum(x[0] for x in table[-3:-1]))

        view.close()
    def run(self):
        """ Main execution method
        """
        criteria = self.job.criteria

        self.timeseries = False  # if key column called 'time' is created
        self.column_names = []

        # Resolution comes in as a time_delta
        resolution = timedelta_total_seconds(criteria.resolution)

        default_delta = 1000000000  # one second
        self.delta = int(default_delta * resolution)  # sample size interval

        if criteria.netshark_device == '':
            logger.debug('%s: No netshark device selected' % self.table)
            self.job.mark_error("No NetShark Device Selected")
            return False

        shark = DeviceManager.get_device(criteria.netshark_device)

        logger.debug("Creating columns for NetShark table %d" % self.table.id)

        # Create Key/Value Columns
        columns = []
        for tc in self.table.get_columns(synthetic=False):
            tc_options = tc.options
            if (tc.iskey and tc.name == 'time'
                    and tc_options.extractor == 'sample_time'):
                # don't create column, use the sample time for timeseries
                self.timeseries = True
                self.column_names.append('time')
                continue
            elif tc.iskey:
                c = Key(tc_options.extractor,
                        description=tc.label,
                        default_value=tc_options.default_value)
            else:
                if tc_options.operation:
                    try:
                        operation = getattr(Operation, tc_options.operation)
                    except AttributeError:
                        operation = Operation.sum
                        print('ERROR: Unknown operation attribute '
                              '%s for column %s.' %
                              (tc_options.operation, tc.name))
                else:
                    operation = Operation.none

                c = Value(tc_options.extractor,
                          operation,
                          description=tc.label,
                          default_value=tc_options.default_value)

            self.column_names.append(tc.name)
            columns.append(c)

        # Identify Sort Column
        sortidx = None
        if self.table.sortcols is not None:
            sortcol = Column.objects.get(table=self.table,
                                         name=self.table.sortcols[0])
            sort_name = sortcol.options.extractor
            for i, c in enumerate(columns):
                if c.field == sort_name:
                    sortidx = i
                    break

        # Initialize filters
        criteria = self.job.criteria

        filters = []

        if hasattr(criteria, 'netshark_filterexpr'):
            logger.debug('calculating netshark filter expression ...')
            filterexpr = self.job.combine_filterexprs(
                exprs=criteria.netshark_filterexpr, joinstr="&")
            if filterexpr:
                logger.debug('applying netshark filter expression: %s' %
                             filterexpr)
                filters.append(NetSharkFilter(filterexpr))

        if hasattr(criteria, 'netshark_bpf_filterexpr'):
            # TODO evaluate how to combine multiple BPF filters
            # this will just apply one at a time
            filterexpr = criteria.netshark_bpf_filterexpr
            logger.debug('applying netshark BPF filter expression: %s' %
                         filterexpr)
            filters.append(BpfFilter(filterexpr))

        resolution = criteria.resolution
        if resolution.seconds == 1:
            sampling_time_msec = 1000
        elif resolution.microseconds == 1000:
            sampling_time_msec = 1
            if criteria.duration > parse_timedelta('1s'):
                msg = ("Cannot run a millisecond report with a duration "
                       "longer than 1 second")
                raise ValueError(msg)
        else:
            sampling_time_msec = 1000

        # Get source type from options
        logger.debug("NetShark Source: %s" %
                     self.job.criteria.netshark_source_name)

        source = path_to_class(shark, self.job.criteria.netshark_source_name)
        live = source.is_live()
        persistent = criteria.get('netshark_persistent', False)

        if live and not persistent:
            raise ValueError("Live views must be run with persistent set")

        view = None
        if persistent:
            # First, see a view by this title already exists
            # Title is the table name plus a criteria hash including
            # all criteria *except* the timeframe
            h = hashlib.md5()
            h.update('.'.join([c.name for c in self.table.get_columns()]))
            for k, v in criteria.iteritems():
                if criteria.is_timeframe_key(k):
                    continue
                h.update('%s:%s' % (k, v))

            title = '/'.join([
                'steelscript-appfwk',
                str(self.table.id), self.table.namespace, self.table.name,
                h.hexdigest()
            ])
            view = NetSharkViews.find_by_name(shark, title)
            logger.debug("Persistent view title: %s" % title)
        else:
            # Only assign a title for persistent views
            title = None

        timefilter = TimeFilter(start=criteria.starttime, end=criteria.endtime)

        if not view:
            # Not persistent, or not yet created...

            if not live:
                # Cannot attach time filter to a live view,
                # it will be added later at get_data() time
                if criteria.starttime and criteria.endtime:
                    filters.append(timefilter)

                    logger.info("Setting netshark table %d timeframe to %s" %
                                (self.table.id, str(timefilter)))
                else:
                    # if times are set to zero, don't add to filter
                    # this will process entire timeframe of source instead
                    logger.info("Not setting netshark table %d timeframe" %
                                self.table.id)

            # Create it
            with lock:
                logger.debug("%s: Creating view for table %s" %
                             (str(self), str(self.table)))
                view = shark.create_view(source,
                                         columns,
                                         filters=filters,
                                         sync=False,
                                         name=title,
                                         sampling_time_msec=sampling_time_msec)

            if not live:
                done = False
                logger.debug("Waiting for netshark table %d to complete" %
                             self.table.id)
                while not done:
                    time.sleep(0.5)
                    with lock:
                        s = view.get_progress()
                        self.job.mark_progress(s)
                        self.job.save()
                        done = view.is_ready()

        logger.debug("Retrieving data for timeframe: %s" % timefilter)

        # Retrieve the data
        with lock:
            getdata_kwargs = {}
            if sortidx:
                getdata_kwargs['sortby'] = sortidx

            if self.table.options.aggregated:
                getdata_kwargs['aggregated'] = self.table.options.aggregated
            else:
                getdata_kwargs['delta'] = self.delta

            if live:
                # For live views, attach the time frame to the get_data()
                getdata_kwargs['start'] = (datetime_to_nanoseconds(
                    criteria.starttime))
                getdata_kwargs['end'] = (datetime_to_nanoseconds(
                    criteria.endtime))

            self.data = view.get_data(**getdata_kwargs)

            if not persistent:
                view.close()

        if self.table.rows > 0:
            self.data = self.data[:self.table.rows]

        self.parse_data()

        logger.info("NetShark Report %s returned %s rows" %
                    (self.job, len(self.data)))

        return QueryComplete(self.data)