예제 #1
0
    def generate_traffic(self, activity, legend_keys, report_type):
        """ Generate traffic data during the time the user was logged-in.
        """
        cache = {}
        combined_activity = []
        for event in activity:
            host = event[0]
            timefilter = TimeFilter(string_to_datetime(event[1]),
                                    string_to_datetime(event[2]))

            if self.options.usecache and report_type == 'timeseries':
                # check cache - only consider a hit when whole time period is covered
                minutes = timefilter.profiler_minutes(astimestamp=True)

                if host in cache and all(t in cache[host] for t in minutes):
                    data = [cache[host][t] for t in minutes]
                else:
                    legend, data = self.traffic_report(host, timefilter, report_type)
                    # store results in cache by host->times->data
                    cache.setdefault(host, {}).update((int(x[0]), x) for x in data)
            else:
                legend, data = self.traffic_report(host, timefilter, report_type)

            if data:
                if self.options.aggregate and report_type == 'timeseries':
                    # generate running averages over data samples received
                    # first convert empty strings to zeros, then run averages
                    columns = map(lambda c: [0 if x == '' else x for x in c],
                                                                itertools.izip(*data))
                    aggmap = [x[1] for x in TCOLUMNS]
                    aggregates = [aggmap[i](x) for i, x in enumerate(columns)]
                    combined_activity.append(list(event) + aggregates)
                elif report_type == 'timeseries' or report_type == 'summary':
                    # create entry for each element in report
                    for row in data:
                        r = ['--' if x == '' else x for x in row]
                        combined_activity.append(list(event) + r)
                else:
                    raise RuntimeError('unknown report type: %s' % report_type)

            else:
                # populate result with blanks
                combined_activity.append(list(event) + ['--'] * len(legend))

        traffic_legend = [c.key for c in legend]
        
        legend = legend_keys + traffic_legend
        return legend, combined_activity
예제 #2
0
    def compare_time(self, t, resolution=60):
        """ Return True if time `t` falls in between start and end times.

            `t` may be a unix timestamp (float or string) or a datetime.datetime
            object

            `resolution` is the number of seconds to use for rounding.  Since
            Profiler stores data in one-minute increments, typically this
            should allow reasonable comparisons to report outputs.  Passing
            zero (`0`) in here will enforce strict comparisons.
        """
        # try converting to datetime object
        try:
            t = timeutils.string_to_datetime(t)
        except TypeError:
            pass

        # move everything to uniform utc timezone
        # string to datetime already returns utc, but if this is a
        # datetime object, we are just being safe here
        t = timeutils.force_to_utc(t)
        start = timeutils.force_to_utc(self.start)
        end = timeutils.force_to_utc(self.end)

        # by default, this will be one minute delta
        delta = datetime.timedelta(0, resolution, 0)
        return (start <= t <= end or
                abs(start - t) < delta or
                abs(end - t) < delta)
예제 #3
0
    def get_iterdata(self, start=None, end=None, delta=None,
                     aggregated=False,
                     sortby=None, sorttype="descending",
                     fromentry=0, toentry=0):
        """
        Returns an iterator to the output data. This function is ideal for
        sequential parsing of the view data, because it downloads the
        dataset incrementally as it is accessed.

        `start` and `end` are `datetime.datetime` objects representing
        the earliest and latest packets that should be considered.
        If `start` and `end` are unspecified, the start/end of the
        underlying packet source are used.

        `delta` is a `datetime.timedelta` object that can be used to
        override the default data aggregation interval.  If this
        parameter is unspecified, the underlying view sample interval
        (which defaults to 1 second) is used.  If this parameter is
        specified, it must be an even multiple of the underlying
        view sample interval.

        If `aggregated` is True, the parameter `delta` is automatically
        computed to be the full extent of this request (i.e., the difference
        between the effective start and end times).  This is useful if
        you do not care about timeseries data (e.g., if the data from
        this view is to be plotted in a single chart that has no
        time component).

        The `sortby` parameter is one of the fields of the output (x1, x2 ...)

        The `sorttype` can be:
        * `ascending`: the output is sorted from smallest to largest
        * `descending`: the output is sorted largest to smallest

        The `fromentry` parameter represent the first sorted item we want
        to appear in the output.  0 means from the first one.

        The `toentry` parameter represent the last sorted item we want to
        appear in the output.  0 means all of them.
        """
        params = self._parse_output_params(start, end, delta, aggregated, sortby, sorttype, fromentry, toentry)

        res = self.view.shark.api.view.get_data(self.view.handle, self.id, **params)

        samples = res.get('samples')

        # aggregated debug
        logger.debug('get_data params: %s' % params)

        if samples is None:
            return

        for sample in samples:
            if 'vals' not in sample:
                continue

            sample['t'] = timeutils.string_to_datetime(sample['t'])

            def convert_one(vec):
                return [ _to_native(v, self._legend[i])
                         for i, v in enumerate(vec) ]
            sample['vals'] = [ convert_one(v) for v in sample['vals']]
            yield sample