Пример #1
0
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)

        filename = self.options.filename
        if not filename:
            filename = "%s_export.pcap" % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.parser.error("Select either --timerange or --start and --end times")

        if self.options.filters:
            kvs = [f.split("=") for f in self.options.filters]
            filters = [NetSharkFilter(r'%s="%s"' % (k, v)) for k, v in kvs]
        else:
            filters = None

        with self.netshark.create_export(source, timefilter, filters=filters) as e:
            print "beginning download to file %s" % filename
            e.download(filename, overwrite=self.options.overwrite)
Пример #2
0
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)

        filename = self.options.filename
        if not filename:
            filename = '%s_export.pcap' % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.parser.error(
                'Select either --timerange or --start and --end times')

        if self.options.filters:
            kvs = [f.split('=') for f in self.options.filters]
            filters = [NetSharkFilter(r'%s="%s"' % (k, v)) for k, v in kvs]
        else:
            filters = None

        with self.netshark.create_export(source, timefilter,
                                         filters=filters) as e:
            print 'beginning download to file %s' % filename
            e.download(filename, overwrite=self.options.overwrite)
Пример #3
0
 def get_property_values(self):
     return [
         self.subject(), self.fingerprint(),
         self.key(), self.issuer(),
         timeutils.string_to_datetime(self.valid_at()),
         timeutils.string_to_datetime(self.expires_at()),
         self.pem()
     ]
Пример #4
0
    def generate_traffic(self, activity, legend_keys, report_type):
        """ Generate traffic data during the time the user was logged-in.
        """
        cache = {}
        combined_activity = []
        for event in activity:
            # handle dns names in host along with IP address
            host = event[0].split('|', 1)[0]

            timefilter = TimeFilter(string_to_datetime(event[1]),
                                    string_to_datetime(event[2]))
            # if event occurs in less than a minute, add extra minute to report
            while len(timefilter.profiler_minutes()) == 1:
                timefilter.end += datetime.timedelta(minutes=1)

            # normalize times to minute increments
            mins = timefilter.profiler_minutes()
            tf = TimeFilter(mins[0], mins[-1])

            if self.options.usecache and report_type == 'timeseries':
                # only consider a hit when whole time period is covered
                minutes = tf.profiler_minutes(astimestamp=True)

                if host in cache and all(t in cache[host] for t in minutes):
                    data = [cache[host][t] for t in minutes]
                else:
                    legend, data = self.traffic_report(host, tf, report_type)
                    # store results in cache by host->times->data
                    cache.setdefault(host, {}).update(
                        (int(x[0]), x) for x in data)
            else:
                legend, data = self.traffic_report(host, tf, report_type)

            if data:
                if self.options.aggregate and report_type == 'timeseries':
                    # generate running averages over data samples received
                    # first convert empty strings to zeros, then run averages
                    columns = map(lambda c: [0 if x == '' else x for x in c],
                                  itertools.izip(*data))
                    aggmap = [x[1] for x in TCOLUMNS]
                    aggregates = [aggmap[i](x) for i, x in enumerate(columns)]
                    combined_activity.append(list(event) + aggregates)
                elif report_type == 'timeseries' or report_type == 'summary':
                    # create entry for each element in report
                    for row in data:
                        r = ['--' if x == '' else x for x in row]
                        combined_activity.append(list(event) + r)
                else:
                    raise RuntimeError('unknown report type: %s' % report_type)

            else:
                # populate result with blanks
                combined_activity.append(list(event) + ['--'] * len(legend))

        traffic_legend = [c.key for c in legend]

        legend = legend_keys + traffic_legend
        return legend, combined_activity
    def generate_traffic(self, activity, legend_keys, report_type):
        """ Generate traffic data during the time the user was logged-in.
        """
        cache = {}
        combined_activity = []
        for event in activity:
            # handle dns names in host along with IP address
            host = event[0].split('|', 1)[0]

            timefilter = TimeFilter(string_to_datetime(event[1]),
                                    string_to_datetime(event[2]))
            # if event occurs in less than a minute, add extra minute to report
            while len(timefilter.profiler_minutes()) == 1:
                timefilter.end += datetime.timedelta(minutes=1)

            # normalize times to minute increments
            mins = timefilter.profiler_minutes()
            tf = TimeFilter(mins[0], mins[-1])

            if self.options.usecache and report_type == 'timeseries':
                # only consider a hit when whole time period is covered
                minutes = tf.profiler_minutes(astimestamp=True)

                if host in cache and all(t in cache[host] for t in minutes):
                    data = [cache[host][t] for t in minutes]
                else:
                    legend, data = self.traffic_report(host, tf, report_type)
                    # store results in cache by host->times->data
                    cache.setdefault(host, {}).update((int(x[0]), x) for x in data)
            else:
                legend, data = self.traffic_report(host, tf, report_type)

            if data:
                if self.options.aggregate and report_type == 'timeseries':
                    # generate running averages over data samples received
                    # first convert empty strings to zeros, then run averages
                    columns = map(lambda c: [0 if x == '' else x for x in c],
                                                                itertools.izip(*data))
                    aggmap = [x[1] for x in TCOLUMNS]
                    aggregates = [aggmap[i](x) for i, x in enumerate(columns)]
                    combined_activity.append(list(event) + aggregates)
                elif report_type == 'timeseries' or report_type == 'summary':
                    # create entry for each element in report
                    for row in data:
                        r = ['--' if x == '' else x for x in row]
                        combined_activity.append(list(event) + r)
                else:
                    raise RuntimeError('unknown report type: %s' % report_type)

            else:
                # populate result with blanks
                combined_activity.append(list(event) + ['--'] * len(legend))

        traffic_legend = [c.key for c in legend]

        legend = legend_keys + traffic_legend
        return legend, combined_activity
Пример #6
0
 def get_property_values(self):
     return [
         self.id(),
         self.name(),
         self.description(),
         timeutils.string_to_datetime(self.timestamp())
     ]
Пример #7
0
    def compare_time(self, t, resolution=60):
        """ Return True if time `t` falls in between start and end times.

            `t` may be a unix timestamp (float or string) or a datetime.datetime
            object

            `resolution` is the number of seconds to use for rounding.  Since
            NetProfiler stores data in one-minute increments, typically this
            should allow reasonable comparisons to report outputs.  Passing
            zero (`0`) in here will enforce strict comparisons.
        """
        # try converting to datetime object
        try:
            t = timeutils.string_to_datetime(t)
        except TypeError:
            pass

        # move everything to uniform utc timezone
        # string to datetime already returns utc, but if this is a
        # datetime object, we are just being safe here
        t = timeutils.force_to_utc(t)
        start = timeutils.force_to_utc(self.start)
        end = timeutils.force_to_utc(self.end)

        # by default, this will be one minute delta
        delta = datetime.timedelta(0, resolution, 0)
        return (start <= t <= end or abs(start - t) < delta
                or abs(end - t) < delta)
Пример #8
0
    def compare_time(self, t, resolution=60):
        """ Return True if time `t` falls in between start and end times.

            `t` may be a unix timestamp (float or string) or a datetime.datetime
            object

            `resolution` is the number of seconds to use for rounding.  Since
            NetProfiler stores data in one-minute increments, typically this
            should allow reasonable comparisons to report outputs.  Passing
            zero (`0`) in here will enforce strict comparisons.
        """
        # try converting to datetime object
        try:
            t = timeutils.string_to_datetime(t)
        except TypeError:
            pass

        # move everything to uniform utc timezone
        # string to datetime already returns utc, but if this is a
        # datetime object, we are just being safe here
        t = timeutils.force_to_utc(t)
        start = timeutils.force_to_utc(self.start)
        end = timeutils.force_to_utc(self.end)

        # by default, this will be one minute delta
        delta = datetime.timedelta(0, resolution, 0)
        return (start <= t <= end or
                abs(start - t) < delta or
                abs(end - t) < delta)
 def get_property_values(self):
     return [
         self.state(),
         self.state_description(),
         timeutils.string_to_datetime(self.last_state_time()),
         self.target_version(),
         self.get_history_details()
     ]
 def get_history_details(self):
     history = self.update_history()
     f_history = ""
     for i in range(len(history)):
         f_time = "\nTime: " + str(
             timeutils.string_to_datetime(history[i].time))
         f_version = "Version: " + history[i].version + "\n"
         f_history += f_time + " " + f_version
     return f_history
Пример #11
0
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
            create_export = self.netshark.api.jobs.create_export
            status_export = self.netshark.api.jobs.get_export_details
            download_export = self.netshark.api.jobs.get_packets_from_export
            delete_export = self.netshark.api.jobs.delete_export
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)
            create_export = self.netshark.api.clips.create_export
            status_export = self.netshark.api.clips.get_export_details
            download_export = self.netshark.api.clips.get_packets_from_export
            delete_export = self.netshark.api.clips.delete_export

        filename = self.options.filename
        if not filename:
            filename = '%s_export.pcap' % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.optparse.error('Select either --timerange or --start and --end times')

        config = {
            #'output_filename': filename,
            'output_format': 'PCAP_US',
            'start_time': datetime_to_seconds(timefilter.start),
            'end_time': datetime_to_seconds(timefilter.end),
        }

        if self.options.filters:
            filters = [f.split('=') for f in self.options.filters]
            filters = [r'%s="%s"' % (k, v) for k, v in filters]
            config['filters'] = [NetSharkFilter(filt).bind(self.netshark) for filt in filters]

        try:
            export_id = create_export(source.id, config=config)

            print 'Export created with ID: %s' % export_id

            status = status_export(source.id, export_id['id'])

            print 'Current status of export is:\n%s' % status

            if status['status']['state'] == 'RUNNING':
                print 'beginning download to file %s' % filename
                download_export(source.id, export_id['id'], filename)
        finally:
            try:
                print 'Trying to delete export ... ',
                delete_export(source.id, export_id['id'])
                print 'deleted.'
            except:
                print 'Error when trying to delete export.  Ignoring.'
                pass