def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) trafficexpr = TrafficFilter(self.options.trafficexpr) columns = [ self.netprofiler.columns.key.srv_host_ip, self.netprofiler.columns.key.app_info, self.netprofiler.columns.key.start_time, self.netprofiler.columns.key.end_time, self.netprofiler.columns.value.s2c_total_bytes, self.netprofiler.columns.value.s2c_total_pkts, self.netprofiler.columns.value.response_time, self.netprofiler.columns.value.server_delay ] report = TrafficFlowListReport(self.netprofiler) report.run(columns, timefilter=timefilter, trafficexpr=trafficexpr) data = report.get_data() report.delete() headers = [c.key for c in columns] Formatter.print_table(data, headers)
def generate_traffic(self, activity, legend_keys, report_type): """ Generate traffic data during the time the user was logged-in. """ cache = {} combined_activity = [] for event in activity: # handle dns names in host along with IP address host = event[0].split('|', 1)[0] timefilter = TimeFilter(string_to_datetime(event[1]), string_to_datetime(event[2])) # if event occurs in less than a minute, add extra minute to report while len(timefilter.profiler_minutes()) == 1: timefilter.end += datetime.timedelta(minutes=1) # normalize times to minute increments mins = timefilter.profiler_minutes() tf = TimeFilter(mins[0], mins[-1]) if self.options.usecache and report_type == 'timeseries': # only consider a hit when whole time period is covered minutes = tf.profiler_minutes(astimestamp=True) if host in cache and all(t in cache[host] for t in minutes): data = [cache[host][t] for t in minutes] else: legend, data = self.traffic_report(host, tf, report_type) # store results in cache by host->times->data cache.setdefault(host, {}).update((int(x[0]), x) for x in data) else: legend, data = self.traffic_report(host, tf, report_type) if data: if self.options.aggregate and report_type == 'timeseries': # generate running averages over data samples received # first convert empty strings to zeros, then run averages columns = map(lambda c: [0 if x == '' else x for x in c], itertools.izip(*data)) aggmap = [x[1] for x in TCOLUMNS] aggregates = [aggmap[i](x) for i, x in enumerate(columns)] combined_activity.append(list(event) + aggregates) elif report_type == 'timeseries' or report_type == 'summary': # create entry for each element in report for row in data: r = ['--' if x == '' else x for x in row] combined_activity.append(list(event) + r) else: raise RuntimeError('unknown report type: %s' % report_type) else: # populate result with blanks combined_activity.append(list(event) + ['--'] * len(legend)) traffic_legend = [c.key for c in legend] legend = legend_keys + traffic_legend return legend, combined_activity
def main(self): # groupby validation should be part of validate_args, but there # is no NetProfiler initialized at that part of the initialization try: self.groupby = self.netprofiler.groupbys[self.options.groupby] except KeyError: if self.options.groupby not in self.netprofiler.groupbys.values(): self.parser.error('Invalid groupby chosen.') else: self.groupby = self.options.groupby self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.trafficexpr: self.trafficexpr = TrafficFilter(self.options.trafficexpr) else: self.trafficexpr = None with TrafficSummaryReport(self.netprofiler) as report: report.run(columns=self.options.columns.split(','), groupby=self.groupby, sort_col=self.options.sortby, timefilter=self.timefilter, trafficexpr=self.trafficexpr, centricity=self.centricity, resolution='auto') data = report.get_data() legend = [c.label for c in report.get_legend()] self.print_data(data, legend)
def main(self): self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.trafficexpr: self.trafficexpr = TrafficFilter(self.options.trafficexpr) else: self.trafficexpr = None with MultiQueryReport(self.netprofiler) as report: report.run(template_id=int(self.options.template_id), timefilter=self.timefilter, trafficexpr=self.trafficexpr) print('Report Template {id} successfully run.' .format(id=self.options.template_id)) self.netprofiler.conn.download( '/api/profiler/1.6/reporting/reports/{id}/view.{fmt}' .format(id=report.id, fmt=self.options.fmt), path=self.options.pathname, overwrite=True ) print('Completed Report {id} downloaded to {path}.' .format(id=report.id, path=self.options.pathname))
def test_traffic_overall_time_series_report(self): columns = [ self.profiler.columns.key.time, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts ] timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") resolution = "15min" report = TrafficOverallTimeSeriesReport(self.profiler) report.run(columns, timerange, trafficexpr, resolution=resolution) legend = report.get_legend() keys = [c.key for c in legend] data = report.get_data() for item in data: d = dict(zip(keys, item)) # resolution assumes 15-minute responses self.assertTrue( timerange.compare_time(d['time'], resolution=15 * 60)) report.delete()
def main(self): if self.options.listinterfacegroups: self.list_interface_groups(self.options.host, self.options.sshusername, self.options.sshpassword) return if self.options.listhostgroups: self.list_host_groups(NetProfiler(self.options.host, auth=self.auth)) return try: timefilter = TimeFilter.parse_range(self.options.timefilter) except ValueError: print("Could not parse time filter expression.") return profiler = NetProfiler(self.options.host, auth=self.auth) if not self.options.clean: print("Reporting on the period: {0}\n" "Using the traffic filter: {1}\n" "Calculating data at percentile {2}\n" "Averaging based on buckets of {3} minutes" "".format(self.options.timefilter, self.options.trafficfilter, self.options.percentile, self.options.buckettime)) if self.options.graph: print("Saving a graph to {}".format(self.options.graph)) print() trafficfilter = TrafficFilter(self.options.trafficfilter) self.report_item(profiler, timefilter, trafficfilter, self.options.buckettime, self.options.percentile)
def test_traffic_summary_report(self): groupby = self.profiler.groupbys.host columns = [self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") with TrafficSummaryReport(self.profiler) as rep: rep.run(groupby, columns, sort_col, timerange, trafficexpr) legend = rep.get_legend() self.assertEqual(len(legend), 3) legend = rep.get_legend(columns=[self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes]) self.assertEqual(len(legend), 2) self.assertEqual(legend[0].key, 'host_ip') self.assertEqual(legend[1].key, 'avg_bytes') data = rep.get_data() if data: self.assertEqual(len(data[0]), 3) #check that data is refetched from cache data = rep.get_data() data = rep.get_data(columns=[self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes]) if data: self.assertEqual(len(data[0]), 2)
def test_resolution(self): groupby = self.profiler.groupbys.host columns = [self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts] sort_col = self.profiler.columns.value.avg_bytes trafficexpr = TrafficFilter("host 10/8") resolutions = [["1min", "last 5 min"], ["15min", "last 1 hour"], ["hour", "last 4 hours"], ["6hour", "last 1 day"], ["day", "last 1 week"], ["3600", "last 4 hours"], # hour resolution ["60", "last 5 min"], # minute resolution #["week", "last 4 weeks"], #["month", "last 12 months"], #Commented values blow up with a #E RvbdHTTPException: 400 Unknown time resolution. ] for (resolution, duration) in resolutions: timerange = TimeFilter.parse_range(duration) with TrafficSummaryReport(self.profiler) as rep: rep.run(groupby, columns, sort_col, timerange, trafficexpr, resolution=resolution)
def main(self): if self.options.listinterfacegroups: self.list_interface_groups(self.options.host, self.options.sshusername, self.options.sshpassword) return if self.options.listhostgroups: self.list_host_groups(NetProfiler(self.options.host, auth=self.auth)) return try: timefilter = TimeFilter.parse_range(self.options.timefilter) except ValueError: print "Could not parse time filter expression." return profiler = NetProfiler(self.options.host, auth=self.auth) if not self.options.clean: print "Reporting on the period: {}".format(self.options.timefilter) print "Using the traffic filter: {}".format(self.options.trafficfilter) print "Calculating data at percentile {}".format(self.options.percentile) print ("Averaging based on buckets of {} " "minutes").format(self.options.buckettime) if self.options.graph: print "Saving a graph to {}".format(self.options.graph) print trafficfilter = TrafficFilter(self.options.trafficfilter) self.report_item(profiler, timefilter, trafficfilter, self.options.buckettime, self.options.percentile)
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) trafficexpr = TrafficFilter(self.options.trafficexpr) columns = [self.netprofiler.columns.key.srv_host_ip, self.netprofiler.columns.key.app_info, self.netprofiler.columns.key.start_time, self.netprofiler.columns.key.end_time, self.netprofiler.columns.value.s2c_total_bytes, self.netprofiler.columns.value.s2c_total_pkts, self.netprofiler.columns.value.response_time, self.netprofiler.columns.value.server_delay] report = TrafficFlowListReport(self.netprofiler) report.run(columns, timefilter=timefilter, trafficexpr=trafficexpr) data = report.get_data() report.delete() headers = [c.key for c in columns] Formatter.print_table(data, headers)
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) if self.options.trafficexpr: trafficexpr = TrafficFilter(self.options.trafficexpr) else: trafficexpr = None legend_columns, all_data = self.identity_report(timefilter=timefilter, trafficexpr=trafficexpr, testfile=self.options.testfile) legend, activity = self.analyze_login_data(all_data, legend_columns) if activity and self.options.timeseries_report: headers, tbl_data = self.generate_traffic(activity, legend, 'timeseries') elif activity and self.options.summary_report: headers, tbl_data = self.generate_traffic(activity, legend, 'summary') else: headers = ('Host IP', 'Login Time', 'Logout Time', 'Duration') tbl_data = [(x[0], format_time(x[1]), format_time(x[2]), x[3]) for x in activity] if self.options.csv: Formatter.print_csv(tbl_data, headers) elif self.options.tsv: Formatter.print_csv(tbl_data, headers, delim='\t') else: Formatter.print_table(tbl_data, headers)
def _prepare_report_args(self): class Args(object): pass args = Args() criteria = self.job.criteria if criteria.netprofiler_device == '': logger.debug('%s: No netprofiler device selected' % self.table) self.job.mark_error("No NetProfiler Device Selected") return False args.profiler = DeviceManager.get_device(criteria.netprofiler_device) args.columns = [ col.name for col in self.table.get_columns(synthetic=False) ] args.sortcol = None if self.table.sortcols is not None: args.sortcol = self.table.sortcols[0] args.timefilter = TimeFilter(start=criteria.starttime, end=criteria.endtime) logger.info("Running NetProfiler table %d report for timeframe %s" % (self.table.id, str(args.timefilter))) if ('datafilter' in criteria) and (criteria.datafilter is not None): args.datafilter = criteria.datafilter.split(',') else: args.datafilter = None args.trafficexpr = TrafficFilter( self.job.combine_filterexprs( exprs=criteria.netprofiler_filterexpr)) # Incoming criteria.resolution is a timedelta logger.debug('NetProfiler report got criteria resolution %s (%s)' % (criteria.resolution, type(criteria.resolution))) if criteria.resolution != 'auto': rsecs = int(timedelta_total_seconds(criteria.resolution)) args.resolution = Report.RESOLUTION_MAP[rsecs] else: args.resolution = 'auto' logger.debug('NetProfiler report using resolution %s (%s)' % (args.resolution, type(args.resolution))) args.limit = (self.table.options.limit if hasattr( self.table.options, 'limit') else None) if getattr(self.table.options, 'interface', False): args.centricity = 'int' else: args.centricity = 'hos' return args
def main(self): # Create and run a traffic summary report of all hosts in use # and then take that data and send it to a specified host group report = TrafficSummaryReport(self.netprofiler) # Run the report report.run( groupby=self.netprofiler.groupbys.host, columns=[self.netprofiler.columns.key.host_ip, self.netprofiler.columns.key.group_name], sort_col=self.netprofiler.columns.key.group_name, timefilter=TimeFilter.parse_range(self.options.timefilter), trafficexpr=TrafficFilter(self.options.trafficexpr) ) # Store the report's data data = report.get_data() # Grab the type_name and group_name from options.group (type_name, group_name) = self.options.group.split(':', 1) # Create an array to store the new config data new_config_entries = [] # Using data from the report, put it in config-readable format. for i in range(len(data)): new_config_entries.append({'cidr': data[i][0] + '/32', 'name': group_name}) # Make sure that if there were no entries returned, # we don't overwrite the old data if len(new_config_entries) == 0: print('ERROR: Report returned zero hosts for supplied parameters') return # Get the ID of the host type specified by name host_types = self.netprofiler.api.host_group_types.get_all() target_type_id = -1 for i, host_type in enumerate(host_types): if type_name == host_type['name']: target_type_id = host_type['id'] break # If target_type_id is still -1, then we didn't find that host if target_type_id == -1: print('ERROR: Host Group Type: "' + type_name + '" was not found.') return # Get the current config from the target host group config = self.netprofiler.api.host_group_types.get_config(target_type_id) old_config_size = len(config) # If the append flag is not true, # remove all entries in config matching group_name if self.options.append is False: config = filter(lambda a: a['name'] != group_name, config) config.extend(new_config_entries) new_config_size = len(config) self.netprofiler.api.host_group_types.set_config(target_type_id, config) print("Successfully updated type: " + type_name + ", group: " + group_name) print("The old config had " + str(old_config_size) + " elements. It now has " + str(new_config_size) + " elements.\n")
def test_timefilter(self): tfilter = TimeFilter.parse_range('9:01:36 to 10:04:39') testtime = tfilter.start.replace(minute=33, second=59) self.assertTrue(tfilter.compare_time(testtime)) testtime = tfilter.end.replace(minute=44) self.assertFalse(tfilter.compare_time(testtime)) minutes = tfilter.profiler_minutes() self.assertEqual(len(minutes), 64) minutes = tfilter.profiler_minutes(astimestamp=True) self.assertEqual(len(minutes), 64) minutes = tfilter.profiler_minutes(aslocal=True) self.assertEqual(len(minutes), 64) tfilter = TimeFilter.parse_range('9:01:36 to 9:02:33') minutes = tfilter.profiler_minutes() self.assertEqual(len(minutes), 1)
def main(self): netprof = self.netprofiler timefilter = TimeFilter.parse_range(self.options.timefilter) # Create and run a traffic summary report of all server ports in use report = TrafficSummaryReport(netprof) # Run the report report.run(groupby=netprof.groupbys.port, columns=[ netprof.columns.key.protoport, netprof.columns.key.protocol, netprof.columns.key.port, netprof.columns.value.avg_bytes ], sort_col=netprof.columns.value.avg_bytes, timefilter=timefilter) # Retrieve and print data ports_data = report.get_data()[:int(self.options.N)] report.delete() # Now create a new report using the ports_data report = TrafficTimeSeriesReport(netprof) # The format the query_columns for 'ports' is: # 'ports' = [{'name': 'tcp/80'}, # {'name': 'tcp/443'}, # {'name': 'icmp/0'}] # For most protocols, this works just fine from the report data, # but for icmp the result from data is 'icmp/0/0' -- where the two # zeros are type and code. This doesn't work for input to # netprofiler, it expects type and code to be smushed into a single # 16-bit number (type << 8 | code). query_columns = [] for (protoport, protocol, port, avgbytes) in ports_data: if protoport.startswith('icmp'): protoport = 'icmp/%s' % (port) query_columns.append({'name': protoport}) # Run the report report.run(columns=[ netprof.columns.key.time, netprof.columns.value.avg_bytes ], resolution='1 min', query_columns_groupby='ports', query_columns=query_columns, timefilter=timefilter) # Get the data! data = report.get_data() Formatter.print_table(data, padding=1, headers=(['time'] + [q['name'] for q in query_columns]))
def generate_traffic(self, activity, legend_keys, report_type): """ Generate traffic data during the time the user was logged-in. """ cache = {} combined_activity = [] for event in activity: # handle dns names in host along with IP address host = event[0].split('|', 1)[0] timefilter = TimeFilter(string_to_datetime(event[1]), string_to_datetime(event[2])) # if event occurs in less than a minute, add extra minute to report while len(timefilter.profiler_minutes()) == 1: timefilter.end += datetime.timedelta(minutes=1) # normalize times to minute increments mins = timefilter.profiler_minutes() tf = TimeFilter(mins[0], mins[-1]) if self.options.usecache and report_type == 'timeseries': # only consider a hit when whole time period is covered minutes = tf.profiler_minutes(astimestamp=True) if host in cache and all(t in cache[host] for t in minutes): data = [cache[host][t] for t in minutes] else: legend, data = self.traffic_report(host, tf, report_type) # store results in cache by host->times->data cache.setdefault(host, {}).update( (int(x[0]), x) for x in data) else: legend, data = self.traffic_report(host, tf, report_type) if data: if self.options.aggregate and report_type == 'timeseries': # generate running averages over data samples received # first convert empty strings to zeros, then run averages columns = map(lambda c: [0 if x == '' else x for x in c], itertools.izip(*data)) aggmap = [x[1] for x in TCOLUMNS] aggregates = [aggmap[i](x) for i, x in enumerate(columns)] combined_activity.append(list(event) + aggregates) elif report_type == 'timeseries' or report_type == 'summary': # create entry for each element in report for row in data: r = ['--' if x == '' else x for x in row] combined_activity.append(list(event) + r) else: raise RuntimeError('unknown report type: %s' % report_type) else: # populate result with blanks combined_activity.append(list(event) + ['--'] * len(legend)) traffic_legend = [c.key for c in legend] legend = legend_keys + traffic_legend return legend, combined_activity
def main(self): netprof = self.netprofiler timefilter = TimeFilter.parse_range(self.options.timefilter) # Create and run a traffic summary report of all server ports in use report = TrafficSummaryReport(netprof) # Run the report report.run( groupby=netprof.groupbys.port, columns=[netprof.columns.key.protoport, netprof.columns.key.protocol, netprof.columns.key.port, netprof.columns.value.avg_bytes], sort_col=netprof.columns.value.avg_bytes, timefilter=timefilter) # Retrieve and print data ports_data = report.get_data()[:int(self.options.N)] report.delete() # Now create a new report using the ports_data report = TrafficTimeSeriesReport(netprof) # The format the query_columns for 'ports' is: # 'ports' = [{'name': 'tcp/80'}, # {'name': 'tcp/443'}, # {'name': 'icmp/0'}] # For most protocols, this works just fine from the report data, # but for icmp the result from data is 'icmp/0/0' -- where the two # zeros are type and code. This doesn't work for input to # netprofiler, it expects type and code to be smushed into a single # 16-bit number (type << 8 | code). query_columns = [] for (protoport, protocol, port, avgbytes) in ports_data: if protoport.startswith('icmp'): protoport = 'icmp/%s' % (port) query_columns.append({'name': protoport}) # Run the report report.run(columns=[netprof.columns.key.time, netprof.columns.value.avg_bytes], resolution='1 min', query_columns_groupby='ports', query_columns=query_columns, timefilter=timefilter) # Get the data! data = report.get_data() Formatter.print_table( data, padding=1, headers=(['time'] + [q['name'] for q in query_columns]))
def test_identity_report(self): timerange = TimeFilter.parse_range('last 30 m') with IdentityReport(self.profiler) as report: report.run(timefilter=timerange) legend = report.get_legend() data = report.get_data() keys = [c.key for c in legend] self.assertTrue('time' in keys) self.assertTrue('username' in keys) if data: self.assertEqual(len(data[0]), 9)
def test_report_with_area(self): groupby = self.profiler.groupbys.host columns = [self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") area = self.profiler.areas.vxlan_tenant with TrafficSummaryReport(self.profiler) as rep: rep.run(groupby, columns, sort_col, timerange, trafficexpr, area=area)
def main(self): self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.trafficexpr: self.trafficexpr = TrafficFilter(self.options.trafficexpr) else: self.trafficexpr = None with HostTimeSeriesReport(self.netprofiler) as report: report.run(timefilter=self.timefilter, trafficexpr=self.trafficexpr) data = report.get_data() legend = [c.label for c in report.get_legend()] self.print_data(data, legend)
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) if self.options.trafficexpr: trafficexpr = TrafficFilter(self.options.trafficexpr) else: trafficexpr = None legend_columns, all_data = self.identity_report( timefilter=timefilter, trafficexpr=trafficexpr, testfile=self.options.testfile) legend, activity = self.analyze_login_data(all_data, legend_columns) if activity and self.options.timeseries_report: headers, tbl_data = self.generate_traffic(activity, legend, 'timeseries') elif activity and self.options.summary_report: headers, tbl_data = self.generate_traffic(activity, legend, 'summary') else: headers = ('Host IP', 'Login Time', 'Logout Time', 'Duration') tbl_data = [(x[0], format_time(x[1]), format_time(x[2]), x[3]) for x in activity] if self.options.csv: Formatter.print_csv(tbl_data, headers) elif self.options.tsv: Formatter.print_csv(tbl_data, headers, delim='\t') else: Formatter.print_table(tbl_data, headers)
def main(self): self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.trafficexpr: self.trafficexpr = TrafficFilter(self.options.trafficexpr) else: self.trafficexpr = None with TrafficOverallTimeSeriesReport(self.netprofiler) as report: report.run(columns=self.options.columns.split(','), timefilter=self.timefilter, trafficexpr=self.trafficexpr, centricity=self.centricity) data = report.get_data() legend = [c.label for c in report.get_legend()] self.print_data(data, legend)
def main(self): self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.trafficexpr: self.trafficexpr = TrafficFilter(self.options.trafficexpr) else: self.trafficexpr = None with TrafficFlowListReport(self.netprofiler) as report: report.run(columns=self.options.columns.split(','), sort_col=self.options.sortby, timefilter=self.timefilter, trafficexpr=self.trafficexpr) data = report.get_data() legend = [c.label for c in report.get_legend()] self.print_data(data, legend)
def setUp(self): self.profiler = create_profiler() y = datetime.datetime.now() - datetime.timedelta(days=1) yesterday_at_4 = datetime.datetime(y.year, y.month, y.day, hour=16, minute=0, microsecond=1) yesterday_at_5 = datetime.datetime(y.year, y.month, y.day, hour=17, minute=0, microsecond=1) self.yesterday = TimeFilter(yesterday_at_4, yesterday_at_5)
def main(self): # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = TrafficOverallTimeSeriesReport(self.netprofiler) # Run the report report.run( columns=[self.netprofiler.columns.key.time, self.netprofiler.columns.value.avg_bytes, self.netprofiler.columns.value.network_rtt], timefilter=TimeFilter.parse_range("last 15 m"), trafficexpr=TrafficFilter("host 10/8") ) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data)
def main(self): # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = TrafficOverallTimeSeriesReport(self.netprofiler) # Run the report report.run(columns=[ self.netprofiler.columns.key.time, self.netprofiler.columns.value.avg_bytes, self.netprofiler.columns.value.network_rtt ], timefilter=TimeFilter.parse_range("last 15 m"), trafficexpr=TrafficFilter("host 10/8")) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data)
def test_report_with_area(self): groupby = self.profiler.groupbys.host columns = [ self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts ] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") area = self.profiler.areas.vxlan_tenant with TrafficSummaryReport(self.profiler) as rep: rep.run(groupby, columns, sort_col, timerange, trafficexpr, area=area)
def test_unsupported_column(self): groupby = self.profiler.groupbys.port # host_ip shouldn't be included as part of 'port' groupby columns = [self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") report = TrafficSummaryReport(self.profiler) kwds = dict(groupby=groupby, columns=columns, sort_col=sort_col, timefilter=timerange, trafficexpr=trafficexpr) self.assertRaises(RvbdException, report.run, None, kwds)
def main(self): # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = TrafficSummaryReport(self.netprofiler) # Run the report report.run(groupby=self.netprofiler.groupbys.port, columns=[ self.netprofiler.columns.key.protoport, self.netprofiler.columns.key.protoport_name, self.netprofiler.columns.value.avg_bytes, self.netprofiler.columns.value.network_rtt ], sort_col=self.netprofiler.columns.value.avg_bytes, timefilter=TimeFilter.parse_range("last 15 m"), trafficexpr=TrafficFilter("host 10/8")) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data[:20])
def test_unsupported_column(self): groupby = self.profiler.groupbys.port # host_ip shouldn't be included as part of 'port' groupby columns = [ self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts ] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") report = TrafficSummaryReport(self.profiler) kwds = dict(groupby=groupby, columns=columns, sort_col=sort_col, timefilter=timerange, trafficexpr=trafficexpr) self.assertRaises(RvbdException, report.run, None, kwds)
def main(self): # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = TrafficSummaryReport(self.netprofiler) # Run the report report.run( groupby=self.netprofiler.groupbys.port, columns=[self.netprofiler.columns.key.protoport, self.netprofiler.columns.key.protoport_name, self.netprofiler.columns.value.avg_bytes, self.netprofiler.columns.value.network_rtt], sort_col=self.netprofiler.columns.value.avg_bytes, timefilter=TimeFilter.parse_range("last 15 m"), trafficexpr=TrafficFilter("host 10/8") ) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data[:20])
def get_interfaces(self, device_ip): """ Query netprofiler to attempt to automatically determine LAN and WAN interface ids. """ cols = self.profiler.get_columns(['interface_dns', 'interface']) super(WANReport, self).run(realm='traffic_summary', groupby='ifc', columns=cols, timefilter=TimeFilter.parse_range('last 1 h'), trafficexpr=TrafficFilter('device %s' % device_ip), centricity='int', resolution='auto', sync=True) interfaces = self._get_data() lan = [address for name, address in interfaces if 'lan' in name] wan = [address for name, address in interfaces if 'wan' in name] if not lan or not wan: raise RvbdException('Unable to determine LAN and WAN interfaces for device %s' % device_ip) return lan, wan
def test_traffic_flow_list_report(self): columns = [self.profiler.columns.key.srv_host_ip, self.profiler.columns.key.app_info, self.profiler.columns.key.start_time, self.profiler.columns.key.end_time, self.profiler.columns.value.s2c_total_bytes, self.profiler.columns.value.s2c_total_pkts, self.profiler.columns.value.response_time, self.profiler.columns.value.server_delay] timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") with TrafficFlowListReport(self.profiler) as report: report.run(columns, timefilter=timerange, trafficexpr=trafficexpr) legend = report.get_legend() keys = [c.key for c in legend] self.assertTrue('app_info' in keys) data = report.get_data() if data: self.assertEqual(len(data[0]), 8)
def test_traffic_overall_time_series_report(self): columns = [self.profiler.columns.key.time, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts] timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") resolution = "15min" report = TrafficOverallTimeSeriesReport(self.profiler) report.run(columns, timerange, trafficexpr, resolution=resolution) legend = report.get_legend() keys = [c.key for c in legend] data = report.get_data() for item in data: d = dict(zip(keys, item)) # resolution assumes 15-minute responses self.assertTrue(timerange.compare_time(d['time'], resolution=15*60)) report.delete()
def _fill_criteria(self, **kwargs): if ('timefilter' in kwargs and not all(['start_time' in kwargs, 'end_time' in kwargs])): # when timefilter is passed in, need to convert to start/end time timefilter = TimeFilter.parse_range(kwargs['timefilter']) kwargs['start_time'] = timefilter.start kwargs['end_time'] = timefilter.end del kwargs['timefilter'] for name in ['start_time', 'end_time']: if name in kwargs: kwargs[name] = datetime_to_seconds(kwargs[name]) if 'devices' in kwargs and kwargs['devices']: kwargs['devices'] = kwargs['devices'].split(',') if 'port' in kwargs and kwargs['port']: kwargs['port'] = int(kwargs['port']) super(BaseStatsReport, self)._fill_criteria(**kwargs)
def test_traffic_summary_report(self): groupby = self.profiler.groupbys.host columns = [ self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes, self.profiler.columns.value.avg_pkts ] sort_col = self.profiler.columns.value.avg_bytes timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") with TrafficSummaryReport(self.profiler) as rep: rep.run(groupby, columns, sort_col, timerange, trafficexpr) legend = rep.get_legend() self.assertEqual(len(legend), 3) legend = rep.get_legend(columns=[ self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes ]) self.assertEqual(len(legend), 2) self.assertEqual(legend[0].key, 'host_ip') self.assertEqual(legend[1].key, 'avg_bytes') data = rep.get_data() if data: self.assertEqual(len(data[0]), 3) #check that data is refetched from cache data = rep.get_data() data = rep.get_data(columns=[ self.profiler.columns.key.host_ip, self.profiler.columns.value.avg_bytes ]) if data: self.assertEqual(len(data[0]), 2)
def test_traffic_flow_list_report(self): columns = [ self.profiler.columns.key.srv_host_ip, self.profiler.columns.key.app_info, self.profiler.columns.key.start_time, self.profiler.columns.key.end_time, self.profiler.columns.value.s2c_total_bytes, self.profiler.columns.value.s2c_total_pkts, self.profiler.columns.value.response_time, self.profiler.columns.value.server_delay ] timerange = TimeFilter.parse_range("last 1 h") trafficexpr = TrafficFilter("host 10/8") with TrafficFlowListReport(self.profiler) as report: report.run(columns, timefilter=timerange, trafficexpr=trafficexpr) legend = report.get_legend() keys = [c.key for c in legend] self.assertTrue('app_info' in keys) data = report.get_data() if data: self.assertEqual(len(data[0]), 8)
def main(self): self.ip_address = None self.lan_address = None self.wan_address = None self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.wan_address and self.options.lan_address: self.ip_address = self.options.wan_address.split(':')[0] self.lan_address = [self.options.lan_address] self.wan_address = [self.options.wan_address] elif self.options.device_name: name = self.options.device_name devices = self.netprofiler.api.devices.get_all() for d in devices: if name.lower() in d['name'].lower(): self.ip_address = d['ipaddr'] break else: print('Device {0} cannot be found in NetProfiler device list\n' 'Try specifying the name differently or use an IP ' 'address'.format(name)) sys.exit(1) else: self.ip_address = self.options.device_address if self.options.summary: self.columns = ['device', 'avg_bytes', 'total_bytes'] self.groupby = 'dev' ReportClass = WANSummaryReport else: # Time Series report self.columns = ['time', 'avg_bytes', 'total_bytes'] self.groupby = None ReportClass = WANTimeSeriesReport with ReportClass(self.netprofiler) as report: if not self.lan_address: # query for the interfaces self.lan_address, self.wan_address = report.get_interfaces( self.ip_address) if self.options.out_inbound or self.options.out_combined: # inbound report.run(self.lan_address, self.wan_address, 'inbound', columns=self.columns, groupby=self.groupby, timefilter=self.timefilter, resolution='auto') inbound = report.get_data(as_list=False) if self.options.out_inbound: header = 'Inbound traffic:' self.print_data(inbound, header) if self.options.out_outbound or self.options.out_combined: # outbound report.run(self.lan_address, self.wan_address, 'outbound', columns=self.columns, groupby=self.groupby, timefilter=self.timefilter, resolution='auto') outbound = report.get_data(as_list=False) if self.options.out_outbound: header = 'Outbound traffic:' self.print_data(outbound, header) if self.options.out_combined: header = 'Combined Inbound/Outbound traffic:' total = inbound + outbound self.print_data(total, header)
def setUp(self): yesterday_at_4 = datetime.datetime(year=2019, month=2, day=8, hour=16, minute=0, microsecond=1) yesterday_at_5 = datetime.datetime(year=2019, month=2, day=8, hour=17, minute=0, microsecond=1) self.yesterday = TimeFilter(yesterday_at_4, yesterday_at_5)
#!/usr/bin/env python # Copyright (c) 2015 Riverbed Technology, Inc. # # This software is licensed under the terms and conditions of the MIT License # accompanying the software ("License"). This software is distributed "AS IS" # as set forth in the License. from steelscript.netprofiler.core.app import NetProfilerApp from steelscript.netprofiler.core.services import ServiceLocationReport from steelscript.netprofiler.core.filters import TimeFilter import pprint app = NetProfilerApp() app.run() # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = ServiceLocationReport(app.netprofiler) # Run the report report.run(timefilter=TimeFilter.parse_range("last 1h")) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data[:20])
# connection information username = '******' password = '******' host = '<netprofiler.ip.address>' if (username == '<username>' or password == '<password>' or host == '<netprofiler.ip.address>'): print("Update the username, password, and netprofiler host values " "before running this script.") sys.exit(0) auth = UserAuth(username, password) p = NetProfiler(host, auth=auth) report = TrafficSummaryReport(p) columns = [ p.columns.key.host_ip, p.columns.value.avg_bytes, p.columns.value.network_rtt ] sort_column = p.columns.value.avg_bytes timefilter = TimeFilter.parse_range("last 15 m") report.run('hos', columns, timefilter=timefilter, sort_col=sort_column) data = report.get_data() legend = report.get_legend() report.delete() pprint.pprint(data[:10])
def run(self, template_id, timefilter=None, resolution="auto", query=None, trafficexpr=None, data_filter=None, sync=True, custom_criteria=None): """Create the report and begin running the report on NetProfiler. If the `sync` option is True, periodically poll until the report is complete, otherwise return immediately. :param int template_id: numeric id of the template to use for the report :param timefilter: range of time to query, instance of :class:`TimeFilter` :param str resolution: data resolution, such as (1min, 15min, etc.), defaults to 'auto' :param str query: query object containing criteria :param trafficexpr: instance of :class:`TrafficFilter` :param str data_filter: deprecated filter to run against report data :param bool sync: if True, poll for status until the report is complete """ self.template_id = template_id if timefilter is None: self.timefilter = TimeFilter.parse_range("last 5 min") else: self.timefilter = timefilter self.query = query self.trafficexpr = trafficexpr self.data_filter = data_filter self.id = None self.queries = list() self.last_status = None if resolution not in [ "auto", "1min", "15min", "hour", "6hour", "day", "week", "month" ]: rd = parse_timedelta(resolution) resolution = self.RESOLUTION_MAP[int(timedelta_total_seconds(rd))] self.resolution = resolution start = datetime_to_seconds(self.timefilter.start) end = datetime_to_seconds(self.timefilter.end) criteria = RecursiveUpdateDict( **{"time_frame": { "start": int(start), "end": int(end) }}) if self.query is not None: criteria["query"] = self.query if self.resolution != "auto": criteria["time_frame"]["resolution"] = self.resolution if self.data_filter: criteria['deprecated'] = {self.data_filter[0]: self.data_filter[1]} if self.trafficexpr is not None: criteria["traffic_expression"] = self.trafficexpr.filter if custom_criteria: for k, v in custom_criteria.iteritems(): criteria[k] = v to_post = {"template_id": self.template_id, "criteria": criteria} logger.debug("Posting JSON: %s" % to_post) response = self.profiler.api.report.reports(data=to_post) try: self.id = int(response['id']) except KeyError: raise ValueError( "failed to retrieve report id from report creation response: %s" % response) logger.info("Created report %d" % self.id) if sync: self.wait_for_complete()
def run(self): """ Main execution method. """ criteria = self.job.criteria if criteria.netprofiler_device == '': logger.debug('%s: No netprofiler device selected' % self.table) self.job.mark_error("No NetProfiler Device Selected") return False profiler = DeviceManager.get_device(criteria.netprofiler_device) report = steelscript.netprofiler.core.report.MultiQueryReport(profiler) tf = TimeFilter(start=criteria.starttime, end=criteria.endtime) logger.info("Running NetProfilerTemplateTable table %d report " "for timeframe %s" % (self.table.id, str(tf))) trafficexpr = TrafficFilter( self.job.combine_filterexprs(exprs=criteria.profiler_filterexpr)) # Incoming criteria.resolution is a timedelta logger.debug('NetProfiler report got criteria resolution %s (%s)' % (criteria.resolution, type(criteria.resolution))) if criteria.resolution != 'auto': rsecs = int(timedelta_total_seconds(criteria.resolution)) resolution = steelscript.netprofiler.core.report.Report.RESOLUTION_MAP[ rsecs] else: resolution = 'auto' logger.debug('NetProfiler report using resolution %s (%s)' % (resolution, type(resolution))) with lock: res = report.run(template_id=self.table.options.template_id, timefilter=tf, trafficexpr=trafficexpr, resolution=resolution) if res is True: logger.info("Report template complete.") self.job.safe_update(progress=100) # Retrieve the data with lock: query = report.get_query_by_index(0) data = query.get_data() headers = report.get_legend() tz = criteria.starttime.tzinfo # Update criteria criteria.starttime = (datetime.datetime.utcfromtimestamp( query.actual_t0).replace(tzinfo=tz)) criteria.endtime = (datetime.datetime.utcfromtimestamp( query.actual_t1).replace(tzinfo=tz)) self.job.safe_update(actual_criteria=criteria) # create dataframe with all of the default headers df = pandas.DataFrame(data, columns=[h.key for h in headers]) # now filter down to the columns requested by the table columns = [col.name for col in self.table.get_columns(synthetic=False)] self.data = df[columns] logger.info("Report %s returned %s rows" % (self.job, len(self.data))) return True
def run(self, template_id, timefilter=None, resolution="auto", query=None, trafficexpr=None, data_filter=None, sync=True, custom_criteria=None): """Create the report and begin running the report on NetProfiler. If the `sync` option is True, periodically poll until the report is complete, otherwise return immediately. :param int template_id: numeric id of the template to use for the report :param timefilter: range of time to query, instance of :class:`TimeFilter` :param str resolution: data resolution, such as (1min, 15min, etc.), defaults to 'auto' :param str query: query object containing criteria :param trafficexpr: instance of :class:`TrafficFilter` :param str data_filter: deprecated filter to run against report data :param bool sync: if True, poll for status until the report is complete """ self.template_id = template_id if timefilter is None: self.timefilter = TimeFilter.parse_range("last 5 min") else: self.timefilter = timefilter self.query = query self.trafficexpr = trafficexpr self.data_filter = data_filter self.id = None self.queries = list() self.last_status = None if resolution not in ["auto", "1min", "15min", "hour", "6hour", "day", "week", "month"]: rd = parse_timedelta(resolution) resolution = self.RESOLUTION_MAP[int(timedelta_total_seconds(rd))] self.resolution = resolution start = datetime_to_seconds(self.timefilter.start) end = datetime_to_seconds(self.timefilter.end) criteria = RecursiveUpdateDict(**{"time_frame": {"start": int(start), "end": int(end)} }) if self.query is not None: criteria["query"] = self.query if self.resolution != "auto": criteria["time_frame"]["resolution"] = self.resolution if self.data_filter: criteria['deprecated'] = {self.data_filter[0]: self.data_filter[1]} if self.trafficexpr is not None: criteria["traffic_expression"] = self.trafficexpr.filter if custom_criteria: for k, v in custom_criteria.iteritems(): criteria[k] = v to_post = {"template_id": self.template_id, "criteria": criteria} logger.debug("Posting JSON: %s" % to_post) response = self.profiler.api.report.reports(data=to_post) try: self.id = int(response['id']) except KeyError: raise ValueError( "failed to retrieve report id from report creation response: %s" % response) logger.info("Created report %d" % self.id) if sync: self.wait_for_complete()
# Copyright (c) 2015 Riverbed Technology, Inc. # # This software is licensed under the terms and conditions of the MIT License # accompanying the software ("License"). This software is distributed "AS IS" # as set forth in the License. from steelscript.netprofiler.core.app import NetProfilerApp from steelscript.netprofiler.core.services import ServiceLocationReport from steelscript.netprofiler.core.filters import TimeFilter import pprint app = NetProfilerApp() app.run() # Create and run a traffic summary report of all server ports in use # by hosts in 10/8 report = ServiceLocationReport(app.netprofiler) # Run the report report.run( timefilter=TimeFilter.parse_range("last 1h") ) # Retrieve and print data data = report.get_data() printer = pprint.PrettyPrinter(2) printer.pprint(data[:20])
def main(self): self.ip_address = None self.lan_address = None self.wan_address = None self.timefilter = TimeFilter.parse_range(self.options.timefilter) if self.options.wan_address and self.options.lan_address: self.ip_address = self.options.wan_address.split(':')[0] self.lan_address = [self.options.lan_address] self.wan_address = [self.options.wan_address] elif self.options.device_name: name = self.options.device_name devices = self.netprofiler.api.devices.get_all() for d in devices: if name.lower() in d['name'].lower(): self.ip_address = d['ipaddr'] break else: print('Device {0} cannot be found in NetProfiler device list\n' 'Try specifying the name differently or use an IP ' 'address'.format(name)) sys.exit(1) else: self.ip_address = self.options.device_address if self.options.summary: self.columns = ['device', 'avg_bytes', 'total_bytes'] self.groupby = 'dev' ReportClass = WANSummaryReport else: # Time Series report self.columns = ['time', 'avg_bytes', 'total_bytes'] self.groupby = None ReportClass = WANTimeSeriesReport with ReportClass(self.netprofiler) as report: if not self.lan_address: # query for the interfaces self.lan_address, self.wan_address = report.get_interfaces(self.ip_address) if self.options.out_inbound or self.options.out_combined: # inbound report.run(self.lan_address, self.wan_address, 'inbound', columns=self.columns, groupby=self.groupby, timefilter=self.timefilter, resolution='auto') inbound = report.get_data(as_list=False) if self.options.out_inbound: header = 'Inbound traffic:' self.print_data(inbound, header) if self.options.out_outbound or self.options.out_combined: # outbound report.run(self.lan_address, self.wan_address, 'outbound', columns=self.columns, groupby=self.groupby, timefilter=self.timefilter, resolution='auto') outbound = report.get_data(as_list=False) if self.options.out_outbound: header = 'Outbound traffic:' self.print_data(outbound, header) if self.options.out_combined: header = 'Combined Inbound/Outbound traffic:' total = inbound + outbound self.print_data(total, header)