def main(self): if self.options.list_groupbys: header = ["GroupBy", "Id"] data = [(k, v) for k, v in self.netprofiler.groupbys.iteritems()] data.sort() Formatter.print_table(data, header) else: if self.options.ids: columns = self.netprofiler.get_columns_by_ids(self.options.ids) else: o = self.options # find groupby looking in keys and values if o.groupby in self.netprofiler.groupbys: groupby = self.netprofiler.groupbys[o.groupby] elif o.groupby in self.netprofiler.groupbys.values(): groupby = o.groupby else: groupby = None args = { 'realms': [o.realm] if o.realm else None, 'centricities': [o.centricity] if o.centricity else None, 'groupbys': [groupby] if groupby else None, } columns = self.netprofiler.search_columns(**args) columns.sort(key=lambda x: x.key) self.print_columns(columns)
def main(self): headers = [ 'Name', 'Groups', 'Filters Supported on Metric Columns', 'Granularities in Seconds' ] if self.options.group: source_names = report_sources[self.options.group] else: source_names = report_source_to_groups.keys() data = [] for name in source_names: s = self.appresponse.reports.sources[name] data.append([ s['name'], ', '.join(report_source_to_groups[name]), str(s['filters_on_metrics']), ', '.join(s['granularities']) if s['granularities'] else '---' ]) Formatter.print_table(data, headers, padding=2, max_width=int(self.options.table_width), long_column=1, wrap_columns=(not self.options.truncate))
def main(self): instances = self.appresponse.reports.get_instances() if instances: header = [ 'id', 'user_agent', 'owner', 'name', 'completed?', 'is_live?' ] data = [] for i in instances: data.append( (i.data['id'], i.data['user_agent'], i.data['access_rights']['owner'], i.data['info']['name'], i.is_complete(), i.data['live'])) Formatter.print_table(data, header) if not self.options.force: if not prompt_yn('\nDelete all these report instances?', default_yes=False): print('Okay, exiting.') sys.exit(0) for instance in instances: instance.delete() print('Deleted.') else: print('No report instances found.')
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) trafficexpr = TrafficFilter(self.options.trafficexpr) columns = [ self.netprofiler.columns.key.srv_host_ip, self.netprofiler.columns.key.app_info, self.netprofiler.columns.key.start_time, self.netprofiler.columns.key.end_time, self.netprofiler.columns.value.s2c_total_bytes, self.netprofiler.columns.value.s2c_total_pkts, self.netprofiler.columns.value.response_time, self.netprofiler.columns.value.server_delay ] report = TrafficFlowListReport(self.netprofiler) report.run(columns, timefilter=timefilter, trafficexpr=trafficexpr) data = report.get_data() report.delete() headers = [c.key for c in columns] Formatter.print_table(data, headers)
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) trafficexpr = TrafficFilter(self.options.trafficexpr) columns = [self.netprofiler.columns.key.srv_host_ip, self.netprofiler.columns.key.app_info, self.netprofiler.columns.key.start_time, self.netprofiler.columns.key.end_time, self.netprofiler.columns.value.s2c_total_bytes, self.netprofiler.columns.value.s2c_total_pkts, self.netprofiler.columns.value.response_time, self.netprofiler.columns.value.server_delay] report = TrafficFlowListReport(self.netprofiler) report.run(columns, timefilter=timefilter, trafficexpr=trafficexpr) data = report.get_data() report.delete() headers = [c.key for c in columns] Formatter.print_table(data, headers)
def print_columns(self, paginate=None): """ Print out data in a nice formatted table `paginate` option will insert a new header after that many rows have been printed. Defaults to None (only single header output). """ if not self.data: print "No data found." return if self.options.typelist: # two columns only headers = ['type_id', 'type'] data = self.data elif self.options.ipaddr: # single dict headers = self.data.keys() data = [self.data.values()] else: # assume objects are uniform and take keys from first one headers = self.data[0].keys() data = [d.values() for d in self.data] Formatter.print_table(data, headers, paginate=30)
def main(self): pcap = PcapFile(self.options.pcap_path) info = pcap.info() Formatter.print_table([(k, unicode(v, errors='replace')) for k, v in info.iteritems()], headers=['Key', 'Value'])
def main(self): if self.options.showsources: svcdef = self.appresponse.find_service('npm.reports') dr = svcdef.bind('source_names') source_names = dr.execute('get').data print('\n'.join(source_names)) return source = SourceProxy(name=self.options.sourcename) columns = [] headers = [] if self.options.keycolumns: for col in self.options.keycolumns.split(','): columns.append(Key(col)) headers.append(col) for col in self.options.valuecolumns.split(','): columns.append(Value(col)) headers.append(col) topbycolumns = [] if self.options.topbycolumns: for col in self.options.topbycolumns.split(','): topbycolumns.append(Key(col)) data_def = DataDef(source=source, columns=columns, granularity=self.options.granularity, resolution=self.options.resolution, time_range=self.options.timerange, limit=self.options.limit, topbycolumns=topbycolumns) if self.options.filterexpr: data_def.add_filter( TrafficFilter(type_='steelfilter', value=self.options.filterexpr)) report = Report(self.appresponse) report.add(data_def) report.run() data = report.get_data() headers = report.get_legend() report.delete() if self.options.csvfile: with open(self.options.csvfile, 'w') as f: for line in Formatter.get_csv(data, headers): f.write(line) f.write('\n') else: Formatter.print_csv(data, headers)
def console(self, source_type, data, headers): print('') print(source_type) print('-' * len(source_type)) if data: Formatter.print_table(data, headers) else: print('None.')
def main(self): netprof = self.netprofiler timefilter = TimeFilter.parse_range(self.options.timefilter) # Create and run a traffic summary report of all server ports in use report = TrafficSummaryReport(netprof) # Run the report report.run(groupby=netprof.groupbys.port, columns=[ netprof.columns.key.protoport, netprof.columns.key.protocol, netprof.columns.key.port, netprof.columns.value.avg_bytes ], sort_col=netprof.columns.value.avg_bytes, timefilter=timefilter) # Retrieve and print data ports_data = report.get_data()[:int(self.options.N)] report.delete() # Now create a new report using the ports_data report = TrafficTimeSeriesReport(netprof) # The format the query_columns for 'ports' is: # 'ports' = [{'name': 'tcp/80'}, # {'name': 'tcp/443'}, # {'name': 'icmp/0'}] # For most protocols, this works just fine from the report data, # but for icmp the result from data is 'icmp/0/0' -- where the two # zeros are type and code. This doesn't work for input to # netprofiler, it expects type and code to be smushed into a single # 16-bit number (type << 8 | code). query_columns = [] for (protoport, protocol, port, avgbytes) in ports_data: if protoport.startswith('icmp'): protoport = 'icmp/%s' % (port) query_columns.append({'name': protoport}) # Run the report report.run(columns=[ netprof.columns.key.time, netprof.columns.value.avg_bytes ], resolution='1 min', query_columns_groupby='ports', query_columns=query_columns, timefilter=timefilter) # Get the data! data = report.get_data() Formatter.print_table(data, padding=1, headers=(['time'] + [q['name'] for q in query_columns]))
def main(self): pcap = PcapFile(self.options.pcap_path) info = pcap.info() data = list() for k, v in info.items(): if isinstance(v, bytes): v = v.decode('utf-8') data.append((k, v)) Formatter.print_table(data, headers=['Key', 'Value'])
def main(self): netprof = self.netprofiler timefilter = TimeFilter.parse_range(self.options.timefilter) # Create and run a traffic summary report of all server ports in use report = TrafficSummaryReport(netprof) # Run the report report.run( groupby=netprof.groupbys.port, columns=[netprof.columns.key.protoport, netprof.columns.key.protocol, netprof.columns.key.port, netprof.columns.value.avg_bytes], sort_col=netprof.columns.value.avg_bytes, timefilter=timefilter) # Retrieve and print data ports_data = report.get_data()[:int(self.options.N)] report.delete() # Now create a new report using the ports_data report = TrafficTimeSeriesReport(netprof) # The format the query_columns for 'ports' is: # 'ports' = [{'name': 'tcp/80'}, # {'name': 'tcp/443'}, # {'name': 'icmp/0'}] # For most protocols, this works just fine from the report data, # but for icmp the result from data is 'icmp/0/0' -- where the two # zeros are type and code. This doesn't work for input to # netprofiler, it expects type and code to be smushed into a single # 16-bit number (type << 8 | code). query_columns = [] for (protoport, protocol, port, avgbytes) in ports_data: if protoport.startswith('icmp'): protoport = 'icmp/%s' % (port) query_columns.append({'name': protoport}) # Run the report report.run(columns=[netprof.columns.key.time, netprof.columns.value.avg_bytes], resolution='1 min', query_columns_groupby='ports', query_columns=query_columns, timefilter=timefilter) # Get the data! data = report.get_data() Formatter.print_table( data, padding=1, headers=(['time'] + [q['name'] for q in query_columns]))
def main(self): headers = ['ID', 'Description', 'Type'] data = [(f.id, f.description, f.type) for f in self.netshark.get_extractor_fields()] data.sort() Formatter.print_table(data, headers, padding=2, max_width=int(self.options.table_width), long_column=1, wrap_columns=(not self.options.truncate))
def console(self, source_type, data, headers): if self.output_file is not None: f = open(self.output_file, "a+") f.write('') if not self.first_line: f.write("\n") f.write(source_type + "\n") f.write('-' * len(source_type) + "\n") f.close() self.first_line = False if data: Formatter.print_table(data, headers, self.output_file)
def main(self): if self.options.sourcetype == 'file': source = self.appresponse.fs.get_file_by_id(self.options.sourceid) elif self.options.sourcetype == 'job': source = self.appresponse.capture.\ get_job_by_name(self.options.sourceid) else: source = self.appresponse.clips.\ get_clip_by_id(self.options.sourceid) data_source = SourceProxy(source) columns = [] headers = [] if self.options.keycolumns: for col in self.options.keycolumns.split(','): columns.append(Key(col)) headers.append(col) for col in self.options.valuecolumns.split(','): columns.append(Value(col)) headers.append(col) data_def = DataDef(source=data_source, columns=columns, granularity=self.options.granularity, resolution=self.options.resolution, time_range=self.options.timerange) if self.options.filterexpr: data_def.add_filter( TrafficFilter(type_=self.options.filtertype, value=self.options.filterexpr)) report = Report(self.appresponse) report.add(data_def) report.run() data = report.get_data() headers = report.get_legend() report.delete() if self.options.csvfile: with open(self.options.csvfile, 'w') as f: for line in Formatter.get_csv(data, headers): f.write(line) f.write('\n') else: Formatter.print_csv(data, headers)
def main(self): columns = self.options.columns.split(',') pcap = PcapFile(self.options.pcap_path) data = pcap.query(columns) if self.options.join == 'INNER': data = [row for row in data if None not in row] if not data: print('No rows found matching your input') max_rows = int(self.options.max_rows) data_out = data[:max_rows] Formatter.print_table(data_out, headers=columns)
def handle(self, *args, **options): """ Main command handler """ self.options = options if options['table_classes']: sc = self.get_tables() Formatter.print_table([(c.__name__, c.__module__) for c in sc], ['Name', 'Package']) elif options['widget_classes']: # can't use same trick as tables since these aren't subclasses W = Widget.objects.all() s = set([(w.uiwidget, w.module) for w in W]) Formatter.print_table(sorted(s, key=lambda x: x[1]), ['Name', 'Package'])
def main(self): headers = ['ID', 'Description', 'Type', 'Metric', 'Key/Value'] cols = self.appresponse.reports.sources[self.options.source]['columns'] data = [] for c in cols.values(): v = 'Key' if 'grouped_by' in c and c['grouped_by'] else 'Value' metric = c['metric'] if 'metric' in c else '---' data.append((c['id'], c['description'], c['type'], metric, v)) data.sort() Formatter.print_table(data, headers, padding=2, max_width=int(self.options.table_width), long_column=1, wrap_columns=(not self.options.truncate))
def print_columns(self, columns): keys = [] values = [] for c in columns: if (self.options.filter and self.options.filter.lower() not in c.label.lower()): continue item = (c.key, c.label, c.id, c.json['type']) if c.iskey: keys.append(item) else: values.append(item) Formatter.print_table(keys, ['Key Columns', 'Label', 'ID', 'Type']) print '' Formatter.print_table(values, ['Value Columns', 'Label', 'ID', 'Type'])
def handle(self, *args, **options): """ Main command handler. """ if options['job_list']: # print out the id's instead of processing anything columns = ['ID', 'Master', 'Parent', 'PID', 'Table', 'Created', 'Touched', 'Status', 'Refs', 'Progress', 'Data file'] data = [] for j in Job.objects.all().order_by('id'): datafile = os.path.basename(j.datafile()) if not os.path.exists(j.datafile()): datafile += " (missing)" status = (s for s in ('NEW', 'RUNNING', 'COMPLETE', 'ERROR') if getattr(Job, s) == j.status).next() parent_id = j.parent.id if j.parent else '--' master_id = j.master.id if j.master else '--' data.append([j.id, master_id, parent_id, j.pid, j.table.name, j.created, j.touched, status, j.refcount, j.progress, datafile]) Formatter.print_table(data, columns) elif options['job_data']: job = Job.objects.get(id=options['job_data']) columns = [c.name for c in job.table.get_columns()] if job.status == job.COMPLETE: Formatter.print_table(job.values(), columns) elif options['job_age']: logger.debug('Aging all jobs.') Job.objects.age_jobs(force=True) elif options['job_flush']: logger.debug('Flushing all jobs.') while Job.objects.count(): ids = Job.objects.values_list('pk', flat=True)[:100] # Using list(ids) forces a DB hit, otherwise we may hit # a MySQL limitation Job.objects.filter(pk__in=list(ids)).delete()
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) if self.options.trafficexpr: trafficexpr = TrafficFilter(self.options.trafficexpr) else: trafficexpr = None legend_columns, all_data = self.identity_report(timefilter=timefilter, trafficexpr=trafficexpr, testfile=self.options.testfile) legend, activity = self.analyze_login_data(all_data, legend_columns) if activity and self.options.timeseries_report: headers, tbl_data = self.generate_traffic(activity, legend, 'timeseries') elif activity and self.options.summary_report: headers, tbl_data = self.generate_traffic(activity, legend, 'summary') else: headers = ('Host IP', 'Login Time', 'Logout Time', 'Duration') tbl_data = [(x[0], format_time(x[1]), format_time(x[2]), x[3]) for x in activity] if self.options.csv: Formatter.print_csv(tbl_data, headers) elif self.options.tsv: Formatter.print_csv(tbl_data, headers, delim='\t') else: Formatter.print_table(tbl_data, headers)
def handle(self, *args, **options): """ Main command handler. """ if options['list_alerts']: columns = ('ID', 'Timestamp', 'EventID', 'Level', 'Sender', 'Dest Options', 'Message') data = [] for a in Alert.objects.all().order_by('timestamp'): data.append((a.id, a.timestamp, a.event.eventid, a.level, a.sender, a.options, a.message)) Formatter.print_table(data, columns, padding=2) elif options['list_events']: columns = ('ID', 'Timestamp', 'EventID', '# Alerts', 'Context', 'Trigger Result') data = [] for e in Event.objects.all().order_by('timestamp'): alert_count = len(e.alert_set.all()) data.append((e.id, e.timestamp, e.eventid, alert_count, str(e.context), str(e.trigger_result)[:30])) Formatter.print_table(data, columns, padding=2) elif options['alert_detail']: alert = Alert.objects.get(id=options['alert_detail']) self.stdout.write(alert.get_details()) elif options['event_detail']: event = Event.objects.get(id=options['event_detail']) self.stdout.write(event.get_details()) elif options['alert_age']: self.stdout.write('Not Implemented Yet') elif options['alert_flush']: logger.debug('Deleting all alerts.') Alert.objects.all().delete() else: raise CommandError('Missing appropriate option')
def main(self): instances = self.appresponse.reports.get_instances() if instances: header = [ 'id', 'user_agent', 'owner', 'name', 'created', 'completed?', 'is_live?' ] data = [] for i in instances: created = self.format_time(i.data['created']) try: # If the instance had an error, an exception will # be raised here, so just catch it complete = i.is_complete() except AppResponseException: complete = 'Error' data.append( (i.data['id'], i.data['user_agent'], i.data['access_rights']['owner'], i.data['info']['name'], created, complete, i.data['live'])) Formatter.print_table(data, header) if not self.options.force: if not prompt_yn('\nDelete all these report instances?', default_yes=False): print('Okay, exiting.') sys.exit(0) for instance in instances: instance.delete() print('Deleted.') else: print('No report instances found.')
def main(self): if self.options.show_mifgs: headers = ['id', 'name', 'interfaces'] data = [] for mifg in self.appresponse.capture.get_mifgs(): data.append([mifg.id, mifg.name, mifg.data.config.interfaces]) Formatter.print_table(data, headers) elif self.options.show_jobs: headers = [ 'id', 'name', 'mifg_id', 'filter', 'state', 'start', 'end', 'size' ] data = [] for job in self.appresponse.capture.get_jobs(): data.append([ job.id, job.name, job.data.config.mifg_id, getattr(job.data.config, 'filter', dict(string=None))['string'], job.status, job.data.state.status.packet_start_time, job.data.state.status.packet_end_time, job.data.state.status.capture_size ]) Formatter.print_table(data, headers) else: config = dict(name=self.options.jobname, mifg_id=int(self.options.mifg_id), filter=dict(type=self.options.filter_type, string=self.options.filter)) self.appresponse.capture.create_job(dict(config=config)) print("Successfully created packet capture job {}".format( self.options.jobname))
def main(self): """ Setup query and run report with default column set """ if self.options.timerange: timefilter = TimeFilter.parse_range(self.options.timerange) else: timefilter = TimeFilter(self.options.time0, self.options.time1) if self.options.trafficexpr: trafficexpr = TrafficFilter(self.options.trafficexpr) else: trafficexpr = None legend_columns, all_data = self.identity_report( timefilter=timefilter, trafficexpr=trafficexpr, testfile=self.options.testfile) legend, activity = self.analyze_login_data(all_data, legend_columns) if activity and self.options.timeseries_report: headers, tbl_data = self.generate_traffic(activity, legend, 'timeseries') elif activity and self.options.summary_report: headers, tbl_data = self.generate_traffic(activity, legend, 'summary') else: headers = ('Host IP', 'Login Time', 'Logout Time', 'Duration') tbl_data = [(x[0], format_time(x[1]), format_time(x[2]), x[3]) for x in activity] if self.options.csv: Formatter.print_csv(tbl_data, headers) elif self.options.tsv: Formatter.print_csv(tbl_data, headers, delim='\t') else: Formatter.print_table(tbl_data, headers)
def print_data(self, data, header): if self.options.as_csv: Formatter.print_csv(data, header) else: Formatter.print_table(data, header)
def handle(self, *args, **options): """ Main command handler. """ self.options = options if options['table_list']: # print out the id's instead of processing anything output = [] for t in Table.objects.all(): output.append([t.id, t.namespace, t.queryclassname, t.name, t]) Formatter.print_table(output, ['ID', 'Namespace', 'QueryClass', 'Name', 'Table']) elif options['table_list_by_report']: # or print them out organized by report/widget/table output = [] reports = Report.objects.all() for report in reports: for table in report.tables(): for widget in table.widget_set.all(): line = [table.id, report.title, widget.title, table] output.append(line) Formatter.print_table(output, ['ID', 'Report', 'Widget', 'Table']) elif options['criteria_list']: if 'table_id' in options and options['table_id'] is not None: table = Table.objects.get(id=options['table_id']) elif 'table_name' in options and options['table_name'] is not None: table = Table.objects.get(name=options['table_name']) else: raise ValueError("Must specify either --table-id or " "--table-name to run a table") form = self.get_form(table) # Only show criteria options that were included in report # and given a label, other ones are for internal table use. # criteria like ignore_cache can still be passed in, they # just won't be shown in this list output = [(k, v.label) for k, v in form.fields.iteritems() if v.label] Formatter.print_table(output, ['Keyword', 'Label']) else: if 'table_id' in options and options['table_id'] is not None: table = Table.objects.get(id=options['table_id']) elif 'table_name' in options and options['table_name'] is not None: table = Table.objects.get(name=options['table_name']) else: raise ValueError("Must specify either --table-id or " "--table-name to run a table") # Django gives us a nice error if we can't find the table self.console('Table %s found.' % table) # Parse criteria options criteria_options = {} if 'criteria' in options and options['criteria'] is not None: for s in options['criteria']: (k, v) = s.split(':', 1) criteria_options[k] = v form = self.get_form(table, data=criteria_options) if not form.is_valid(check_unknown=True): self.console('Invalid criteria:') logger.error('Invalid criteria: %s' % ','.join('%s:%s' % (k, v) for k, v in form.errors.iteritems())) for k, v in form.errors.iteritems(): self.console(' %s: %s' % (k, ','.join(v))) sys.exit(1) criteria = form.criteria() columns = [c.name for c in table.get_columns()] if options['only_columns']: print columns return job = Job.create(table=table, criteria=criteria, update_progress=False) job.save() self.console('Job created: %s' % job) self.console('Criteria: %s' % criteria.print_details()) start_time = datetime.datetime.now() job.start() self.console('Job running . . ', ending='') # wait for results while not job.done(): # self.console('. ', ending='') # self.stdout.flush() time.sleep(1) end_time = datetime.datetime.now() delta = end_time - start_time seconds = float(delta.microseconds + (delta.seconds + delta.days*24*3600)*10**6)/10**6 self.console('Done!! (elapsed time: %.2f seconds)' % seconds) self.console('') # Need to refresh the column list in case the job changed them # (ephemeral cols) columns = [c.name for c in table.get_columns()] if job.status == job.COMPLETE: if options['as_csv']: if options['output_file']: with open(options['output_file'], 'w') as f: for line in Formatter.get_csv(job.values(), columns): f.write(line) f.write('\n') else: Formatter.print_csv(job.values(), columns) else: Formatter.print_table(job.values(), columns) else: self.console("Job completed with an error:") self.console(job.message) sys.exit(1)
def main(self): if self.options.sourcename == 'packets': if self.options.sourceid is None: source = self.appresponse.capture.get_vifgs()[0] else: source = SourceProxy(name='packets', path=self.options.sourceid) else: source = SourceProxy(name='aggregates') columns = [] headers = [] for col in self.options.keycolumns.split(','): columns.append(Key(col)) headers.append(col) for col in self.options.valuecolumns.split(','): columns.append(Value(col)) headers.append(col) data_def = DataDef(source=source, columns=columns, granularity=self.options.granularity, resolution=self.options.resolution, live=True) if self.options.filterexpr: data_def.add_filter(TrafficFilter(type_='steelfilter', value=self.options.filterexpr)) print('Running report, press Ctrl-C to exit.') print('') report = self.appresponse.create_report(data_def) time.sleep(1) try: while 1: banner = '{} {}'.format(datetime.datetime.now(), '--' * 20) print(banner) try: data = report.get_data()['data'] headers = report.get_legend() if self.options.sortby: index = headers.index(self.options.sortby) data.sort(key=lambda x: x[index], reverse=True) if self.limit: total_rows = len(data) limit_string = ('Showing {} out of {} rows.' .format(self.limit, total_rows)) data = data[:self.limit] else: limit_string = None except KeyError: # something went wrong, print error and exit print('Error accessing data:') print(report.get_data()) raise KeyboardInterrupt if self.options.csvfile: with open(self.options.csvfile, 'a') as f: f.write(banner) f.write('\n') for line in Formatter.get_csv(data, headers): f.write(line) f.write('\n') if limit_string: f.write(limit_string) f.write('\n') else: Formatter.print_table(data, headers) if limit_string: print(limit_string) time.sleep(self.delay) except KeyboardInterrupt: print('Exiting ...') report.delete()
def main(self): version = APIVersion(self.appresponse.versions['npm.packet_capture']) if version < APIVersion('2.0'): ifg = IFG('mifg_id', lambda job: job.data.config.mifg_id, self.appresponse.capture.get_mifgs) else: ifg = IFG('vifgs', lambda job: job.data.config.vifgs, self.appresponse.capture.get_vifgs) if self.options.show_ifgs: headers = ['id', 'name', 'filter', 'members'] data = [] for xifg in ifg.get_items(): if 'filter' in xifg.data.config: f = xifg.data.config.filter else: f = {'value': None} fltr = f if f['value'] else 'None' if 'members' in xifg.data.config: members = xifg.data.config.members else: members = xifg.data.config.interfaces data.append([xifg.id, xifg.name, fltr, members]) Formatter.print_table(data, headers) elif self.options.show_jobs: headers = ['id', 'name', ifg.type, 'filter', 'state', 'start', 'end', 'size'] data = [] for job in self.appresponse.capture.get_jobs(): data.append([job.id, job.name, ifg.get_id(job), getattr(job.data.config, 'filter', dict(string=None))['string'], job.status, job.data.state.status.packet_start_time, job.data.state.status.packet_end_time, job.data.state.status.capture_size]) Formatter.print_table(data, headers) else: if version < APIVersion('2.0'): config = dict(name=self.options.jobname, mifg_id=int(self.options.ifgs)) else: ifgs = [int(v) for v in self.options.ifgs.split(',')] config = dict(name=self.options.jobname, enabled=True, vifgs=ifgs) if self.options.filter: fltr = dict(type=self.options.filter_type, string=self.options.filter) config['filter'] = fltr self.appresponse.capture.create_job(config) print("Successfully created packet capture job {}" .format(self.options.jobname))
def main(self): if self.options.timerange is not None: try: timefilter = TimeFilter.parse_range(self.options.timerange) except ValueError: print "Could not parse time filter expression." return elif (self.options.starttime is not None or self.options.endtime is not None): timeparser = TimeParser() if self.options.starttime is None: start_time = datetime.min else: try: start_time = timeparser.parse(self.options.starttime) except ValueError: print "Could not parse start timestamp" return if self.options.endtime is None: end_time = datetime.now() else: try: end_time = timeparser.parse(self.options.endtime) except ValueError: print "Could not parse end timestamp" return timefilter = TimeFilter(start_time, end_time) else: timefilter = None filters = [NetSharkFilter(f) for f in self.options.filters] if timefilter is not None: filters.append(timefilter) if self.options.file is None: sharks_info = [[self.options.host, self.options.username, self.options.password]] else: sharks_info = self.get_csv_sharks_info(self.options.file) out_table = [] for host, username, password in sharks_info: shark = NetShark(host, auth=UserAuth(username, password)) jobs_bytes = self.get_jobs_bytes(shark, filters) if not jobs_bytes: print "(No data returned from NetShark {0}.)".format(host) else: for job_name, job_bytes in self.get_jobs_bytes(shark, filters): out_table.append([host, job_name, job_bytes]) if not out_table: print "No data found by any NetShark." else: out_table_sorted = sorted(out_table, reverse=True, key=operator.itemgetter(2)) heads = ["NetShark", "Job", "Total bytes"] Formatter.print_table(out_table_sorted, heads)
def main(self): headers = ['id', 'name', 'active', 'definition'] data = [[hg.id, hg.name, hg.data.enabled, hg.data.hosts] for hg in self.appresponse.classification.get_hostgroups() if hasattr(hg.data,'hosts') ] Formatter.print_table(data, headers)
def handle(self, *args, **options): """ Main command handler. """ if options['show_device']: dev = {} for opt in DEVICE_ATTRS + ['id']: if options[opt] is not None: dev[opt] = options[opt] output = [] for d in Device.objects.filter(**dev): output.append([d.id, d.name, d.module, d.host, d.port, d.username, d.enabled]) Formatter.print_table(output, ['ID', 'Name', 'Module', 'Host', 'Port', 'User Name', 'Enabled']) if not output: self.stdout.write("No device found") elif options['add_device']: dev = {} for opt in REQUIRED_DEVICE_ATTRS: if options[opt] is None: self.stdout.write("Option '%s' is required for a device" % opt) sys.exit(1) dev[opt] = options[opt] if options['port']: if options['port'].isdigit(): dev['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) if options['enabled'] is not None: # --enabled or --no-enabled is set dev['enabled'] = options['enabled'] dev_obj = Device(**dev) dev_obj.save() self.stdout.write('Device added.') elif options['edit_device']: if not options['id']: self.stdout.write('Option ID is required to edit a device') sys.exit(1) elif not options['id'].isdigit(): self.stdout.write("Option id '%s' is not a positive integer" % options['id']) sys.exit(1) dev_list = Device.objects.filter(id=int(options['id'])) if not dev_list: self.stdout.write("Option id '%s' does not match a device" % options['id']) sys.exit(1) if options['port']: if options['port'].isdigit(): options['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) dev_obj = dev_list[0] change = False for attr in DEVICE_ATTRS: if options[attr] is not None: change = True setattr(dev_obj, attr, options[attr]) if change: dev_obj.save() self.stdout.write("Device '%s' modified" % options['id']) else: self.stdout.write("Device '%s' unchanged" % options['id']) elif options['batch_file']: file_name = options['batch_file'] default_header = ['name', 'module', 'host', 'port', 'username', 'password', 'auth', 'access_code', 'tags'] with open(file_name, 'r') as f: reader = csv.reader(f) devs = [] add, update = 0, 0 for i, row in enumerate(reader): row = map(str.strip, row) if i == 0: if set(map(str.lower, row)) == set(default_header): header = map(str.lower, row) continue else: header = default_header if not row or not row[0]: continue if len(row) < len(header): msg = ('Line {0} only has {1} fields. ' '{2} fields are required.' .format(i+1, len(row), len(header))) raise CommandError(msg) kwargs = dict(zip(header, row)) if not kwargs['host']: msg = 'Host is empty string in line {0}.'.format(i+1) raise CommandError(msg) if not kwargs['port'].isdigit(): msg = ("Port should be integer instead of '{0}'" " in line {1}".format(kwargs['port'], i+1)) raise CommandError(msg) kwargs['port'] = int(kwargs['port']) kwargs['enabled'] = True kwargs['tags'] = ','.join(map(str.strip, kwargs['tags'].split(';'))) # Check for device with same host and port res = Device.objects.filter(host=kwargs['host'], port=kwargs['port']) if res: dev = res[0] for k, v in kwargs.iteritems(): setattr(dev, k, v) update += 1 else: dev = Device(**kwargs) add += 1 devs.append(dev) with transaction.atomic(): for dev in devs: dev.save() self.stdout.write('{add} devices added. ' '{update} existing devices refreshed.' .format(add=add, update=update))
def console(self, source_type, data, headers): print('') print(source_type) print('-' * len(source_type)) Formatter.print_table(data, headers)
def handle(self, *args, **options): """ Main command handler. """ if options['show_device']: dev = {} for opt in DEVICE_ATTRS + ['id']: if options[opt] is not None: dev[opt] = options[opt] output = [] for d in Device.objects.filter(**dev): output.append([d.id, d.name, d.module, d.host, d.port, d.username, d.enabled]) Formatter.print_table(output, ['ID', 'Name', 'Module', 'Host', 'Port', 'User Name', 'Enabled']) if not output: self.stdout.write("No device found") elif options['add_device']: dev = {} for opt in REQUIRED_DEVICE_ATTRS: if options[opt] is None: self.stdout.write("Option '%s' is required for a device" % opt) sys.exit(1) dev[opt] = options[opt] if options['port']: if options['port'].isdigit(): dev['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) if options['enabled'] is not None: # --enabled or --no-enabled is set dev['enabled'] = options['enabled'] dev_obj = Device(**dev) dev_obj.save() self.stdout.write('Device added.') elif options['edit_device']: if not options['id']: self.stdout.write('Option ID is required to edit a device') sys.exit(1) elif not options['id'].isdigit(): self.stdout.write("Option id '%s' is not a positive integer" % options['id']) sys.exit(1) dev_list = Device.objects.filter(id=int(options['id'])) if not dev_list: self.stdout.write("Option id '%s' does not match a device" % options['id']) sys.exit(1) if options['port']: if options['port'].isdigit(): options['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) dev_obj = dev_list[0] change = False for attr in DEVICE_ATTRS: if options[attr] is not None: change = True setattr(dev_obj, attr, options[attr]) if change: dev_obj.save() self.stdout.write("Device '%s' modified" % options['id']) else: self.stdout.write("Device '%s' unchanged" % options['id'])
def main(self): source = SourceProxy(name='alert_list') if self.options.alert_detail: # detail view column_names = [ 'alert.id', 'alert.policy_id', 'alert.policy_name', 'alert.policy_eval_period', 'alert.policy_type_name', 'alert.policy_last_N', 'alert.policy_last_M', 'alert.severity_level', 'alert.severity', 'alert.start_time', 'alert.end_time', 'alert.duration', 'alert.ongoing', 'alert.low_violations_count', 'alert.medium_violations_count', 'alert.high_violations_count' ] else: # alert event listing view column_names = [ 'alert.id', 'alert.policy_id', 'alert.policy_name', 'alert.policy_type_name', 'alert.policy_type', 'alert.severity_level', 'alert.severity', 'alert.start_time', 'alert.end_time', 'alert.duration', 'alert.ongoing' ] columns = self.appresponse.get_column_objects('alert_list', column_names) data_def = DataDef(source=source, columns=columns, granularity=60, resolution=0, time_range=self.options.timerange) if self.options.alert_detail: fltr = {'value': 'alert.id="{}"'.format(self.options.alert_detail)} data_def._filters.append(fltr) report = Report(self.appresponse) report.add(data_def) report.run() data = report.get_data() headers = [x.lstrip('alert.') for x in report.get_legend()] if not data: print('\nNo data found.\n') else: if self.options.csvfile: with open(self.options.csvfile, 'w') as f: for line in Formatter.get_csv(data, headers): f.write(line) f.write('\n') else: Formatter.print_table(data, headers)
def handle(self, *args, **options): """ Main command handler. """ if options['show_device']: dev = {} for opt in DEVICE_ATTRS + ['id']: if options[opt] is not None: dev[opt] = options[opt] output = [] for d in Device.objects.filter(**dev): output.append([ d.id, d.name, d.module, d.host, d.port, d.username, d.enabled ]) Formatter.print_table(output, [ 'ID', 'Name', 'Module', 'Host', 'Port', 'User Name', 'Enabled' ]) if not output: self.stdout.write("No device found") elif options['add_device']: dev = {} for opt in REQUIRED_DEVICE_ATTRS: if options[opt] is None: self.stdout.write("Option '%s' is required for a device" % opt) sys.exit(1) dev[opt] = options[opt] if options['port']: if options['port'].isdigit(): dev['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) if options['enabled'] is not None: # --enabled or --no-enabled is set dev['enabled'] = options['enabled'] dev_obj = Device(**dev) dev_obj.save() self.stdout.write('Device added.') elif options['edit_device']: if not options['id']: self.stdout.write('Option ID is required to edit a device') sys.exit(1) elif not options['id'].isdigit(): self.stdout.write("Option id '%s' is not a positive integer" % options['id']) sys.exit(1) dev_list = Device.objects.filter(id=int(options['id'])) if not dev_list: self.stdout.write("Option id '%s' does not match a device" % options['id']) sys.exit(1) if options['port']: if options['port'].isdigit(): options['port'] = int(options['port']) else: self.stdout.write("Option port '%s' is not a positive " "integer" % options['port']) sys.exit(1) dev_obj = dev_list[0] change = False for attr in DEVICE_ATTRS: if options[attr] is not None: change = True setattr(dev_obj, attr, options[attr]) if change: dev_obj.save() self.stdout.write("Device '%s' modified" % options['id']) else: self.stdout.write("Device '%s' unchanged" % options['id'])
def console(self, header, data): print '' print header print '-' * len(header) Formatter.print_table(data, ('id', 'source', 'source_path'))
def main(self): enabled = not self.options.disabled if self.options.operation == 'show': headers = ['id', 'name', 'active', 'definition'] data = [[hg.id, hg.name, hg.data.enabled, hg.data.hosts] for hg in self.appresponse.classification.get_hostgroups()] Formatter.print_table(data, headers) elif self.options.operation == 'add': hg = HostGroupConfig(name=self.options.name, hosts=self.options.hosts.split(','), enabled=enabled) ret = self.appresponse.classification.create_hostgroup(hg) print("Successfully created hostgroup '{}'".format(ret.data.name)) elif self.options.operation == 'update': if self.options.id: hg = self.appresponse.classification.get_hostgroup_by_id( self.options.id) else: hg = self.appresponse.classification.get_hostgroup_by_name( self.options.name) hgc = HostGroupConfig( name=self.options.name or hg.data.name, hosts=(self.options.hosts.split(',') if self.options.hosts else hg.data.hosts), enabled=enabled) hg.update(hgc) print("Successfully updated hostgroup '{}'".format(hg.name)) elif self.options.operation == 'upload': with open(self.options.file) as f: hgs = [] for ln in f.readlines(): if not ln: continue name, hosts = ln.split() hgs.append( HostGroupConfig(name=name, hosts=hosts.split(','), enabled=True)) self.appresponse.classification.create_hostgroups(hgs) print("Successfully uploaded {} hostgroup definitions.".format( len(hgs))) elif self.options.operation == 'delete': if self.options.id: hg = self.appresponse.classification.get_hostgroup_by_id( self.options.id) else: hg = self.appresponse.classification.get_hostgroup_by_name( self.options.name) hg.delete() print("Successfully deleted hostgroup with ID/name {}".format( self.options.id or self.options.name)) elif self.options.operation == 'clear': # clear all hostgroups self.appresponse.classification.bulk_delete() print("Successfully cleared all hostgroups")