def main(app): # Get the list of jobs jobs = app.shark.get_capture_jobs() if len(jobs) == 0: print ("No jobs on the appliance, you can create jobs using the " "'create_job.py' and start/stop them using the 'control_job.py' script.") return 0 # Pick the first job job = jobs[0] print 'creating a 30 minutes clip on job {0}'.format(job.name) # set the filters filters = ( # Time filter: keep the last 30 minutes TimeFilter.parse_range("last 30 m"), # IP address filter: keep only 192.168.0.1 SharkFilter('ip.src="192.168.0.1"') ) # Create the clip clip = job.add_clip(filters, "a_test_clip") print 'Clip with the following properties created:' print '' print 'ID: %s' % clip.id print 'Description: %s' % clip.description print 'Source Path: %s' % clip.source_path print 'Size: %s' % clip.size
def test_loaded_decorator(self): shark = self.shark fltr = (TimeFilter.parse_range("last 30 m")) interface = shark.get_interfaces()[0] job = self.shark.create_job(interface, 'test_loaded_decorator', '300M') with shark.create_clip(job, [fltr], 'test_decorator_clip') as clip: #this will test the @loaded decorator clip.size
def test_clip_export(self): job = self.shark.get_capture_jobs()[0] fltr = TimeFilter.parse_range('last 5 minutes') clip = self.shark.create_clip(job, [fltr], 'test_clip') logger.info('created 5 min trace clip for export test') f = clip.download() f.close() self.assertTrue(os.path.exists(f.name)) os.remove(f.name)
def setup_defaults(): # # some columns and filters we can use for creating views # columns = [Key('ip.src'), Key('ip.dst'), Value('generic.packets'), Value('http.duration', Operation.max, description="Max Duration"), Value('http.duration', Operation.avg, description="Avg Duration")] filters = [SharkFilter('(generic.application="Web") & (http.content_type contains "image/")'), TimeFilter.parse_range('last 2 hours')] return columns, filters
def setup_defaults(): # # some columns and filters we can use for creating views # columns = [Key('ip.src'), Key('ip.dst'), Value('generic.packets'), Value('http.duration', Operation.max, description="Max Duration"), Value('http.duration', Operation.avg, description="Avg Duration")] # we don't # have generic.application in 5.0 anymore filters = [SharkFilter('(tcp.src_port=80) | (tcp.dst_port=80)'), TimeFilter.parse_range('last 2 hours')] return columns, filters
def test_shark_interface(self): interfaces = self.shark.get_interfaces() interface = self.shark.get_interface_by_name('mon0') try: job = self.shark.get_capture_job_by_name('test_shark_interface_job') job.delete() except ValueError: #everything is allright, we can create the test_shark_interface_job job pass job = self.shark.create_job(interface, 'test_shark_interface_job', '300M') filters = [TimeFilter.parse_range('last 10 minutes')] with self.shark.create_clip(job, filters, 'test_shark_interface_clip') as clip: self.shark.get_capture_jobs() self.shark.get_clips() self.assertNotEqual(self.shark.get_capture_job_by_name('test_shark_interface_job'), None) self.assertNotEqual(self.shark.get_trace_clip_by_description('test_shark_interface_clip'), None) self.assertNotEqual(self.shark.get_file('/admin/noon.cap'), None) self.assertNotEqual(self.shark.get_files(), None) self.assertNotEqual(self.shark.get_dir('/admin/'), None) job.delete()
def create_trace_clip(shark, job): # create a relatively short trace clip that we can use later fltr = TimeFilter.parse_range('last 10 minutes') clip = shark.create_clip(job, [fltr], 'test_clip') logger.info('created test trace clip') return clip
def run(self): """ Main execution method """ criteria = self.job.criteria if criteria.shark_device == '': logger.debug('%s: No shark device selected' % self.table) self.job.mark_error("No Shark Device Selected") return False #self.fake_run() #return True shark = DeviceManager.get_device(criteria.shark_device) logger.debug("Creating columns for Shark table %d" % self.table.id) # Create Key/Value Columns columns = [] for tc in self.table.get_columns(synthetic=False): tc_options = tc.options if (tc.iskey and tc.name == 'time' and tc_options.extractor == 'sample_time'): # don't create column, use the sample time for timeseries self.timeseries = True self.column_names.append('time') continue elif tc.iskey: c = Key(tc_options.extractor, description=tc.label, default_value=tc_options.default_value) else: if tc_options.operation: try: operation = getattr(Operation, tc_options.operation) except AttributeError: operation = Operation.sum print('ERROR: Unknown operation attribute ' '%s for column %s.' % (tc_options.operation, tc.name)) else: operation = Operation.none c = Value(tc_options.extractor, operation, description=tc.label, default_value=tc_options.default_value) self.column_names.append(tc.name) columns.append(c) # Identify Sort Column sortidx = None if self.table.sortcol is not None: sort_name = self.table.sortcol.options.extractor for i, c in enumerate(columns): if c.field == sort_name: sortidx = i break # Initialize filters criteria = self.job.criteria filters = [] filterexpr = self.job.combine_filterexprs( exprs=criteria.shark_filterexpr, joinstr="&") if filterexpr: filters.append(SharkFilter(filterexpr)) tf = TimeFilter(start=criteria.starttime, end=criteria.endtime) filters.append(tf) logger.info("Setting shark table %d timeframe to %s" % (self.table.id, str(tf))) # Get source type from options try: with lock: source = path_to_class(shark, self.job.criteria.shark_source_name) except RvbdHTTPException, e: source = None raise e