Example #1
0
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)

        filename = self.options.filename
        if not filename:
            filename = '%s_export.pcap' % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.parser.error(
                'Select either --timerange or --start and --end times')

        if self.options.filters:
            kvs = [f.split('=') for f in self.options.filters]
            filters = [NetSharkFilter(r'%s="%s"' % (k, v)) for k, v in kvs]
        else:
            filters = None

        with self.netshark.create_export(source, timefilter,
                                         filters=filters) as e:
            print 'beginning download to file %s' % filename
            e.download(filename, overwrite=self.options.overwrite)
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)

        filename = self.options.filename
        if not filename:
            filename = "%s_export.pcap" % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.parser.error("Select either --timerange or --start and --end times")

        if self.options.filters:
            kvs = [f.split("=") for f in self.options.filters]
            filters = [NetSharkFilter(r'%s="%s"' % (k, v)) for k, v in kvs]
        else:
            filters = None

        with self.netshark.create_export(source, timefilter, filters=filters) as e:
            print "beginning download to file %s" % filename
            e.download(filename, overwrite=self.options.overwrite)
    def test_view_on_job(self):
        """ Test creating a view on a capture job """
        job = setup_capture_job(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(job,
                                    columns,
                                    None,
                                    name='test_view_on_job') as view:
            data = view.get_data()
            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        #testing bug 111168
        #http://bugzilla.nbttech.com/show_bug.cgi?id=111168

        with self.shark.create_view(job, columns, filters,
                                    name='bug_111168') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        with self.shark.create_view(job,
                                    columns,
                                    [TimeFilter.parse_range('last 2 hours')],
                                    name='bug_111168_2') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))
            self.assertEqual(len(view.config['input_source']['filters']), 1)
            filter = view.config['input_source']['filters'][0]
            self.assertEqual(filter.start + datetime.timedelta(hours=2),
                             filter.end)
    def run(self):
        criteria = self.job.criteria

        netshark = DeviceManager.get_device(criteria.netshark_device)

        self.export_name = str(
            path_to_class(netshark, criteria.netshark_source_name))

        source = netshark.get_capture_job_by_name(self.export_name)

        timefilter = TimeFilter(criteria.starttime, criteria.endtime)

        handle = Job._compute_handle(self.table, criteria)

        # check if pcaps directory exists, if not make the directory
        if not os.path.exists(PCAP_DIR):
            os.mkdir(PCAP_DIR)

        while self.all_pcap_size > settings.PCAP_SIZE_LIMIT:
            self.delete_oldest_pcap()

        self.filename = add_pcap_dir('%s.pcap' % handle)

        filters = ([BpfFilter(filt) for filt in self.table.options.filters]
                   or None)
        with netshark.create_export(
                source,
                timefilter,
                filters=filters,
                wait_for_data=self.table.options.wait_for_data,
                wait_duration=self.table.options.wait_duration) as e:
            self.download(e)

        return QueryComplete(pandas.DataFrame([dict(filename=self.filename)]))
def main(app):
    # Get the list of jobs
    jobs = app.shark.get_capture_jobs()
    if len(jobs) == 0:
        print ("No jobs on the appliance, you can create jobs using the "
               "'create_job.py' and start/stop them using the 'control_job.py' script.")
        return 0

    # Pick the first job
    job = jobs[0]
    print 'creating a 30 minutes clip on job {0}'.format(job.name)

    # set the filters
    filters = (
        # Time filter: keep the last 30 minutes
        TimeFilter.parse_range("last 30 m"),

        # IP address filter: keep only 192.168.0.1
        NetSharkFilter('ip.src="192.168.0.1"')
    )

    # Create the clip
    clip = job.add_clip(filters, "a_test_clip")

    print 'Clip with the following properties created:'
    print ''
    print 'ID: %s' % clip.id
    print 'Description: %s' % clip.description
    print 'Source Path: %s' % clip.source_path
    print 'Size: %s' % clip.size
Example #6
0
    def main(self):
        # Get the list of jobs
        jobs = self.netshark.get_capture_jobs()
        if len(jobs) == 0:
            print ("No jobs on the appliance, you can create jobs using the "
                   "'create_job.py' and start/stop them using the "
                   "'control_job.py' script.")
            return 0

        # Pick the first job
        job = jobs[0]
        print 'creating a 30 minutes clip on job {0}'.format(job.name)

        # set the filters
        filters = (
            # Time filter: keep the last 30 minutes
            TimeFilter.parse_range("last 30 m"),

            # IP address filter: keep only 192.168.0.1
            NetSharkFilter('ip.src="192.168.0.1"')
        )

        # Create the clip
        clip = job.add_clip(filters, "a_test_clip")

        print 'Clip with the following properties created:'
        print ''
        print 'ID: %s' % clip.id
        print 'Description: %s' % clip.description
        print 'Source Path: %s' % clip.source_path
        print 'Size: %s' % clip.size
 def test_loaded_decorator(self):
     shark = self.shark
     fltr = (TimeFilter.parse_range("last 30 m"))
     interface = shark.get_interfaces()[0]
     job = self.shark.create_job(interface, 'test_loaded_decorator', '300MB')
     with shark.create_clip(job, [fltr], 'test_decorator_clip') as clip:
         #this will test the @loaded decorator
         clip.size
 def test_clip_export(self):
     job = self.shark.get_capture_jobs()[0]
     fltr = TimeFilter.parse_range('last 1 minute')
     clip = self.shark.create_clip(job, [fltr], 'test_clip')
     logger.info('created 1 min trace clip for export test')
     f = clip.download()
     f.close()
     self.assertTrue(os.path.exists(f.name))
     os.remove(f.name)
 def test_loaded_decorator(self):
     shark = self.shark
     fltr = (TimeFilter.parse_range("last 30 m"))
     interface = shark.get_interfaces()[0]
     job = self.shark.create_job(interface, 'test_loaded_decorator',
                                 '300MB')
     with shark.create_clip(job, [fltr], 'test_decorator_clip') as clip:
         #this will test the @loaded decorator
         clip.size
 def test_clip_export(self):
     job = self.shark.get_capture_jobs()[0]
     fltr = TimeFilter.parse_range('last 1 minute')
     clip = self.shark.create_clip(job, [fltr], 'test_clip')
     logger.info('created 1 min trace clip for export test')
     f = clip.download()
     f.close()
     self.assertTrue(os.path.exists(f.name))
     os.remove(f.name)
 def test_create_clip(self):
     interface = self.shark.get_interfaces()[0]
     job = self.shark.create_job(interface, 'test_create_clip', '300M')
     filters = [
         TimeFilter(datetime.datetime.now() - datetime.timedelta(1),
                    datetime.datetime.now())
     ]
     clip = self.shark.create_clip(job, filters, description='test_clip')
     clip.delete()
     #lets create a clip from a job
     with job.add_clip(filters, 'test_add_clip') as clip:
         pass
Example #12
0
def setup_defaults():
    #
    # some columns and filters we can use for creating views
    #
    columns = [Key('ip.src'),
               Key('ip.dst'),
               Value('generic.packets'),
               Value('http.duration', Operation.max, description="Max Duration"),
               Value('http.duration', Operation.avg, description="Avg Duration")]
    # we don't
    # have generic.application in 5.0 anymore
    filters = [NetSharkFilter('(tcp.src_port=80) | (tcp.dst_port=80)'),
               TimeFilter.parse_range('last 2 hours')]
    return columns, filters
Example #13
0
    def create_view(self,
                    src,
                    columns,
                    filters=None,
                    start_time=None,
                    end_time=None,
                    name=None,
                    charts=None,
                    sync=True,
                    sampling_time_msec=None):
        """ Create a new view on this NetShark.

        :param src: identifies the source of packets to be analyzed.
            It may be any packet source object.

        :param columns: specifies what information is extracted from
            packets and presented in this view.  It should be a list
            of :py:class:`Key <steelscript.netshark.core.types.Key>`
            and :py:class:`Value <steelscript.netshark.core.types.Value>`
            objects

        :param filters: an optional list of filters that can be used
            to limit which packets from the packet source are
            processed by this view.

        :returns: :class:`View4`

        """

        if start_time is not None or end_time is not None:
            if start_time is None or end_time is None:
                raise ValueError('must specify both start and end times')
            if filters is None:
                filters = []
            filters.append(TimeFilter(start_time, end_time))

        filterobjs = []
        if filters is not None:
            filterobjs.extend([filt.bind(self) for filt in filters])

        view = self.classes.View._create(self,
                                         src,
                                         columns,
                                         filterobjs,
                                         name=name,
                                         sync=sync,
                                         sampling_time_msec=sampling_time_msec)
        self._add_view(view)
        return view
    def test_view_on_job(self):
        """ Test creating a view on a capture job """
        job = setup_capture_job(self.shark)
        columns, filters = setup_defaults()

        with self.shark.create_view(
                job,
                columns,
                None,
                name='test_view_on_job') as view:
            data = view.get_data()
            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        #testing bug 111168
        #http://bugzilla.nbttech.com/show_bug.cgi?id=111168

        with self.shark.create_view(job,
                                    columns,
                                    filters,
                                    name='bug_111168') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))

        with self.shark.create_view(job, columns,
                                    [TimeFilter.parse_range('last 2 hours')],
                                    name='bug_111168_2') as view:
            data = view.get_data()

            self.assertTrue(
                view.config['input_source']['path'].startswith('jobs'))
            self.assertEqual(len(view.config['input_source']['filters']), 1)
            filter = view.config['input_source']['filters'][0]
            self.assertEqual(
                filter.start + datetime.timedelta(hours=2),
                filter.end)
    def test_shark_interface(self):
        interfaces = self.shark.get_interfaces()
        try:
            interface = self.shark.get_interface_by_name('mon0')
        except:
            interface = self.shark.get_interfaces()[0]
        try:
            job = self.shark.get_capture_job_by_name('test_shark_interface_job')
            job.delete()
        except ValueError:
            #everything is allright,
            #we can create the test_shark_interface_job job
            pass
        job = self.shark.create_job(interface,
                                    'test_shark_interface_job',
                                    '300M')
        filters = [TimeFilter.parse_range('last 10 minutes')]
        with self.shark.create_clip(job, filters,
                                    'test_shark_interface_clip') as clip:
            self.shark.get_capture_jobs()
            self.shark.get_clips()
            self.assertNotEqual(
                self.shark.get_capture_job_by_name(
                    'test_shark_interface_job'), None)
            self.assertNotEqual(
                self.shark.get_trace_clip_by_description(
                    'test_shark_interface_clip'), None)
            try:
                self.assertNotEqual(
                    self.shark.get_file('/admin/noon.cap'), None)
            except RvbdHTTPException as e:
                if e.status != 404:
                    raise
            self.assertNotEqual(self.shark.get_files(), None)
            self.assertNotEqual(self.shark.get_dir('/admin/'), None)

        job.delete()
    def test_shark_interface(self):
        interfaces = self.shark.get_interfaces()
        try:
            interface = self.shark.get_interface_by_name('mon0')
        except:
            interface = self.shark.get_interfaces()[0]
        try:
            job = self.shark.get_capture_job_by_name(
                'test_shark_interface_job')
            job.delete()
        except ValueError:
            #everything is allright,
            #we can create the test_shark_interface_job job
            pass
        job = self.shark.create_job(interface, 'test_shark_interface_job',
                                    '300M')
        filters = [TimeFilter.parse_range('last 10 minutes')]
        with self.shark.create_clip(job, filters,
                                    'test_shark_interface_clip') as clip:
            self.shark.get_capture_jobs()
            self.shark.get_clips()
            self.assertNotEqual(
                self.shark.get_capture_job_by_name('test_shark_interface_job'),
                None)
            self.assertNotEqual(
                self.shark.get_trace_clip_by_description(
                    'test_shark_interface_clip'), None)
            try:
                self.assertNotEqual(self.shark.get_file('/admin/noon.cap'),
                                    None)
            except RvbdHTTPException as e:
                if e.status != 404:
                    raise
            self.assertNotEqual(self.shark.get_files(), None)
            self.assertNotEqual(self.shark.get_dir('/admin/'), None)

        job.delete()
Example #17
0
def create_trace_clip(shark, job):
    # create a relatively short trace clip that we can use later
    fltr = TimeFilter.parse_range('last 10 minutes')
    clip = shark.create_clip(job, [fltr], 'test_clip')
    logger.info('created test trace clip')
    return clip
    def main(self):
        if self.options.timerange is not None:
            try:
                timefilter = TimeFilter.parse_range(self.options.timerange)
            except ValueError:
                print "Could not parse time filter expression."
                return
        elif (self.options.starttime is not None or
              self.options.endtime is not None):
            timeparser = TimeParser()

            if self.options.starttime is None:
                start_time = datetime.min
            else:
                try:
                    start_time = timeparser.parse(self.options.starttime)
                except ValueError:
                    print "Could not parse start timestamp"
                    return

            if self.options.endtime is None:
                end_time = datetime.now()
            else:
                try:
                    end_time = timeparser.parse(self.options.endtime)
                except ValueError:
                    print "Could not parse end timestamp"
                    return
            timefilter = TimeFilter(start_time, end_time)
        else:
            timefilter = None

        filters = [NetSharkFilter(f) for f in self.options.filters]
        if timefilter is not None:
            filters.append(timefilter)

        if self.options.file is None:
            sharks_info = [[self.options.host, self.options.username,
                            self.options.password]]
        else:
            sharks_info = self.get_csv_sharks_info(self.options.file)

        out_table = []
        for host, username, password in sharks_info:
            shark = NetShark(host, auth=UserAuth(username, password))

            jobs_bytes = self.get_jobs_bytes(shark, filters)
            if not jobs_bytes:
                print "(No data returned from NetShark {0}.)".format(host)
            else:
                for job_name, job_bytes in self.get_jobs_bytes(shark, filters):
                    out_table.append([host, job_name, job_bytes])

        if not out_table:
            print "No data found by any NetShark."
        else:
            out_table_sorted = sorted(out_table, reverse=True,
                                      key=operator.itemgetter(2))

            heads = ["NetShark", "Job", "Total bytes"]
            Formatter.print_table(out_table_sorted, heads)
Example #19
0
    def main(self):
        if self.options.jobname:
            export_name = self.options.jobname
            source = self.netshark.get_capture_job_by_name(export_name)
            create_export = self.netshark.api.jobs.create_export
            status_export = self.netshark.api.jobs.get_export_details
            download_export = self.netshark.api.jobs.get_packets_from_export
            delete_export = self.netshark.api.jobs.delete_export
        elif self.options.clipname:
            export_name = self.options.clipname
            source = self.netshark.get_trace_clip_by_description(export_name)
            create_export = self.netshark.api.clips.create_export
            status_export = self.netshark.api.clips.get_export_details
            download_export = self.netshark.api.clips.get_packets_from_export
            delete_export = self.netshark.api.clips.delete_export

        filename = self.options.filename
        if not filename:
            filename = '%s_export.pcap' % export_name

        if self.options.timerange:
            timefilter = TimeFilter.parse_range(self.options.timerange)
        elif self.options.start_time and self.options.end_time:
            start = string_to_datetime(float(self.options.start_time))
            end = string_to_datetime(float(self.options.end_time))
            timefilter = TimeFilter(start, end)
        else:
            self.optparse.error('Select either --timerange or --start and --end times')

        config = {
            #'output_filename': filename,
            'output_format': 'PCAP_US',
            'start_time': datetime_to_seconds(timefilter.start),
            'end_time': datetime_to_seconds(timefilter.end),
        }

        if self.options.filters:
            filters = [f.split('=') for f in self.options.filters]
            filters = [r'%s="%s"' % (k, v) for k, v in filters]
            config['filters'] = [NetSharkFilter(filt).bind(self.netshark) for filt in filters]

        try:
            export_id = create_export(source.id, config=config)

            print 'Export created with ID: %s' % export_id

            status = status_export(source.id, export_id['id'])

            print 'Current status of export is:\n%s' % status

            if status['status']['state'] == 'RUNNING':
                print 'beginning download to file %s' % filename
                download_export(source.id, export_id['id'], filename)
        finally:
            try:
                print 'Trying to delete export ... ',
                delete_export(source.id, export_id['id'])
                print 'deleted.'
            except:
                print 'Error when trying to delete export.  Ignoring.'
                pass
Example #20
0
    def run(self):
        """ Main execution method
        """
        criteria = self.job.criteria

        self.timeseries = False  # if key column called 'time' is created
        self.column_names = []

        # Resolution comes in as a time_delta
        resolution = timedelta_total_seconds(criteria.resolution)

        default_delta = 1000000000  # one second
        self.delta = int(default_delta * resolution)  # sample size interval

        if criteria.netshark_device == '':
            logger.debug('%s: No netshark device selected' % self.table)
            self.job.mark_error("No NetShark Device Selected")
            return False

        shark = DeviceManager.get_device(criteria.netshark_device)

        logger.debug("Creating columns for NetShark table %d" % self.table.id)

        # Create Key/Value Columns
        columns = []
        for tc in self.table.get_columns(synthetic=False):
            tc_options = tc.options
            if (tc.iskey and tc.name == 'time'
                    and tc_options.extractor == 'sample_time'):
                # don't create column, use the sample time for timeseries
                self.timeseries = True
                self.column_names.append('time')
                continue
            elif tc.iskey:
                c = Key(tc_options.extractor,
                        description=tc.label,
                        default_value=tc_options.default_value)
            else:
                if tc_options.operation:
                    try:
                        operation = getattr(Operation, tc_options.operation)
                    except AttributeError:
                        operation = Operation.sum
                        print('ERROR: Unknown operation attribute '
                              '%s for column %s.' %
                              (tc_options.operation, tc.name))
                else:
                    operation = Operation.none

                c = Value(tc_options.extractor,
                          operation,
                          description=tc.label,
                          default_value=tc_options.default_value)

            self.column_names.append(tc.name)
            columns.append(c)

        # Identify Sort Column
        sortidx = None
        if self.table.sortcols is not None:
            sortcol = Column.objects.get(table=self.table,
                                         name=self.table.sortcols[0])
            sort_name = sortcol.options.extractor
            for i, c in enumerate(columns):
                if c.field == sort_name:
                    sortidx = i
                    break

        # Initialize filters
        criteria = self.job.criteria

        filters = []

        if hasattr(criteria, 'netshark_filterexpr'):
            logger.debug('calculating netshark filter expression ...')
            filterexpr = self.job.combine_filterexprs(
                exprs=criteria.netshark_filterexpr, joinstr="&")
            if filterexpr:
                logger.debug('applying netshark filter expression: %s' %
                             filterexpr)
                filters.append(NetSharkFilter(filterexpr))

        if hasattr(criteria, 'netshark_bpf_filterexpr'):
            # TODO evaluate how to combine multiple BPF filters
            # this will just apply one at a time
            filterexpr = criteria.netshark_bpf_filterexpr
            logger.debug('applying netshark BPF filter expression: %s' %
                         filterexpr)
            filters.append(BpfFilter(filterexpr))

        resolution = criteria.resolution
        if resolution.seconds == 1:
            sampling_time_msec = 1000
        elif resolution.microseconds == 1000:
            sampling_time_msec = 1
            if criteria.duration > parse_timedelta('1s'):
                msg = ("Cannot run a millisecond report with a duration "
                       "longer than 1 second")
                raise ValueError(msg)
        else:
            sampling_time_msec = 1000

        # Get source type from options
        logger.debug("NetShark Source: %s" %
                     self.job.criteria.netshark_source_name)

        source = path_to_class(shark, self.job.criteria.netshark_source_name)
        live = source.is_live()
        persistent = criteria.get('netshark_persistent', False)

        if live and not persistent:
            raise ValueError("Live views must be run with persistent set")

        view = None
        if persistent:
            # First, see a view by this title already exists
            # Title is the table name plus a criteria hash including
            # all criteria *except* the timeframe
            h = hashlib.md5()
            h.update('.'.join([c.name for c in self.table.get_columns()]))
            for k, v in criteria.iteritems():
                if criteria.is_timeframe_key(k):
                    continue
                h.update('%s:%s' % (k, v))

            title = '/'.join([
                'steelscript-appfwk',
                str(self.table.id), self.table.namespace, self.table.name,
                h.hexdigest()
            ])
            view = NetSharkViews.find_by_name(shark, title)
            logger.debug("Persistent view title: %s" % title)
        else:
            # Only assign a title for persistent views
            title = None

        timefilter = TimeFilter(start=criteria.starttime, end=criteria.endtime)

        if not view:
            # Not persistent, or not yet created...

            if not live:
                # Cannot attach time filter to a live view,
                # it will be added later at get_data() time
                if criteria.starttime and criteria.endtime:
                    filters.append(timefilter)

                    logger.info("Setting netshark table %d timeframe to %s" %
                                (self.table.id, str(timefilter)))
                else:
                    # if times are set to zero, don't add to filter
                    # this will process entire timeframe of source instead
                    logger.info("Not setting netshark table %d timeframe" %
                                self.table.id)

            # Create it
            with lock:
                logger.debug("%s: Creating view for table %s" %
                             (str(self), str(self.table)))
                view = shark.create_view(source,
                                         columns,
                                         filters=filters,
                                         sync=False,
                                         name=title,
                                         sampling_time_msec=sampling_time_msec)

            if not live:
                done = False
                logger.debug("Waiting for netshark table %d to complete" %
                             self.table.id)
                while not done:
                    time.sleep(0.5)
                    with lock:
                        s = view.get_progress()
                        self.job.mark_progress(s)
                        self.job.save()
                        done = view.is_ready()

        logger.debug("Retrieving data for timeframe: %s" % timefilter)

        # Retrieve the data
        with lock:
            getdata_kwargs = {}
            if sortidx:
                getdata_kwargs['sortby'] = sortidx

            if self.table.options.aggregated:
                getdata_kwargs['aggregated'] = self.table.options.aggregated
            else:
                getdata_kwargs['delta'] = self.delta

            if live:
                # For live views, attach the time frame to the get_data()
                getdata_kwargs['start'] = (datetime_to_nanoseconds(
                    criteria.starttime))
                getdata_kwargs['end'] = (datetime_to_nanoseconds(
                    criteria.endtime))

            self.data = view.get_data(**getdata_kwargs)

            if not persistent:
                view.close()

        if self.table.rows > 0:
            self.data = self.data[:self.table.rows]

        self.parse_data()

        logger.info("NetShark Report %s returned %s rows" %
                    (self.job, len(self.data)))

        return QueryComplete(self.data)