def test_range_days(self): start, end = parse_range('last 3 days') self.assertTrue(end - start, parse_timedelta('3 days')) start, end = parse_range('yesterday') self.assertTrue(end - start, parse_timedelta('1 day')) self.on_boundary(end, 'day') self.within_unit(end, 'day') start, end = parse_range('today') self.on_boundary(start, 'day') self.close_to_now(end) self.within_unit(start, 'day')
def __init__(self, **kwargs): self._special_values = kwargs.pop('special_values', None) initial = kwargs.pop('initial', None) if ((initial is not None) and (self._special_values is None or initial not in self._special_values)): initial_td = parse_timedelta(initial) initial_valid = False else: initial_td = None initial_valid = True choices = [] # Rebuild the choices list to ensure that # the value is normalized using timedelta_str for choice in kwargs.pop('choices'): td = None if not (isinstance(choice, list) or isinstance(choice, tuple)): if (self._special_values is None or choice not in self._special_values): td = parse_timedelta(choice) td_str = timedelta_str(td) value = td_str label = td_str else: value = choice label = choice else: (value, label) = choice if (self._special_values is None or value not in self._special_values): td = parse_timedelta(value) value = timedelta_str(td) choice = (value, label) choices.append(choice) if initial_td is not None and initial_td == td: initial = value initial_valid = True kwargs['choices'] = choices if not initial_valid: self.error_msg = ('Invalid initial %s : %s. ' % (kwargs['label'], initial)) super(DurationField, self).__init__(initial=initial, **kwargs)
def parse_config(self, job_config): """Breaks up dict from config section into job and function options. Returns new dict suitable for passing to add_job, plus dict of interval definitions. """ # pull job scheduler kwargs from job_config dict interval = dict() offset = timeutils.parse_timedelta(job_config.pop('offset', '0')) keys = job_config.keys() job_kwargs = dict((k[9:], job_config.pop(k)) for k in keys if k.startswith('interval_')) # convert time fields to floats, populate interval dict for v in ['weeks', 'days', 'hours', 'minutes', 'seconds']: if v in job_kwargs: val = float(job_kwargs[v]) job_kwargs[v] = val interval[v] = val interval['delta'] = datetime.timedelta(**interval) interval['offset'] = offset # hardcode the function call - don't allow config overrides func_params = self.get_job_function(job_config) job_kwargs.update(func_params) # embed interval and add remaining kwargs as # actual kwargs for function call job_config['interval'] = interval job_kwargs['kwargs'] = job_config return job_kwargs, interval
def post_process_table(self, field_options): resolution = field_options['resolution'] if resolution != 'auto': if isinstance(resolution, int): res = resolution else: res = int(timedelta_total_seconds(parse_timedelta(resolution))) resolution = Report.RESOLUTION_MAP[res] field_options['resolution'] = resolution fields_add_device_selection(self, keyword='netprofiler_device', label='NetProfiler', module='netprofiler', enabled=True) duration = field_options['duration'] if isinstance(duration, int): duration = "%d min" % duration fields_add_time_selection(self, initial_duration=duration, durations=field_options['durations']) fields_add_resolution(self, initial=field_options['resolution'], resolutions=field_options['resolutions'], special_values=['auto']) self.fields_add_filterexpr()
def render(self, name, value, attrs): initial_time = attrs.get('initial_time', None) if initial_time: m = re.match("now *- *(.+)", initial_time) if m: secs = timedelta_total_seconds(parse_timedelta(m.group(1))) initial_time = ( "d = new Date(); d.setSeconds(d.getSeconds()-%d);" % secs) else: initial_time = "d = '%s';" % initial_time else: initial_time = "d = new Date();" msg = ''' {0} <span id="timenow_{name}" class="icon-time" title="Set time/date to now"> </span> <script type="text/javascript"> $("#id_{name}").timepicker({{ step: 15, scrollDefaultNow:true, timeFormat:"g:i:s a" }}); $("#timenow_{name}").click(function() {{ $("#id_{name}").timepicker("setTime", new Date()); }}); {initial_time} $("#id_{name}").timepicker("setTime", d); </script> ''' #'$("#id_{name}").timepicker("setTime", new Date());' return msg.format(super(TimeWidget, self).render(name, value, attrs), name=name, initial_time=initial_time)
def render(self, name, value, attrs): initial_time = attrs.get('initial_time', None) if initial_time: m = re.match("now *- *(.+)", initial_time) if m: secs = timedelta_total_seconds(parse_timedelta(m.group(1))) initial_time = ( "d = new Date(); d.setSeconds(d.getSeconds()-%d);" % secs) else: initial_time = "d = new Date('{}');".format(initial_time) else: initial_time = "d = new Date();" round_initial = attrs.get('round_initial', None) if round_initial: js = (' p = %d*1000; ' 'd = new Date(Math.floor(d.getTime() / p) * p);' % round_initial) initial_time += js # only pin manually entered times if we are rounding above a minute if round_initial >= 60: step = int(round_initial) / 60 force_round_time = 'true' else: step = 15 force_round_time = 'false' else: step = 15 force_round_time = 'false' msg = ''' <span class="input-group-addon"> <span id="timenow_{name}" class="glyphicon glyphicon-time" title="Set time/date to now"> </span> </span> {0} <script type="text/javascript"> $("#id_{name}").timepicker({{ step: {step}, scrollDefaultNow:true, forceRoundTime: {force_round_time}, timeFormat:"g:i:s a" }}); $("#timenow_{name}").click(function() {{ $("#id_{name}").timepicker("setTime", new Date()); }}); {initial_time} // align to timezone var offset = ($('#tz').html()/100)*60*60*1000; d = rvbd.timeutil.convertDateToUTC(d); d.setTime(d.getTime() + offset); $("#id_{name}").timepicker("setTime", d); </script> ''' return msg.format(super(TimeWidget, self).render(name, value, attrs), name=name, initial_time=initial_time, step=step, force_round_time=force_round_time)
def render(self, name, value, attrs): initial_date = attrs.get('initial_date', None) if initial_date: m = re.match("now *- *(.+)", initial_date) if m: secs = timedelta_total_seconds(parse_timedelta(m.group(1))) initial_date = ( "d = new Date(); d.setSeconds(d.getSeconds()-%d);" \ % secs) else: initial_date = "d = '%s';" % initial_date else: initial_date = "d = new Date();" msg = ''' {0} <span id="datenow_{name}" class="icon-calendar" title="Set date to today"> </span> <script type="text/javascript"> $("#id_{name}").datepicker({{ format: "mm/dd/YY", defaultDate: +2, autoclose: true }}); {initial_date} $("#id_{name}").datepicker("setDate", d); $("#datenow_{name}").click(function() {{ $("#id_{name}").datepicker("setDate", new Date()); }}); </script> ''' return msg.format(super(DateWidget, self).render(name, value, attrs), name=name, initial_date=initial_date)
def analyze(self, jobs): logger.debug('%s analyze - received jobs: %s' % (self, jobs)) basetable = Table.from_ref( self.table.options['related_tables']['template'] ) data = jobs['source'].data() if data is None: return QueryError('No data available to analyze') # find column whose min/max is largest deviation from mean # then take row from that column where min/max occurs if self.table.options['max']: idx = (data.max() / data.mean()).idxmax() frow = data.ix[data[idx].idxmax()] else: idx = (data.min() / data.mean()).idxmin() frow = data.ix[data[idx].idxmin()] # get time value from extracted row to calculate new start/end times ftime = frow['time'] duration = parse_timedelta(self.table.options['zoom_duration']) resolution = parse_timedelta(self.table.options['zoom_resolution']) stime = ftime - (duration / 2) etime = ftime + (duration / 2) criteria = self.job.criteria if 'resolution' in criteria: criteria['resolution'] = resolution else: criteria['granularity'] = resolution criteria['duration'] = duration criteria['_orig_duration'] = duration criteria['starttime'] = stime criteria['_orig_starttime'] = stime criteria['endtime'] = etime criteria['_orig_endtime'] = etime logging.debug('Creating FocusedAnalysis job with updated criteria %s' % criteria) job = Job.create(basetable, criteria, self.job.update_progress) return QueryContinue(self.finish, {'job': job})
def decompress(self, value): if isinstance(value, str) or isinstance(value, unicode): value = timedelta_total_seconds(parse_timedelta(value)) if value: m = [v for v in self.choices if v[0] == value] if len(m) == 1: return m[0] else: return [0, '%d min' % (value / 60)] return [None, None]
def test_range_weeks(self): start, end = parse_range('last 3 weeks') self.assertTrue(end - start, parse_timedelta('3 weeks')) start, end = parse_range('previous 3 weeks') self.assertTrue(end - start, parse_timedelta('3 weeks')) self.on_boundary(end, 'day') self.within_unit(end, 'week') self.assertTrue(end.weekday() == 6) start, end = parse_range('this week') self.on_boundary(start, 'day') self.close_to_now(end) self.within_unit(start, 'week') self.assertTrue(start.weekday() == 6) start, end = parse_range('previous week', begin_monday=True) self.assertTrue(start.weekday() == 0) start, end = parse_range('this week', begin_monday=True) self.assertTrue(start.weekday() == 0)
def to_python(self, value): if value in validators.EMPTY_VALUES: v = None elif self._special_values and value in self._special_values: v = value else: try: v = parse_timedelta(value) except: raise ValidationError('Invalid duration string: %s' % value) return v
def __init__(self, start=None, end=None, duration=None, time_range=None): """Initialize a TimeFilter object. :param start: integer, start time in epoch seconds :param end: integer, end time in epoch seconds :param duration: string, time duration, i.e. '1 hour' :param time_range: string, time range, i.e. 'last 1 hour' or '4/21/13 4:00 to 4/21/13 5:00' """ invalid = False if not start and not end and not duration and not time_range: # when querying file or clip, usually no time filters are provided self.start = None self.end = None elif start and end: if duration or time_range: invalid = True else: self.start = str(start) self.end = str(end) elif time_range: if start or end or duration: invalid = True else: start, end = timeutils.parse_range(time_range) self.start = timeutils.datetime_to_seconds(start) self.end = timeutils.datetime_to_seconds(end) elif duration: if not start and not end: invalid = True else: td = timeutils.parse_timedelta(duration).total_seconds() if start: self.start = str(start) self.end = str(int(start + td)) else: self.start = str(int(end - td)) self.end = str(end) elif start or end: invalid = True if invalid: msg = ('Start/end timestamps can not be derived from start "{}" ' 'end "{}" duration "{}" time_range "{}".'.format( start, end, duration, time_range)) raise AppResponseException(msg)
def test_range_months(self): start, end = parse_range('last 3 months') self.assertTrue(end - start, parse_timedelta('3 months')) start, end = parse_range('previous 3 months') self.assertEquals(start, end - relativedelta(months=3)) self.on_boundary(end, 'month') self.within_unit(end, 'month') start, end = parse_range('this month') self.on_boundary(start, 'month') self.close_to_now(end) self.within_unit(start, 'month')
def test_range_years(self): start, end = parse_range('last 3 years') self.assertTrue(end - start, parse_timedelta('3 years')) start, end = parse_range('previous 3 years') self.assertEquals(start, end - relativedelta(years=3)) self.on_boundary(end, 'year') self.within_unit(end, 'year') start, end = parse_range('this year') self.on_boundary(start, 'year') self.close_to_now(end) self.within_unit(start, 'year')
def test_range_months(self): start, end = parse_range('last 3 months') self.assertTrue(end - start, parse_timedelta('3 months')) start, end = parse_range('previous 3 months') self.assertEqual(start, end - relativedelta(months=3)) self.on_boundary(end, 'month') self.within_unit(end, 'month') start, end = parse_range('this month') self.on_boundary(start, 'month') self.close_to_now(end) self.within_unit(start, 'month')
def test_range_years(self): start, end = parse_range('last 3 years') self.assertTrue(end - start, parse_timedelta('3 years')) start, end = parse_range('previous 3 years') self.assertEqual(start, end - relativedelta(years=3)) self.on_boundary(end, 'year') self.within_unit(end, 'year') start, end = parse_range('this year') self.on_boundary(start, 'year') self.close_to_now(end) self.within_unit(start, 'year')
def export(self, filename, starttime=None, endtime=None, duration=None): """Returns a PCAP file, potentially including your specified starttime, endtime or duration. Internally uses editcap :param str filename: the name of the new PCAP file to be created/exported from the existing PCAP file :param str starttime: defines a start time filter :param str endtime: defines an end time filter :param str duration: defines a duration filter """ cmd = ['editcap'] if starttime is not None: if isinstance(starttime, basestring): starttime = dateutil_parse(starttime) if endtime is not None: if isinstance(endtime, basestring): endtime = dateutil_parse(endtime) if duration is not None: if isinstance(duration, basestring): duration = parse_timedelta(duration) if starttime: endtime = starttime + duration elif endtime: starttime = endtime - duration else: raise ValueError("Must specify either starttime or " "endtime with duration") if starttime is not None: cmd.extend(['-A', (starttime .strftime('%Y-%m-%d %H:%M:%S'))]) if endtime is not None: cmd.extend(['-B', (endtime .strftime('%Y-%m-%d %H:%M:%S'))]) cmd.append(self.filename) cmd.append(filename) logger.info('subprocess: %s' % ' '.join(cmd)) subprocess.check_output(cmd) return PcapFile(filename)
def resample(df, timecol, interval, how): """Resample the input dataframe. :param str timecol: the name of the column containing the row time :param timedelta,str interval: the new interval :param how: method for down or resampling (see pandas.Dataframe.resample) """ df[timecol] = pandas.DatetimeIndex(df[timecol]) df.set_index(timecol, inplace=True) if isinstance(interval, timedelta): interval = '%ss' % (timedelta_total_seconds(parse_timedelta(interval))) df = df.resample(interval, how=how).reset_index() return df
def test_range_quarters(self): start, end = parse_range('last 3 q') self.assertTrue(end - start, parse_timedelta('3 q')) start, end = parse_range('previous 3 q') self.assertEqual(start, end - relativedelta(months=9)) self.on_boundary(end, 'month') self.within_unit(end, 'quarter') self.assertTrue(end.month in [1, 4, 7, 10]) start, end = parse_range('this q') self.on_boundary(start, 'month') self.close_to_now(end) self.within_unit(start, 'quarter') self.assertTrue(start.month in [1, 4, 7, 10])
def test_range_quarters(self): start, end = parse_range('last 3 q') self.assertTrue(end - start, parse_timedelta('3 q')) start, end = parse_range('previous 3 q') self.assertEquals(start, end - relativedelta(months=9)) self.on_boundary(end, 'month') self.within_unit(end, 'quarter') self.assertTrue(end.month in [1, 4, 7, 10]) start, end = parse_range('this q') self.on_boundary(start, 'month') self.close_to_now(end) self.within_unit(start, 'quarter') self.assertTrue(start.month in [1, 4, 7, 10])
def export(self, filename, starttime=None, endtime=None, duration=None): """Returns a PCAP file, potentially including your specified starttime, endtime or duration. Internally uses editcap :param str filename: the name of the new PCAP file to be created/exported from the existing PCAP file :param str starttime: defines a start time filter :param str endtime: defines an end time filter :param str duration: defines a duration filter """ cmd = ['editcap'] if starttime is not None: if isinstance(starttime, str): starttime = dateutil_parse(starttime) if endtime is not None: if isinstance(endtime, str): endtime = dateutil_parse(endtime) if duration is not None: if isinstance(duration, str): duration = parse_timedelta(duration) if starttime: endtime = starttime + duration elif endtime: starttime = endtime - duration else: raise ValueError("Must specify either starttime or " "endtime with duration") if starttime is not None: cmd.extend(['-A', (starttime.strftime('%Y-%m-%d %H:%M:%S'))]) if endtime is not None: cmd.extend(['-B', (endtime.strftime('%Y-%m-%d %H:%M:%S'))]) cmd.append(self.filename) cmd.append(filename) logger.info('subprocess: %s' % ' '.join(cmd)) subprocess.check_output(cmd, env=popen_env, universal_newlines=True) return PcapFile(filename)
def resample(df, timecol, interval, how='sum'): """Resample the input dataframe. :param str timecol: the name of the column containing the row time :param timedelta,str interval: the new interval :param how: method for down or resampling (see pandas.Dataframe.resample) """ df[timecol] = pandas.DatetimeIndex(df[timecol]) df.set_index(timecol, inplace=True) if isinstance(interval, timedelta): interval = '%ss' % (timedelta_total_seconds(parse_timedelta(interval))) # use new pandas reasmple API # http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#resample-api r = df.resample(interval) df = getattr(r, how)() df.reset_index(inplace=True) return df
def render(self, name, value, attrs): initial_date = attrs.get('initial_date', None) if initial_date: m = re.match("now *- *(.+)", initial_date) if m: secs = timedelta_total_seconds(parse_timedelta(m.group(1))) initial_date = ( "d = new Date(); d.setSeconds(d.getSeconds()-%d);" % secs ) else: initial_date = "d = '%s';" % initial_date else: initial_date = "d = new Date();" round_initial = attrs.get('round_initial', None) if round_initial: js = (' p = %d*1000; ' 'd = new Date(Math.floor(d.getTime() / p) * p);' % round_initial) initial_date += js msg = ''' <span class="input-group-addon"> <span id="datenow_{name}" class="glyphicon glyphicon-calendar" title="Set date to today"> </span> </span> {0} <script type="text/javascript"> $("#id_{name}").datepicker({{ format: "mm/dd/YY", defaultDate: +2, autoclose: true }}); {initial_date} $("#id_{name}").datepicker("setDate", d); $("#datenow_{name}").click(function() {{ $("#id_{name}").datepicker("setDate", new Date()); }}); </script> ''' return msg.format( super(DateWidget, self).render(name, value, attrs), name=name, initial_date=initial_date )
def export(self, filename, starttime=None, endtime=None, duration=None): cmd = ['editcap'] if starttime is not None: if isinstance(starttime, basestring): starttime = dateutil_parse(starttime) if endtime is not None: if isinstance(endtime, basestring): endtime = dateutil_parse(endtime) if duration is not None: if isinstance(duration, basestring): duration = parse_timedelta(duration) if starttime: endtime = starttime + duration elif endtime: starttime = endtime - duration else: raise ValueError("Must specify either starttime or " "endtime with duration") if starttime is not None: cmd.extend(['-A', (starttime .strftime('%Y-%m-%d %H:%M:%S'))]) if endtime is not None: cmd.extend(['-B', (endtime .strftime('%Y-%m-%d %H:%M:%S'))]) cmd.append(self.filename) cmd.append(filename) logger.info('subprocess: %s' % ' '.join(cmd)) o = subprocess.check_output(cmd) return PcapFile(filename)
def run(self, template_id, timefilter=None, resolution="auto", query=None, trafficexpr=None, data_filter=None, sync=True, custom_criteria=None): """Create the report and begin running the report on NetProfiler. If the `sync` option is True, periodically poll until the report is complete, otherwise return immediately. :param int template_id: numeric id of the template to use for the report :param timefilter: range of time to query, instance of :class:`TimeFilter` :param str resolution: data resolution, such as (1min, 15min, etc.), defaults to 'auto' :param str query: query object containing criteria :param trafficexpr: instance of :class:`TrafficFilter` :param str data_filter: deprecated filter to run against report data :param bool sync: if True, poll for status until the report is complete """ self.template_id = template_id if timefilter is None: self.timefilter = TimeFilter.parse_range("last 5 min") else: self.timefilter = timefilter self.query = query self.trafficexpr = trafficexpr self.data_filter = data_filter self.id = None self.queries = list() self.last_status = None if resolution not in [ "auto", "1min", "15min", "hour", "6hour", "day", "week", "month" ]: rd = parse_timedelta(resolution) resolution = self.RESOLUTION_MAP[int(timedelta_total_seconds(rd))] self.resolution = resolution start = datetime_to_seconds(self.timefilter.start) end = datetime_to_seconds(self.timefilter.end) criteria = RecursiveUpdateDict( **{"time_frame": { "start": int(start), "end": int(end) }}) if self.query is not None: criteria["query"] = self.query if self.resolution != "auto": criteria["time_frame"]["resolution"] = self.resolution if self.data_filter: criteria['deprecated'] = {self.data_filter[0]: self.data_filter[1]} if self.trafficexpr is not None: criteria["traffic_expression"] = self.trafficexpr.filter if custom_criteria: for k, v in custom_criteria.iteritems(): criteria[k] = v to_post = {"template_id": self.template_id, "criteria": criteria} logger.debug("Posting JSON: %s" % to_post) response = self.profiler.api.report.reports(data=to_post) try: self.id = int(response['id']) except KeyError: raise ValueError( "failed to retrieve report id from report creation response: %s" % response) logger.info("Created report %d" % self.id) if sync: self.wait_for_complete()
def within_unit(self, dt, unit): return datetime.now() - dt <= parse_timedelta(unit)
def run(self): """ Main execution method """ criteria = self.job.criteria self.timeseries = False # if key column called 'time' is created self.column_names = [] # Resolution comes in as a time_delta resolution = timedelta_total_seconds(criteria.resolution) default_delta = 1000000000 # one second self.delta = int(default_delta * resolution) # sample size interval if criteria.netshark_device == '': logger.debug('%s: No netshark device selected' % self.table) self.job.mark_error("No NetShark Device Selected") return False shark = DeviceManager.get_device(criteria.netshark_device) logger.debug("Creating columns for NetShark table %d" % self.table.id) # Create Key/Value Columns columns = [] for tc in self.table.get_columns(synthetic=False): tc_options = tc.options if (tc.iskey and tc.name == 'time' and tc_options.extractor == 'sample_time'): # don't create column, use the sample time for timeseries self.timeseries = True self.column_names.append('time') continue elif tc.iskey: c = Key(tc_options.extractor, description=tc.label, default_value=tc_options.default_value) else: if tc_options.operation: try: operation = getattr(Operation, tc_options.operation) except AttributeError: operation = Operation.sum print('ERROR: Unknown operation attribute ' '%s for column %s.' % (tc_options.operation, tc.name)) else: operation = Operation.none c = Value(tc_options.extractor, operation, description=tc.label, default_value=tc_options.default_value) self.column_names.append(tc.name) columns.append(c) # Identify Sort Column sortidx = None if self.table.sortcols is not None: sortcol = Column.objects.get(table=self.table, name=self.table.sortcols[0]) sort_name = sortcol.options.extractor for i, c in enumerate(columns): if c.field == sort_name: sortidx = i break # Initialize filters criteria = self.job.criteria filters = [] if hasattr(criteria, 'netshark_filterexpr'): logger.debug('calculating netshark filter expression ...') filterexpr = self.job.combine_filterexprs( exprs=criteria.netshark_filterexpr, joinstr="&") if filterexpr: logger.debug('applying netshark filter expression: %s' % filterexpr) filters.append(NetSharkFilter(filterexpr)) if hasattr(criteria, 'netshark_bpf_filterexpr'): # TODO evaluate how to combine multiple BPF filters # this will just apply one at a time filterexpr = criteria.netshark_bpf_filterexpr logger.debug('applying netshark BPF filter expression: %s' % filterexpr) filters.append(BpfFilter(filterexpr)) resolution = criteria.resolution if resolution.seconds == 1: sampling_time_msec = 1000 elif resolution.microseconds == 1000: sampling_time_msec = 1 if criteria.duration > parse_timedelta('1s'): msg = ("Cannot run a millisecond report with a duration " "longer than 1 second") raise ValueError(msg) else: sampling_time_msec = 1000 # Get source type from options logger.debug("NetShark Source: %s" % self.job.criteria.netshark_source_name) source = path_to_class(shark, self.job.criteria.netshark_source_name) live = source.is_live() persistent = criteria.get('netshark_persistent', False) if live and not persistent: raise ValueError("Live views must be run with persistent set") view = None if persistent: # First, see a view by this title already exists # Title is the table name plus a criteria hash including # all criteria *except* the timeframe h = hashlib.md5() h.update('.'.join([c.name for c in self.table.get_columns()])) for k, v in criteria.iteritems(): if criteria.is_timeframe_key(k): continue h.update('%s:%s' % (k, v)) title = '/'.join([ 'steelscript-appfwk', str(self.table.id), self.table.namespace, self.table.name, h.hexdigest() ]) view = NetSharkViews.find_by_name(shark, title) logger.debug("Persistent view title: %s" % title) else: # Only assign a title for persistent views title = None timefilter = TimeFilter(start=criteria.starttime, end=criteria.endtime) if not view: # Not persistent, or not yet created... if not live: # Cannot attach time filter to a live view, # it will be added later at get_data() time if criteria.starttime and criteria.endtime: filters.append(timefilter) logger.info("Setting netshark table %d timeframe to %s" % (self.table.id, str(timefilter))) else: # if times are set to zero, don't add to filter # this will process entire timeframe of source instead logger.info("Not setting netshark table %d timeframe" % self.table.id) # Create it with lock: logger.debug("%s: Creating view for table %s" % (str(self), str(self.table))) view = shark.create_view(source, columns, filters=filters, sync=False, name=title, sampling_time_msec=sampling_time_msec) if not live: done = False logger.debug("Waiting for netshark table %d to complete" % self.table.id) while not done: time.sleep(0.5) with lock: s = view.get_progress() self.job.mark_progress(s) self.job.save() done = view.is_ready() logger.debug("Retrieving data for timeframe: %s" % timefilter) # Retrieve the data with lock: getdata_kwargs = {} if sortidx: getdata_kwargs['sortby'] = sortidx if self.table.options.aggregated: getdata_kwargs['aggregated'] = self.table.options.aggregated else: getdata_kwargs['delta'] = self.delta if live: # For live views, attach the time frame to the get_data() getdata_kwargs['start'] = (datetime_to_nanoseconds( criteria.starttime)) getdata_kwargs['end'] = (datetime_to_nanoseconds( criteria.endtime)) self.data = view.get_data(**getdata_kwargs) if not persistent: view.close() if self.table.rows > 0: self.data = self.data[:self.table.rows] self.parse_data() logger.info("NetShark Report %s returned %s rows" % (self.job, len(self.data))) return QueryComplete(self.data)
def get(self, request, namespace=None, report_slug=None, widget_slug=None): try: report = Report.objects.get(namespace=namespace, slug=report_slug) except: raise Http404 logger.debug("Received GET for report %s widget definition" % report_slug) if widget_slug: w = get_object_or_404( Widget, slug=widget_slug, section__in=Section.objects.filter(report=report) ) widgets = [w] else: # Add 'id' to order_by so that stacked widgets will return # with the same order as created widgets = report.widgets().order_by('row', 'col', 'id') # parse time and localize to user profile timezone timezone = get_timezone(request) now = datetime.datetime.now(timezone) # pin the endtime to a round interval if we are set to # reload periodically minutes = report.reload_minutes offset = report.reload_offset if minutes: # avoid case of long duration reloads to have large reload gap # e.g. 24-hour report will consider 12:15 am or later a valid time # to roll-over the time time values, rather than waiting # until 12:00 pm trimmed = round_time(dt=now, round_to=60*minutes, trim=True) if now - trimmed > datetime.timedelta(seconds=offset): now = trimmed else: now = round_time(dt=now, round_to=60*minutes) widget_defs = [] for w in widgets: # get default criteria values for widget # and set endtime to now, if applicable widget_fields = w.collect_fields() form = TableFieldForm(widget_fields, use_widgets=False) # create object from the tablefield keywords # and populate it with initial data generated by default keys = form._tablefields.keys() criteria = dict(zip(keys, [None]*len(keys))) criteria.update(form.data) # calculate time offsets if 'endtime' in criteria: criteria['endtime'] = now.isoformat() # only consider starttime if its paired with an endtime if 'starttime' in criteria: start = now field = form.fields['starttime'] initial = field.widget.attrs.get('initial_time', None) if initial: m = re.match("now *- *(.+)", initial) if m: delta = parse_timedelta(m.group(1)) start = now - delta criteria['starttime'] = start.isoformat() # Check for "Meta Widget" criteria items system_settings = SystemSettings.get_system_settings() if system_settings.ignore_cache: criteria['ignore_cache'] = system_settings.ignore_cache if system_settings.developer: criteria['debug'] = system_settings.developer # setup json definition object widget_def = w.get_definition(criteria) widget_defs.append(widget_def) # Build the primary key corresponding to static data for this # widget if report.static: rw_id = '-'.join([namespace, report_slug, widget_def['widgetslug']]) # Add cached widget data if available. try: data_cache = WidgetDataCache.objects.get( report_widget_id=rw_id) widget_def['dataCache'] = data_cache.data except WidgetDataCache.DoesNotExist: msg = "No widget data cache available with id %s." % rw_id resp = {'message': msg, 'status': 'error', 'exception': ''} widget_def['dataCache'] = json.dumps(resp) report_def = self.report_def(widget_defs, now) return JsonResponse(report_def, safe=False)
def render(self, name, value, attrs): initial_time = attrs.get('initial_time', None) if initial_time: m = re.match("now *- *(.+)", initial_time) if m: secs = timedelta_total_seconds(parse_timedelta(m.group(1))) initial_time = ( "d = new Date(); d.setSeconds(d.getSeconds()-%d);" % secs ) else: initial_time = "d = new Date('{}');".format(initial_time) else: initial_time = "d = new Date();" round_initial = attrs.get('round_initial', None) if round_initial: js = (' p = %d*1000; ' 'd = new Date(Math.floor(d.getTime() / p) * p);' % round_initial) initial_time += js # only pin manually entered times if we are rounding above a minute if round_initial >= 60: step = int(round_initial) / 60 force_round_time = 'true' else: step = 15 force_round_time = 'false' else: step = 15 force_round_time = 'false' msg = ''' <span class="input-group-addon"> <span id="timenow_{name}" class="glyphicon glyphicon-time" title="Set time/date to now"> </span> </span> {0} <script type="text/javascript"> $("#id_{name}").timepicker({{ step: {step}, scrollDefaultNow:true, forceRoundTime: {force_round_time}, timeFormat:"g:i:s a" }}); $("#timenow_{name}").click(function() {{ $("#id_{name}").timepicker("setTime", new Date()); }}); {initial_time} // align to timezone var offset = ($('#tz').html()/100)*60*60*1000; d = rvbd.timeutil.convertDateToUTC(d); d.setTime(d.getTime() + offset); $("#id_{name}").timepicker("setTime", d); </script> ''' return msg.format( super(TimeWidget, self).render(name, value, attrs), name=name, initial_time=initial_time, step=step, force_round_time=force_round_time )
def get(self, request, namespace=None, report_slug=None, widget_slug=None): try: report = Report.objects.get(namespace=namespace, slug=report_slug) except: raise Http404 logger.debug("Received GET for report %s widget definition" % report_slug) if widget_slug: w = get_object_or_404( Widget, slug=widget_slug, section__in=Section.objects.filter(report=report)) widgets = [w] else: widgets = report.widgets().order_by('row', 'col') # parse time and localize to user profile timezone timezone = pytz.timezone(request.user.timezone) now = datetime.datetime.now(timezone) # pin the endtime to a round interval if we are set to # reload periodically minutes = report.reload_minutes if minutes: # avoid case of long duration reloads to have large reload gap # e.g. 24-hour report will consider 12:15 am or later a valid time # to roll-over the time time values, rather than waiting # until 12:00 pm trimmed = round_time(dt=now, round_to=60 * minutes, trim=True) if now - trimmed > datetime.timedelta(minutes=15): now = trimmed else: now = round_time(dt=now, round_to=60 * minutes) widget_defs = [] for w in widgets: # get default criteria values for widget # and set endtime to now, if applicable widget_fields = w.collect_fields() form = TableFieldForm(widget_fields, use_widgets=False) # create object from the tablefield keywords # and populate it with initial data generated by default keys = form._tablefields.keys() criteria = dict(zip(keys, [None] * len(keys))) criteria.update(form.data) # calculate time offsets if 'endtime' in criteria: criteria['endtime'] = now.isoformat() # only consider starttime if its paired with an endtime if 'starttime' in criteria: start = now field = form.fields['starttime'] initial = field.widget.attrs.get('initial_time', None) if initial: m = re.match("now *- *(.+)", initial) if m: delta = parse_timedelta(m.group(1)) start = now - delta criteria['starttime'] = start.isoformat() # setup json definition object widget_def = w.get_definition(criteria) widget_defs.append(widget_def) report_def = self.report_def(widget_defs, now) return JsonResponse(report_def, safe=False)
def get(self, request, namespace=None, report_slug=None, widget_slug=None): try: report = Report.objects.get(namespace=namespace, slug=report_slug) except: raise Http404 logger.debug("Received GET for report %s widget definition" % report_slug) if widget_slug: w = get_object_or_404( Widget, slug=widget_slug, section__in=Section.objects.filter(report=report) ) widgets = [w] else: widgets = report.widgets().order_by('row', 'col') # parse time and localize to user profile timezone timezone = pytz.timezone(request.user.timezone) now = datetime.datetime.now(timezone) # pin the endtime to a round interval if we are set to # reload periodically minutes = report.reload_minutes if minutes: # avoid case of long duration reloads to have large reload gap # e.g. 24-hour report will consider 12:15 am or later a valid time # to roll-over the time time values, rather than waiting # until 12:00 pm trimmed = round_time(dt=now, round_to=60*minutes, trim=True) if now - trimmed > datetime.timedelta(minutes=15): now = trimmed else: now = round_time(dt=now, round_to=60*minutes) widget_defs = [] for w in widgets: # get default criteria values for widget # and set endtime to now, if applicable widget_fields = w.collect_fields() form = TableFieldForm(widget_fields, use_widgets=False) # create object from the tablefield keywords # and populate it with initial data generated by default keys = form._tablefields.keys() criteria = dict(zip(keys, [None]*len(keys))) criteria.update(form.data) # calculate time offsets if 'endtime' in criteria: criteria['endtime'] = now.isoformat() # only consider starttime if its paired with an endtime if 'starttime' in criteria: start = now field = form.fields['starttime'] initial = field.widget.attrs.get('initial_time', None) if initial: m = re.match("now *- *(.+)", initial) if m: delta = parse_timedelta(m.group(1)) start = now - delta criteria['starttime'] = start.isoformat() # setup json definition object widget_def = w.get_definition(criteria) widget_defs.append(widget_def) report_def = self.report_def(widget_defs, now) return JsonResponse(report_def, safe=False)
def run(self): """ Main execution method """ criteria = self.job.criteria self.timeseries = False # if key column called 'time' is created self.column_names = [] # Resolution comes in as a time_delta resolution = timedelta_total_seconds(criteria.resolution) default_delta = 1000000000 # one second self.delta = int(default_delta * resolution) # sample size interval if criteria.netshark_device == '': logger.debug('%s: No netshark device selected' % self.table) self.job.mark_error("No NetShark Device Selected") return False shark = DeviceManager.get_device(criteria.netshark_device) logger.debug("Creating columns for NetShark table %d" % self.table.id) # Create Key/Value Columns columns = [] for tc in self.table.get_columns(synthetic=False): tc_options = tc.options if (tc.iskey and tc.name == 'time' and tc_options.extractor == 'sample_time'): # don't create column, use the sample time for timeseries self.timeseries = True self.column_names.append('time') continue elif tc.iskey: c = Key(tc_options.extractor, description=tc.label, default_value=tc_options.default_value) else: if tc_options.operation: try: operation = getattr(Operation, tc_options.operation) except AttributeError: operation = Operation.sum print ('ERROR: Unknown operation attribute ' '%s for column %s.' % (tc_options.operation, tc.name)) else: operation = Operation.none c = Value(tc_options.extractor, operation, description=tc.label, default_value=tc_options.default_value) self.column_names.append(tc.name) columns.append(c) # Identify Sort Column sortidx = None if self.table.sortcols is not None: sortcol = Column.objects.get(table=self.table, name=self.table.sortcols[0]) sort_name = sortcol.options.extractor for i, c in enumerate(columns): if c.field == sort_name: sortidx = i break # Initialize filters criteria = self.job.criteria filters = [] if hasattr(criteria, 'netshark_filterexpr'): logger.debug('calculating netshark filter expression ...') filterexpr = self.job.combine_filterexprs( exprs=criteria.netshark_filterexpr, joinstr="&" ) if filterexpr: logger.debug('applying netshark filter expression: %s' % filterexpr) filters.append(NetSharkFilter(filterexpr)) if hasattr(criteria, 'netshark_bpf_filterexpr'): # TODO evaluate how to combine multiple BPF filters # this will just apply one at a time filterexpr = criteria.netshark_bpf_filterexpr logger.debug('applying netshark BPF filter expression: %s' % filterexpr) filters.append(BpfFilter(filterexpr)) resolution = criteria.resolution if resolution.seconds == 1: sampling_time_msec = 1000 elif resolution.microseconds == 1000: sampling_time_msec = 1 if criteria.duration > parse_timedelta('1s'): msg = ("Cannot run a millisecond report with a duration " "longer than 1 second") raise ValueError(msg) else: sampling_time_msec = 1000 # Get source type from options logger.debug("NetShark Source: %s" % self.job.criteria.netshark_source_name) source = path_to_class( shark, self.job.criteria.netshark_source_name) live = source.is_live() persistent = criteria.get('netshark_persistent', False) if live and not persistent: raise ValueError("Live views must be run with persistent set") view = None if persistent: # First, see a view by this title already exists # Title is the table name plus a criteria hash including # all criteria *except* the timeframe h = hashlib.md5() h.update('.'.join([c.name for c in self.table.get_columns()])) for k, v in criteria.iteritems(): if criteria.is_timeframe_key(k): continue h.update('%s:%s' % (k, v)) title = '/'.join(['steelscript-appfwk', str(self.table.id), self.table.namespace, self.table.name, h.hexdigest()]) view = NetSharkViews.find_by_name(shark, title) logger.debug("Persistent view title: %s" % title) else: # Only assign a title for persistent views title = None if not view: # Not persistent, or not yet created... if not live: # Cannot attach time filter to a live view, # it will be added later at get_data() time tf = TimeFilter(start=criteria.starttime, end=criteria.endtime) filters.append(tf) logger.info("Setting netshark table %d timeframe to %s" % (self.table.id, str(tf))) # Create it with lock: logger.debug("%s: Creating view for table %s" % (str(self), str(self.table))) view = shark.create_view( source, columns, filters=filters, sync=False, name=title, sampling_time_msec=sampling_time_msec) if not live: done = False logger.debug("Waiting for netshark table %d to complete" % self.table.id) while not done: time.sleep(0.5) with lock: s = view.get_progress() self.job.mark_progress(s) self.job.save() done = view.is_ready() logger.debug("Retrieving data for timeframe: %s - %s" % (datetime_to_nanoseconds(criteria.starttime), datetime_to_nanoseconds(criteria.endtime))) # Retrieve the data with lock: getdata_kwargs = {} if sortidx: getdata_kwargs['sortby'] = sortidx if self.table.options.aggregated: getdata_kwargs['aggregated'] = self.table.options.aggregated else: getdata_kwargs['delta'] = self.delta if live: # For live views, attach the time frame to the get_data() getdata_kwargs['start'] = ( datetime_to_nanoseconds(criteria.starttime)) getdata_kwargs['end'] = ( datetime_to_nanoseconds(criteria.endtime)) self.data = view.get_data(**getdata_kwargs) if not persistent: view.close() if self.table.rows > 0: self.data = self.data[:self.table.rows] self.parse_data() logger.info("NetShark Report %s returned %s rows" % (self.job, len(self.data))) return QueryComplete(self.data)
def run(self, template_id, timefilter=None, resolution="auto", query=None, trafficexpr=None, data_filter=None, sync=True, custom_criteria=None): """Create the report and begin running the report on NetProfiler. If the `sync` option is True, periodically poll until the report is complete, otherwise return immediately. :param int template_id: numeric id of the template to use for the report :param timefilter: range of time to query, instance of :class:`TimeFilter` :param str resolution: data resolution, such as (1min, 15min, etc.), defaults to 'auto' :param str query: query object containing criteria :param trafficexpr: instance of :class:`TrafficFilter` :param str data_filter: deprecated filter to run against report data :param bool sync: if True, poll for status until the report is complete """ self.template_id = template_id if timefilter is None: self.timefilter = TimeFilter.parse_range("last 5 min") else: self.timefilter = timefilter self.query = query self.trafficexpr = trafficexpr self.data_filter = data_filter self.id = None self.queries = list() self.last_status = None if resolution not in ["auto", "1min", "15min", "hour", "6hour", "day", "week", "month"]: rd = parse_timedelta(resolution) resolution = self.RESOLUTION_MAP[int(timedelta_total_seconds(rd))] self.resolution = resolution start = datetime_to_seconds(self.timefilter.start) end = datetime_to_seconds(self.timefilter.end) criteria = RecursiveUpdateDict(**{"time_frame": {"start": int(start), "end": int(end)} }) if self.query is not None: criteria["query"] = self.query if self.resolution != "auto": criteria["time_frame"]["resolution"] = self.resolution if self.data_filter: criteria['deprecated'] = {self.data_filter[0]: self.data_filter[1]} if self.trafficexpr is not None: criteria["traffic_expression"] = self.trafficexpr.filter if custom_criteria: for k, v in custom_criteria.iteritems(): criteria[k] = v to_post = {"template_id": self.template_id, "criteria": criteria} logger.debug("Posting JSON: %s" % to_post) response = self.profiler.api.report.reports(data=to_post) try: self.id = int(response['id']) except KeyError: raise ValueError( "failed to retrieve report id from report creation response: %s" % response) logger.info("Created report %d" % self.id) if sync: self.wait_for_complete()
def get(self, request, namespace=None, report_slug=None, widget_slug=None): try: report = Report.objects.get(namespace=namespace, slug=report_slug) except: raise Http404 logger.debug("Received GET for report %s widget definition" % report_slug) if widget_slug: w = get_object_or_404( Widget, slug=widget_slug, section__in=Section.objects.filter(report=report) ) widgets = [w] else: # Add 'id' to order_by so that stacked widgets will return # with the same order as created widgets = report.widgets().order_by('row', 'col', 'id') # parse time and localize to user profile timezone timezone = get_timezone(request) now = datetime.datetime.now(timezone) # pin the endtime to a round interval if we are set to # reload periodically minutes = report.reload_minutes offset = report.reload_offset if minutes: # avoid case of long duration reloads to have large reload gap # e.g. 24-hour report will consider 12:15 am or later a valid time # to roll-over the time time values, rather than waiting # until 12:00 pm trimmed = round_time(dt=now, round_to=60*minutes, trim=True) if now - trimmed > datetime.timedelta(seconds=offset): now = trimmed else: now = round_time(dt=now, round_to=60*minutes) widget_defs = [] for w in widgets: # get default criteria values for widget # and set endtime to now, if applicable widget_fields = w.collect_fields() form = TableFieldForm(widget_fields, use_widgets=False) # create object from the tablefield keywords # and populate it with initial data generated by default keys = form._tablefields.keys() criteria = dict(zip(keys, [None]*len(keys))) criteria.update(form.data) # calculate time offsets if 'endtime' in criteria: criteria['endtime'] = now.isoformat() # only consider starttime if its paired with an endtime if 'starttime' in criteria: start = now field = form.fields['starttime'] initial = field.widget.attrs.get('initial_time', None) if initial: m = re.match("now *- *(.+)", initial) if m: delta = parse_timedelta(m.group(1)) start = now - delta criteria['starttime'] = start.isoformat() # setup json definition object widget_def = w.get_definition(criteria) widget_defs.append(widget_def) # Build the primary key corresponding to static data for this # widget if report.static: rw_id = '-'.join([namespace, report_slug, widget_def['widgetslug']]) # Add cached widget data if available. try: data_cache = WidgetDataCache.objects.get( report_widget_id=rw_id) widget_def['dataCache'] = data_cache.data except WidgetDataCache.DoesNotExist: msg = "No widget data cache available with id %s." % rw_id resp = {'message': msg, 'status': 'error', 'exception': ''} widget_def['dataCache'] = json.dumps(resp) report_def = self.report_def(widget_defs, now) return JsonResponse(report_def, safe=False)