def get(self, request): """ Retrieve records from time series data storage. The URL is formatted as '/db/records?handle=**&start=**&end=**'. Within the URL, required parameters include 'handle' and 'start'. Optional parameter is 'end'. Values for 'start' and 'end' in the URL should be epoch seconds. JSON results returned looks like: [{"avg_bytes": 1617806.0, "time": "2017-03-24T18:14:00+00:00"}, ... ] """ request_data = request.GET.dict() keys = ['handle', 'start'] for k in keys: if k not in request_data: msg = "Missing parameter '{}' in url".format(k) raise InvalidRequest(msg) handle = request_data['handle'] try: obj = ExistingIntervals.objects.get(table_handle=handle) except ObjectDoesNotExist: msg = "Handle '{}' does not exist.".format(handle) raise NotFoundError(msg) tr = {} tr['gte'] = sec_string_to_datetime(int(request_data['start'])) if 'end' in request_data: tr['lte'] = sec_string_to_datetime(int(request_data['end'])) # Getting the time column name table = Table.from_ref( dict(sourcefile=obj.sourcefile, namespace=obj.namespace, name=obj.table)) timecols = [ c for c in table.get_columns(iskey=True) if c.datatype == Column.DATATYPE_TIME ] time_col_name = timecols[0].name col_filters = [ ColumnFilter(query_type='range', query={time_col_name: tr}) ] # allow for override via url param index = request_data.get('index', make_index(obj.namespace)) records = storage.search(index=index, doc_type=handle, col_filters=col_filters) return Response(records)
def get(self, request): """ Retrieve records from time series data storage. The URL is formatted as '/db/records?handle=**&start=**&end=**'. Within the URL, required parameters include 'handle' and 'start'. Optional parameter is 'end'. Values for 'start' and 'end' in the URL should be epoch seconds. JSON results returned looks like: [{"avg_bytes": 1617806.0, "time": "2017-03-24T18:14:00+00:00"}, ... ] """ request_data = request.GET.dict() keys = ['handle', 'start'] for k in keys: if k not in request_data: msg = "Missing parameter '{}' in url".format(k) raise InvalidRequest(msg) handle = request_data['handle'] try: obj = ExistingIntervals.objects.get(table_handle=handle) except ObjectDoesNotExist: msg = "Handle '{}' does not exist.".format(handle) raise NotFoundError(msg) tr = {} tr['gte'] = sec_string_to_datetime(int(request_data['start'])) if 'end' in request_data: tr['lte'] = sec_string_to_datetime(int(request_data['end'])) # Getting the time column name table = Table.from_ref(dict(sourcefile=obj.sourcefile, namespace=obj.namespace, name=obj.table)) timecols = [c for c in table.get_columns(iskey=True) if c.datatype == Column.DATATYPE_TIME] time_col_name = timecols[0].name col_filters = [ColumnFilter( query_type='range', query={time_col_name: tr})] # allow for override via url param index = request_data.get('index', make_index(obj.namespace)) records = storage.search(index=index, doc_type=handle, col_filters=col_filters) return Response(records)
def update_criteria_from_bookmark(self, report, request, fields): """ Update fields' initial values using bookmark. """ request_data = request.GET.dict() override_msg = 'Setting criteria field %s to %s.' # Initialize report.live as False report.live = False for k, v in request_data.iteritems(): if k == 'auto_run': report.auto_run = (v.lower() == 'true') continue if k == 'live': if report.static and v.lower() == 'true': report.live = True continue field = fields.get(k, None) if field is None: logger.warning("Keyword %s in bookmark does not match any " "criteria field." % k) continue if self.is_field_cls(field, 'DateTimeField'): # Only accepts epoch seconds if not v.isdigit(): field.error_msg = ("%s '%s' is invalid." % (k, v)) continue # Needs to set delta derived from ceiling of current time # Otherwise the resulting timestamp would advance 1 sec # vs the original timestamp from bookmark delta = int(math.ceil(time.time())) - int(v) if delta < 0: field.error_msg = ("%s %s is later than current time." % (k, v)) continue dt_utc = sec_string_to_datetime(int(v)) tz = get_timezone(request) dt_local = dt_utc.astimezone(tz) logger.debug(override_msg % (k, dt_local)) # Setting initial date as 'mm/dd/yy' field.field_kwargs['widget_attrs']['initial_date'] = \ dt_local.strftime('%m/%d/%Y') # Setting initial time as 'hh:mm:ss' field.field_kwargs['widget_attrs']['initial_time'] = \ dt_local.strftime('%H:%M:%S') elif self.is_field_cls(field, 'BooleanField'): logger.debug(override_msg % (k, v.lower() == 'true')) field.initial = (v.lower() == 'true') else: logger.debug(override_msg % (k, v)) field.initial = v
def update_criteria_from_bookmark(self, report, request, fields): """ Update fields' initial values using bookmark. """ request_data = request.GET.dict() override_msg = 'Setting criteria field %s to %s.' # Initialize report.live as False report.live = False for k, v in request_data.iteritems(): if k == 'auto_run': report.auto_run = (v.lower() == 'true') continue if k == 'live': if report.static and v.lower() == 'true': report.live = True continue field = fields.get(k, None) if field is None: logger.warning("Keyword %s in bookmark does not match any " "criteria field." % k) continue if self.is_field_cls(field, 'DateTimeField'): # Only accepts epoch seconds if not v.isdigit(): field.error_msg = ("%s '%s' is invalid." % (k, v)) continue # Needs to set delta derived from ceiling of current time # Otherwise the resulting timestamp would advance 1 sec # vs the original timestamp from bookmark delta = int(math.ceil(time.time())) - int(v) if delta < 0: field.error_msg = ("%s %s is later than current time." % (k, v)) continue dt_utc = sec_string_to_datetime(int(v)) tz = get_timezone(request) dt_local = dt_utc.astimezone(tz) logger.debug(override_msg % (k, dt_local)) # Setting initial date as 'mm/dd/yy' field.field_kwargs['widget_attrs']['initial_date'] = \ dt_local.strftime('%m/%d/%Y') # Setting initial time as 'hh:mm:ss' field.field_kwargs['widget_attrs']['initial_time'] = \ dt_local.strftime('%m/%d/%Y %H:%M:%S') elif self.is_field_cls(field, 'BooleanField'): logger.debug(override_msg % (k, v.lower() == 'true')) field.initial = (v.lower() == 'true') else: logger.debug(override_msg % (k, v)) field.initial = v
def _convert_sample_time(self, sample_timestamp): if self.view.timestamp_format == APITimestampFormat.SECOND: return timeutils.sec_string_to_datetime(sample_timestamp) elif self.view.timestamp_format == APITimestampFormat.MILLISECOND: return timeutils.msec_string_to_datetime(sample_timestamp) elif self.view.timestamp_format == APITimestampFormat.MICROSECOND: return timeutils.usec_string_to_datetime(sample_timestamp) elif self.view.timestamp_format == APITimestampFormat.NANOSECOND: return timeutils.nsec_string_to_datetime(sample_timestamp) else: raise ValueError('invalid time format %s' % str(view.timestamp_format))
def process(cls, widget, job, data): class ColInfo: def __init__(self, col, dataindex, axis, istime=False, isdate=False): self.col = col self.key = cleankey(col.name) self.dataindex = dataindex self.axis = axis self.istime = istime self.isdate = isdate t_cols = job.get_columns() colinfo = {} # map by widget key # columns of None is a special case, just use all # defined columns other than time if widget.options.columns is None: valuecolnames = [ col.name for col in t_cols if not col.istime() and not col.isdate() ] else: valuecolnames = widget.options.columns # Column keys are the 'cleaned' column names w_keys = [cleankey(n) for n in valuecolnames] # Retrieve the desired value columns # ...and the indices for the value values # (as the 'data' has *all* columns) time_colinfo = None for i, c in enumerate(t_cols): if c.istime(): ci = ColInfo(c, i, -1, istime=True) time_colinfo = ci elif c.isdate(): ci = ColInfo(c, i, -1, isdate=True) time_colinfo = ci elif c.name in valuecolnames: if c.isnumeric(): ci = ColInfo(c, i, -1, istime=False, isdate=False) else: raise KeyError( "Cannot graph non-numeric data in timeseries widget: " "column {0}".format(c.name)) colinfo[ci.key] = ci if widget.options.altaxis: altaxis = widget.options.altaxis axes_def = { '0': { 'position': 'left', 'columns': [col for col in valuecolnames if col not in altaxis] }, '1': { 'position': 'right', 'columns': [col for col in valuecolnames if col in altaxis] } } else: axes_def = {'0': {'position': 'left', 'columns': valuecolnames}} w_series = [] axes = Axes(axes_def) # Setup the time axis w_axes = { "time": { "keys": ["time"], "position": "bottom", "type": "time", "styles": { "label": { "fontSize": "8pt", "rotation": "-45" } } } } # Create a better time format depending on t0/t1 t_dataindex = time_colinfo.dataindex t0 = data[0][t_dataindex] t1 = data[-1][t_dataindex] if not hasattr(t0, 'utcfromtimestamp'): t0 = timeutils.sec_string_to_datetime(t0) t1 = timeutils.sec_string_to_datetime(t1) total_seconds = timeutils.timedelta_total_seconds(t1 - t0) if total_seconds < 2: w_axes['time']['formatter'] = 'formatTimeMs' elif total_seconds < 120: w_axes['time']['labelFormat'] = '%k:%M:%S' elif total_seconds < (24 * 60 * 60): w_axes['time']['labelFormat'] = '%k:%M' elif time_colinfo.isdate: w_axes['time']['formatter'] = 'formatDate' else: w_axes['time']['labelFormat'] = '%D %k:%M' # Setup the other axes, checking the axis for each column for w_key in w_keys: # Need to interate the valuecolnames array to preserve order ci = colinfo[w_key] w_series.append({ "xKey": "time", "xDisplayName": "Time", "yKey": ci.key, "yDisplayName": ci.col.label, "styles": { "line": { "weight": 1 }, "marker": { "height": 3, "width": 3 } } }) ci.axis = axes.getaxis(ci.col.name) axis_name = 'axis' + str(ci.axis) if axis_name not in w_axes: w_axes[axis_name] = { "type": "numeric", "position": axes.position(ci.axis), "keys": [] } w_axes[axis_name]['keys'].append(ci.key) # Output row data rows = [] # min/max values by axis 0/1 minval = {} maxval = {} stacked = widget.options.stacked # Iterate through all rows if input data for rawrow in data: t = rawrow[t_dataindex] try: t = timeutils.datetime_to_microseconds(t) / 1000 except AttributeError: t = t * 1000 row = {'time': t} rowmin = {} rowmax = {} for ci in colinfo.values(): if ci.istime or ci.isdate: continue a = ci.axis val = rawrow[ci.dataindex] row[ci.key] = val if val != '' else None # If stacked and there is only one value, use that # value as the rowmin. If stacked and there is more than # one value for the axis, use a floor of 0 to give proper # context. if a not in rowmin: rowmin[a] = val if val != '' else 0 rowmax[a] = val if val != '' else 0 else: rowmin[a] = (0 if stacked else min(rowmin[a], val)) rowmax[a] = ((rowmax[a] + val) if stacked else max(rowmax[a], val)) for a in rowmin.keys(): minval[a] = rowmin[a] if (a not in minval) else min( minval[a], rowmin[a]) maxval[a] = rowmax[a] if (a not in maxval) else max( maxval[a], rowmax[a]) rows.append(row) # Setup the scale values for the axes for ci in colinfo.values(): if ci.istime or ci.isdate: continue axis_name = 'axis' + str(ci.axis) if minval and maxval: n = NiceScale(minval[ci.axis], maxval[ci.axis]) w_axes[axis_name]['minimum'] = "%.10f" % n.nicemin w_axes[axis_name]['maximum'] = "%.10f" % n.nicemax w_axes[axis_name]['tickExponent'] = math.log10(n.tickspacing) w_axes[axis_name]['styles'] = { 'majorUnit': { 'count': n.numticks } } else: # empty data which would result in keyError above w_axes[axis_name]['minimum'] = "0" w_axes[axis_name]['maximum'] = "1" w_axes[axis_name]['tickExponent'] = 1 w_axes[axis_name]['styles'] = {'majorUnit': {'count': 1}} if ci.col.units == ci.col.UNITS_PCT: w_axes[axis_name]['formatter'] = 'formatPct' else: w_axes[axis_name]['formatter'] = 'formatMetric' if stacked: charttype = "area" elif widget.options.bar: charttype = "column" else: charttype = "combo" data = { "chartTitle": widget.title.format(**job.actual_criteria), "type": charttype, "stacked": stacked, "dataProvider": rows, "seriesCollection": w_series, "axes": w_axes, "legend": { "position": "bottom", "fontSize": "8pt", "styles": { "gap": 0 } }, "interactionType": "planar" } # logger.debug("data:\n\n%s\n" % data) return data
def process(cls, widget, job, data): class ColInfo: def __init__(self, col, dataindex, axis, istime=False, isdate=False): self.col = col self.key = cleankey(col.name) self.dataindex = dataindex self.axis = axis self.istime = istime self.isdate = isdate t_cols = job.get_columns() colinfo = {} # map by widget key # columns of None is a special case, just use all # defined columns other than time if widget.options.columns is None: valuecolnames = [col.name for col in t_cols if not col.istime() and not col.isdate()] else: valuecolnames = widget.options.columns # Column keys are the 'cleaned' column names w_keys = [cleankey(n) for n in valuecolnames] # Retrieve the desired value columns # ...and the indices for the value values # (as the 'data' has *all* columns) time_colinfo = None for i, c in enumerate(t_cols): if c.istime(): ci = ColInfo(c, i, -1, istime=True) time_colinfo = ci elif c.isdate(): ci = ColInfo(c, i, -1, isdate=True) time_colinfo = ci elif c.name in valuecolnames: if c.isnumeric(): ci = ColInfo(c, i, -1, istime=False, isdate=False) else: raise KeyError( "Cannot graph non-numeric data in timeseries widget: " "column {0}".format(c.name)) colinfo[ci.key] = ci if widget.options.altaxis: altaxis = widget.options.altaxis axes_def = {'0': {'position': 'left', 'columns': [col for col in valuecolnames if col not in altaxis]}, '1': {'position': 'right', 'columns': [col for col in valuecolnames if col in altaxis]} } else: axes_def = {'0': {'position': 'left', 'columns': valuecolnames}} w_series = [] axes = Axes(axes_def) # Setup the time axis w_axes = {"time": {"keys": ["time"], "position": "bottom", "type": "time", "styles": {"label": {"fontSize": "8pt", "rotation": "-45"}}}} # Create a better time format depending on t0/t1 t_dataindex = time_colinfo.dataindex t0 = data[0][t_dataindex] t1 = data[-1][t_dataindex] if not hasattr(t0, 'utcfromtimestamp'): t0 = timeutils.sec_string_to_datetime(t0) t1 = timeutils.sec_string_to_datetime(t1) total_seconds = timeutils.timedelta_total_seconds(t1 - t0) if total_seconds < 2: w_axes['time']['formatter'] = 'formatTimeMs' elif total_seconds < 120: w_axes['time']['labelFormat'] = '%k:%M:%S' elif total_seconds < (24 * 60 * 60): w_axes['time']['labelFormat'] = '%k:%M' elif time_colinfo.isdate: w_axes['time']['formatter'] = 'formatDate' else: w_axes['time']['labelFormat'] = '%D %k:%M' # Setup the other axes, checking the axis for each column for w_key in w_keys: # Need to interate the valuecolnames array to preserve order ci = colinfo[w_key] w_series.append({"xKey": "time", "xDisplayName": "Time", "yKey": ci.key, "yDisplayName": ci.col.label, "styles": {"line": {"weight": 1}, "marker": {"height": 3, "width": 3}}}) ci.axis = axes.getaxis(ci.col.name) axis_name = 'axis' + str(ci.axis) if axis_name not in w_axes: w_axes[axis_name] = {"type": "numeric", "position": axes.position(ci.axis), "keys": [] } w_axes[axis_name]['keys'].append(ci.key) # Output row data rows = [] # min/max values by axis 0/1 minval = {} maxval = {} stacked = widget.options.stacked # Iterate through all rows if input data for rawrow in data: t = rawrow[t_dataindex] try: t = timeutils.datetime_to_microseconds(t) / 1000 except AttributeError: t = t * 1000 row = {'time': t} rowmin = {} rowmax = {} for ci in colinfo.values(): if ci.istime or ci.isdate: continue a = ci.axis val = rawrow[ci.dataindex] row[ci.key] = val if val != '' else None # If stacked and there is only one value, use that # value as the rowmin. If stacked and there is more than # one value for the axis, use a floor of 0 to give proper # context. if a not in rowmin: rowmin[a] = val if val != '' else 0 rowmax[a] = val if val != '' else 0 else: rowmin[a] = (0 if stacked else min(rowmin[a], val)) rowmax[a] = ((rowmax[a] + val) if stacked else max(rowmax[a], val)) for a in rowmin.keys(): minval[a] = rowmin[a] if (a not in minval) else min(minval[a], rowmin[a]) maxval[a] = rowmax[a] if (a not in maxval) else max(maxval[a], rowmax[a]) rows.append(row) # Setup the scale values for the axes for ci in colinfo.values(): if ci.istime or ci.isdate: continue axis_name = 'axis' + str(ci.axis) if minval and maxval: n = NiceScale(minval[ci.axis], maxval[ci.axis]) w_axes[axis_name]['minimum'] = "%.10f" % n.nicemin w_axes[axis_name]['maximum'] = "%.10f" % n.nicemax w_axes[axis_name]['tickExponent'] = math.log10(n.tickspacing) w_axes[axis_name]['styles'] = { 'majorUnit': {'count': n.numticks} } else: # empty data which would result in keyError above w_axes[axis_name]['minimum'] = "0" w_axes[axis_name]['maximum'] = "1" w_axes[axis_name]['tickExponent'] = 1 w_axes[axis_name]['styles'] = {'majorUnit': {'count': 1}} if ci.col.units == ci.col.UNITS_PCT: w_axes[axis_name]['formatter'] = 'formatPct' else: w_axes[axis_name]['formatter'] = 'formatMetric' if stacked: charttype = "area" elif widget.options.bar: charttype = "column" else: charttype = "combo" data = { "chartTitle": widget.title.format(**job.actual_criteria), "type": charttype, "stacked": stacked, "dataProvider": rows, "seriesCollection": w_series, "axes": w_axes, "legend": {"position": "bottom", "fontSize": "8pt", "styles": {"gap": 0}}, "interactionType": "planar" } # logger.debug("data:\n\n%s\n" % data) return data