Esempio n. 1
0
 def query_obj(self):
     ds = self.datasource
     args = self.form_data
     groupby = args.getlist("groupby") or []
     metrics = args.getlist("metrics") or ["count"]
     granularity = args.get("granularity", "1 day")
     if granularity != "all":
         granularity = utils.parse_human_timedelta(granularity).total_seconds() * 1000
     limit = int(args.get("limit", config.ROW_LIMIT))
     row_limit = int(args.get("row_limit", config.ROW_LIMIT))
     since = args.get("since", "1 year ago")
     from_dttm = utils.parse_human_datetime(since)
     if from_dttm > datetime.now():
         from_dttm = datetime.now() - (from_dttm - datetime.now())
     until = args.get("until", "now")
     to_dttm = utils.parse_human_datetime(until)
     if from_dttm >= to_dttm:
         flash("The date range doesn't seem right.", "danger")
         from_dttm = to_dttm  # Making them identicial to not raise
     d = {
         "granularity": granularity,
         "from_dttm": from_dttm,
         "to_dttm": to_dttm,
         "groupby": groupby,
         "metrics": metrics,
         "row_limit": row_limit,
         "filter": self.query_filters(),
         "timeseries_limit": limit,
     }
     return d
Esempio n. 2
0
 def query_obj(self):
     ds = self.datasource
     args = self.form_data
     groupby = args.getlist("groupby") or []
     metrics = args.getlist("metrics") or ['count']
     granularity = args.get("granularity", "1 day")
     if granularity != "all":
         granularity = utils.parse_human_timedelta(
             granularity).total_seconds() * 1000
     limit = int(args.get("limit", config.ROW_LIMIT))
     row_limit = int(args.get("row_limit", config.ROW_LIMIT))
     since = args.get("since", "1 year ago")
     from_dttm = utils.parse_human_datetime(since)
     if from_dttm > datetime.now():
         from_dttm = datetime.now() - (from_dttm - datetime.now())
     until = args.get("until", "now")
     to_dttm = utils.parse_human_datetime(until)
     if from_dttm >= to_dttm:
         flash("The date range doesn't seem right.", "danger")
         from_dttm = to_dttm  # Making them identicial to not raise
     d = {
         'granularity': granularity,
         'from_dttm': from_dttm,
         'to_dttm': to_dttm,
         'groupby': groupby,
         'metrics': metrics,
         'row_limit': row_limit,
         'filter': self.query_filters(),
         'timeseries_limit': limit,
     }
     return d
Esempio n. 3
0
 def query_obj(self):
     ds = self.datasource
     args = self.form_data
     groupby = args.getlist("groupby") or []
     granularity = args.get("granularity", "1 day")
     granularity = utils.parse_human_timedelta(
         granularity).total_seconds() * 1000
     aggregations = {
         m.metric_name: m.json_obj
         for m in ds.metrics if m.metric_name in self.metrics
     }
     limit = int(args.get("limit", config.ROW_LIMIT)) or config.ROW_LIMIT
     since = args.get("since", "1 year ago")
     from_dttm = utils.parse_human_datetime(since)
     if from_dttm > datetime.now():
         from_dttm = datetime.now() - (from_dttm - datetime.now())
     from_dttm = from_dttm.isoformat()
     until = args.get("until", "now")
     to_dttm = utils.parse_human_datetime(until).isoformat()
     if from_dttm >= to_dttm:
         flash("The date range doesn't seem right.", "danger")
         from_dttm = to_dttm  # Making them identicial to not raise
     d = {
         'datasource': ds.datasource_name,
         'granularity': {
             "type": "duration",
             "duration": granularity
         },
         'intervals': from_dttm + '/' + to_dttm,
         'dimensions': groupby,
         'aggregations': aggregations,
         'limit_spec': {
             "type":
             "default",
             "limit":
             limit,
             "columns": [{
                 "dimension": self.metrics[0],
                 "direction": "descending",
             }],
         },
     }
     filters = self.query_filters()
     if filters:
         d['filter'] = filters
     return d
Esempio n. 4
0
 def query_obj(self):
     ds = self.datasource
     args = self.form_data
     groupby = args.getlist("groupby") or []
     granularity = args.get("granularity", "1 day")
     granularity = utils.parse_human_timedelta(granularity).total_seconds() * 1000
     aggregations = {
         m.metric_name: m.json_obj
         for m in ds.metrics if m.metric_name in self.metrics
     }
     limit = int(
         args.get("limit", config.ROW_LIMIT)) or config.ROW_LIMIT
     since = args.get("since", "1 year ago")
     from_dttm = utils.parse_human_datetime(since)
     if from_dttm > datetime.now():
         from_dttm = datetime.now() - (from_dttm-datetime.now())
     from_dttm = from_dttm.isoformat()
     until = args.get("until", "now")
     to_dttm = utils.parse_human_datetime(until).isoformat()
     if from_dttm >= to_dttm:
         flash("The date range doesn't seem right.", "danger")
         from_dttm = to_dttm  # Making them identicial to not raise
     d = {
         'datasource': ds.datasource_name,
         'granularity': {"type": "duration", "duration": granularity},
         'intervals': from_dttm + '/' + to_dttm,
         'dimensions': groupby,
         'aggregations': aggregations,
         'limit_spec': {
             "type": "default",
             "limit": limit,
             "columns": [{
                 "dimension": self.metrics[0],
                 "direction": "descending",
             }],
         },
     }
     filters = self.query_filters()
     if filters:
         d['filter'] = filters
     return d