def test_parse_human_timedelta(mock_datetime: Mock) -> None: mock_datetime.now.return_value = datetime(2019, 4, 1) mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw) assert parse_human_timedelta("now") == timedelta(0) assert parse_human_timedelta("1 year") == timedelta(366) assert parse_human_timedelta("-1 year") == timedelta(-365) assert parse_human_timedelta(None) == timedelta(0) assert parse_human_timedelta("1 month", datetime(2019, 4, 1)) == timedelta(30) assert parse_human_timedelta("1 month", datetime(2019, 5, 1)) == timedelta(31) assert parse_human_timedelta("1 month", datetime(2019, 2, 1)) == timedelta(28) assert parse_human_timedelta("-1 month", datetime(2019, 2, 1)) == timedelta(-31)
def __init__( # pylint: disable=too-many-locals self, *, annotation_layers: Optional[List[Dict[str, Any]]] = None, applied_time_extras: Optional[Dict[str, str]] = None, apply_fetch_values_predicate: bool = False, columns: Optional[List[Column]] = None, datasource: Optional[BaseDatasource] = None, extras: Optional[Dict[str, Any]] = None, filters: Optional[List[QueryObjectFilterClause]] = None, granularity: Optional[str] = None, is_rowcount: bool = False, is_timeseries: Optional[bool] = None, metrics: Optional[List[Metric]] = None, order_desc: bool = True, orderby: Optional[List[OrderBy]] = None, post_processing: Optional[List[Optional[Dict[str, Any]]]] = None, row_limit: int, row_offset: Optional[int] = None, series_columns: Optional[List[Column]] = None, series_limit: int = 0, series_limit_metric: Optional[Metric] = None, time_range: Optional[str] = None, time_shift: Optional[str] = None, **kwargs: Any, ): self._set_annotation_layers(annotation_layers) self.applied_time_extras = applied_time_extras or {} self.apply_fetch_values_predicate = apply_fetch_values_predicate or False self.columns = columns or [] self.datasource = datasource self.extras = extras or {} self.filter = filters or [] self.granularity = granularity self.is_rowcount = is_rowcount self._set_is_timeseries(is_timeseries) self._set_metrics(metrics) self.order_desc = order_desc self.orderby = orderby or [] self._set_post_processing(post_processing) self.row_limit = row_limit self.row_offset = row_offset or 0 self._init_series_columns(series_columns, metrics, is_timeseries) self.series_limit = series_limit self.series_limit_metric = series_limit_metric self.time_range = time_range self.time_shift = parse_human_timedelta(time_shift) self.from_dttm = kwargs.get("from_dttm") self.to_dttm = kwargs.get("to_dttm") self.result_type = kwargs.get("result_type") self.time_offsets = kwargs.get("time_offsets", []) self.inner_from_dttm = kwargs.get("inner_from_dttm") self.inner_to_dttm = kwargs.get("inner_to_dttm") self._rename_deprecated_fields(kwargs) self._move_deprecated_extra_fields(kwargs)
def test_parse_human_timedelta(self, mock_datetime): mock_datetime.now.return_value = datetime(2019, 4, 1) mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw) self.assertEqual(parse_human_timedelta("now"), timedelta(0)) self.assertEqual(parse_human_timedelta("1 year"), timedelta(366)) self.assertEqual(parse_human_timedelta("-1 year"), timedelta(-365)) self.assertEqual(parse_human_timedelta(None), timedelta(0)) self.assertEqual( parse_human_timedelta("1 month", datetime(2019, 4, 1)), timedelta(30), ) self.assertEqual( parse_human_timedelta("1 month", datetime(2019, 5, 1)), timedelta(31), ) self.assertEqual( parse_human_timedelta("1 month", datetime(2019, 2, 1)), timedelta(28), ) self.assertEqual( parse_human_timedelta("-1 month", datetime(2019, 2, 1)), timedelta(-31), )
def compute_time_compare(granularity, periods): if not granularity: return None # convert old db_engine_spec granularity to ISO duration if granularity in db_engine_specs_map: granularity = db_engine_specs_map[granularity] try: obj = isodate.parse_duration(granularity) * periods except isodate.isoerror.ISO8601Error: # if parse_human_timedelta can parse it, return it directly delta = "{0} {1}{2}".format(periods, granularity, "s" if periods > 1 else "") obj = parse_human_timedelta(delta) if obj: return delta raise Exception("Unable to parse: {0}".format(granularity)) if isinstance(obj, isodate.duration.Duration): return isodate_duration_to_string(obj) elif isinstance(obj, datetime.timedelta): return timedelta_to_string(obj)
def __init__( self, datasource: Optional[DatasourceDict] = None, result_type: Optional[ChartDataResultType] = None, annotation_layers: Optional[List[Dict[str, Any]]] = None, applied_time_extras: Optional[Dict[str, str]] = None, apply_fetch_values_predicate: bool = False, granularity: Optional[str] = None, metrics: Optional[List[Union[Dict[str, Any], str]]] = None, groupby: Optional[List[str]] = None, filters: Optional[List[Dict[str, Any]]] = None, time_range: Optional[str] = None, time_shift: Optional[str] = None, is_timeseries: Optional[bool] = None, timeseries_limit: int = 0, row_limit: Optional[int] = None, row_offset: Optional[int] = None, timeseries_limit_metric: Optional[Metric] = None, order_desc: bool = True, extras: Optional[Dict[str, Any]] = None, columns: Optional[List[str]] = None, orderby: Optional[List[OrderBy]] = None, post_processing: Optional[List[Optional[Dict[str, Any]]]] = None, is_rowcount: bool = False, **kwargs: Any, ): columns = columns or [] groupby = groupby or [] extras = extras or {} annotation_layers = annotation_layers or [] self.is_rowcount = is_rowcount self.datasource = None if datasource: self.datasource = ConnectorRegistry.get_datasource( str(datasource["type"]), int(datasource["id"]), db.session) self.result_type = result_type self.apply_fetch_values_predicate = apply_fetch_values_predicate or False self.annotation_layers = [ layer for layer in annotation_layers # formula annotations don't affect the payload, hence can be dropped if layer["annotationType"] != "FORMULA" ] self.applied_time_extras = applied_time_extras or {} self.granularity = granularity self.from_dttm, self.to_dttm = get_since_until( relative_start=extras.get("relative_start", config["DEFAULT_RELATIVE_START_TIME"]), relative_end=extras.get("relative_end", config["DEFAULT_RELATIVE_END_TIME"]), time_range=time_range, time_shift=time_shift, ) # is_timeseries is True if time column is in either columns or groupby # (both are dimensions) self.is_timeseries = (is_timeseries if is_timeseries is not None else DTTM_ALIAS in columns + groupby) self.time_range = time_range self.time_shift = parse_human_timedelta(time_shift) self.post_processing = [ post_proc for post_proc in post_processing or [] if post_proc ] # Support metric reference/definition in the format of # 1. 'metric_name' - name of predefined metric # 2. { label: 'label_name' } - legacy format for a predefined metric # 3. { expressionType: 'SIMPLE' | 'SQL', ... } - adhoc metric self.metrics = metrics and [ x if isinstance(x, str) or is_adhoc_metric(x) else x["label"] # type: ignore for x in metrics ] self.row_limit = config["ROW_LIMIT"] if row_limit is None else row_limit self.row_offset = row_offset or 0 self.filter = filters or [] self.timeseries_limit = timeseries_limit self.timeseries_limit_metric = timeseries_limit_metric self.order_desc = order_desc self.extras = extras if config["SIP_15_ENABLED"]: self.extras["time_range_endpoints"] = get_time_range_endpoints( form_data=self.extras) self.columns = columns self.groupby = groupby or [] self.orderby = orderby or [] # rename deprecated fields for field in DEPRECATED_FIELDS: if field.old_name in kwargs: logger.warning( "The field `%s` is deprecated, please use `%s` instead.", field.old_name, field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self, field.new_name): logger.warning( "The field `%s` is already populated, " "replacing value with contents from `%s`.", field.new_name, field.old_name, ) setattr(self, field.new_name, value) # move deprecated extras fields to extras for field in DEPRECATED_EXTRAS_FIELDS: if field.old_name in kwargs: logger.warning( "The field `%s` is deprecated and should " "be passed to `extras` via the `%s` property.", field.old_name, field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self.extras, field.new_name): logger.warning( "The field `%s` is already populated in " "`extras`, replacing value with contents " "from `%s`.", field.new_name, field.old_name, ) self.extras[field.new_name] = value
def __init__( self, annotation_layers: Optional[List[Dict[str, Any]]] = None, applied_time_extras: Optional[Dict[str, str]] = None, granularity: Optional[str] = None, metrics: Optional[List[Union[Dict[str, Any], str]]] = None, groupby: Optional[List[str]] = None, filters: Optional[List[Dict[str, Any]]] = None, time_range: Optional[str] = None, time_shift: Optional[str] = None, is_timeseries: Optional[bool] = None, timeseries_limit: int = 0, row_limit: Optional[int] = None, row_offset: Optional[int] = None, timeseries_limit_metric: Optional[Metric] = None, order_desc: bool = True, extras: Optional[Dict[str, Any]] = None, columns: Optional[List[str]] = None, orderby: Optional[List[List[str]]] = None, post_processing: Optional[List[Optional[Dict[str, Any]]]] = None, **kwargs: Any, ): annotation_layers = annotation_layers or [] metrics = metrics or [] extras = extras or {} is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE") self.annotation_layers = [ layer for layer in annotation_layers # formula annotations don't affect the payload, hence can be dropped if layer["annotationType"] != "FORMULA" ] self.applied_time_extras = applied_time_extras or {} self.granularity = granularity self.from_dttm, self.to_dttm = get_since_until( relative_start=extras.get( "relative_start", config["DEFAULT_RELATIVE_START_TIME"] ), relative_end=extras.get( "relative_end", config["DEFAULT_RELATIVE_END_TIME"] ), time_range=time_range, time_shift=time_shift, ) # is_timeseries is True if time column is in groupby self.is_timeseries = ( is_timeseries if is_timeseries is not None else (DTTM_ALIAS in groupby if groupby else False) ) self.time_range = time_range self.time_shift = parse_human_timedelta(time_shift) self.post_processing = [ post_proc for post_proc in post_processing or [] if post_proc ] if not is_sip_38: self.groupby = groupby or [] # Support metric reference/definition in the format of # 1. 'metric_name' - name of predefined metric # 2. { label: 'label_name' } - legacy format for a predefined metric # 3. { expressionType: 'SIMPLE' | 'SQL', ... } - adhoc metric self.metrics = [ metric if isinstance(metric, str) or "expressionType" in metric else metric["label"] # type: ignore for metric in metrics ] self.row_limit = row_limit or config["ROW_LIMIT"] self.row_offset = row_offset or 0 self.filter = filters or [] self.timeseries_limit = timeseries_limit self.timeseries_limit_metric = timeseries_limit_metric self.order_desc = order_desc self.extras = extras if config["SIP_15_ENABLED"] and "time_range_endpoints" not in self.extras: self.extras["time_range_endpoints"] = get_time_range_endpoints(form_data={}) self.columns = columns or [] if is_sip_38 and groupby: self.columns += groupby logger.warning( "The field `groupby` is deprecated. Viz plugins should " "pass all selectables via the `columns` field" ) self.orderby = orderby or [] # rename deprecated fields for field in DEPRECATED_FIELDS: if field.old_name in kwargs: logger.warning( "The field `%s` is deprecated, please use `%s` instead.", field.old_name, field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self, field.new_name): logger.warning( "The field `%s` is already populated, " "replacing value with contents from `%s`.", field.new_name, field.old_name, ) setattr(self, field.new_name, value) # move deprecated extras fields to extras for field in DEPRECATED_EXTRAS_FIELDS: if field.old_name in kwargs: logger.warning( "The field `%s` is deprecated and should " "be passed to `extras` via the `%s` property.", field.old_name, field.new_name, ) value = kwargs[field.old_name] if value: if hasattr(self.extras, field.new_name): logger.warning( "The field `%s` is already populated in " "`extras`, replacing value with contents " "from `%s`.", field.new_name, field.old_name, ) self.extras[field.new_name] = value
def __init__( # pylint: disable=too-many-arguments,too-many-locals self, query_context: "QueryContext", annotation_layers: Optional[List[Dict[str, Any]]] = None, applied_time_extras: Optional[Dict[str, str]] = None, apply_fetch_values_predicate: bool = False, columns: Optional[List[str]] = None, datasource: Optional[DatasourceDict] = None, extras: Optional[Dict[str, Any]] = None, filters: Optional[List[QueryObjectFilterClause]] = None, granularity: Optional[str] = None, is_rowcount: bool = False, is_timeseries: Optional[bool] = None, metrics: Optional[List[Metric]] = None, order_desc: bool = True, orderby: Optional[List[OrderBy]] = None, post_processing: Optional[List[Optional[Dict[str, Any]]]] = None, result_type: Optional[ChartDataResultType] = None, row_limit: Optional[int] = None, row_offset: Optional[int] = None, series_columns: Optional[List[str]] = None, series_limit: int = 0, series_limit_metric: Optional[Metric] = None, time_range: Optional[str] = None, time_shift: Optional[str] = None, **kwargs: Any, ): columns = columns or [] extras = extras or {} annotation_layers = annotation_layers or [] self.time_offsets = kwargs.get("time_offsets", []) self.inner_from_dttm = kwargs.get("inner_from_dttm") self.inner_to_dttm = kwargs.get("inner_to_dttm") if series_columns: self.series_columns = series_columns elif is_timeseries and metrics: self.series_columns = columns else: self.series_columns = [] self.is_rowcount = is_rowcount self.datasource = None if datasource: self.datasource = ConnectorRegistry.get_datasource( str(datasource["type"]), int(datasource["id"]), db.session) self.result_type = result_type or query_context.result_type self.apply_fetch_values_predicate = apply_fetch_values_predicate or False self.annotation_layers = [ layer for layer in annotation_layers # formula annotations don't affect the payload, hence can be dropped if layer["annotationType"] != "FORMULA" ] self.applied_time_extras = applied_time_extras or {} self.granularity = granularity self.from_dttm, self.to_dttm = get_since_until( relative_start=extras.get("relative_start", config["DEFAULT_RELATIVE_START_TIME"]), relative_end=extras.get("relative_end", config["DEFAULT_RELATIVE_END_TIME"]), time_range=time_range, time_shift=time_shift, ) # is_timeseries is True if time column is in either columns or groupby # (both are dimensions) self.is_timeseries = (is_timeseries if is_timeseries is not None else DTTM_ALIAS in columns) self.time_range = time_range self.time_shift = parse_human_timedelta(time_shift) self.post_processing = [ post_proc for post_proc in post_processing or [] if post_proc ] # Support metric reference/definition in the format of # 1. 'metric_name' - name of predefined metric # 2. { label: 'label_name' } - legacy format for a predefined metric # 3. { expressionType: 'SIMPLE' | 'SQL', ... } - adhoc metric self.metrics = metrics and [ x if isinstance(x, str) or is_adhoc_metric(x) else x["label"] # type: ignore for x in metrics ] default_row_limit = (config["SAMPLES_ROW_LIMIT"] if self.result_type == ChartDataResultType.SAMPLES else config["ROW_LIMIT"]) self.row_limit = apply_max_row_limit(row_limit or default_row_limit) self.row_offset = row_offset or 0 self.filter = filters or [] self.series_limit = series_limit self.series_limit_metric = series_limit_metric self.order_desc = order_desc self.extras = extras if config["SIP_15_ENABLED"]: self.extras["time_range_endpoints"] = get_time_range_endpoints( form_data=self.extras) self.columns = columns self.orderby = orderby or [] self._rename_deprecated_fields(kwargs) self._move_deprecated_extra_fields(kwargs)