Пример #1
0
def test_get_time_filter_status_no_temporal_col():
    dataset = get_dataset_mock()
    dataset.columns[0].is_dttm = False

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_COL: "foobar"}) == (
            [],
            [{
                "reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE,
                "column": ExtraFiltersTimeColumnType.TIME_COL,
            }],
        )

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"}) == (
            [],
            [{
                "reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
                "column": ExtraFiltersTimeColumnType.TIME_RANGE,
            }],
        )

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"}) == (
            [],
            [{
                "reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
                "column": ExtraFiltersTimeColumnType.TIME_GRAIN,
            }],
        )
Пример #2
0
def test_get_time_filter_status_time_range():
    dataset = get_dataset_mock()

    assert get_time_filter_status(
        dataset,
        {ExtraFiltersTimeColumnType.TIME_RANGE: NO_TIME_RANGE}) == ([], [])

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"}) == ([{
            "column":
            ExtraFiltersTimeColumnType.TIME_RANGE
        }], [])
Пример #3
0
def _get_full(
    query_context: "QueryContext",
    query_obj: "QueryObject",
    force_cached: Optional[bool] = False,
) -> Dict[str, Any]:
    datasource = _get_datasource(query_context, query_obj)
    result_type = query_obj.result_type or query_context.result_type
    payload = query_context.get_df_payload(query_obj,
                                           force_cached=force_cached)
    df = payload["df"]
    status = payload["status"]
    if status != QueryStatus.FAILED:
        payload["colnames"] = list(df.columns)
        payload["coltypes"] = extract_dataframe_dtypes(df)
        payload["data"] = query_context.get_data(df)
    del payload["df"]

    filters = query_obj.filter
    filter_columns = cast(List[str], [flt.get("col") for flt in filters])
    columns = set(datasource.column_names)
    applied_time_columns, rejected_time_columns = get_time_filter_status(
        datasource, query_obj.applied_time_extras)
    payload["applied_filters"] = [{
        "column": col
    } for col in filter_columns if col in columns] + applied_time_columns
    payload["rejected_filters"] = [{
        "reason": "not_in_datasource",
        "column": col
    } for col in filter_columns if col not in columns] + rejected_time_columns

    if result_type == ChartDataResultType.RESULTS and status != QueryStatus.FAILED:
        return {"data": payload.get("data")}
    return payload
Пример #4
0
def test_get_time_filter_status_time_grain():
    dataset = get_dataset_mock()

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"}) == ([{
            "column":
            ExtraFiltersTimeColumnType.TIME_GRAIN
        }], [])
Пример #5
0
def test_get_time_filter_status_time_col():
    dataset = get_dataset_mock()

    assert get_time_filter_status(
        dataset, {ExtraFiltersTimeColumnType.TIME_COL: "ds"}) == ([{
            "column":
            ExtraFiltersTimeColumnType.TIME_COL
        }], [])
Пример #6
0
    def get_single_payload(
        self,
        query_obj: QueryObject,
        force_cached: Optional[bool] = False,
    ) -> Dict[str, Any]:
        """Return results payload for a single quey"""
        if self.result_type == utils.ChartDataResultType.QUERY:
            return {
                "query": self.datasource.get_query_str(query_obj.to_dict()),
                "language": self.datasource.query_language,
            }

        if self.result_type == utils.ChartDataResultType.SAMPLES:
            row_limit = query_obj.row_limit or math.inf
            query_obj = copy.copy(query_obj)
            query_obj.is_timeseries = False
            query_obj.orderby = []
            query_obj.groupby = []
            query_obj.metrics = []
            query_obj.post_processing = []
            query_obj.row_limit = min(row_limit, config["SAMPLES_ROW_LIMIT"])
            query_obj.row_offset = 0
            query_obj.columns = [
                o.column_name for o in self.datasource.columns
            ]

        payload = self.get_df_payload(query_obj, force_cached=force_cached)
        df = payload["df"]
        status = payload["status"]
        if status != utils.QueryStatus.FAILED:
            payload["colnames"] = list(df.columns)
            payload["coltypes"] = utils.extract_dataframe_dtypes(df)
            payload["data"] = self.get_data(df)
        del payload["df"]

        filters = query_obj.filter
        filter_columns = cast(List[str], [flt.get("col") for flt in filters])
        columns = set(self.datasource.column_names)
        applied_time_columns, rejected_time_columns = utils.get_time_filter_status(
            self.datasource, query_obj.applied_time_extras)
        payload["applied_filters"] = [{
            "column": col
        } for col in filter_columns if col in columns] + applied_time_columns
        payload["rejected_filters"] = [{
            "reason": "not_in_datasource",
            "column": col
        } for col in filter_columns if col not in columns
                                       ] + rejected_time_columns

        if (self.result_type == utils.ChartDataResultType.RESULTS
                and status != utils.QueryStatus.FAILED):
            return {"data": payload["data"]}
        return payload
Пример #7
0
    def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
        """Returns a payload of metadata and data"""
        if self.result_type == utils.ChartDataResultType.QUERY:
            return {
                "query": self.datasource.get_query_str(query_obj.to_dict()),
                "language": self.datasource.query_language,
            }
        if self.result_type == utils.ChartDataResultType.SAMPLES:
            row_limit = query_obj.row_limit or math.inf
            query_obj = copy.copy(query_obj)
            query_obj.orderby = []
            query_obj.groupby = []
            query_obj.metrics = []
            query_obj.post_processing = []
            query_obj.row_limit = min(row_limit, config["SAMPLES_ROW_LIMIT"])
            query_obj.row_offset = 0
            query_obj.columns = [
                o.column_name for o in self.datasource.columns
            ]
        payload = self.get_df_payload(query_obj)
        # TODO: implement
        payload["annotation_data"] = []
        df = payload["df"]
        status = payload["status"]
        if status != utils.QueryStatus.FAILED:
            payload["data"] = self.get_data(df)
        del payload["df"]

        filters = query_obj.filter
        filter_columns = cast(List[str], [flt.get("col") for flt in filters])
        columns = set(self.datasource.column_names)
        applied_time_columns, rejected_time_columns = utils.get_time_filter_status(
            self.datasource, query_obj.applied_time_extras)
        payload["applied_filters"] = [{
            "column": col
        } for col in filter_columns if col in columns] + applied_time_columns
        payload["rejected_filters"] = [{
            "reason": "not_in_datasource",
            "column": col
        } for col in filter_columns if col not in columns
                                       ] + rejected_time_columns

        if self.result_type == utils.ChartDataResultType.RESULTS:
            return {"data": payload["data"]}
        return payload
Пример #8
0
def _get_full(
    query_context: QueryContext,
    query_obj: QueryObject,
    force_cached: Optional[bool] = False,
) -> Dict[str, Any]:
    datasource = _get_datasource(query_context, query_obj)
    result_type = query_obj.result_type or query_context.result_type
    payload = query_context.get_df_payload(query_obj, force_cached=force_cached)
    applied_template_filters = payload.get("applied_template_filters", [])
    df = payload["df"]
    status = payload["status"]
    if status != QueryStatus.FAILED:
        payload["colnames"] = list(df.columns)
        payload["indexnames"] = list(df.index)
        payload["coltypes"] = extract_dataframe_dtypes(df, datasource)
        payload["data"] = query_context.get_data(df)
        payload["result_format"] = query_context.result_format
    del payload["df"]

    filters = query_obj.filter
    filter_columns = cast(List[str], [flt.get("col") for flt in filters])
    columns = set(datasource.column_names)
    applied_time_columns, rejected_time_columns = get_time_filter_status(
        datasource, query_obj.applied_time_extras
    )
    payload["applied_filters"] = [
        {"column": get_column_name(col)}
        for col in filter_columns
        if is_adhoc_column(col) or col in columns or col in applied_template_filters
    ] + applied_time_columns
    payload["rejected_filters"] = [
        {"reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE, "column": col}
        for col in filter_columns
        if not is_adhoc_column(col)
        and col not in columns
        and col not in applied_template_filters
    ] + rejected_time_columns

    if result_type == ChartDataResultType.RESULTS and status != QueryStatus.FAILED:
        return {
            "data": payload.get("data"),
            "colnames": payload.get("colnames"),
            "coltypes": payload.get("coltypes"),
        }
    return payload