示例#1
0
def get_store_contents():
    """
    Return an ordered tuple of attributes representing the store contents.
    Useful for ensuring key properties stay the same when switching between systems.
    """
    _get_one = [
        serialized_dataframe(global_state.get_data('1')),
        global_state.get_dtypes('1'),
        global_state.get_settings('1'),
        global_state.get_metadata('1'),
        global_state.get_context_variables('1'),
        global_state.get_history('1'),
    ]
    _get_all = [
        {
            k: serialized_dataframe(v)
            for k, v in global_state.get_data().items()
        },
        global_state.get_dtypes(),
        global_state.get_settings(),
        global_state.get_metadata(),
        global_state.get_context_variables(),
        global_state.get_history(),
    ]
    _lengths = [
        len(global_state.DATA),
        len(global_state.DTYPES),
        len(global_state.SETTINGS),
        len(global_state.METADATA),
        len(global_state.CONTEXT_VARIABLES),
        len(global_state.HISTORY),
    ]
    return (_get_one, _get_all, _lengths)
示例#2
0
def test_stack(unittest):
    from dtale.views import build_dtypes_state
    import dtale.global_state as global_state

    global_state.clear_store()
    df1 = pd.DataFrame({
        "A": ["A0", "A1"],
        "B": ["B0", "B1"],
        "C": ["C0", "C1"],
        "D": ["D0", "D1"],
    })
    df2 = pd.DataFrame({
        "A": ["A2", "A3"],
        "B": ["B3", "B3"],
        "C": ["C3", "C3"],
        "D": ["D3", "D3"],
    })

    with app.test_client() as c:
        data = {"1": df1, "2": df2}
        dtypes = {k: build_dtypes_state(v) for k, v in data.items()}
        settings = {k: {} for k in data.keys()}
        build_data_inst(data)
        build_dtypes(dtypes)
        build_settings(settings)
        datasets = [dict(dataId="1", columns=[]), dict(dataId="2", columns=[])]
        config = dict(ignore_index=False)
        resp = c.post(
            "/dtale/merge",
            data=dict(
                action="stack",
                config=json.dumps(config),
                datasets=json.dumps(datasets),
            ),
        )
        assert resp.status_code == 200
        final_df = global_state.get_data(resp.json["data_id"])
        unittest.assertEqual(list(final_df["A"].values),
                             ["A0", "A1", "A2", "A3"])
        unittest.assertEqual(list(final_df["index"].values), [0, 1, 0, 1])

        config["ignoreIndex"] = True
        resp = c.post(
            "/dtale/merge",
            data=dict(
                action="stack",
                config=json.dumps(config),
                datasets=json.dumps(datasets),
            ),
        )
        assert resp.status_code == 200
        final_df = global_state.get_data(resp.json["data_id"])
        assert "index" not in final_df.columns
        unittest.assertEqual(list(final_df["A"].values),
                             ["A0", "A1", "A2", "A3"])
示例#3
0
def test_columns(unittest):
    import dtale.global_state as global_state

    global_state.clear_store()
    data_id, duplicates_type = "1", "columns"

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"keep": "first"})
    unittest.assertEqual(builder.test(), {"Foo": ["foo"]})
    new_data_id = builder.execute()
    unittest.assertEqual(
        list(global_state.get_data(new_data_id).columns), ["Foo", "fOo", "foO", "bar"]
    )

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"keep": "last"})
    unittest.assertEqual(builder.test(), {"foo": ["Foo"]})
    new_data_id = builder.execute()
    unittest.assertEqual(
        list(global_state.get_data(new_data_id).columns), ["foo", "fOo", "foO", "bar"]
    )

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"keep": "none"})
    unittest.assertEqual(builder.test(), {"Foo": ["foo"]})
    new_data_id = builder.execute()
    unittest.assertEqual(
        list(global_state.get_data(new_data_id).columns), ["fOo", "foO", "bar"]
    )

    data = {data_id: duplicates_data().drop(["fOo", "foO", "bar"], axis=1)}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"keep": "none"})
    with pytest.raises(RemoveAllDataException):
        builder.execute()

    data = {data_id: non_duplicate_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"keep": "none"})
    with pytest.raises(NoDuplicatesException):
        builder.checker.remove(data[data_id])
示例#4
0
    def input_toggles(_ts, inputs, pathname):
        """
        dash callback controlling showing/hiding of chart-specific inputs (for example z-axis) as well as chart
        formatting inputs (sorting for bars in bar chart, bar chart style (stacked) or y-axis ranges.
        """
        [chart_type, agg] = [inputs.get(p) for p in ["chart_type", "agg"]]
        show_input = show_input_handler(chart_type)

        y_multi_style = {"display": "block" if show_input("y", "multi") else "none"}
        y_single_style = {"display": "block" if show_input("y") else "none"}
        z_style = {"display": "block" if show_input("z") else "none"}
        group_style = {"display": "block" if show_input("group") else "none"}
        rolling_style = {"display": "inherit" if agg == "rolling" else "none"}
        cpg_style = {"display": "block" if show_chart_per_group(**inputs) else "none"}
        bar_style, barsort_style = bar_input_style(**inputs)
        yaxis_style = {"display": "block" if show_yaxis_ranges(**inputs) else "none"}

        data_id = get_data_id(pathname)
        df = global_state.get_data(data_id)
        animate_style, animate_by_style, animate_opts = animate_styles(df, **inputs)

        return (
            y_multi_style,
            y_single_style,
            z_style,
            group_style,
            rolling_style,
            cpg_style,
            bar_style,
            barsort_style,
            yaxis_style,
            animate_style,
            animate_by_style,
            animate_opts,
        )
示例#5
0
 def run(self):
     data = run_query(
         global_state.get_data(self.data_id),
         (global_state.get_settings(self.data_id) or {}).get("query"),
         global_state.get_context_variables(self.data_id),
     )
     return self.report.run(data)
示例#6
0
def get_analysis(data_id):
    df = global_state.get_data(data_id)
    valid_corr_cols, _, _ = get_col_groups(data_id, df)
    corr_matrix, _ = build_matrix(data_id, df, valid_corr_cols)
    corr_matrix = corr_matrix.abs()

    # Select upper triangle of correlation matrix
    upper = corr_matrix.where(
        np.triu(np.ones(corr_matrix.shape), k=1).astype(np.bool_))

    score = upper.max(axis=1)
    score.name = "score"
    score = score.sort_values(ascending=False)

    upper = upper.loc[score.index]
    column_name = upper.index[0]
    max_score = score.loc[column_name]
    if pd.isnull(max_score):
        max_score = "N/A"
    upper = upper.fillna(0).to_dict(orient="index")

    missing = df[valid_corr_cols].isna().sum()
    missing.name = "missing"

    analysis = pd.concat([score, missing], axis=1)
    analysis.index.name = "column"
    analysis = analysis.fillna("N/A").reset_index().to_dict(orient="records")

    return column_name, max_score, upper, analysis
示例#7
0
    def pareto_data_callback(
        pareto_x,
        pareto_bars,
        pareto_line,
        pareto_sort,
        pareto_dir,
        group,
        data_id,
    ):
        pareto_data = dict(
            pareto_x=pareto_x,
            pareto_bars=pareto_bars,
            pareto_line=pareto_line,
            pareto_sort=pareto_sort,
            pareto_dir=pareto_dir,
        )
        if group is not None:
            pareto_data["pareto_group"] = group
        df = global_state.get_data(data_id)
        (x_options, bars_options, line_options,
         _sort_options) = build_pareto_options(
             df,
             x=pareto_x,
             bars=pareto_bars,
             line=pareto_line,
         )

        return (
            pareto_data,
            x_options,
            bars_options,
            line_options,
        )
示例#8
0
 def reshape(self):
     data = run_query(
         global_state.get_data(self.data_id),
         (global_state.get_settings(self.data_id) or {}).get("query"),
         global_state.get_context_variables(self.data_id),
     )
     return self.builder.reshape(data)
示例#9
0
文件: views.py 项目: ebgaspar/dtale
    def query_input(query, pathname, curr_query):
        """
        dash callback for storing valid pandas dataframe queries.  This acts as an intermediary between values typed
        by the user and values that are applied to pandas dataframes.  Most of the time what the user has typed is not
        complete and thus not a valid pandas dataframe query.

        :param query: query input
        :type query: str
        :param pathname: URL path
        :param curr_query: current valid pandas dataframe query
        :return: tuple of (query (if valid), styling for query input (if invalid input), query input title (containing
        invalid query exception information)
        :rtype: tuple of (str, str, str)
        """
        try:
            data_id = get_data_id(pathname)
            data = global_state.get_data(data_id)
            ctxt_vars = global_state.get_context_variables(data_id)
            run_query(data, query, ctxt_vars)
            return query, {"line-height": "inherit"}, ""
        except BaseException as ex:
            return (
                curr_query,
                {
                    "line-height": "inherit",
                    "background-color": "pink"
                },
                str(ex),
            )
示例#10
0
    def group_values(chart_type, group_cols, map_group_cols, pathname, inputs,
                     prev_group_vals):
        group_cols = make_list(group_cols)
        if show_input_handler(chart_type
                              or 'line')('group') and not len(group_cols):
            return [], None
        elif chart_type == 'maps':  # all maps have a group input
            group_cols = make_list(map_group_cols)
            if not len(group_cols):
                return [], None
        data_id = get_data_id(pathname)
        group_vals = run_query(global_state.get_data(data_id),
                               inputs.get('query'),
                               global_state.get_context_variables(data_id))
        group_vals = build_group_val_options(group_vals, group_cols)
        selections = []
        available_vals = [gv['value'] for gv in group_vals]
        if prev_group_vals is not None:
            selections = [
                pgv for pgv in prev_group_vals if pgv in available_vals
            ]
        if not len(selections) and len(group_vals) <= MAX_GROUPS:
            selections = available_vals

        return group_vals, selections
示例#11
0
 def input_data(_ts, chart_type, x, y_multi, y_single, z, group, group_val,
                agg, window, rolling_comp, pathname, query):
     """
     dash callback for maintaining chart input state and column-based dropdown options.  This will guard against
     users selecting the same column for multiple axes.
     """
     y_val = make_list(y_single if chart_type in ZAXIS_CHARTS else y_multi)
     if group_val is not None:
         group_val = [json.loads(gv) for gv in group_val]
     inputs = dict(query=query,
                   chart_type=chart_type,
                   x=x,
                   y=y_val,
                   z=z,
                   group=group,
                   group_val=group_val,
                   agg=agg,
                   window=window,
                   rolling_comp=rolling_comp)
     data_id = get_data_id(pathname)
     options = build_input_options(global_state.get_data(data_id), **inputs)
     x_options, y_multi_options, y_single_options, z_options, group_options, barsort_options, yaxis_options = options
     show_map = chart_type == 'maps'
     map_style = {} if show_map else {'display': 'none'}
     non_map_style = {'display': 'none'} if show_map else {}
     cscale_style = colorscale_input_style(chart_type=chart_type)
     return (inputs, x_options, y_single_options, y_multi_options,
             z_options, group_options, barsort_options, yaxis_options,
             non_map_style, map_style, cscale_style)
示例#12
0
def get_store_contents():
    """
    Return an ordered tuple of attributes representing the store contents.
    Useful for ensuring key properties stay the same when switching between systems.
    """
    _get_one = [
        serialized_dataframe(global_state.get_data("1")),
        global_state.get_dtypes("1"),
        global_state.get_settings("1"),
        global_state.get_metadata("1"),
        global_state.get_context_variables("1"),
        global_state.get_history("1"),
    ]
    _get_all = [
        {
            int(k): serialized_dataframe(v.data)
            for k, v in global_state.items()
        },
        {int(k): v.dtypes
         for k, v in global_state.items()},
        {int(k): v.settings
         for k, v in global_state.items()},
        {int(k): v.metadata
         for k, v in global_state.items()},
        {int(k): v.context_variables
         for k, v in global_state.items()},
        {int(k): v.history
         for k, v in global_state.items()},
    ]
    _lengths = [
        global_state.size(),
    ]
    return (_get_one, _get_all, _lengths)
示例#13
0
文件: views.py 项目: redisun/dtale
 def group_values(
     chart_type,
     group_cols,
     map_group_cols,
     cs_group_cols,
     treemap_group_cols,
     pathname,
     inputs,
     prev_group_vals,
 ):
     data_id = get_data_id(pathname)
     group_cols = group_cols
     if chart_type == "maps":
         group_cols = map_group_cols
     elif chart_type == "candlestick":
         group_cols = cs_group_cols
     elif chart_type == "treemap":
         group_cols = treemap_group_cols
     group_cols = make_list(group_cols)
     group_types = get_group_types(inputs, data_id, group_cols)
     if "groups" not in group_types:
         return [], None
     group_vals = run_query(
         global_state.get_data(data_id),
         inputs.get("query"),
         global_state.get_context_variables(data_id),
     )
     group_vals = build_group_val_options(group_vals, group_cols)
     selections = []
     available_vals = [gv["value"] for gv in group_vals]
     if prev_group_vals is not None:
         selections = [pgv for pgv in prev_group_vals if pgv in available_vals]
     if not len(selections) and len(group_vals) <= MAX_GROUPS:
         selections = available_vals
     return group_vals, selections
示例#14
0
def test_covid_dataset():
    import dtale.global_state as global_state

    global_state.clear_store()

    def mock_load_csv(**kwargs):
        if (
            kwargs.get("path")
            == "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-states.csv"
        ):
            return pd.DataFrame(dict(state=["a", "b"]))
        elif (
            kwargs.get("path")
            == "https://raw.githubusercontent.com/jasonong/List-of-US-States/master/states.csv"
        ):
            return pd.DataFrame(dict(State=["a"], Abbreviation=["A"]))
        return None

    with build_app(url=URL).test_client() as c:
        with ExitStack() as stack:
            stack.enter_context(
                mock.patch("dtale.cli.loaders.csv_loader.loader_func", mock_load_csv)
            )
            c.get("/dtale/datasets", query_string=dict(dataset="covid"))
            assert global_state.get_data(1).state_code.values[0] == "A"
示例#15
0
def test_time_dataframe_dataset():
    import dtale.global_state as global_state

    global_state.clear_store()
    with build_app(url=URL).test_client() as c:
        c.get("/dtale/datasets", query_string=dict(dataset="time_dataframe"))
        assert global_state.get_data(1)["A"].isnull().sum() == 0
示例#16
0
文件: app.py 项目: sigmakappa/dtale
def instances():
    """
    Prints all urls to the current pieces of data being viewed
    """
    curr_data = global_state.get_data()

    if len(curr_data):

        def _instance_msgs():
            for data_id in curr_data:
                data_obj = DtaleData(data_id,
                                     build_url(ACTIVE_PORT, ACTIVE_HOST))
                metadata = global_state.get_metadata(data_id)
                name = metadata.get("name")
                yield [
                    data_id, name or "",
                    data_obj.build_main_url(data_id=data_id)
                ]
                if name is not None:
                    yield [
                        global_state.convert_name_to_url_path(name),
                        name,
                        data_obj.build_main_url(),
                    ]

        data = pd.DataFrame(list(_instance_msgs()),
                            columns=["ID", "Name",
                                     "URL"]).to_string(index=False)
        print((
            "To gain access to an instance object simply pass the value from 'ID' to dtale.get_instance(ID)\n\n{}"
        ).format(data))
    else:
        print("currently no running instances...")
示例#17
0
def build_drilldown_title(data_id, all_inputs, click_point, props, val_prop):
    data = global_state.get_data(data_id)

    def _build_val(col, val):
        if classify_type(find_dtype(data[col])) == "D":
            return json_date(convert_date_val_to_date(val))
        return val

    if "text" in click_point:  # Heatmaps
        strs = []
        for dim in click_point["text"].split("<br>"):
            prop, val = dim.split(": ")
            strs.append("{} ({})".format(prop, val))
        return "{}: {}".format(text("Drilldown for"), ", ".join(strs))

    strs = []
    frame_col = all_inputs.get("animate_by")
    if frame_col:
        strs.append("{} ({})".format(frame_col, click_point.get("customdata")))
    for prop in props:
        prop = make_list(prop)
        val_key = prop[0]
        if click_point.get(val_key) is not None:
            col = make_list(all_inputs.get(prop[-1]))[0]
            strs.append("{} ({})".format(
                col, _build_val(col, click_point.get(val_key))))

    val_prop = make_list(val_prop)
    val_key = val_prop[0]
    val_col = make_list(all_inputs.get(val_prop[-1]))[0]
    agg = AGGS[all_inputs.get("agg") or "raw"]
    strs.append("{} {} ({})".format(
        agg, val_col, _build_val(val_col, click_point.get(val_key))))
    return "{}: {}".format(text("Drilldown for"), ", ".join(strs))
示例#18
0
    def input_toggles(_ts, inputs, pathname):
        """
        dash callback controlling showing/hiding of chart-specific inputs (for example z-axis) as well as chart
        formatting inputs (sorting for bars in bar chart, bar chart style (stacked) or y-axis ranges.
        """
        [chart_type, agg] = [inputs.get(p) for p in ['chart_type', 'agg']]
        show_input = show_input_handler(chart_type)

        y_multi_style = {
            'display': 'block' if show_input('y', 'multi') else 'none'
        }
        y_single_style = {'display': 'block' if show_input('y') else 'none'}
        z_style = {'display': 'block' if show_input('z') else 'none'}
        group_style = {'display': 'block' if show_input('group') else 'none'}
        rolling_style = {'display': 'inherit' if agg == 'rolling' else 'none'}
        cpg_style = {
            'display': 'block' if show_chart_per_group(**inputs) else 'none'
        }
        bar_style, barsort_style = bar_input_style(**inputs)
        yaxis_style = {
            'display': 'block' if show_yaxis_ranges(**inputs) else 'none'
        }

        data_id = get_data_id(pathname)
        df = global_state.get_data(data_id)
        animate_style, animate_by_style, animate_opts = animate_styles(
            df, **inputs)

        return (y_multi_style, y_single_style, z_style, group_style,
                rolling_style, cpg_style, bar_style, barsort_style,
                yaxis_style, animate_style, animate_by_style, animate_opts)
示例#19
0
def build_histogram(data_id, col, query, point_filter):
    data = run_query(
        global_state.get_data(data_id),
        query,
        global_state.get_context_variables(data_id),
    )
    query, _ = build_group_inputs_filter(data, [point_filter])
    data = run_query(data, query)
    s = data[~pd.isnull(data[col])][col]
    hist_data, hist_labels = np.histogram(s, bins=10)
    hist_labels = list(
        map(lambda x: json_float(x, precision=3), hist_labels[1:]))
    axes_builder = build_axes(
        None,
        "Bins",
        dict(type="single", data={}),
        dict(Frequency=0),
        dict(Frequency=max(hist_data)),
        data=pd.DataFrame(dict(Frequency=hist_data, Bins=hist_labels)),
    )
    hist_data = dict(data={"all": dict(x=hist_labels, Frequency=hist_data)})
    bars = bar_builder(
        hist_data,
        "Bins",
        ["Frequency"],
        axes_builder,
        chart_builder_passthru,
        modal=True,
    )
    bars.figure["layout"]["xaxis"]["type"] = "category"
    bars.figure["layout"]["title"][
        "text"] = "Histogram of {} ({} data points)".format(col, len(s))
    return bars
示例#20
0
def test_rows(unittest):
    import dtale.global_state as global_state

    global_state.clear_store()
    data_id, duplicates_type = "1", "rows"
    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(
        data_id, duplicates_type, {"keep": "first", "subset": "foo"}
    )
    unittest.assertEqual(builder.test(), dict(removed=0, total=5, remaining=5))
    pre_length = len(data[data_id])
    new_data_id = builder.execute()
    assert pre_length == len(global_state.get_data(new_data_id))

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(
        data_id, duplicates_type, {"keep": "first", "subset": ["foO", "bar"]}
    )
    unittest.assertEqual(builder.test(), dict(removed=3, total=5, remaining=2))
    new_data_id = builder.execute()
    assert len(global_state.get_data(new_data_id)) == 2
    unittest.assertEqual(global_state.get_data(new_data_id)["Foo"].tolist(), [1, 4])

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(
        data_id, duplicates_type, {"keep": "last", "subset": ["foO", "bar"]}
    )
    unittest.assertEqual(builder.test(), dict(removed=3, total=5, remaining=2))
    new_data_id = builder.execute()
    assert len(global_state.get_data(new_data_id)) == 2
    unittest.assertEqual(global_state.get_data(new_data_id)["Foo"].tolist(), [3, 5])

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(
        data_id, duplicates_type, {"keep": "none", "subset": ["foO", "bar"]}
    )
    unittest.assertEqual(builder.test(), dict(removed=5, total=5, remaining=0))
    with pytest.raises(RemoveAllDataException):
        builder.execute()
示例#21
0
def test_transpose(custom_data, unittest):
    from dtale.views import build_dtypes_state

    global_state.clear_store()
    with app.test_client() as c:
        data = {c.port: custom_data}
        dtypes = {c.port: build_dtypes_state(custom_data)}
        settings = {c.port: {}}

        build_data_inst(data)
        build_dtypes(dtypes)
        build_settings(settings)
        reshape_cfg = dict(index=["security_id"], columns=["Col0"])
        resp = c.get(
            "/dtale/reshape/{}".format(c.port),
            query_string=dict(output="new",
                              type="transpose",
                              cfg=json.dumps(reshape_cfg)),
        )
        response_data = json.loads(resp.data)
        new_key = int(c.port) + 1
        assert "error" in response_data

        min_date = custom_data["date"].min().strftime("%Y-%m-%d")
        global_state.set_settings(c.port,
                                  dict(query="date == '{}'".format(min_date)))
        reshape_cfg = dict(index=["date", "security_id"], columns=["Col0"])
        resp = c.get(
            "/dtale/reshape/{}".format(c.port),
            query_string=dict(output="new",
                              type="transpose",
                              cfg=json.dumps(reshape_cfg)),
        )
        response_data = json.loads(resp.data)
        assert response_data["data_id"] == new_key
        assert len(global_state.keys()) == 2
        unittest.assertEqual(
            [d["name"] for d in global_state.get_dtypes(new_key)],
            [
                "index",
                "{} 00:00:00 100000".format(min_date),
                "{} 00:00:00 100001".format(min_date),
            ],
        )
        assert len(global_state.get_data(new_key)) == 1
        assert global_state.get_settings(new_key).get(
            "startup_code") is not None
        c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))

        reshape_cfg = dict(index=["date", "security_id"])
        resp = c.get(
            "/dtale/reshape/{}".format(c.port),
            query_string=dict(output="override",
                              type="transpose",
                              cfg=json.dumps(reshape_cfg)),
        )
        response_data = json.loads(resp.data)
        assert response_data["data_id"] == c.port
示例#22
0
def instances():
    """
    Prints all urls to the current pieces of data being viewed
    """
    curr_data = global_state.get_data()
    if len(curr_data):
        print('\n'.join([DtaleData(data_id, build_url(ACTIVE_PORT, ACTIVE_HOST)).main_url() for data_id in curr_data]))
    else:
        print('currently no running instances...')
示例#23
0
def build_df(dataset, is_merge=False):
    data = global_state.get_data(dataset["dataId"])
    cols = dataset.get("columns")
    cols = list(set(cols + (dataset["index"] if is_merge else []))) if cols else None
    if cols:
        data = data[cols]
    if is_merge and dataset["index"]:
        data = data.set_index(dataset["index"])
    return data
示例#24
0
 def display_page(_ts, pathname, search):
     """
     dash callback which gets called on initial load of each dash page (main & popup)
     """
     dash_app.config.suppress_callback_exceptions = False
     params = chart_url_params(search)
     data_id = get_data_id(pathname)
     df = global_state.get_data(data_id)
     settings = global_state.get_settings(data_id) or {}
     return charts_layout(df, settings, **params)
示例#25
0
def load_filterable_data(data_id, req, query=None):
    filtered = get_bool_arg(req, "filtered")
    curr_settings = global_state.get_settings(data_id) or {}
    if filtered:
        final_query = query or build_query(data_id, curr_settings.get("query"))
        return run_query(
            handle_predefined(data_id),
            final_query,
            global_state.get_context_variables(data_id),
            ignore_empty=True,
        )
    return global_state.get_data(data_id)
示例#26
0
文件: charts.py 项目: shalevy1/dtale
def build_figure_data(data_id, chart_type=None, query=None, x=None, y=None, z=None, group=None, agg=None, window=None,
                      rolling_comp=None, **kwargs):
    """
    Builds chart figure data for loading into dash:`dash_core_components.Graph <dash-core-components/graph>` components

    :param data_id: integer string identifier for a D-Tale process's data
    :type data_id: str
    :param chart_type: type of chart (line, bar, pie, scatter...)
    :type chart_type: str
    :param query: pandas dataframe query string
    :type query: str, optional
    :param x: column to use for the X-Axis
    :type x: str
    :param y: columns to use for the Y-Axes
    :type y: list of str
    :param z: column to use for the Z-Axis
    :type z: str, optional
    :param group: column(s) to use for grouping
    :type group: list of str or str, optional
    :param agg: specific aggregation that can be applied to y or z axes.  Possible values are: count, first, last mean,
                median, min, max, std, var, mad, prod, sum.  This is included in label of axis it is being applied to.
    :type agg: str, optional
    :param window: number of days to include in rolling aggregations
    :type window: int, optional
    :param rolling_comp: computation to use in rolling aggregations
    :type rolling_comp: str, optional
    :param kwargs: optional keyword arguments, here in case invalid arguements are passed to this function
    :type kwargs: dict
    :return: dictionary of series data, min/max ranges of columns used in chart
    :rtype: dict
    """
    code = None
    try:
        if not valid_chart(**dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window,
                                  rolling_comp=rolling_comp)):
            return None, None

        data = run_query(
            global_state.get_data(data_id),
            query,
            global_state.get_context_variables(data_id)
        )
        code = build_code_export(data_id, query=query)
        chart_kwargs = dict(group_col=group, agg=agg, allow_duplicates=chart_type == 'scatter', rolling_win=window,
                            rolling_comp=rolling_comp)
        if chart_type in ZAXIS_CHARTS:
            chart_kwargs['z'] = z
            del chart_kwargs['group_col']
        data, chart_code = build_chart_data(data, x, y, **chart_kwargs)
        return data, code + chart_code
    except BaseException as e:
        return dict(error=str(e), traceback=str(traceback.format_exc())), code
示例#27
0
def instances():
    """
    Prints all urls to the current pieces of data being viewed
    """
    curr_data = global_state.get_data()

    if len(curr_data):
        def _instance_msg(data_id):
            url = DtaleData(data_id, build_url(ACTIVE_PORT, ACTIVE_HOST)).main_url()
            return '{}:\t{}'.format(data_id, url)
        print('\n'.join(['ID\tURL'] + [_instance_msg(data_id) for data_id in curr_data]))
    else:
        print('currently no running instances...')
示例#28
0
def get_instance(data_id):
    """
    Returns a :class:`dtale.views.DtaleData` object for the data_id passed as input, will return None if the data_id
    does not exist

    :param data_id: integer string identifier for a D-Tale process's data
    :type data_id: str
    :return: :class:`dtale.views.DtaleData`
    """
    data_id_str = str(data_id)
    if global_state.get_data(data_id_str) is not None:
        return DtaleData(data_id_str, build_url(ACTIVE_PORT, ACTIVE_HOST))
    return None
示例#29
0
def test_show_duplicates(unittest):
    import dtale.global_state as global_state

    global_state.clear_store()
    data_id, duplicates_type = "1", "show"
    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"group": ["foo"]})
    unittest.assertEqual(builder.test(), {})
    with pytest.raises(NoDuplicatesToShowException):
        builder.execute()

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(data_id, duplicates_type, {"group": ["foO", "bar"]})
    unittest.assertEqual(
        builder.test(),
        {
            "4, 5": dict(count=3, filter=["4", "5"]),
            "4, 6": dict(count=2, filter=["4", "6"]),
        },
    )
    new_data_id = builder.execute()
    assert new_data_id == 2
    unittest.assertEqual(
        global_state.get_data(new_data_id)["Foo"].tolist(), [1, 2, 3, 4, 5]
    )

    data = {data_id: duplicates_data()}
    build_data_inst(data)

    builder = DuplicateCheck(
        data_id, duplicates_type, {"group": ["foO", "bar"], "filter": ["4", "5"]}
    )
    new_data_id = builder.execute()
    unittest.assertEqual(global_state.get_data(new_data_id)["Foo"].tolist(), [1, 2, 3])
示例#30
0
文件: app.py 项目: krishnatray/dtale
def get_instance(data_id):
    """
    Returns a :class:`dtale.views.DtaleData` object for the data_id passed as input, will return None if the data_id
    does not exist

    :param data_id: integer string identifier for a D-Tale process's data
    :type data_id: str
    :return: :class:`dtale.views.DtaleData`
    """
    data_id_str = str(data_id)
    if global_state.get_data(data_id_str) is not None:
        startup_url, _ = build_startup_url_and_app_root()
        return DtaleData(data_id_str, startup_url)
    return None