def test_convert(): from dtale.views import startup from tests.dtale.test_replacements import replacements_data with app.test_client() as c: with ExitStack() as stack: data, dtypes, datasets, dataset_dim, settings = {}, {}, {}, {}, {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) stack.enter_context( mock.patch("dtale.global_state.DATASETS", datasets)) stack.enter_context( mock.patch("dtale.global_state.DATASET_DIM", dataset_dim)) stack.enter_context( mock.patch("dtale.global_state.SETTINGS", settings)) startup(URL, data=replacements_data(), data_id=c.port) resp = c.get( "/dtale/to-xarray/{}".format(c.port), query_string=dict(index=json.dumps(["a"])), ) assert resp.status_code == 200 assert c.port in datasets assert settings[c.port]["locked"] == ["a"]
def update_dtale_data(player_data): curr_data = get_instance("1") if curr_data is not None: # append data to pre-existing data in D-Tale curr_data = curr_data.data curr_data = curr_data[~(curr_data["name"] == player_data["name"].values[0])] player_data = pd.concat([curr_data, player_data], ignore_index=True) cleanup("1") # load data to D-Tale startup(data_id="1", data=player_data)
def test_view(unittest): from dtale.views import startup with app.test_client() as c: with ExitStack() as stack: data, dtypes, datasets, dataset_dim = {}, {}, {}, {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) stack.enter_context(mock.patch("dtale.global_state.DATASETS", datasets)) stack.enter_context( mock.patch("dtale.global_state.DATASET_DIM", dataset_dim) ) startup(URL, data=xarray_data(), data_id=c.port) assert c.port in datasets response = c.get("/dtale/main/{}".format(c.port)) assert 'input id="xarray" value="True"' not in str(response.data) assert 'input id="xarray_dim" value="{}"' not in str(response.data) resp = c.get("/dtale/xarray-coordinates/{}".format(c.port)) response_data = resp.json expected = [ { "count": 3, "dtype": "str64" if PY3 else "string16", "name": "location", }, {"count": 731, "dtype": "datetime64[ns]", "name": "time"}, ] unittest.assertEqual( sorted(response_data["data"], key=lambda c: c["name"]), expected ) resp = c.get("/dtale/xarray-dimension-values/{}/location".format(c.port)) response_data = resp.json unittest.assertEqual( response_data["data"], [{"value": "IA"}, {"value": "IN"}, {"value": "IL"}], ) resp = c.get( "/dtale/update-xarray-selection/{}".format(c.port), query_string=dict(selection=json.dumps(dict(location="IA"))), ) assert resp.status_code == 200 assert list(data[c.port].location.unique()) == ["IA"] assert dataset_dim[c.port]["location"] == "IA" resp = c.get( "/dtale/update-xarray-selection/{}".format(c.port), query_string=dict(selection=json.dumps(dict())), ) assert resp.status_code == 200 assert list(data[c.port].location.unique()) == ["IA", "IN", "IL"]
def test_startup(unittest): import dtale.views as views with pytest.raises(BaseException) as error: views.startup(URL) assert 'data loaded is None!' in str(error.value) with pytest.raises(BaseException) as error: views.startup(URL, dict()) assert 'data loaded must be one of the following types: pandas.DataFrame, pandas.Series, pandas.DatetimeIndex'\ in str(error.value) test_data = pd.DataFrame([dict(date=pd.Timestamp('now'), security_id=1, foo=1.5)]) test_data = test_data.set_index(['date', 'security_id']) instance = views.startup(URL, data_loader=lambda: test_data) pdt.assert_frame_equal(instance.data, test_data.reset_index()) unittest.assertEqual(views.SETTINGS[instance._data_id], dict(locked=['date', 'security_id']), 'should lock index columns') test_data = test_data.reset_index() instance = views.startup(URL, data=test_data) pdt.assert_frame_equal(instance.data, test_data) unittest.assertEqual(views.SETTINGS[instance._data_id], dict(locked=[]), 'no index = nothing locked') test_data = pd.DataFrame([dict(date=pd.Timestamp('now'), security_id=1)]) test_data = test_data.set_index('security_id').date instance = views.startup(URL, data_loader=lambda: test_data) pdt.assert_frame_equal(instance.data, test_data.reset_index()) unittest.assertEqual(views.SETTINGS[instance._data_id], dict(locked=['security_id']), 'should lock index columns') test_data = pd.DatetimeIndex([pd.Timestamp('now')], name='date') instance = views.startup(URL, data_loader=lambda: test_data) pdt.assert_frame_equal(instance.data, test_data.to_frame(index=False)) unittest.assertEqual(views.SETTINGS[instance._data_id], dict(locked=[]), 'should lock index columns') test_data = pd.MultiIndex.from_arrays([[1, 2], [3, 4]], names=('a', 'b')) instance = views.startup(URL, data_loader=lambda: test_data) pdt.assert_frame_equal(instance.data, test_data.to_frame(index=False)) unittest.assertEqual(views.SETTINGS[instance._data_id], dict(locked=[]), 'should lock index columns') test_data = pd.DataFrame([ dict(date=pd.Timestamp('now'), security_id=1, foo=1.0, bar=2.0), dict(date=pd.Timestamp('now'), security_id=1, foo=2.0, bar=np.inf) ], columns=['date', 'security_id', 'foo', 'bar']) instance = views.startup(URL, data_loader=lambda: test_data) unittest.assertEqual( {'name': 'bar', 'dtype': 'float64', 'index': 3}, next((dt for dt in views.DTYPES[instance._data_id] if dt['name'] == 'bar'), None), )
def uploader_file(): if request.method == 'POST': f = request.files['file'] data = pd.read_csv(f) # data = business_discovery_new.return_business_discovery(list(username_df.values.flatten())) instance = startup(data_id="1", data=data, ignore_duplicate=True) return redirect(f"/dtale/main/{instance._data_id}", code=302)
def test_convert(): from dtale.views import startup from tests.dtale.test_replacements import replacements_data import dtale.global_state as global_state global_state.clear_store() with app.test_client() as c: global_state.new_data_inst(c.port) startup(URL, data=replacements_data(), data_id=c.port) resp = c.get( "/dtale/to-xarray/{}".format(c.port), query_string=dict(index=json.dumps(["a"])), ) assert resp.status_code == 200 assert global_state.get_dataset(c.port) is not None assert global_state.get_settings(c.port)["locked"] == ["a"]
def _show(): selected_port = int(port or find_free_port()) startup(data=data, data_loader=data_loader, port=selected_port) app = build_app() if debug: app.jinja_env.auto_reload = True app.config['TEMPLATES_AUTO_RELOAD'] = True else: getLogger("werkzeug").setLevel(LOG_ERROR) logger.info('D-Tale started at: http://{}:{}'.format( socket.gethostname(), selected_port)) app.run(host=host, port=selected_port, debug=debug, reaper_on=reaper_on)
def load_daily_prediction(date): df = load_data_daily(date) data_id = uuid.uuid4().int _ = startup(data_id=data_id, data=df, name=date, ignore_duplicate=True, allow_cell_edits=False, inplace=True) return redirect(url_for("dtale.view_iframe", data_id=data_id))
def load_prediction(job_id): job_info = REDIS_DB.hget(JOB_INFO_KEY, job_id) data_id = uuid.UUID(job_id).int _ = startup(data_id=data_id, data=load_data(job_id), name=job_info.search_conditions, ignore_duplicate=True, allow_cell_edits=False, inplace=True) return redirect(url_for("dtale.view_iframe", data_id=data_id))
def write(): with st.spinner("Cargando Otros Elementos ..."): st.title("Otros elementos de Streamlit") st.header("Reportes en PowerBI") url1="https://app.powerbi.com/view?r=eyJrIjoiMDA4NGFhNmEtZDE4Mi00MWNhLTg5OTMtMWE2MzYxNTVmMTFlIiwidCI6ImI3M2IxZDZlLTIxZDUtNGUzOC1iMjM5LTgxMzRkOWQyYmY3OCIsImMiOjh9" st.subheader("Podemos integrar un reporte realizado en PowerBI (de 800x540)") st.markdown(''' <iframe width="840" height="540" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe> ''' % url1, unsafe_allow_html=True) st.write("código:") st.code("""url1="https://app.powerbi.com/view?r=eyJrIjoiMDA4NGFhNmEtZDE4Mi00MWNhLTg5OTMtMWE2MzYxNTVmMTFlIiwidCI6ImI3M2IxZDZlLTIxZDUtNGUzOC1iMjM5LTgxMzRkOWQyYmY3OCIsImMiOjh9" st.subheader("Podemos tener un reporte de 800x540") st.markdown(''' <iframe width="840" height="540" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>''' % url1, unsafe_allow_html=True)""") url2="https://app.powerbi.com/view?r=eyJrIjoiMjEwYjgzNWUtZGQ4Ni00ODMwLWI0NjgtNzk3NjkxODIwNDM4IiwidCI6IjFmZjk0MGQ4LWFkOGEtNDNkZi1iZjQxLWI2OThkMWJkODVmNiIsImMiOjh9" st.subheader("O también algo tener el reporte un poco más pequeño (de 560x360)") st.markdown(""" <iframe width="560" height="360" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe> """ % url2, unsafe_allow_html=True) st.write("código:") st.code('''url2="https://app.powerbi.com/view?r=eyJrIjoiMjEwYjgzNWUtZGQ4Ni00ODMwLWI0NjgtNzk3NjkxODIwNDM4IiwidCI6IjFmZjk0MGQ4LWFkOGEtNDNkZi1iZjQxLWI2OThkMWJkODVmNiIsImMiOjh9" st.subheader("O también algo más pequeño de 560x360") st.markdown(""" <iframe width="560" height="360" src="%s" frameborder="0" style="border:0" allowfullscreen="true"></iframe>""" % url2, unsafe_allow_html=True)''') st.header("Dataframes Interactivos en D-TALE") st.markdown("""D-Tale es una herramienta Open Source que permite ver, analizar y modificar de una manera fácil estructuras de datos en Pandas. Para más información ver su [github](https://github.com/man-group/dtale)""") df=pd.read_csv(r'dataset/indian_liver.csv') startup(data_id="1", data=df) st.markdown("""<iframe width="840" height="540" src="/dtale/main/1" />""",unsafe_allow_html=True) st.write("código:") st.code('''st.markdown("""D-Tale es una herramienta Open Source que permite ver, analizar y modificar de una manera fácil estructuras de datos en Pandas. Para más información ver su [github](https://github.com/man-group/dtale)""") df=pd.read_csv(r'dataset/indian_liver.csv') startup(data_id="1", data=df) st.markdown("""<iframe width="840" height="540" src="/dtale/main/1" />""",unsafe_allow_html=True)''')
def execute(self): from dtale.views import startup data = global_state.get_data(self.data_id) try: df, code = self.checker.remove(data) instance = startup(data=df, **self.checker.startup_kwargs) curr_settings = global_state.get_settings(instance._data_id) global_state.set_settings( instance._data_id, dict_merge(curr_settings, dict(startup_code=code)), ) return instance._data_id except NoDuplicatesException: return self.data_id
def offline_chart(df, chart_type=None, query=None, x=None, y=None, z=None, group=None, agg=None, window=None, rolling_comp=None, barmode=None, barsort=None, yaxis=None, filepath=None, **kwargs): """ Builds the HTML for a plotly chart figure to saved to a file or output to a jupyter notebook :param df: integer string identifier for a D-Tale process's data :type df: :class:`pandas:pandas.DataFrame` :param chart_type: type of chart, possible options are line|bar|pie|scatter|3d_scatter|surface|heatmap :type chart_type: str :param query: pandas dataframe query string :type query: str, optional :param x: column to use for the X-Axis :type x: str :param y: columns to use for the Y-Axes :type y: list of str :param z: column to use for the Z-Axis :type z: str, optional :param group: column(s) to use for grouping :type group: list of str or str, optional :param agg: specific aggregation that can be applied to y or z axes. Possible values are: count, first, last mean, median, min, max, std, var, mad, prod, sum. This is included in label of axis it is being applied to. :type agg: str, optional :param window: number of days to include in rolling aggregations :type window: int, optional :param rolling_comp: computation to use in rolling aggregations :type rolling_comp: str, optional :param barmode: mode to use for bar chart display. possible values are stack|group(default)|overlay|relative :type barmode: str, optional :param barsort: axis name to sort the bars in a bar chart by (default is the 'x', but other options are any of columns names used in the 'y' parameter :type barsort: str, optional :param filepath: location to save HTML output :type filepath: str, optional :param kwargs: optional keyword arguments, here in case invalid arguments are passed to this function :type kwargs: dict :return: possible outcomes are: - if run within a jupyter notebook and no 'filepath' is specified it will print the resulting HTML within a cell in your notebook - if 'filepath' is specified it will save the chart to the path specified - otherwise it will return the HTML output as a string """ instance = startup(url=None, data=df, data_id=999) output = instance.offline_chart(chart_type=chart_type, query=query, x=x, y=y, z=z, group=group, agg=agg, window=window, rolling_comp=rolling_comp, barmode=barmode, barsort=barsort, yaxis=yaxis, filepath=filepath, **kwargs) global_state.cleanup() return output
def test_startup(unittest): import dtale.views as views views.startup() assert views.DATA is None test_data = pd.DataFrame( [dict(date=pd.Timestamp('now'), security_id=1, foo=1.5)]) test_data = test_data.set_index(['date', 'security_id']) views.startup(data_loader=lambda: test_data, port=80) pdt.assert_frame_equal(views.DATA, test_data.reset_index()) unittest.assertEqual(views.SETTINGS['80'], dict(locked=['date', 'security_id']), 'should lock index columns') test_data = test_data.reset_index() views.startup(data=test_data, port=80) pdt.assert_frame_equal(views.DATA, test_data) unittest.assertEqual(views.SETTINGS['80'], dict(locked=[]), 'no index = nothing locked')
def show(data=None, host=None, port=None, name=None, debug=False, subprocess=True, data_loader=None, reaper_on=True, open_browser=False, notebook=False, force=False, context_vars=None, ignore_duplicate=False, app_root=None, **kwargs): """ Entry point for kicking off D-Tale :class:`flask:flask.Flask` process from python process :param data: data which D-Tale will display :type data: :class:`pandas:pandas.DataFrame` or :class:`pandas:pandas.Series` or :class:`pandas:pandas.DatetimeIndex` or :class:`pandas:pandas.MultiIndex`, optional :param host: hostname of D-Tale, defaults to 0.0.0.0 :type host: str, optional :param port: port number of D-Tale process, defaults to any open port on server :type port: str, optional :param name: optional label to assign a D-Tale process :type name: str, optional :param debug: will turn on :class:`flask:flask.Flask` debug functionality, defaults to False :type debug: bool, optional :param subprocess: run D-Tale as a subprocess of your current process, defaults to True :type subprocess: bool, optional :param data_loader: function to load your data :type data_loader: func, optional :param reaper_on: turn on subprocess which will terminate D-Tale after 1 hour of inactivity :type reaper_on: bool, optional :param open_browser: if true, this will try using the :mod:`python:webbrowser` package to automatically open your default browser to your D-Tale process :type open_browser: bool, optional :param notebook: if true, this will try displaying an :class:`ipython:IPython.display.IFrame` :type notebook: bool, optional :param force: if true, this will force the D-Tale instance to run on the specified host/port by killing any other process running at that location :type force: bool, optional :param context_vars: a dictionary of the variables that will be available for use in user-defined expressions, such as filters :type context_vars: dict, optional :param ignore_duplicate: if true, this will not check if this data matches any other data previously loaded to D-Tale :type ignore_duplicate: bool, optional :Example: >>> import dtale >>> import pandas as pd >>> df = pandas.DataFrame([dict(a=1,b=2,c=3)]) >>> dtale.show(df) D-Tale started at: http://hostname:port ..link displayed in logging can be copied and pasted into any browser """ global ACTIVE_HOST, ACTIVE_PORT, USE_NGROK, USE_COLAB, JUPYTER_SERVER_PROXY try: logfile, log_level, verbose = map(kwargs.get, ["logfile", "log_level", "verbose"]) setup_logging(logfile, log_level or "info", verbose) if USE_NGROK: if not PY3: raise Exception( "In order to use ngrok you must be using Python 3 or higher!" ) from flask_ngrok import _run_ngrok ACTIVE_HOST = _run_ngrok() ACTIVE_PORT = None else: initialize_process_props(host, port, force) app_url = build_url(ACTIVE_PORT, ACTIVE_HOST) startup_url, final_app_root = build_startup_url_and_app_root(app_root) instance = startup( startup_url, data=data, data_loader=data_loader, name=name, context_vars=context_vars, ignore_duplicate=ignore_duplicate, ) is_active = not running_with_flask_debug() and is_up(app_url) if is_active: def _start(): if open_browser: instance.open_browser() else: if USE_NGROK: thread = Timer(1, _run_ngrok) thread.setDaemon(True) thread.start() def _start(): app = build_app( app_url, reaper_on=reaper_on, host=ACTIVE_HOST, app_root=final_app_root, ) if debug and not USE_NGROK: app.jinja_env.auto_reload = True app.config["TEMPLATES_AUTO_RELOAD"] = True else: getLogger("werkzeug").setLevel(LOG_ERROR) if open_browser: instance.open_browser() # hide banner message in production environments cli = sys.modules.get("flask.cli") if cli is not None: cli.show_server_banner = lambda *x: None if USE_NGROK: app.run(threaded=True) else: app.run(host="0.0.0.0", port=ACTIVE_PORT, debug=debug, threaded=True) if subprocess: if is_active: _start() else: _thread.start_new_thread(_start, ()) if notebook: instance.notebook() else: logger.info("D-Tale started at: {}".format(app_url)) _start() return instance except DuplicateDataError as ex: print( "It looks like this data may have already been loaded to D-Tale based on shape and column names. Here is " "URL of the data that seems to match it:\n\n{}\n\nIf you still want to load this data please use the " "following command:\n\ndtale.show(df, ignore_duplicate=True)". format( DtaleData(ex.data_id, build_url(ACTIVE_PORT, ACTIVE_HOST)).main_url())) return None
def show(data=None, host='0.0.0.0', port=None, name=None, debug=False, subprocess=True, data_loader=None, reaper_on=True, open_browser=False, notebook=False, **kwargs): """ Entry point for kicking off D-Tale Flask process from python process :param data: data which D-Tale will display :type data: :class:`pandas:pandas.DataFrame` or :class:`pandas:pandas.Series` or :class:`pandas:pandas.DatetimeIndex` or :class:`pandas:pandas.MultiIndex`, optional :param host: hostname of D-Tale, defaults to 0.0.0.0 :type host: str, optional :param port: port number of D-Tale process, defaults to any open port on server :type port: str, optional :param name: optional label to assign a D-Tale process :type name: str, optional :param debug: will turn on Flask debug functionality, defaults to False :type debug: bool, optional :param subprocess: run D-Tale as a subprocess of your current process, defaults to True :type subprocess: bool, optional :param data_loader: function to load your data :type data_loader: func, optional :param reaper_on: turn on subprocess which will terminate D-Tale after 1 hour of inactivity :type reaper_on: bool, optional :param open_browser: if true, this will try using the :mod:`python:webbrowser` package to automatically open your default browser to your D-Tale process :type open_browser: bool, optional :param notebook: if true, this will try displaying an :class:`ipython:IPython.display.IFrame` :type notebook: bool, optional :Example: >>> import dtale >>> import pandas as pd >>> df = pandas.DataFrame([dict(a=1,b=2,c=3)]) >>> dtale.show(df) D-Tale started at: http://hostname:port ..link displayed in logging can be copied and pasted into any browser """ logfile, log_level, verbose = map(kwargs.get, ['logfile', 'log_level', 'verbose']) setup_logging(logfile, log_level or 'info', verbose) selected_port = int(port or find_free_port()) instance = startup(data=data, data_loader=data_loader, port=selected_port, name=name) def _show(): app = build_app(reaper_on=reaper_on) if debug: app.jinja_env.auto_reload = True app.config['TEMPLATES_AUTO_RELOAD'] = True else: getLogger("werkzeug").setLevel(LOG_ERROR) url = build_url(selected_port) logger.info('D-Tale started at: {}'.format(url)) if open_browser: webbrowser.get().open(url) app.run(host=host, port=selected_port, debug=debug) if subprocess: _thread.start_new_thread(_show, ()) if notebook: instance.notebook() else: _show() return instance
def show(data=None, host=None, port=None, name=None, debug=False, subprocess=True, data_loader=None, reaper_on=True, open_browser=False, notebook=False, force=False, **kwargs): """ Entry point for kicking off D-Tale :class:`flask:flask.Flask` process from python process :param data: data which D-Tale will display :type data: :class:`pandas:pandas.DataFrame` or :class:`pandas:pandas.Series` or :class:`pandas:pandas.DatetimeIndex` or :class:`pandas:pandas.MultiIndex`, optional :param host: hostname of D-Tale, defaults to 0.0.0.0 :type host: str, optional :param port: port number of D-Tale process, defaults to any open port on server :type port: str, optional :param name: optional label to assign a D-Tale process :type name: str, optional :param debug: will turn on :class:`flask:flask.Flask` debug functionality, defaults to False :type debug: bool, optional :param subprocess: run D-Tale as a subprocess of your current process, defaults to True :type subprocess: bool, optional :param data_loader: function to load your data :type data_loader: func, optional :param reaper_on: turn on subprocess which will terminate D-Tale after 1 hour of inactivity :type reaper_on: bool, optional :param open_browser: if true, this will try using the :mod:`python:webbrowser` package to automatically open your default browser to your D-Tale process :type open_browser: bool, optional :param notebook: if true, this will try displaying an :class:`ipython:IPython.display.IFrame` :type notebook: bool, optional :param force: if true, this will force the D-Tale instance to run on the specified host/port by killing any other process running at that location :type force: bool, optional :Example: >>> import dtale >>> import pandas as pd >>> df = pandas.DataFrame([dict(a=1,b=2,c=3)]) >>> dtale.show(df) D-Tale started at: http://hostname:port ..link displayed in logging can be copied and pasted into any browser """ logfile, log_level, verbose = map(kwargs.get, ['logfile', 'log_level', 'verbose']) setup_logging(logfile, log_level or 'info', verbose) initialize_process_props(host, port, force) url = build_url(ACTIVE_PORT, ACTIVE_HOST) instance = startup(url, data=data, data_loader=data_loader, name=name) is_active = not running_with_flask_debug() and is_up(url) if is_active: def _start(): if open_browser: instance.open_browser() else: def _start(): app = build_app(url, reaper_on=reaper_on, host=ACTIVE_HOST) if debug: app.jinja_env.auto_reload = True app.config['TEMPLATES_AUTO_RELOAD'] = True else: getLogger("werkzeug").setLevel(LOG_ERROR) if open_browser: instance.open_browser() # hide banner message in production environments cli = sys.modules.get('flask.cli') if cli is not None: cli.show_server_banner = lambda *x: None app.run(host='0.0.0.0', port=ACTIVE_PORT, debug=debug, threaded=True) if subprocess: if is_active: _start() else: _thread.start_new_thread(_start, ()) if notebook: instance.notebook() else: logger.info('D-Tale started at: {}'.format(url)) _start() return instance
def create_df(request): df = pd.DataFrame(dict(a=[1, 2, 3], b=[4, 5, 6])) instance = startup("", data=df, ignore_duplicate=True) resp = redirect(f"/flask/dtale/main/{instance._data_id}") return resp
def test_view(unittest): from dtale.views import startup import dtale.global_state as global_state global_state.clear_store() with app.test_client() as c: global_state.new_data_inst(c.port) startup(URL, data=xarray_data(), data_id=c.port) assert global_state.get_dataset(c.port) is not None response = c.get("/dtale/main/{}".format(c.port)) assert 'input id="xarray" value="True"' not in str(response.data) assert 'input id="xarray_dim" value="{}"' not in str(response.data) resp = c.get("/dtale/code-export/{}".format(c.port)) assert resp.status_code == 200 response_data = resp.json assert response_data["success"] resp = c.get("/dtale/xarray-coordinates/{}".format(c.port)) response_data = resp.json expected = [ { "count": 3, "dtype": "str64" if PY3 else "string16", "name": "location", }, { "count": 731, "dtype": "datetime64[ns]", "name": "time" }, ] unittest.assertEqual( sorted(response_data["data"], key=lambda c: c["name"]), expected) resp = c.get("/dtale/xarray-dimension-values/{}/location".format( c.port)) response_data = resp.json unittest.assertEqual( response_data["data"], [{ "value": "IA" }, { "value": "IN" }, { "value": "IL" }], ) resp = c.get( "/dtale/update-xarray-selection/{}".format(c.port), query_string=dict(selection=json.dumps(dict(location="IA"))), ) assert resp.status_code == 200 assert list(global_state.get_data(c.port).location.unique()) == ["IA"] assert global_state.get_dataset_dim(c.port)["location"] == "IA" resp = c.get( "/dtale/update-xarray-selection/{}".format(c.port), query_string=dict(selection=json.dumps(dict())), ) assert resp.status_code == 200 assert list(global_state.get_data(c.port).location.unique()) == [ "IA", "IN", "IL", ] resp = c.get("/dtale/code-export/{}".format(c.port)) assert resp.status_code == 200 response_data = resp.json assert response_data["success"] with app.test_client() as c: zero_dim_xarray = xarray_data().sel(location="IA", time="2000-01-01") startup(URL, data=zero_dim_xarray, data_id=c.port) assert global_state.get_dataset(c.port) is not None response = c.get("/dtale/main/{}".format(c.port)) assert 'input id="xarray" value="True"' not in str(response.data) assert 'input id="xarray_dim" value="{}"' not in str(response.data)
def show(data=None, data_loader=None, name=None, context_vars=None, **options): """ Entry point for kicking off D-Tale :class:`flask:flask.Flask` process from python process :param data: data which D-Tale will display :type data: :class:`pandas:pandas.DataFrame` or :class:`pandas:pandas.Series` or :class:`pandas:pandas.DatetimeIndex` or :class:`pandas:pandas.MultiIndex`, optional :param host: hostname of D-Tale, defaults to 0.0.0.0 :type host: str, optional :param port: port number of D-Tale process, defaults to any open port on server :type port: str, optional :param name: optional label to assign a D-Tale process :type name: str, optional :param debug: will turn on :class:`flask:flask.Flask` debug functionality, defaults to False :type debug: bool, optional :param subprocess: run D-Tale as a subprocess of your current process, defaults to True :type subprocess: bool, optional :param data_loader: function to load your data :type data_loader: func, optional :param reaper_on: turn on subprocess which will terminate D-Tale after 1 hour of inactivity :type reaper_on: bool, optional :param open_browser: if true, this will try using the :mod:`python:webbrowser` package to automatically open your default browser to your D-Tale process :type open_browser: bool, optional :param notebook: if true, this will try displaying an :class:`ipython:IPython.display.IFrame` :type notebook: bool, optional :param force: if true, this will force the D-Tale instance to run on the specified host/port by killing any other process running at that location :type force: bool, optional :param context_vars: a dictionary of the variables that will be available for use in user-defined expressions, such as filters :type context_vars: dict, optional :param ignore_duplicate: if true, this will not check if this data matches any other data previously loaded to D-Tale :type ignore_duplicate: bool, optional :param app_root: Optional path to prepend to the routes of D-Tale. This is used when making use of Jupyterhub server proxy :type app_root: str, optional :param allow_cell_edits: If false, this will not allow users to edit cells directly in their D-Tale grid :type allow_cell_edits: bool, optional :param inplace: If true, this will call `reset_index(inplace=True)` on the dataframe used as a way to save memory. Otherwise this will create a brand new dataframe, thus doubling memory but leaving the dataframe input unchanged. :type inplace: bool, optional :param drop_index: If true, this will drop any pre-existing index on the dataframe input. :type drop_index: bool, optional :param hide_shutdown: If true, this will hide the "Shutdown" buton from users :type hide_shutdown: bool, optional :param github_fork: If true, this will display a "Fork me on GitHub" ribbon in the upper right-hand corner of the app :type github_fork: bool, optional :Example: >>> import dtale >>> import pandas as pd >>> df = pandas.DataFrame([dict(a=1,b=2,c=3)]) >>> dtale.show(df) D-Tale started at: http://hostname:port ..link displayed in logging can be copied and pasted into any browser """ global ACTIVE_HOST, ACTIVE_PORT, USE_NGROK try: final_options = dtale_config.build_show_options(options) logfile, log_level, verbose = map( final_options.get, ["logfile", "log_level", "verbose"] ) setup_logging(logfile, log_level or "info", verbose) if USE_NGROK: if not PY3: raise Exception( "In order to use ngrok you must be using Python 3 or higher!" ) from flask_ngrok import _run_ngrok ACTIVE_HOST = _run_ngrok() ACTIVE_PORT = None else: initialize_process_props( final_options["host"], final_options["port"], final_options["force"] ) app_url = build_url(ACTIVE_PORT, ACTIVE_HOST) startup_url, final_app_root = build_startup_url_and_app_root( final_options["app_root"] ) instance = startup( startup_url, data=data, data_loader=data_loader, name=name, context_vars=context_vars, ignore_duplicate=final_options["ignore_duplicate"], allow_cell_edits=final_options["allow_cell_edits"], inplace=final_options["inplace"], drop_index=final_options["drop_index"], precision=final_options["precision"], show_columns=final_options["show_columns"], hide_columns=final_options["hide_columns"], ) instance.started_with_open_browser = final_options["open_browser"] is_active = not running_with_flask_debug() and is_up(app_url) if is_active: def _start(): if final_options["open_browser"]: instance.open_browser() else: if USE_NGROK: thread = Timer(1, _run_ngrok) thread.setDaemon(True) thread.start() def _start(): app = build_app( app_url, reaper_on=final_options["reaper_on"], host=ACTIVE_HOST, app_root=final_app_root, ) if final_options["debug"] and not USE_NGROK: app.jinja_env.auto_reload = True app.config["TEMPLATES_AUTO_RELOAD"] = True else: getLogger("werkzeug").setLevel(LOG_ERROR) if final_options["open_browser"]: instance.open_browser() # hide banner message in production environments cli = sys.modules.get("flask.cli") if cli is not None: cli.show_server_banner = lambda *x: None if USE_NGROK: app.run(threaded=True) else: app.run( host="0.0.0.0", port=ACTIVE_PORT, debug=final_options["debug"], threaded=True, ) if final_options["subprocess"]: if is_active: _start() else: _thread.start_new_thread(_start, ()) if final_options["notebook"]: instance.notebook() else: logger.info("D-Tale started at: {}".format(app_url)) _start() return instance except DuplicateDataError as ex: print( "It looks like this data may have already been loaded to D-Tale based on shape and column names. Here is " "URL of the data that seems to match it:\n\n{}\n\nIf you still want to load this data please use the " "following command:\n\ndtale.show(df, ignore_duplicate=True)".format( DtaleData(ex.data_id, build_url(ACTIVE_PORT, ACTIVE_HOST)).main_url() ) ) return None
def test_startup(unittest): import dtale.views as views with pytest.raises(BaseException) as error: views.startup() assert 'data loaded is None!' in str(error.value) with pytest.raises(BaseException) as error: views.startup(dict()) assert 'data loaded must be one of the following types: pandas.DataFrame, pandas.Series, pandas.DatetimeIndex'\ in str(error.value) test_data = pd.DataFrame( [dict(date=pd.Timestamp('now'), security_id=1, foo=1.5)]) test_data = test_data.set_index(['date', 'security_id']) port = '80' views.startup(data_loader=lambda: test_data, port=port) pdt.assert_frame_equal(views.DATA[port], test_data.reset_index()) unittest.assertEqual(views.SETTINGS[port], dict(locked=['date', 'security_id']), 'should lock index columns') test_data = test_data.reset_index() port = '81' views.startup(data=test_data, port=port) pdt.assert_frame_equal(views.DATA[port], test_data) unittest.assertEqual(views.SETTINGS[port], dict(locked=[]), 'no index = nothing locked') test_data = pd.DataFrame([dict(date=pd.Timestamp('now'), security_id=1)]) test_data = test_data.set_index('security_id').date port = '82' views.startup(data_loader=lambda: test_data, port=port) pdt.assert_frame_equal(views.DATA[port], test_data.reset_index()) unittest.assertEqual(views.SETTINGS[port], dict(locked=['security_id']), 'should lock index columns') test_data = pd.DatetimeIndex([pd.Timestamp('now')], name='date') port = '83' views.startup(data_loader=lambda: test_data, port=port) pdt.assert_frame_equal(views.DATA[port], test_data.to_frame(index=False)) unittest.assertEqual(views.SETTINGS[port], dict(locked=[]), 'should lock index columns') test_data = pd.MultiIndex.from_arrays([[1, 2], [3, 4]], names=('a', 'b')) port = '84' views.startup(data_loader=lambda: test_data, port=port) pdt.assert_frame_equal(views.DATA[port], test_data.to_frame(index=False)) unittest.assertEqual(views.SETTINGS[port], dict(locked=[]), 'should lock index columns')
def create_df(data): # cleanup("1") instance = startup(data_id="1", data=data, ignore_duplicate=True) return redirect(f"/dtale/main/{instance._data_id}", code=302)
</style> """ PREAMBLE = ( "# DISCLAIMER: 'df' refers to the data you passed in when calling 'dtale.show'\n\n" "import pandas as pd\n\n" "if isinstance(df, (pd.DatetimeIndex, pd.MultiIndex)):\n" "\tdf = df.to_frame(index=False)\n\n" "# remove any pre-existing indices for ease of use in the D-Tale code, but this is not required\n" "df = df.reset_index().drop('index', axis=1, errors='ignore')\n" "df.columns = [str(c) for c in df.columns] # update columns to strings in case they are numbers" ) curr_instance = get_instance("1") if curr_instance is None: df = pd.DataFrame(dict(a=list(range(1, 11)))) startup(data_id="1", data=df) curr_instance = get_instance("1") html = f""" {CSS} <iframe src="/dtale/main/1" style="height: 100%;width: 100%"/> """ st.markdown(html, unsafe_allow_html=True) col1, col2 = st.beta_columns((1, 3)) reload_columns = col1.button("Reload") columns = [c for c in curr_instance.data.columns] if reload_columns: curr_instance = get_instance("1") columns = [c for c in curr_instance.data.columns] selected_column = col2.radio("Column Analysis", columns)