def get_store_contents(): """ Return an ordered tuple of attributes representing the store contents. Useful for ensuring key properties stay the same when switching between systems. """ _get_one = [ serialized_dataframe(global_state.get_data('1')), global_state.get_dtypes('1'), global_state.get_settings('1'), global_state.get_metadata('1'), global_state.get_context_variables('1'), global_state.get_history('1'), ] _get_all = [ { k: serialized_dataframe(v) for k, v in global_state.get_data().items() }, global_state.get_dtypes(), global_state.get_settings(), global_state.get_metadata(), global_state.get_context_variables(), global_state.get_history(), ] _lengths = [ len(global_state.DATA), len(global_state.DTYPES), len(global_state.SETTINGS), len(global_state.METADATA), len(global_state.CONTEXT_VARIABLES), len(global_state.HISTORY), ] return (_get_one, _get_all, _lengths)
def test_hide_columns(): from dtale.app import show import dtale.global_state as global_state global_state.clear_store() df = pd.DataFrame(dict(a=[1, 2], b=[2, 3])) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.DtaleFlask", MockDtaleFlask)) stack.enter_context(mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("requests.get", mock.Mock())) instance = show(data=df, ignore_duplicate=True, hide_columns=["b"], subprocess=False) assert global_state.get_dtypes(instance._data_id)[0]["visible"] is True assert not global_state.get_dtypes( instance._data_id)[1]["visible"] is True
def test_view(unittest): from dtale.views import build_dtypes_state import dtale.global_state as global_state global_state.clear_store() df = replacements_data() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} build_data_inst(data) build_dtypes(dtypes) resp = c.get( "/dtale/build-replacement/{}".format(c.port), query_string=dict(type="not_implemented", name="test", cfg=json.dumps({})), ) response_data = resp.json assert ( response_data["error"] == "'not_implemented' replacement not implemented yet!" ) params = dict( type="value", col="e", name="a", cfg=json.dumps( {"value": [dict(value="nan", type="raw", replace="for test")]} ), ) resp = c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) response_data = resp.json assert response_data["error"] == "A column named 'a' already exists!" params = dict( type="value", col="e", name="e2", cfg=json.dumps( {"value": [dict(value="nan", type="raw", replace="for test")]} ), ) c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]["e2"].values), ["a", "for test", "b"]) dtypes = global_state.get_dtypes(c.port) assert dtypes[-1]["name"] == "e2" assert dtypes[-1]["dtype"] == "string" if PY3 else "mixed" assert not dtypes[-1]["hasMissing"] del params["name"] c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) dtypes = global_state.get_dtypes(c.port) unittest.assertEqual(list(data[c.port]["e"].values), ["a", "for test", "b"]) e_dtype = next((d for d in dtypes if d["name"] == "e")) assert not e_dtype["hasMissing"]
def get_store_contents(): """ Return an ordered tuple of attributes representing the store contents. Useful for ensuring key properties stay the same when switching between systems. """ _get_one = [ serialized_dataframe(global_state.get_data("1")), global_state.get_dtypes("1"), global_state.get_settings("1"), global_state.get_metadata("1"), global_state.get_context_variables("1"), global_state.get_history("1"), ] _get_all = [ { int(k): serialized_dataframe(v.data) for k, v in global_state.items() }, {int(k): v.dtypes for k, v in global_state.items()}, {int(k): v.settings for k, v in global_state.items()}, {int(k): v.metadata for k, v in global_state.items()}, {int(k): v.context_variables for k, v in global_state.items()}, {int(k): v.history for k, v in global_state.items()}, ] _lengths = [ global_state.size(), ] return (_get_one, _get_all, _lengths)
def test_transpose(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index=["security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert "error" in response_data min_date = custom_data["date"].min().strftime("%Y-%m-%d") global_state.set_settings(c.port, dict(query="date == '{}'".format(min_date))) reshape_cfg = dict(index=["date", "security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "index", "{} 00:00:00 100000".format(min_date), "{} 00:00:00 100001".format(min_date), ], ) assert len(global_state.get_data(new_key)) == 1 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index=["date", "security_id"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="override", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == c.port
def test_show_colab(unittest, builtin_pkg): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state orig_import = __import__ mock_eval_js = mock.Mock() mock_eval_js.eval_js = lambda _port: "http://colab_host" def import_mock(name, *args, **kwargs): if name == "google.colab.output": return mock_eval_js return orig_import(name, *args, **kwargs) test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock)) stack.enter_context(mock.patch("dtale.app.USE_COLAB", True)) stack.enter_context(mock.patch("dtale.app.ACTIVE_PORT", 40000)) stack.enter_context(mock.patch("dtale.app.ACTIVE_HOST", "localhost")) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show(data=test_data, subprocess=False, name="foo", ignore_duplicate=True) assert "http://colab_host" == instance._url mock_run.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.get_dtypes(instance._data_id), views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url instances() assert get_instance( 20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][0] == "http://colab_host/shutdown"
def build(self): base_code = build_code_export( self.data_id, imports="{}\n\n".format("\n".join([ "import numpy as np", "import pandas as pd", "import plotly.graph_objs as go", ])), ) return_data, code = self.analysis.build(self) return dict(code=build_final_chart_code(base_code + code), query=self.query, cols=global_state.get_dtypes(self.data_id), dtype=self.dtype, chart_type=self.analysis_type, **return_data)
def test_resample(unittest): from dtale.views import build_dtypes_state, format_data start, end = "2000-10-01 23:30:00", "2000-10-03 00:30:00" rng = pd.date_range(start, end, freq="7min") ts = pd.Series(np.arange(len(rng)) * 3, index=rng) ts2 = pd.Series(np.arange(len(rng)) * 0.32, index=rng) df = pd.DataFrame(data={"col1": ts, "col2": ts2}) df, _ = format_data(df) global_state.clear_store() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index="index", columns=["col1"], freq="17min", agg="mean") resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="resample", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["index_17min", "col1"], ) assert len(global_state.get_data(new_key)) == 90 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def get_col_groups(data_id, data): valid_corr_cols = [] valid_str_corr_cols = [] valid_date_cols = [] for col_info in global_state.get_dtypes(data_id): name, dtype = map(col_info.get, ["name", "dtype"]) dtype = classify_type(dtype) if dtype in ["I", "F"]: valid_corr_cols.append(name) elif dtype == "S" and col_info.get("unique_ct", 0) <= 50: valid_str_corr_cols.append(name) elif dtype == "D": # even if a datetime column exists, we need to make sure that there is enough data for a date # to warrant a correlation, https://github.com/man-group/dtale/issues/43 date_counts = data[name].dropna().value_counts() if len(date_counts[date_counts > 1]) > 1: valid_date_cols.append(dict(name=name, rolling=False)) elif date_counts.eq(1).all(): valid_date_cols.append(dict(name=name, rolling=True)) return valid_corr_cols, valid_str_corr_cols, valid_date_cols
def test_pivot(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index=["date"], columns=["security_id"], values=["Col0"], aggfunc="mean") resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "100000", "100001"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None resp = c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) assert json.loads(resp.data)["success"] assert len(global_state.keys()) == 1 reshape_cfg["columnNameHeaders"] = True reshape_cfg["aggfunc"] = "sum" resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "security_id-100000", "security_id-100001"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg["columnNameHeaders"] = False reshape_cfg["values"] = ["Col0", "Col1"] resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "date", "Col0 100000", "Col0 100001", "Col1 100000", "Col1 100001" ], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_aggregate(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict( index="date", agg=dict(type="col", cols={ "Col0": ["sum", "mean"], "Col1": ["count"] }), ) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "Col0 sum", "Col0 mean", "Col1 count"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index="date", agg=dict(type="func", func="mean", cols=["Col0", "Col1"])) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "Col0", "Col1"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index="date", agg=dict(type="func", func="mean")) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "date", "security_id", "int_val", "Col0", "Col1", "Col2", "bool_val" ], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_show_jupyter_server_proxy(unittest): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state global_state.clear_store() test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.JUPYTER_SERVER_PROXY", True)) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("dtale.app.ACTIVE_PORT", 40000)) stack.enter_context(mock.patch("dtale.app.ACTIVE_HOST", "localhost")) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show(data=test_data, subprocess=False, name="foo", ignore_duplicate=True) assert "/user/{}/proxy/40000".format( getpass.getuser()) == instance._url mock_run.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.get_dtypes(instance._data_id), views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url instances() assert get_instance( 20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][ 0] == "/user/{}/proxy/40000/shutdown".format(getpass.getuser()) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.JUPYTER_SERVER_PROXY", True)) stack.enter_context(mock.patch("dtale.app.ACTIVE_PORT", 40000)) stack.enter_context(mock.patch("dtale.app.ACTIVE_HOST", "localhost")) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show( data=test_data, subprocess=False, ignore_duplicate=True, app_root="/custom_root/", ) assert "/custom_root/40000" == instance._url mock_run.assert_called_once() instance2 = get_instance(instance._data_id) # this is a known bug where get_instance will not work if you've specified an `app_root' in show() assert not instance2._url == instance._url instances() instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][0] == "/custom_root/40000/shutdown"
def test_show_ngrok(unittest, builtin_pkg): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state orig_import = __import__ mock_flask_ngrok = mock.Mock() mock_flask_ngrok._run_ngrok = lambda: "ngrok_host" def import_mock(name, *args, **kwargs): if name == "flask_ngrok": return mock_flask_ngrok return orig_import(name, *args, **kwargs) test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock)) stack.enter_context(mock.patch("dtale.app.USE_NGROK", True)) stack.enter_context(mock.patch("dtale.app.PY3", True)) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show(data=test_data, subprocess=False, name="foo", ignore_duplicate=True) assert "http://ngrok_host" == instance._url mock_run.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.get_dtypes(instance._data_id), views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url instances() assert get_instance( 20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][0] == "http://ngrok_host/shutdown" # assert global_state.get_name(1) == "foo" with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.USE_NGROK", True)) stack.enter_context(mock.patch("dtale.app.PY3", False)) with pytest.raises(Exception): show(data=test_data)
def test_show(unittest): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state global_state.clear_store() instances() test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show(data=test_data, subprocess=False, ignore_duplicate=True) assert "http://localhost:9999" == instance._url assert "http://localhost:9999/dtale/main/1" == instance.main_url() mock_run.assert_called_once() mock_find_free_port.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.get_dtypes(instance._data_id), views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url # removed data name test. wait for proper data name implementation. instances() assert get_instance( 20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][ 0] == "http://localhost:9999/shutdown" instance3 = show(data=test_data, subprocess=False, name="It's Here", ignore_duplicate=True) assert instance3.main_url() == "http://localhost:9999/dtale/main/2" pdt.assert_frame_equal(instance3.data, test_data) with ExitStack() as stack: mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_data_loader = mock.Mock(return_value=test_data) instance = show( data_loader=mock_data_loader, subprocess=False, port=9999, force=True, debug=True, ignore_duplicate=True, ) assert "http://localhost:9999" == instance._url mock_run.assert_called_once() mock_find_free_port.assert_not_called() mock_data_loader.assert_called_once() _, kwargs = mock_run.call_args assert "9999" in instance._url # RangeIndex test test_data = pd.DataFrame([1, 2, 3]) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.DtaleFlask", MockDtaleFlask)) stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("dtale.app.logger", mock.Mock())) instance = show(data=test_data, subprocess=False, name="foo", ignore_duplicate=True) assert np.array_equal(instance.data["0"].values, test_data[0].values) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.DtaleFlask", MockDtaleFlask)) stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("dtale.app.logger", mock.Mock())) stack.enter_context( mock.patch("dtale.views.in_ipython_frontend", mock.Mock(return_value=False))) get_calls = {"ct": 0} getter = namedtuple("get", "ok") def mock_requests_get(url, verify=True): if url.endswith("/health"): is_ok = get_calls["ct"] > 0 get_calls["ct"] += 1 return getter(is_ok) return getter(True) stack.enter_context(mock.patch("requests.get", mock_requests_get)) mock_display = stack.enter_context( mock.patch("IPython.display.display", mock.Mock())) mock_iframe = stack.enter_context( mock.patch("IPython.display.IFrame", mock.Mock())) instance = show( data=test_data, subprocess=True, name="foo1", notebook=True, ignore_duplicate=True, ) mock_display.assert_called_once() mock_iframe.assert_called_once() assert mock_iframe.call_args[0][ 0] == "http://localhost:9999/dtale/iframe/{}".format( instance._data_id) assert type(instance.__str__()).__name__ == "str" assert type(instance.__repr__()).__name__ == "str" class MockDtaleFlaskRunTest(Flask): def __init__(self, import_name, reaper_on=True, url=None, app_root=None, *args, **kwargs): kwargs.pop("instance_relative_config", None) kwargs.pop("static_url_path", None) super(MockDtaleFlaskRunTest, self).__init__(import_name, *args, **kwargs) def run(self, *args, **kwargs): assert self.jinja_env.auto_reload assert self.config["TEMPLATES_AUTO_RELOAD"] with mock.patch("dtale.app.DtaleFlask", MockDtaleFlaskRunTest): show( data=test_data, subprocess=False, port=9999, debug=True, ignore_duplicate=True, ) with mock.patch("dtale.app._thread.start_new_thread", mock.Mock()) as mock_thread: show(data=test_data, subprocess=True, ignore_duplicate=True) mock_thread.assert_called() test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: mock_build_app = stack.enter_context( mock.patch("dtale.app.build_app", mock.Mock())) stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("requests.get", mock.Mock())) show(data=test_data, subprocess=False, name="foo2", ignore_duplicate=True) _, kwargs = mock_build_app.call_args unittest.assertEqual( { "app_root": None, "host": "localhost", "reaper_on": True }, kwargs, "build_app should be called with defaults", ) # test adding duplicate column with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.DtaleFlask", MockDtaleFlask)) stack.enter_context( mock.patch("dtale.app.find_free_port", mock.Mock(return_value=9999))) stack.enter_context( mock.patch("socket.gethostname", mock.Mock(return_value="localhost"))) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) stack.enter_context(mock.patch("requests.get", mock.Mock())) instance = show( data=pd.DataFrame([dict(a=1, b=2)]), subprocess=False, name="foo3", ignore_duplicate=True, ) with pytest.raises(Exception): instance.data = instance.data.rename(columns={"b": "a"}) curr_instance_ct = global_state.size() show(data=pd.DataFrame([dict(a=1, b=2)]), subprocess=False, name="foo") assert curr_instance_ct == global_state.size() # cleanup global_state.cleanup()