def test_jinja_output(): import dtale.views as views df = pd.DataFrame([1, 2, 3]) df, _ = views.format_data(df) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) stack.enter_context( mock.patch('dtale.dash_application.views.DATA', {c.port: df})) response = c.get('/dtale/main/{}'.format(c.port)) assert 'span id="forkongithub"' not in str(response.data) response = c.get('/charts/{}'.format(c.port)) assert 'span id="forkongithub"' not in str(response.data) with build_app(url=URL, github_fork=True).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) stack.enter_context( mock.patch('dtale.dash_application.views.DATA', {c.port: df})) response = c.get('/dtale/main/1') assert 'span id="forkongithub"' in str(response.data) response = c.get('/charts/{}'.format(c.port)) assert 'span id="forkongithub"' in str(response.data)
def test_status_codes(): with ExitStack() as stack: stack.enter_context( mock.patch( "dtale.auth.global_state.get_auth_settings", return_value={ "active": True, "username": "******", "password": "******" }, )) with build_app(url=URL).test_client() as c: resp = c.get("/login") assert resp.status_code == 200 resp = c.get("/logout") assert resp.status_code == 302 with ExitStack() as stack: stack.enter_context( mock.patch( "dtale.auth.global_state.get_auth_settings", return_value={ "active": False, }, )) with build_app(url=URL).test_client() as c: resp = c.get("/login") assert resp.status_code == 404 resp = c.get("/logout") assert resp.status_code == 404
def test_time_dataframe_dataset(): with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) c.get("/dtale/datasets", query_string=dict(dataset="time_dataframe")) assert data["1"]["A"].isnull().sum() == 0
def test_failure(): import dtale.views as views df, _ = views.format_data( pd.DataFrame( [ [1, 1, 3.29, 3.41, 3.64], [1, 2, 2.44, 2.32, 2.42], [1, 3, 4.34, 4.17, 4.27], [2, 1, 3.08, 3.25, 3.07], [2, 2, 2.53, 1.78, 2.32], [2, 3, 4.19, 3.94, 4.34], [2, 4, 3.01, 4.03, 3.2], [2, 5, 2.44, 1.8, 1.72], [3, 1, 3.04, 2.89, 2.85], [3, 2, 1.62, 1.87, 2.04], [3, 3, 3.88, 4.09, 3.67], [3, 4, 3.14, 3.2, 3.11], [3, 5, 1.54, 1.93, 1.55], ], columns=["o", "p", "m1", "m2", "m3"], )) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get( "/dtale/gage-rnr/{}".format(c.port), query_string=dict(operator=json.dumps(["o"])), ) resp = resp.json assert "error" in resp
def test_network_analysis(network_data, unittest): import dtale.views as views df, _ = views.format_data(network_data) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context( mock.patch("dtale.global_state.DATA", {c.port: df})) resp = c.get( "/dtale/network-analysis/{}".format(c.port), query_string={ "to": "to", "from": "from", "weight": "weight" }, ) unittest.assertEqual( resp.json["data"], { "avg_weight": 2.68, "edge_ct": 36, "leaf_ct": 3, "max_edge": "10 (source: h, target: j)", "min_edge": "1 (source: j, target: k)", "most_connected_node": "g (Connections: 5)", "node_ct": 14, "triangle_ct": 2, }, )
def test_login(): import dtale.views as views df, _ = views.format_data(pd.DataFrame(dict(a=[1, 2, 3, 4, 5, 6]))) with ExitStack() as stack: stack.enter_context( mock.patch( "dtale.auth.global_state.get_auth_settings", return_value={ "active": True, "username": "******", "password": "******" }, )) mock_session = stack.enter_context( mock.patch("dtale.auth.session", dict())) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get("/dtale/static/css/main.css") assert resp.status_code == 200 resp = c.get("/dtale/main/{}".format(c.port)) assert resp.status_code == 302 assert resp.location == "http://localhost:{}/login".format(c.port) resp = c.post("/login", data=dict(username="******", password="******")) assert resp.location == "http://localhost:{}/dtale/main/{}".format( c.port, c.port) assert mock_session["logged_in"] assert mock_session["username"] == "foo" resp = c.get("/logout") assert resp.location == "http://localhost:{}/login".format(c.port) assert mock_session.get("logged_in") is None
def test_upload(): import dtale.views as views df, _ = views.format_data(pd.DataFrame([1, 2, 3])) with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {c.port: df} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context( mock.patch("dtale.global_state.DTYPES", {c.port: views.build_dtypes_state(df)})) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "contents": build_upload_data(), "filename": "test_df.csv" }, ) assert len(data) == 2 new_key = next((k for k in data if k != c.port), None) assert list(data[new_key].columns) == ["a", "b", "c"]
def test_covid_dataset(): import dtale.global_state as global_state global_state.clear_store() def mock_load_csv(**kwargs): if ( kwargs.get("path") == "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-states.csv" ): return pd.DataFrame(dict(state=["a", "b"])) elif ( kwargs.get("path") == "https://raw.githubusercontent.com/jasonong/List-of-US-States/master/states.csv" ): return pd.DataFrame(dict(State=["a"], Abbreviation=["A"])) return None with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context( mock.patch("dtale.cli.loaders.csv_loader.loader_func", mock_load_csv) ) c.get("/dtale/datasets", query_string=dict(dataset="covid")) assert global_state.get_data(1).state_code.values[0] == "A"
def test_time_dataframe_dataset(): import dtale.global_state as global_state global_state.clear_store() with build_app(url=URL).test_client() as c: c.get("/dtale/datasets", query_string=dict(dataset="time_dataframe")) assert global_state.get_data(1)["A"].isnull().sum() == 0
def _override_start_listening(app): dtale_app_obj = build_app(reaper_on=False) tr = WSGIContainer(dtale_app_obj) app.add_handlers(r".*", [(".*dtale.*", FallbackHandler, dict(fallback=tr))]) orig_start_listening(app)
def test_matrix(): import dtale.views as views df, _ = views.format_data(pd.DataFrame(dict(a=[1, 2, 3, 4, 5, 6]))) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get("/dtale/missingno/matrix/{}".format(c.port)) assert resp.content_type == "image/png"
def test_dendrogram(rolling_data): import dtale.views as views df, _ = views.format_data(rolling_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get("/dtale/missingno/dendrogram/{}".format(c.port)) assert resp.content_type == "image/png"
def test_web_upload(unittest): with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) load_csv = stack.enter_context( mock.patch( "dtale.cli.loaders.csv_loader.loader_func", mock.Mock(return_value=pd.DataFrame(dict(a=[1], b=[2]))), )) load_json = stack.enter_context( mock.patch( "dtale.cli.loaders.json_loader.loader_func", mock.Mock(return_value=pd.DataFrame(dict(a=[1], b=[2]))), )) params = {"type": "csv", "url": "http://test.com"} c.get("/dtale/web-upload", query_string=params) load_csv.assert_called_once() unittest.assertEqual( load_csv.call_args.kwargs, { "path": "http://test.com", "proxy": None }, ) assert len(data) == 1 load_csv.reset_mock() params = {"type": "tsv", "url": "http://test.com"} c.get("/dtale/web-upload", query_string=params) load_csv.assert_called_once() unittest.assertEqual( load_csv.call_args.kwargs, { "path": "http://test.com", "proxy": None, "delimiter": "\t" }, ) assert len(data) == 2 params = { "type": "json", "url": "http://test.com", "proxy": "http://testproxy.com", } c.get("/dtale/web-upload", query_string=params) load_json.assert_called_once() unittest.assertEqual( load_json.call_args.kwargs, { "path": "http://test.com", "proxy": "http://testproxy.com" }, ) assert len(data) == 3
def test_overriden_route(): app = build_app(reaper_on=False) @app.route("/") def hello_world(): return "hello world" with app.test_client() as c: resp = c.get("/") assert resp.data == b"hello world"
def test_hpfilter(custom_data, ts_analysis_data, unittest): import dtale.views as views df, _ = views.format_data(ts_analysis_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="not_implemented", cfg=json.dumps({})), ) assert not resp.json["success"] cfg = dict(index="date", col="realgdp", lamb=1600) resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="hpfilter", cfg=json.dumps(cfg)), ) unittest.assertEqual( sorted(resp.json["data"]["all"].keys()), ["cycle", "realgdp", "trend", "x"] ) df, _ = views.format_data(custom_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) cfg = dict(index="date", col="Col1", lamb=1600) resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="hpfilter", cfg=json.dumps(cfg)), ) assert not resp.json["success"] cfg["agg"] = "mean" resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="hpfilter", cfg=json.dumps(cfg)), ) unittest.assertEqual( sorted(resp.json["data"]["all"].keys()), ["Col1", "cycle", "trend", "x"] )
def test_time_dataframe_dataset(): with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context( mock.patch( "dtale.datasets.makeTimeDataFrame", mock.Mock(return_value=pd.DataFrame(dict(time=[1]))), )) c.get("/dtale/datasets", query_string=dict(dataset="time_dataframe")) assert data["1"].time.sum() == 1
def test_seinfeld_dataset(): def mock_load_csv(**kwargs): return pd.DataFrame(dict(SEID=["a"])) with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context( mock.patch("dtale.cli.loaders.csv_loader.loader_func", mock_load_csv)) c.get("/dtale/datasets", query_string=dict(dataset="seinfeld")) assert data["1"].SEID.values[0] == "a"
def test_login_failed(): with ExitStack() as stack: stack.enter_context( mock.patch( "dtale.auth.global_state.get_auth_settings", return_value={ "active": True, "username": "******", "password": "******" }, )) with build_app(url=URL).test_client() as c: resp = c.post("/login", data=dict(username="******", password="******")) assert "Invalid credentials!" in str(resp.data)
def test_seinfeld_dataset(): import dtale.global_state as global_state global_state.clear_store() def mock_load_csv(**kwargs): return pd.DataFrame(dict(SEID=["a"])) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context( mock.patch("dtale.cli.loaders.csv_loader.loader_func", mock_load_csv) ) c.get("/dtale/datasets", query_string=dict(dataset="seinfeld")) assert global_state.get_data(1).SEID.values[0] == "a"
def test_bkfilter(unittest, ts_analysis_data): import dtale.views as views df, _ = views.format_data(ts_analysis_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) cfg = dict(index="date", col="realgdp", low=6, high=32, K=12) resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="bkfilter", cfg=json.dumps(cfg)), ) unittest.assertEqual( sorted(resp.json["data"]["all"].keys()), ["cycle", "realgdp", "x"] )
def test_seasonal_decompose(unittest, ts_analysis_data): import dtale.views as views df, _ = views.format_data(ts_analysis_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) cfg = dict(index="date", col="realgdp", model="additive") resp = c.get( "/dtale/timeseries-analysis/{}".format(c.port), query_string=dict(type="seasonal_decompose", cfg=json.dumps(cfg)), ) unittest.assertEqual( sorted(resp.json["data"]["all"].keys()), ["realgdp", "resid", "seasonal", "trend", "x"], )
def test_network_data(network_data): import dtale.views as views df, _ = views.format_data(network_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get( "/dtale/network-data/{}".format(c.port), query_string={ "to": "to", "from": "from", "group": "weight", "weight": "weight", }, ) data = resp.json assert len(data["edges"]) == 19 assert len(data["nodes"]) == 14
def test_failed_override(): with ExitStack() as stack: stack.enter_context( mock.patch( "dtale.app.contains_route", mock.Mock(side_effect=Exception("test error")), )) app = build_app(reaper_on=False) @app.route("/") def hello_world(): return "hello world" with app.test_client() as c: resp = c.get("/") assert resp.status_code == 302 assert resp.location == "http://localhost:{}/dtale/popup/upload".format( c.port)
def test_shortest_path(network_data, unittest): import dtale.views as views df, _ = views.format_data(network_data) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) resp = c.get( "/dtale/shortest-path/{}".format(c.port), query_string={ "to": "to", "from": "from", "weight": "weight", "start": "b", "end": "k", }, ) unittest.assertEqual(resp.json["data"], ["b", "f", "j", "k"])
def test_shortest_path(network_data, unittest): import dtale.views as views df, _ = views.format_data(network_data) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch("dtale.global_state.DATA", {c.port: df})) resp = c.get( "/dtale/shortest-path/{}".format(c.port), query_string={ "to": "to", "from": "from", "weight": "weight", "start": "b", "end": "k", }, ) unittest.assertEqual(resp.json["data"], ["b", "f", "j", "k"])
def test_network_data(network_data): import dtale.views as views df, _ = views.format_data(network_data) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch("dtale.global_state.DATA", {c.port: df})) resp = c.get( "/dtale/network-data/{}".format(c.port), query_string={ "to": "to", "from": "from", "group": "weight", "weight": "weight", }, ) data = resp.json assert len(data["edges"]) == 19 assert len(data["nodes"]) == 14
import numpy as np import pandas as pd import pandas.util.testing as pdt import pytest from pandas.tseries.offsets import Day from six import PY3 from dtale.app import build_app if PY3: from contextlib import ExitStack else: from contextlib2 import ExitStack URL = 'http://localhost:40000' app = build_app(url=URL) @pytest.mark.unit def test_head_data_id(): import dtale.views as views with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {'1': None, '2': None})) assert views.head_data_id() == '1' with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {})) with pytest.raises(Exception) as error: views.head_data_id() assert error.startswith('No data associated with this D-Tale session')
import mock import pandas as pd import pandas.util.testing as pdt import pytest from pandas.tseries.offsets import Day from six import PY3 from dtale.app import build_app if PY3: from contextlib import ExitStack else: from contextlib2 import ExitStack app = build_app() @pytest.mark.unit def test_startup(unittest): import dtale.views as views views.startup() assert views.DATA is None test_data = pd.DataFrame( [dict(date=pd.Timestamp('now'), security_id=1, foo=1.5)]) test_data = test_data.set_index(['date', 'security_id']) views.startup(data_loader=lambda: test_data, port=80) pdt.assert_frame_equal(views.DATA, test_data.reset_index())
def test_upload(unittest): import dtale.views as views import dtale.global_state as global_state global_state.clear_store() df, _ = views.format_data(pd.DataFrame([1, 2, 3])) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "tests_df.csv": (build_upload_data(), "test_df.csv"), "separatorType": "csv", }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: global_state.clear_store() build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "tests_df.csv": (build_upload_data(), "test_df.csv"), "separatorType": "custom", "separator": ",", }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: global_state.clear_store() build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) assert global_state.size() == 1 if PY3: c.post( "/dtale/upload", data={ "test_df.xlsx": ( os.path.join( os.path.dirname(__file__), "..", "data/test_df.xlsx" ), "test_df.xlsx", ) }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: with ExitStack() as stack: global_state.clear_store() data = {c.port: df} build_data_inst(data) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) stack.enter_context( mock.patch( "dtale.views.pd.read_excel", mock.Mock( return_value={ "Sheet 1": pd.DataFrame(dict(a=[1], b=[2])), "Sheet 2": pd.DataFrame(dict(c=[1], d=[2])), } ), ) ) resp = c.post( "/dtale/upload", data={ "test_df.xlsx": ( os.path.join( os.path.dirname(__file__), "..", "data/test_df.xlsx" ), "test_df.xlsx", ) }, ) assert global_state.size() == 3 sheets = resp.json["sheets"] assert len(sheets) == 2 unittest.assertEqual( sorted([s["name"] for s in sheets]), ["Sheet 1", "Sheet 2"], )
def test_web_upload(unittest): import dtale.global_state as global_state global_state.clear_store() with build_app(url=URL).test_client() as c: with ExitStack() as stack: load_csv = stack.enter_context( mock.patch( "dtale.cli.loaders.csv_loader.loader_func", mock.Mock(return_value=pd.DataFrame(dict(a=[1], b=[2]))), ) ) load_excel = stack.enter_context( mock.patch( "dtale.cli.loaders.excel_loader.load_file", mock.Mock( return_value={"Sheet 1": pd.DataFrame(dict(a=[1], b=[2]))} ), ) ) load_json = stack.enter_context( mock.patch( "dtale.cli.loaders.json_loader.loader_func", mock.Mock(return_value=pd.DataFrame(dict(a=[1], b=[2]))), ) ) params = {"type": "csv", "url": "http://test.com"} c.get("/dtale/web-upload", query_string=params) load_csv.assert_called_once() unittest.assertEqual( load_csv.call_args.kwargs, {"path": "http://test.com", "proxy": None}, ) assert global_state.size() == 1 load_csv.reset_mock() params = {"type": "tsv", "url": "http://test.com"} c.get("/dtale/web-upload", query_string=params) load_csv.assert_called_once() unittest.assertEqual( load_csv.call_args.kwargs, {"path": "http://test.com", "proxy": None, "delimiter": "\t"}, ) assert global_state.size() == 2 params = { "type": "json", "url": "http://test.com", "proxy": "http://testproxy.com", } c.get("/dtale/web-upload", query_string=params) load_json.assert_called_once() unittest.assertEqual( load_json.call_args.kwargs, {"path": "http://test.com", "proxy": "http://testproxy.com"}, ) assert global_state.size() == 3 params = {"type": "excel", "url": "http://test.com"} c.get("/dtale/web-upload", query_string=params) load_excel.assert_called_once() unittest.assertEqual( load_excel.call_args.kwargs, {"path": "http://test.com", "proxy": None}, ) assert global_state.size() == 4 global_state.clear_store() load_excel.reset_mock() load_excel.return_value = { "Sheet 1": pd.DataFrame(dict(a=[1], b=[2])), "Sheet 2": pd.DataFrame(dict(c=[1], d=[2])), } resp = c.get("/dtale/web-upload", query_string=params) sheets = resp.json["sheets"] assert len(sheets) == 2 unittest.assertEqual( sorted([s["name"] for s in sheets]), ["Sheet 1", "Sheet 2"], )