def test_processes(test_data, unittest): from dtale.views import build_dtypes_state now = pd.Timestamp('20180430 12:36:44').tz_localize('US/Eastern') with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(test_data)})) stack.enter_context(mock.patch('dtale.views.METADATA', {c.port: dict(start=now, name='foo')})) response = c.get('/dtale/processes') response_data = json.loads(response.data) unittest.assertEqual( [{ 'rows': 50, 'name': u'foo', 'ts': 1525106204000, 'start': '2018-04-30 12:36:44', 'names': u'date,security_id,foo,bar,baz', 'data_id': c.port, 'columns': 5 }], response_data['data'] ) with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(test_data)})) stack.enter_context(mock.patch('dtale.views.METADATA', {})) response = c.get('/dtale/processes') response_data = json.loads(response.data) assert 'error' in response_data
def test_jinja_output(): import dtale.views as views df = pd.DataFrame([1, 2, 3]) df, _ = views.format_data(df) with build_app(url=URL).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) stack.enter_context( mock.patch('dtale.dash_application.views.DATA', {c.port: df})) response = c.get('/dtale/main/{}'.format(c.port)) assert 'span id="forkongithub"' not in str(response.data) response = c.get('/charts/{}'.format(c.port)) assert 'span id="forkongithub"' not in str(response.data) with build_app(url=URL, github_fork=True).test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) stack.enter_context( mock.patch('dtale.dash_application.views.DATA', {c.port: df})) response = c.get('/dtale/main/1') assert 'span id="forkongithub"' in str(response.data) response = c.get('/charts/{}'.format(c.port)) assert 'span id="forkongithub"' in str(response.data)
def test_get_correlations(unittest, test_data): import dtale.views as views with app.test_client() as c: with ExitStack() as stack: test_data, _ = views.format_data(test_data) stack.enter_context( mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(test_data)})) response = c.get('/dtale/correlations') response_data = json.loads(response.data) expected = dict(data=[ dict(column='security_id', security_id=1.0, foo=None, bar=None), dict(column='foo', security_id=None, foo=None, bar=None), dict(column='bar', security_id=None, foo=None, bar=None) ], dates=[]) unittest.assertEqual(response_data, expected, 'should return correlations') with app.test_client() as c: with ExitStack() as stack: stack.enter_context( mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(test_data)})) response = c.get('/dtale/correlations', query_string=dict(query="missing_col == 'blah'")) response_data = json.loads(response.data) unittest.assertEqual(response_data['error'], "name 'missing_col' is not defined", 'should handle correlations exception') with app.test_client() as c: with ExitStack() as stack: test_data.loc[test_data.security_id == 1, 'bar'] = np.nan test_data2 = test_data.copy() test_data2.loc[:, 'date'] = pd.Timestamp('20000102') test_data = pd.concat([test_data, test_data2], ignore_index=True) stack.enter_context( mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(test_data)})) response = c.get('/dtale/correlations') response_data = json.loads(response.data) expected = expected = dict(data=[ dict(column='security_id', security_id=1.0, foo=None, bar=None), dict(column='foo', security_id=None, foo=None, bar=None), dict(column='bar', security_id=None, foo=None, bar=None) ], dates=['date']) unittest.assertEqual(response_data, expected, 'should return correlations')
def test_dtypes(test_data): from dtale.views import build_dtypes_state, format_data with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(test_data)})) response = c.get('/dtale/dtypes/{}'.format(c.port)) response_data = json.loads(response.data) assert response_data['success'] for col in test_data.columns: response = c.get('/dtale/describe/{}/{}'.format(c.port, col)) response_data = json.loads(response.data) assert response_data['success'] lots_of_groups = pd.DataFrame([dict(a=i, b=1) for i in range(150)]) with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: lots_of_groups})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(lots_of_groups)})) response = c.get('/dtale/dtypes/{}'.format(c.port)) response_data = json.loads(response.data) assert response_data['success'] response = c.get('/dtale/describe/{}/{}'.format(c.port, 'a')) response_data = json.loads(response.data) assert response_data['uniques']['top'] assert response_data['success'] with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DTYPES', {})) response = c.get('/dtale/dtypes/{}'.format(c.port)) response_data = json.loads(response.data) assert 'error' in response_data response = c.get('/dtale/describe/{}/foo'.format(c.port)) response_data = json.loads(response.data) assert 'error' in response_data df = pd.DataFrame([ dict(date=pd.Timestamp('now'), security_id=1, foo=1.0, bar=2.0), dict(date=pd.Timestamp('now'), security_id=1, foo=2.0, bar=np.inf) ], columns=['date', 'security_id', 'foo', 'bar']) df, _ = format_data(df) with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(df)})) response = c.get('/dtale/describe/{}/{}'.format(c.port, 'bar')) response_data = json.loads(response.data) assert response_data['describe']['min'] == '2' assert response_data['describe']['max'] == 'inf'
def test_dtypes(test_data): from dtale.views import build_dtypes_state with app.test_client() as c: with ExitStack() as stack: stack.enter_context( mock.patch('dtale.views.DATA', {c.port: test_data})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: build_dtypes_state(test_data)})) response = c.get('/dtale/dtypes') response_data = json.loads(response.data) assert response_data['success'] for col in test_data.columns: response = c.get('/dtale/describe/{}'.format(col)) response_data = json.loads(response.data) assert response_data['success'] with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DTYPES', {})) response = c.get('/dtale/dtypes') response_data = json.loads(response.data) assert 'error' in response_data response = c.get('/dtale/describe/foo') response_data = json.loads(response.data) assert 'error' in response_data
def test_view(unittest): from dtale.views import build_dtypes_state df = replacements_data() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} with ExitStack() as stack: stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) resp = c.get( "/dtale/build-replacement/{}".format(c.port), query_string=dict(type="not_implemented", name="test", cfg=json.dumps({})), ) response_data = resp.json assert (response_data["error"] == "'not_implemented' replacement not implemented yet!") params = dict( type="value", col="e", name="a", cfg=json.dumps({ "value": [dict(value="nan", type="raw", replace="for test")] }), ) resp = c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) response_data = resp.json assert response_data[ "error"] == "A column named 'a' already exists!" params = dict( type="value", col="e", name="e2", cfg=json.dumps({ "value": [dict(value="nan", type="raw", replace="for test")] }), ) c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]["e2"].values), ["a", "for test", "b"]) assert dtypes[c.port][-1]["name"] == "e2" assert dtypes[c.port][-1]["dtype"] == "string" assert not dtypes[c.port][-1]["hasMissing"] del params["name"] c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]["e"].values), ["a", "for test", "b"]) e_dtype = next((d for d in dtypes[c.port] if d["name"] == "e")) assert not e_dtype["hasMissing"]
def test_upload(): import dtale.views as views df, _ = views.format_data(pd.DataFrame([1, 2, 3])) with build_app(url=URL).test_client() as c: with ExitStack() as stack: data = {c.port: df} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context( mock.patch("dtale.global_state.DTYPES", {c.port: views.build_dtypes_state(df)})) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "contents": build_upload_data(), "filename": "test_df.csv" }, ) assert len(data) == 2 new_key = next((k for k in data if k != c.port), None) assert list(data[new_key].columns) == ["a", "b", "c"]
def test_edit_to_nan(): from dtale.views import build_dtypes_state, format_data df = edit_data() df, _ = format_data(df) with app.test_client() as c: with ExitStack() as stack: data = {c.port: df} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) settings = {c.port: {"locked": ["a"]}} stack.enter_context( mock.patch("dtale.global_state.SETTINGS", settings)) dtypes = {c.port: build_dtypes_state(df)} stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) c.get( "/dtale/edit-cell/{}/a".format(c.port), query_string=dict(rowIndex=0, updated="nan"), ) assert pd.isnull(data[c.port].a.values[0]) c.get( "/dtale/edit-cell/{}/b".format(c.port), query_string=dict(rowIndex=0, updated="inf"), ) assert np.isinf(data[c.port].b.values[0])
def test_show_ngrok(unittest, builtin_pkg): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state orig_import = __import__ mock_flask_ngrok = mock.Mock() mock_flask_ngrok._run_ngrok = lambda: "ngrok_host" def import_mock(name, *args, **kwargs): if name == "flask_ngrok": return mock_flask_ngrok return orig_import(name, *args, **kwargs) test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock) ) stack.enter_context(mock.patch("dtale.app.USE_NGROK", True)) stack.enter_context(mock.patch("dtale.app.PY3", True)) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock()) ) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False)) ) mock_requests = stack.enter_context(mock.patch("requests.get", mock.Mock())) instance = show( data=test_data, subprocess=False, name="foo", ignore_duplicate=True ) assert "http://ngrok_host" == instance._url mock_run.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.DTYPES[instance._data_id], views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url instances() assert get_instance(20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][0] == "http://ngrok_host/shutdown" assert global_state.METADATA["1"]["name"] == "foo" with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.USE_NGROK", True)) stack.enter_context(mock.patch("dtale.app.PY3", False)) with pytest.raises(Exception): show(data=test_data)
def test_view(unittest): from dtale.views import build_dtypes_state df = duplicates_data() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} with ExitStack() as stack: stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) resp = c.get( "/dtale/duplicates/{}".format(c.port), query_string=dict( type="not_implemented", action="execute", cfg=json.dumps({}) ), ) response_data = resp.json assert ( response_data["error"] == "'not_implemented' duplicate check not implemented yet!" ) params = dict( type="columns", action="test", cfg=json.dumps({"keep": "first"}), ) resp = c.get("/dtale/duplicates/{}".format(c.port), query_string=params) response_data = resp.json unittest.assertEquals(response_data, {"results": {"Foo": ["foo"]}}) params["action"] = "execute" resp = c.get("/dtale/duplicates/{}".format(c.port), query_string=params) response_data = resp.json assert response_data["data_id"] == c.port
def test_stack(unittest): from dtale.views import build_dtypes_state df1 = pd.DataFrame({ "A": ["A0", "A1"], "B": ["B0", "B1"], "C": ["C0", "C1"], "D": ["D0", "D1"], }) df2 = pd.DataFrame({ "A": ["A2", "A3"], "B": ["B3", "B3"], "C": ["C3", "C3"], "D": ["D3", "D3"], }) with app.test_client() as c: data = {"1": df1, "2": df2} dtypes = {k: build_dtypes_state(v) for k, v in data.items()} settings = {k: {} for k in data.keys()} with ExitStack() as stack: stack.enter_context(mock.patch("dtale.global_state.DATA", data)) stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) stack.enter_context( mock.patch("dtale.global_state.SETTINGS", settings)) datasets = [ dict(dataId="1", columns=[]), dict(dataId="2", columns=[]) ] config = dict(ignore_index=False) resp = c.post( "/dtale/merge", data=dict( action="stack", config=json.dumps(config), datasets=json.dumps(datasets), ), ) assert resp.status_code == 200 final_df = data.get(resp.json["data_id"]) unittest.assertEqual(list(final_df["A"].values), ["A0", "A1", "A2", "A3"]) unittest.assertEqual(list(final_df["index"].values), [0, 1, 0, 1]) config["ignoreIndex"] = True resp = c.post( "/dtale/merge", data=dict( action="stack", config=json.dumps(config), datasets=json.dumps(datasets), ), ) assert resp.status_code == 200 final_df = data.get(resp.json["data_id"]) assert "index" not in final_df.columns unittest.assertEqual(list(final_df["A"].values), ["A0", "A1", "A2", "A3"])
def test_get_correlations_ts(unittest, rolling_data): import dtale.views as views test_data = pd.DataFrame(build_ts_data(size=50), columns=['date', 'security_id', 'foo', 'bar']) with app.test_client() as c: with mock.patch('dtale.views.DATA', {c.port: test_data}): params = dict(dateCol='date', cols=json.dumps(['foo', 'bar'])) response = c.get('/dtale/correlations-ts/{}'.format(c.port), query_string=params) response_data = json.loads(response.data) expected = { 'data': { 'all': { 'x': [ '2000-01-01', '2000-01-02', '2000-01-03', '2000-01-04', '2000-01-05' ], 'corr': [1.0, 1.0, 1.0, 1.0, 1.0] } }, 'max': { 'corr': 1.0, 'x': '2000-01-05' }, 'min': { 'corr': 1.0, 'x': '2000-01-01' }, 'success': True, } unittest.assertEqual(response_data, expected, 'should return timeseries correlation') df, _ = views.format_data(rolling_data) with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context( mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) params = dict(dateCol='date', cols=json.dumps(['0', '1']), rollingWindow='4') response = c.get('/dtale/correlations-ts/{}'.format(c.port), query_string=params) response_data = json.loads(response.data) unittest.assertEqual(response_data['success'], True, 'should return rolling correlation') with app.test_client() as c: with mock.patch('dtale.views.DATA', {c.port: test_data}): response = c.get('/dtale/correlations-ts/{}'.format(c.port), query_string=dict(query="missing_col == 'blah'")) response_data = json.loads(response.data) unittest.assertEqual(response_data['error'], "name 'missing_col' is not defined", 'should handle correlations exception')
def test_transpose(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index=["security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert "error" in response_data min_date = custom_data["date"].min().strftime("%Y-%m-%d") global_state.set_settings(c.port, dict(query="date == '{}'".format(min_date))) reshape_cfg = dict(index=["date", "security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "index", "{} 00:00:00 100000".format(min_date), "{} 00:00:00 100001".format(min_date), ], ) assert len(global_state.get_data(new_key)) == 1 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index=["date", "security_id"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="override", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == c.port
def test_show_colab(unittest, builtin_pkg): from dtale.app import show, get_instance, instances import dtale.views as views import dtale.global_state as global_state orig_import = __import__ mock_eval_js = mock.Mock() mock_eval_js.eval_js = lambda _port: "http://colab_host" def import_mock(name, *args, **kwargs): if name == "google.colab.output": return mock_eval_js return orig_import(name, *args, **kwargs) test_data = pd.DataFrame([dict(a=1, b=2)]) with ExitStack() as stack: stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock)) stack.enter_context(mock.patch("dtale.app.USE_COLAB", True)) stack.enter_context(mock.patch("dtale.app.ACTIVE_PORT", 40000)) stack.enter_context(mock.patch("dtale.app.ACTIVE_HOST", "localhost")) mock_run = stack.enter_context( mock.patch("dtale.app.DtaleFlask.run", mock.Mock())) stack.enter_context( mock.patch("dtale.app.is_up", mock.Mock(return_value=False))) mock_requests = stack.enter_context( mock.patch("requests.get", mock.Mock())) instance = show(data=test_data, subprocess=False, name="foo", ignore_duplicate=True) assert "http://colab_host" == instance._url mock_run.assert_called_once() pdt.assert_frame_equal(instance.data, test_data) tmp = test_data.copy() tmp["biz"] = 2.5 instance.data = tmp unittest.assertEqual( global_state.get_dtypes(instance._data_id), views.build_dtypes_state(tmp), "should update app data/dtypes", ) instance2 = get_instance(instance._data_id) assert instance2._url == instance._url instances() assert get_instance( 20) is None # should return None for invalid data ids instance.kill() mock_requests.assert_called_once() assert mock_requests.call_args[0][0] == "http://colab_host/shutdown"
def test_stack(unittest): from dtale.views import build_dtypes_state import dtale.global_state as global_state global_state.clear_store() df1 = pd.DataFrame({ "A": ["A0", "A1"], "B": ["B0", "B1"], "C": ["C0", "C1"], "D": ["D0", "D1"], }) df2 = pd.DataFrame({ "A": ["A2", "A3"], "B": ["B3", "B3"], "C": ["C3", "C3"], "D": ["D3", "D3"], }) with app.test_client() as c: data = {"1": df1, "2": df2} dtypes = {k: build_dtypes_state(v) for k, v in data.items()} settings = {k: {} for k in data.keys()} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) datasets = [dict(dataId="1", columns=[]), dict(dataId="2", columns=[])] config = dict(ignore_index=False) resp = c.post( "/dtale/merge", data=dict( action="stack", config=json.dumps(config), datasets=json.dumps(datasets), ), ) assert resp.status_code == 200 final_df = global_state.get_data(resp.json["data_id"]) unittest.assertEqual(list(final_df["A"].values), ["A0", "A1", "A2", "A3"]) unittest.assertEqual(list(final_df["index"].values), [0, 1, 0, 1]) config["ignoreIndex"] = True resp = c.post( "/dtale/merge", data=dict( action="stack", config=json.dumps(config), datasets=json.dumps(datasets), ), ) assert resp.status_code == 200 final_df = global_state.get_data(resp.json["data_id"]) assert "index" not in final_df.columns unittest.assertEqual(list(final_df["A"].values), ["A0", "A1", "A2", "A3"])
def test_view(unittest): from dtale.views import build_dtypes_state import dtale.global_state as global_state global_state.clear_store() df = replacements_data() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} build_data_inst(data) build_dtypes(dtypes) resp = c.get( "/dtale/build-replacement/{}".format(c.port), query_string=dict(type="not_implemented", name="test", cfg=json.dumps({})), ) response_data = resp.json assert ( response_data["error"] == "'not_implemented' replacement not implemented yet!" ) params = dict( type="value", col="e", name="a", cfg=json.dumps( {"value": [dict(value="nan", type="raw", replace="for test")]} ), ) resp = c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) response_data = resp.json assert response_data["error"] == "A column named 'a' already exists!" params = dict( type="value", col="e", name="e2", cfg=json.dumps( {"value": [dict(value="nan", type="raw", replace="for test")]} ), ) c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]["e2"].values), ["a", "for test", "b"]) dtypes = global_state.get_dtypes(c.port) assert dtypes[-1]["name"] == "e2" assert dtypes[-1]["dtype"] == "string" if PY3 else "mixed" assert not dtypes[-1]["hasMissing"] del params["name"] c.get("/dtale/build-replacement/{}".format(c.port), query_string=params) dtypes = global_state.get_dtypes(c.port) unittest.assertEqual(list(data[c.port]["e"].values), ["a", "for test", "b"]) e_dtype = next((d for d in dtypes if d["name"] == "e")) assert not e_dtype["hasMissing"]
def test_get_pps_matrix(unittest, test_data): import dtale.views as views with app.test_client() as c: test_data, _ = views.format_data(test_data) build_data_inst({c.port: test_data}) build_dtypes({c.port: views.build_dtypes_state(test_data)}) response = c.get("/dtale/correlations/{}?pps=true".format(c.port)) response_data = response.json expected = [ { "bar": 1, "column": "bar", "foo": 0, "security_id": 0 }, { "bar": 0, "column": "foo", "foo": 1, "security_id": 0 }, { "bar": 0, "column": "security_id", "foo": 0, "security_id": 1 }, ] unittest.assertEqual( response_data["data"], expected, "should return scores", ) pps_val = next( (p for p in response_data["pps"] if p["y"] == "security_id" and p["x"] == "foo"), None, ) expected = { "baseline_score": 12.5, "case": "regression", "is_valid_score": "True", "metric": "mean absolute error", "model": "DecisionTreeRegressor()", "model_score": 12.635071, "ppscore": 0, "x": "foo", "y": "security_id", } unittest.assertEqual(pps_val, expected, "should return PPS information") assert "import ppscore" in response_data["code"] assert "corr_data = ppscore.matrix(corr_data)" in response_data["code"]
def test_302(): import dtale.views as views df = pd.DataFrame([1, 2, 3]) df, _ = views.format_data(df) with app.test_client() as c: with ExitStack() as stack: stack.enter_context(mock.patch('dtale.views.DATA', {c.port: df})) stack.enter_context(mock.patch('dtale.views.DTYPES', {c.port: views.build_dtypes_state(df)})) for path in ['/', '/dtale', '/dtale/main', '/dtale/iframe', '/dtale/popup/test', '/favicon.ico']: response = c.get(path) assert response.status_code == 302, '{} should return 302 response'.format(path)
def initialize_store(test_data): """Helper function that sets up a default store with some data in it""" global_state.cleanup() global_state.use_default_store() for data_id in ["1", "2"]: global_state.set_data(data_id, test_data) global_state.set_dtypes(data_id, build_dtypes_state(test_data)) global_state.set_settings(data_id, dict(locked=[])) global_state.set_name(data_id, "test_name" + data_id) global_state.set_context_variables( data_id, dict(favorite_words=["foo", "bar", "baz"])) global_state.set_history(data_id, ["foo", "bar", "baz"])
def initialize_store(test_data): """Helper function that sets up a default store with some data in it""" global_state.cleanup() global_state.use_default_store() for data_id in ['1', '2']: global_state.set_data(data_id, test_data) global_state.set_dtypes(data_id, build_dtypes_state(test_data)) global_state.set_settings(data_id, dict(locked=[])) global_state.set_metadata(data_id, dict(name='test_name')) global_state.set_context_variables( data_id, dict(favorite_words=['foo', 'bar', 'baz'])) global_state.set_history(data_id, ['foo', 'bar', 'baz'])
def test_correlation_analysis(unittest, rolling_data): import dtale.views as views with app.test_client() as c: df, _ = views.format_data(rolling_data) build_data_inst({c.port: df}) build_dtypes({c.port: views.build_dtypes_state(df)}) response = c.get("/dtale/corr-analysis/{}".format(c.port)) response_data = json.loads(response.data) ranks = response_data["ranks"] corrs = response_data["corrs"] assert len(ranks) == 5 assert all(r["column"] in corrs for r in ranks) assert all(r["missing"] == 0 for r in ranks)
def test_get_column_analysis_kde(): import dtale.views as views df = pd.DataFrame(dict(a=np.random.randn(100))) with app.test_client() as c: build_data_inst({c.port: df}) build_dtypes({c.port: views.build_dtypes_state(df)}) settings = {c.port: {}} build_settings(settings) response = c.get( "/dtale/column-analysis/{}".format(c.port), query_string=dict(col="a", type="histogram", bins=50), ) response_data = json.loads(response.data) assert len(response_data["kde"]) == 51
def test_get_column_analysis_qq(): import dtale.views as views df = pd.DataFrame(dict(a=np.random.normal(loc=20, scale=5, size=100))) with app.test_client() as c: build_data_inst({c.port: df}) build_dtypes({c.port: views.build_dtypes_state(df)}) settings = {c.port: {}} build_settings(settings) response = c.get( "/dtale/column-analysis/{}".format(c.port), query_string=dict(col="a", type="qq"), ) response_data = json.loads(response.data) assert len(response_data["data"]) == 100
def test_probability_histogram(unittest, test_data): import dtale.views as views with app.test_client() as c: with ExitStack(): build_data_inst({c.port: test_data}) build_dtypes({c.port: views.build_dtypes_state(test_data)}) build_settings({c.port: {}}) response = c.get( "/dtale/column-analysis/{}".format(c.port), query_string=dict(col="foo", density="true"), ) response_data = json.loads(response.data) assert response.status_code == 200 assert "np.histogram(s['foo'], density=True)" in response_data[ "code"]
def test_edit_float(): from dtale.views import build_dtypes_state, format_data df = edit_data() df, _ = format_data(df) with app.test_client() as c: data = {c.port: df} build_data_inst(data) settings = {c.port: {"locked": ["a"]}} build_settings(settings) dtypes = {c.port: build_dtypes_state(df)} build_dtypes(dtypes) resp = c.get( "/dtale/edit-cell/{}/b".format(c.port), query_string=dict(rowIndex=0, updated=2.5), ) assert "error" not in resp.json assert data[c.port]["b"].values[0] == 2.5
def test_view(unittest): from dtale.views import build_dtypes_state df = replacements_data() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} with ExitStack() as stack: stack.enter_context(mock.patch('dtale.global_state.DATA', data)) stack.enter_context(mock.patch('dtale.global_state.DTYPES', dtypes)) resp = c.get( '/dtale/build-replacement/{}'.format(c.port), query_string=dict(type='not_implemented', name='test', cfg=json.dumps({})) ) response_data = resp.json assert response_data['error'] == "'not_implemented' replacement not implemented yet!" params = dict( type='value', col='e', name='a', cfg=json.dumps({'value': [dict(value='nan', type='raw', replace='for test')]}) ) resp = c.get('/dtale/build-replacement/{}'.format(c.port), query_string=params) response_data = resp.json assert response_data['error'] == "A column named 'a' already exists!" params = dict( type='value', col='e', name='e2', cfg=json.dumps({'value': [dict(value='nan', type='raw', replace='for test')]}) ) c.get('/dtale/build-replacement/{}'.format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]['e2'].values), ['a', 'for test', 'b']) assert dtypes[c.port][-1]['name'] == 'e2' assert dtypes[c.port][-1]['dtype'] == 'string' if PY3 else 'mixed' assert not dtypes[c.port][-1]['hasMissing'] del params['name'] c.get('/dtale/build-replacement/{}'.format(c.port), query_string=params) unittest.assertEqual(list(data[c.port]['e'].values), ['a', 'for test', 'b']) e_dtype = next((d for d in dtypes[c.port] if d['name'] == 'e')) assert not e_dtype['hasMissing']
def test_edit_float(): from dtale.views import build_dtypes_state, format_data df = edit_data() df, _ = format_data(df) with app.test_client() as c: with ExitStack() as stack: data = {c.port: df} stack.enter_context(mock.patch("dtale.global_state.DATA", data)) settings = {c.port: {"locked": ["a"]}} stack.enter_context(mock.patch("dtale.global_state.SETTINGS", settings)) dtypes = {c.port: build_dtypes_state(df)} stack.enter_context(mock.patch("dtale.global_state.DTYPES", dtypes)) resp = c.get( "/dtale/edit-cell/{}/b".format(c.port), query_string=dict(rowIndex=0, updated=2.5), ) assert "error" not in resp.json assert data[c.port]["b"].values[0] == 2.5
def test_resample(unittest): from dtale.views import build_dtypes_state, format_data start, end = "2000-10-01 23:30:00", "2000-10-03 00:30:00" rng = pd.date_range(start, end, freq="7min") ts = pd.Series(np.arange(len(rng)) * 3, index=rng) ts2 = pd.Series(np.arange(len(rng)) * 0.32, index=rng) df = pd.DataFrame(data={"col1": ts, "col2": ts2}) df, _ = format_data(df) global_state.clear_store() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index="index", columns=["col1"], freq="17min", agg="mean") resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="resample", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["index_17min", "col1"], ) assert len(global_state.get_data(new_key)) == 90 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_edit_timedelta(): from dtale.views import build_dtypes_state, format_data df = edit_data() df, _ = format_data(df) with app.test_client() as c: data = {c.port: df} build_data_inst(data) settings = {c.port: {"locked": ["a"]}} build_settings(settings) dtypes = {c.port: build_dtypes_state(df)} build_dtypes(dtypes) resp = c.get( "/dtale/edit-cell/{}".format(c.port), query_string=dict(col="g", rowIndex=0, updated="0 days 00:09:20"), ) assert "error" not in resp.json assert pd.Timedelta(data[c.port]["g"].values[0]) == pd.Timedelta( "0 days 00:09:20" )
def test_edit_timestamp(): from dtale.views import build_dtypes_state, format_data df = edit_data() df, _ = format_data(df) with app.test_client() as c: data = {c.port: df} build_data_inst(data) settings = {c.port: {"locked": ["a"]}} build_settings(settings) dtypes = {c.port: build_dtypes_state(df)} build_dtypes(dtypes) resp = c.get( "/dtale/edit-cell/{}/e".format(c.port), query_string=dict(rowIndex=0, updated="20000101 11:58:59.999999999"), ) assert "error" not in resp.json assert pd.Timestamp(data[c.port]["e"].values[0]) == pd.Timestamp( "2000-01-01 11:58:59.999999999" )