def test_transpose(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index=["security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert "error" in response_data min_date = custom_data["date"].min().strftime("%Y-%m-%d") global_state.set_settings(c.port, dict(query="date == '{}'".format(min_date))) reshape_cfg = dict(index=["date", "security_id"], columns=["Col0"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "index", "{} 00:00:00 100000".format(min_date), "{} 00:00:00 100001".format(min_date), ], ) assert len(global_state.get_data(new_key)) == 1 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index=["date", "security_id"]) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="override", type="transpose", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == c.port
def get_data_id(pathname): """ Parses data ID from query path (ex: 'foo/bar/1' => '1') """ data_id = pathname.split("/")[-1] if (global_state.size()) == 0: return None if global_state.contains(data_id): return data_id else: return sorted(global_state.keys())[0]
def _instance_msgs(): for data_id in global_state.keys(): data_obj = DtaleData(data_id, build_url(ACTIVE_PORT, ACTIVE_HOST)) name = global_state.get_name(data_id) yield [data_id, name or "", data_obj.build_main_url(data_id=data_id)] if name is not None: yield [ global_state.convert_name_to_url_path(name), name, data_obj.build_main_url(), ]
def _instance_msgs(): for data_id in global_state.keys(): startup_url, final_app_root = build_startup_url_and_app_root() instance = DtaleData( data_id, startup_url, is_proxy=JUPYTER_SERVER_PROXY, app_root=final_app_root, ) name = global_state.get_name(data_id) yield [data_id, name or "", instance.build_main_url()] if name is not None: yield [ global_state.convert_name_to_url_path(name), name, instance.build_main_url(), ]
def test_resample(unittest): from dtale.views import build_dtypes_state, format_data start, end = "2000-10-01 23:30:00", "2000-10-03 00:30:00" rng = pd.date_range(start, end, freq="7min") ts = pd.Series(np.arange(len(rng)) * 3, index=rng) ts2 = pd.Series(np.arange(len(rng)) * 0.32, index=rng) df = pd.DataFrame(data={"col1": ts, "col2": ts2}) df, _ = format_data(df) global_state.clear_store() with app.test_client() as c: data = {c.port: df} dtypes = {c.port: build_dtypes_state(df)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index="index", columns=["col1"], freq="17min", agg="mean") resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="resample", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["index_17min", "col1"], ) assert len(global_state.get_data(new_key)) == 90 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_pivot(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict(index=["date"], columns=["security_id"], values=["Col0"], aggfunc="mean") resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "100000", "100001"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None resp = c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) assert json.loads(resp.data)["success"] assert len(global_state.keys()) == 1 reshape_cfg["columnNameHeaders"] = True reshape_cfg["aggfunc"] = "sum" resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "security_id-100000", "security_id-100001"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg["columnNameHeaders"] = False reshape_cfg["values"] = ["Col0", "Col1"] resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="pivot", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "date", "Col0 100000", "Col0 100001", "Col1 100000", "Col1 100001" ], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_aggregate(custom_data, unittest): from dtale.views import build_dtypes_state global_state.clear_store() with app.test_client() as c: data = {c.port: custom_data} dtypes = {c.port: build_dtypes_state(custom_data)} settings = {c.port: {}} build_data_inst(data) build_dtypes(dtypes) build_settings(settings) reshape_cfg = dict( index="date", agg=dict(type="col", cols={ "Col0": ["sum", "mean"], "Col1": ["count"] }), ) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) new_key = int(c.port) + 1 assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "Col0 sum", "Col0 mean", "Col1 count"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index="date", agg=dict(type="func", func="mean", cols=["Col0", "Col1"])) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], ["date", "Col0", "Col1"], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key)) reshape_cfg = dict(index="date", agg=dict(type="func", func="mean")) resp = c.get( "/dtale/reshape/{}".format(c.port), query_string=dict(output="new", type="aggregate", cfg=json.dumps(reshape_cfg)), ) response_data = json.loads(resp.data) assert response_data["data_id"] == new_key assert len(global_state.keys()) == 2 unittest.assertEqual( [d["name"] for d in global_state.get_dtypes(new_key)], [ "date", "security_id", "int_val", "Col0", "Col1", "Col2", "bool_val" ], ) assert len(global_state.get_data(new_key)) == 365 assert global_state.get_settings(new_key).get( "startup_code") is not None c.get("/dtale/cleanup-datasets", query_string=dict(dataIds=new_key))
def test_upload(unittest): import dtale.views as views import dtale.global_state as global_state global_state.clear_store() df, _ = views.format_data(pd.DataFrame([1, 2, 3])) with build_app(url=URL).test_client() as c: build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "tests_df.csv": (build_upload_data(), "test_df.csv"), "separatorType": "csv", }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: global_state.clear_store() build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) resp = c.post("/dtale/upload") assert not resp.get_json()["success"] c.post( "/dtale/upload", data={ "tests_df.csv": (build_upload_data(), "test_df.csv"), "separatorType": "custom", "separator": ",", }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: global_state.clear_store() build_data_inst({c.port: df}) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) assert global_state.size() == 1 if PY3: c.post( "/dtale/upload", data={ "test_df.xlsx": ( os.path.join( os.path.dirname(__file__), "..", "data/test_df.xlsx" ), "test_df.xlsx", ) }, ) assert global_state.size() == 2 new_key = next((k for k in global_state.keys() if k != c.port), None) assert list(global_state.get_data(new_key).columns) == ["a", "b", "c"] with build_app(url=URL).test_client() as c: with ExitStack() as stack: global_state.clear_store() data = {c.port: df} build_data_inst(data) global_state.set_dtypes(c.port, views.build_dtypes_state(df)) stack.enter_context( mock.patch( "dtale.views.pd.read_excel", mock.Mock( return_value={ "Sheet 1": pd.DataFrame(dict(a=[1], b=[2])), "Sheet 2": pd.DataFrame(dict(c=[1], d=[2])), } ), ) ) resp = c.post( "/dtale/upload", data={ "test_df.xlsx": ( os.path.join( os.path.dirname(__file__), "..", "data/test_df.xlsx" ), "test_df.xlsx", ) }, ) assert global_state.size() == 3 sheets = resp.json["sheets"] assert len(sheets) == 2 unittest.assertEqual( sorted([s["name"] for s in sheets]), ["Sheet 1", "Sheet 2"], )