def test_custom_cli_loaders(): custom_loader_path = "/../".join([os.path.dirname(__file__), 'data']) os.environ['DTALE_CLI_LOADERS'] = custom_loader_path reload(loaders) reload(script) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) args = ['--port', '9999', '--testcli'] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None pdt.assert_frame_equal(kwargs['data_loader'](), pd.DataFrame([dict(security_id=1, foo=1.5)])) mock_show.reset_mock() args = ['--port', '9999', '--testcli2-prop', 'foo'] script.main(args, standalone_mode=False) _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None pdt.assert_frame_equal(kwargs['data_loader'](), pd.DataFrame([dict(security_id=1, foo='foo')]))
def test_sqlite_loader(): props = ["host", "port", "debug", "subprocess", "data_loader", "reaper_on"] with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: csv_path = os.path.join(os.path.dirname(__file__), "..", "data/test.sqlite3") args = [ "--host", "test", "--port", "9999", "--sqlite-path", csv_path, "--sqlite-table", "test_simpsons", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map(kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None df = data_loader() assert len(df) == 5
def test_arctic_import_error(builtin_pkg): orig_import = __import__ def import_mock(name, *args, **kwargs): if name.startswith("arctic"): raise ImportError() return orig_import(name, *args, **kwargs) with ExitStack() as stack: stack.enter_context(mock.patch("dtale.app.build_app", mock.Mock())) stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock) ) args = [ "--port", "9999", "--arctic-host", "arctic_host", "--arctic-library", "arctic_lib", "--arctic-node", "arctic_node", "--arctic-start", "20000101", "--arctic-end", "20000102", ] with pytest.raises(ImportError) as error: script.main(args, standalone_mode=False) assert "In order to use the arctic loader you must install arctic!" in str( error )
def test_custom_cli_loaders(): custom_loader_path = os.path.join(os.path.dirname(__file__), "..", "data") os.environ["DTALE_CLI_LOADERS"] = custom_loader_path reload(loaders) reload(script) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) args = ["--port", "9999", "--testcli"] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal( kwargs["data_loader"](), pd.DataFrame([dict(security_id=1, foo=1.5)]) ) mock_show.reset_mock() args = ["--port", "9999", "--testcli2-prop", "foo"] script.main(args, standalone_mode=False) _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal( kwargs["data_loader"](), pd.DataFrame([dict(security_id=1, foo="foo")]) )
def test_arctic_version_data(builtin_pkg): orig_import = __import__ mock_arctic = mock.Mock() class MockVersionedItem(object): __name__ = 'VersionedItem' def __init__(self): self.data = 'versioned_data' pass class MockArcticLibrary(object): def __init__(self, *args, **kwargs): pass def read(self, *args, **kwargs): return MockVersionedItem() class MockArctic(object): __name__ = 'Arctic' def __init__(self, *args, **kwargs): pass def get_library(self, *args, **kwargs): return MockArcticLibrary() mock_arctic.Arctic = MockArctic mock_versioned_item = mock.Mock() mock_versioned_item.VersionedItem = MockVersionedItem def import_mock(name, *args, **kwargs): if name == 'arctic': return mock_arctic if name == 'arctic.store.versioned_item': return mock_versioned_item return orig_import(name, *args, **kwargs) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) stack.enter_context( mock.patch('{}.__import__'.format(builtin_pkg), side_effect=import_mock)) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None assert kwargs['data_loader']() == 'versioned_data'
def test_main(builtin_pkg): props = ['host', 'port', 'debug', 'subprocess', 'data_loader', 'reaper_on'] with mock.patch('dtale.cli.script.show', mock.Mock()) as mock_show: csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--host', 'test', '--port', '9999', '--csv-path', csv_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host == 'test' assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch('dtale.cli.script.find_free_port', mock.Mock(return_value=9999))) csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--csv-path', csv_path, '--debug', '--no-reaper'] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), 'loader should load csv') with mock.patch('dtale.cli.script.show', mock.Mock()) as mock_show: json_path = "/../".join( [os.path.dirname(__file__), 'data/test_df.json']) args = ['--host', 'test', '--port', '9999', '--json-path', json_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host == 'test' assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), 'loader should load json')
def test_main(builtin_pkg): props = ["host", "port", "debug", "subprocess", "data_loader", "reaper_on"] with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: csv_path = "/../".join([os.path.dirname(__file__), "data/test_df.csv"]) args = ["--host", "test", "--port", "9999", "--csv-path", csv_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch("dtale.cli.script.find_free_port", mock.Mock(return_value=9999))) csv_path = "/../".join([os.path.dirname(__file__), "data/test_df.csv"]) args = ["--csv-path", csv_path, "--debug", "--no-reaper"] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), "loader should load csv") with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: json_path = "/../".join( [os.path.dirname(__file__), "data/test_df.json"]) args = ["--host", "test", "--port", "9999", "--json-path", json_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), "loader should load json")
def test_r_loader_integration(unittest): pytest.importorskip("rpy2") path = os.path.join(os.path.dirname(__file__), "..", "data", "admissions.rda") with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock())) args = ["--port", "9999", "--r-path", path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None df = kwargs["data_loader"]() unittest.assertEqual(list(df.columns), ["major", "gender", "admitted", "applicants"]) assert len(df) == 12 unittest.assertEqual( df.to_dict(orient="records")[0], dict(major="A", gender="men", admitted=62, applicants=825), )
def test_arctic_import_error(builtin_pkg): orig_import = __import__ def import_mock(name, *args, **kwargs): if name.startswith('arctic'): raise ImportError() return orig_import(name, *args, **kwargs) with ExitStack() as stack: stack.enter_context(mock.patch('dtale.cli.script.show', mock.Mock())) stack.enter_context( mock.patch('{}.__import__'.format(builtin_pkg), side_effect=import_mock)) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', '--arctic-start', '20000101', '--arctic-end', '20000102' ] with pytest.raises(ImportError) as error: script.main(args, standalone_mode=False) assert 'ImportError: In order to use the --arctic loader you must install arctic!' in str( error)
def test_main(builtin_pkg): props = ['host', 'port', 'debug', 'subprocess', 'data_loader', 'reaper_on'] with mock.patch('dtale.cli.script.show', mock.Mock()) as mock_show: csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--host', 'test', '--port', '9999', '--csv-path', csv_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host == 'test' assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch('dtale.cli.script.find_free_port', mock.Mock(return_value=9999))) csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--csv-path', csv_path, '--debug', '--no-reaper'] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map( kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), 'loader should load csv') orig_import = __import__ mock_arctic = mock.Mock() mock_versioned_item = mock.Mock() class VersionedItem(object): __name__ = 'VersionedItem' def __init__(self): pass mock_versioned_item.VersionedItem = VersionedItem def import_mock(name, *args, **kwargs): if name == 'arctic': return mock_arctic if name == 'arctic.store.versioned_item': return mock_versioned_item return orig_import(name, *args, **kwargs) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) stack.enter_context( mock.patch('{}.__import__'.format(builtin_pkg), side_effect=import_mock)) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', '--arctic-start', '20000101', '--arctic-end', '20000102' ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None kwargs['data_loader']() assert mock_arctic.Arctic.call_args[0][0] == 'arctic_host' mock_arctic_instance = mock_arctic.Arctic.return_value assert mock_arctic_instance.get_library.call_args[0][0] == 'arctic_lib' mock_arctic_lib_instance = mock_arctic_instance.get_library.return_value args, kwargs = mock_arctic_lib_instance.read.call_args assert args[0] == 'arctic_node' assert kwargs['chunk_range'].min() == pd.Timestamp('20000101') assert kwargs['chunk_range'].max() == pd.Timestamp('20000102') with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) stack.enter_context( mock.patch('{}.__import__'.format(builtin_pkg), side_effect=import_mock)) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None kwargs['data_loader']() mock_arctic_instance = mock_arctic.Arctic.return_value mock_arctic_lib_instance = mock_arctic_instance.get_library.return_value args, kwargs = mock_arctic_lib_instance.read.call_args assert 'chunk_range' not in kwargs
def test_arctic_version_data(builtin_pkg): orig_import = __import__ mock_arctic = mock.Mock() class MockVersionedItem(object): __name__ = "VersionedItem" def __init__(self): self.data = "versioned_data" pass class MockArcticLibrary(object): def __init__(self, *args, **kwargs): pass def read(self, *args, **kwargs): return MockVersionedItem() class MockArctic(object): __name__ = "Arctic" def __init__(self, *args, **kwargs): pass def get_library(self, *args, **kwargs): return MockArcticLibrary() mock_arctic.Arctic = MockArctic mock_versioned_item = mock.Mock() mock_versioned_item.VersionedItem = MockVersionedItem def import_mock(name, *args, **kwargs): if name == "arctic": return mock_arctic if name == "arctic.store.versioned_item": return mock_versioned_item return orig_import(name, *args, **kwargs) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock) ) args = [ "--port", "9999", "--arctic-host", "arctic_host", "--arctic-library", "arctic_lib", "--arctic-node", "arctic_node", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None assert kwargs["data_loader"]() == "versioned_data" with ExitStack() as stack: import dtale stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock) ) stack.enter_context( mock.patch("dtale.cli.loaders.arctic_loader.show", mock.Mock()) ) dtale.show_arctic(host="arctic_host", library="arctic_lib", node="arctic_node")
def test_arctic_loader(mongo_host, library_name, library, chunkstore_name, chunkstore_lib): node = pd.DataFrame([ { 'date': pd.Timestamp('20000101'), 'a': 1, 'b': 1.0 }, { 'date': pd.Timestamp('20000102'), 'a': 2, 'b': 2.0 }, ]).set_index(['date', 'a']) chunkstore_lib.write('test_node', node) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) args = [ '--port', '9999', '--arctic-host', mongo_host, '--arctic-library', chunkstore_name, '--arctic-node', 'test_node', '--arctic-start', '20000101', '--arctic-end', '20000102' ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None pdt.assert_frame_equal(kwargs['data_loader'](), node) node2 = pd.DataFrame([ { 'date': pd.Timestamp('20000101'), 'a': 1, 'b': 1.0 }, { 'date': pd.Timestamp('20000102'), 'a': 2, 'b': 2.0 }, ]).set_index(['date', 'a']) library.write('test_node2', node2) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch('dtale.cli.script.show', mock.Mock())) args = [ '--port', '9999', '--arctic-host', mongo_host, '--arctic-library', library_name, '--arctic-node', 'test_node2', ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None pdt.assert_frame_equal(kwargs['data_loader'](), node2)
def test_artic_loader(builtin_pkg): node = pd.DataFrame( [ {"date": pd.Timestamp("20000101"), "a": 1, "b": 1.0}, {"date": pd.Timestamp("20000102"), "a": 2, "b": 2.0}, ] ).set_index(["date", "a"]) orig_import = __import__ with ExitStack() as stack: mock_arctic = mock.MagicMock() class MockVersionedItem(object): __name__ = "VersionedItem" def __init__(self): self.data = node pass def import_mock(name, *args, **kwargs): if name == "arctic": return mock_arctic if name == "arctic.store.versioned_item": return mock_versioned_item return orig_import(name, *args, **kwargs) mock_arctic_class = mock.MagicMock() mock_arctic.Arctic.return_value = mock_arctic_class mock_arctic_lib = mock.MagicMock() mock_arctic_lib.read = mock.Mock(return_value=MockVersionedItem()) mock_arctic_class.get_library = mock.Mock(return_value=mock_arctic_lib) mock_versioned_item = mock.Mock() mock_versioned_item.VersionedItem = MockVersionedItem stack.enter_context( mock.patch("{}.__import__".format(builtin_pkg), side_effect=import_mock) ) mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) args = [ "--port", "9999", "--arctic-host", "test_host", "--arctic-library", "test_lib", "--arctic-node", "test_node", "--arctic-start", "20000101", "--arctic-end", "20000102", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None output = kwargs["data_loader"]() pdt.assert_frame_equal(output, node) mock_arctic.Arctic.assert_called_with("test_host") mock_arctic_class.get_library.assert_called_with("test_lib") read_call = mock_arctic_lib.read.mock_calls[0] assert read_call.args[0] == "test_node" assert "chunk_range" in read_call.kwargs mock_arctic.reset_mock() mock_arctic_lib.reset_mock() mock_arctic_class.reset_mock() mock_show.reset_mock() args = [ "--port", "9999", "--arctic-host", "test_host", "--arctic-library", "test_lib", "--arctic-node", "test_node2", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal(kwargs["data_loader"](), node) mock_arctic.Arctic.assert_called_with("test_host") mock_arctic_class.get_library.assert_called_with("test_lib") mock_arctic_lib.read.assert_called_with("test_node2")
def test_arctic_loader_integration( mongo_host, library_name, library, chunkstore_name, chunkstore_lib ): node = pd.DataFrame( [ {"date": pd.Timestamp("20000101"), "a": 1, "b": 1.0}, {"date": pd.Timestamp("20000102"), "a": 2, "b": 2.0}, ] ).set_index(["date", "a"]) chunkstore_lib.write("test_node", node) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) args = [ "--port", "9999", "--arctic-host", mongo_host, "--arctic-library", chunkstore_name, "--arctic-node", "test_node", "--arctic-start", "20000101", "--arctic-end", "20000102", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal(kwargs["data_loader"](), node) node2 = pd.DataFrame( [ {"date": pd.Timestamp("20000101"), "a": 1, "b": 1.0}, {"date": pd.Timestamp("20000102"), "a": 2, "b": 2.0}, ] ).set_index(["date", "a"]) library.write("test_node2", node2) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) args = [ "--port", "9999", "--arctic-host", mongo_host, "--arctic-library", library_name, "--arctic-node", "test_node2", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal(kwargs["data_loader"](), node2)
def test_artic_loader(): node = pd.DataFrame( [ {"date": pd.Timestamp("20000101"), "a": 1, "b": 1.0}, {"date": pd.Timestamp("20000102"), "a": 2, "b": 2.0}, ] ).set_index(["date", "a"]) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) arctic_mock = stack.enter_context( mock.patch("arctic.Arctic", side_effect=mock.MagicMock()) ) arctic_lib_mock = mock.MagicMock() arctic_lib_mock().read = mock.Mock(return_value=node) arctic_mock().get_library = arctic_lib_mock args = [ "--port", "9999", "--arctic-host", "test_host", "--arctic-library", "test_lib", "--arctic-node", "test_node", "--arctic-start", "20000101", "--arctic-end", "20000102", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal(kwargs["data_loader"](), node) arctic_mock.assert_called_with("test_host") arctic_lib_mock.assert_called_with("test_lib") read_call = arctic_lib_mock.return_value.read.mock_calls[0] assert read_call.args[0] == "test_node" assert "chunk_range" in read_call.kwargs node2 = pd.DataFrame( [ {"date": pd.Timestamp("20000101"), "a": 1, "b": 1.0}, {"date": pd.Timestamp("20000102"), "a": 2, "b": 2.0}, ] ).set_index(["date", "a"]) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) arctic_mock = stack.enter_context( mock.patch("arctic.Arctic", side_effect=mock.MagicMock()) ) arctic_lib_mock = mock.MagicMock() arctic_lib_mock().read = mock.Mock(return_value=node2) arctic_mock().get_library = arctic_lib_mock args = [ "--port", "9999", "--arctic-host", "test_host", "--arctic-library", "test_lib", "--arctic-node", "test_node2", ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs["data_loader"] is not None pdt.assert_frame_equal(kwargs["data_loader"](), node2) arctic_mock.assert_called_with("test_host") arctic_lib_mock.assert_called_with("test_lib") arctic_lib_mock.return_value.read.assert_called_with("test_node2")
def test_main(unittest): props = [ "host", "port", "debug", "subprocess", "data_loader", "reaper_on", "show_columns", "hide_columns", ] with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: csv_path = os.path.join(os.path.dirname(__file__), "..", "data/test_df.csv") args = ["--host", "test", "--port", "9999", "--csv-path", csv_path] args += ["--show-columns", "a,b", "--hide-columns", "c"] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args ( host, port, debug, subprocess, data_loader, reaper_on, show_columns, hide_columns, ) = map(kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None unittest.assertEqual(show_columns, ["a", "b"]) unittest.assertEqual(hide_columns, ["c"]) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) mock_find_free_port = stack.enter_context( mock.patch("dtale.cli.script.find_free_port", mock.Mock(return_value=9999)) ) csv_path = os.path.join(os.path.dirname(__file__), "..", "data/test_df.csv") args = ["--csv-path", csv_path, "--debug", "--no-reaper"] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args ( host, port, debug, subprocess, data_loader, reaper_on, show_columns, hide_columns, ) = map(kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None assert show_columns is None assert hide_columns is None df = data_loader() pdt.assert_frame_equal( df, pd.DataFrame([dict(a=1, b=2, c=3)]), "loader should load csv" ) with ExitStack() as stack: mock_show = stack.enter_context( mock.patch("dtale.cli.script.show", mock.Mock()) ) mock_find_free_port = stack.enter_context( mock.patch("dtale.cli.script.find_free_port", mock.Mock(return_value=9999)) ) csv_path = os.path.join(os.path.dirname(__file__), "..", "data/test_df.xlsx") args = ["--excel-path", csv_path, "--debug", "--no-reaper"] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args ( host, port, debug, subprocess, data_loader, reaper_on, show_columns, hide_columns, ) = map(kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None assert show_columns is None assert hide_columns is None df = data_loader() pdt.assert_frame_equal( df, pd.DataFrame([dict(a=1, b=2, c=3)]), "loader should load xlsx" ) with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: json_path = os.path.join(os.path.dirname(__file__), "..", "data/test_df.json") args = ["--host", "test", "--port", "9999", "--json-path", json_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args ( host, port, debug, subprocess, data_loader, reaper_on, show_columns, hide_columns, ) = map(kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None assert show_columns is None assert hide_columns is None df = data_loader() pdt.assert_frame_equal( df, pd.DataFrame([dict(a=1, b=2, c=3)]), "loader should load json" ) with mock.patch("dtale.cli.script.show", mock.Mock()) as mock_show: parquet_path = os.path.join( os.path.dirname(__file__), "..", "data/test_df.parquet" ) args = ["--host", "test", "--port", "9999", "--parquet-path", parquet_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args ( host, port, debug, subprocess, data_loader, reaper_on, show_columns, hide_columns, ) = map(kwargs.get, props) assert host == "test" assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None assert show_columns is None assert hide_columns is None df = data_loader() pdt.assert_frame_equal( df, pd.DataFrame(dict(a=[1, 2, 3], b=[4, 5, 6])), "loader should load parquet", )
def test_main(): props = ['host', 'port', 'debug', 'subprocess', 'data_loader', 'reaper_on'] with mock.patch('dtale.cli.script.show', mock.Mock()) as mock_show: csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--host', 'test', '--port', '9999', '--csv-path', csv_path] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map(kwargs.get, props) assert host == 'test' assert not subprocess assert not debug assert port == 9999 assert reaper_on assert data_loader is not None with ExitStack() as stack: mock_show = stack.enter_context(mock.patch('dtale.cli.script.show', mock.Mock())) mock_find_free_port = stack.enter_context( mock.patch('dtale.cli.script.find_free_port', mock.Mock(return_value=9999)) ) csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) args = ['--csv-path', csv_path, '--debug', '--no-reaper'] script.main(args, standalone_mode=False) mock_show.assert_called_once() mock_find_free_port.assert_called_once() _, kwargs = mock_show.call_args host, port, debug, subprocess, data_loader, reaper_on = map(kwargs.get, props) assert host is None assert not subprocess assert debug assert port == 9999 assert not reaper_on assert data_loader is not None df = data_loader() pdt.assert_frame_equal(df, pd.DataFrame([dict(a=1, b=2, c=3)]), 'loader should load csv') with ExitStack() as stack: mock_show = stack.enter_context(mock.patch('dtale.cli.script.show', mock.Mock())) mock_arctic = stack.enter_context(mock.patch('dtale.cli.loaders.arctic_loader.Arctic', mock.Mock())) stack.enter_context(mock.patch( 'dtale.cli.loaders.arctic_loader.VersionedItem', namedtuple('versioned_item', 'VersionedItem') )) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', '--arctic-start', '20000101', '--arctic-end', '20000102' ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None kwargs['data_loader']() assert mock_arctic.call_args[0][0] == 'arctic_host' mock_arctic_instance = mock_arctic.return_value assert mock_arctic_instance.get_library.call_args[0][0] == 'arctic_lib' mock_arctic_lib_instance = mock_arctic_instance.get_library.return_value args, kwargs = mock_arctic_lib_instance.read.call_args assert args[0] == 'arctic_node' assert kwargs['chunk_range'].min() == pd.Timestamp('20000101') assert kwargs['chunk_range'].max() == pd.Timestamp('20000102') with ExitStack() as stack: mock_show = stack.enter_context(mock.patch('dtale.cli.script.show', mock.Mock())) mock_arctic = stack.enter_context(mock.patch('dtale.cli.loaders.arctic_loader.Arctic', mock.Mock())) stack.enter_context(mock.patch( 'dtale.cli.loaders.arctic_loader.VersionedItem', namedtuple('versioned_item', 'VersionedItem') )) args = [ '--port', '9999', '--arctic-host', 'arctic_host', '--arctic-library', 'arctic_lib', '--arctic-node', 'arctic_node', ] script.main(args, standalone_mode=False) mock_show.assert_called_once() _, kwargs = mock_show.call_args assert kwargs['data_loader'] is not None kwargs['data_loader']() mock_arctic_instance = mock_arctic.return_value mock_arctic_lib_instance = mock_arctic_instance.get_library.return_value args, kwargs = mock_arctic_lib_instance.read.call_args assert 'chunk_range' not in kwargs