def test_testframework_solve(mock_solve_all, mock_solve_all_parallel, default_parameters, nondefault_parameters): framework1 = t.TestFramework(**default_parameters) framework1.solve(use_cache=True) mock_solve_all.assert_called_once_with(True) mock_solve_all_parallel.assert_not_called() framework2 = t.TestFramework(**nondefault_parameters) framework2.solve(use_cache=False) mock_solve_all.assert_called_once_with(True) mock_solve_all_parallel.assert_called_once_with(False)
def test_testframework_load_problem_file(mock_get_all_from_file, default_parameters, nondefault_parameters): mock_get_all_from_file.return_value = ["problem1", "problem2"] framework1 = t.TestFramework(**default_parameters) framework1.load_problem_file("test_problem") assert framework1.testproblems == ["problem1", "problem2"] framework2 = t.TestFramework(**nondefault_parameters) framework2.load_problem_file("test_problem") assert framework2.testproblems == ["p1", "p2", "p3" ] + ["problem1", "problem2"]
def test_testframework_solve_all_cache(mock_testinstance, mock_pickle_load, mock_pickle_dump, default_parameters, before_cache_pkl, after_cache_pkl): # Save "old" cache for testing later from copy import deepcopy old_cache = deepcopy(before_cache_pkl) mock_pickle_load.return_value = before_cache_pkl m = mock_open() framework2 = t.TestFramework(**default_parameters) framework2.testproblems = ["prob1", "prob2"] framework2.solverconfigs = ["config1", "config2"] results = [("prob1", "config1"), ("prob2", "config1"), ("prob1", "config2"), ("prob2", "config2")] with patch("cvxbenchmarks.framework.open", m): framework2.solve_all(use_cache=True) print(framework2.results) assert sorted([(result.problemID, result.configID) for result in framework2.results]) == \ sorted(results) # Make sure results were not run if they were cached. for instance in framework2.instances: if hash(instance) in old_cache: instance.run.assert_not_called()
def test_testframework_load_config(mock_from_file, default_parameters): mock_from_file.side_effect = ["config1", "config2", None] framework1 = t.TestFramework(**default_parameters) framework1.load_config("configID1") assert framework1.solverconfigs == ["config1"] framework1.load_config("configID2") assert framework1.solverconfigs == ["config1", "config2"] framework1.load_config("configID_None") assert framework1.solverconfigs == ["config1", "config2"]
def test_testframework_preload_all_configs(mock_load_config, mock_os_walk, default_parameters): mock_os_walk.return_value = [("cvxbenchmarks/lib/configs", [], ["__init__.py", "config1.py", "config2.py"])] framework1 = t.TestFramework(**default_parameters) framework1.preload_all_configs() calls = mock_load_config.mock_calls assert call("config1") in calls assert call("config2") in calls assert not call("__init__") in calls
def test_testframework_solve_all_no_cache(mock_testinstance, default_parameters): # Test without cache framework1 = t.TestFramework(**default_parameters) framework1.testproblems = ["prob1", "prob2"] framework1.solverconfigs = ["config1", "config2"] framework1.solve_all(use_cache=False) results = [("prob1", "config1"), ("prob2", "config1"), ("prob1", "config2"), ("prob2", "config2")] assert sorted([(result.problemID, result.configID) for result in framework1.results]) == \ sorted(results)
def test_testframework_generate_test_instances(default_parameters): problems = ["prob1", "prob2"] configs = ["config1", "config2"] framework1 = t.TestFramework(**default_parameters) framework1.testproblems = problems framework1.solverconfigs = configs framework1.generate_test_instances() assert framework1.instances == [ t.TestInstance("prob1", "config1"), t.TestInstance("prob1", "config2"), t.TestInstance("prob2", "config1"), t.TestInstance("prob2", "config2") ]
def test_testframework_preload_all_problems(mock_load_problem_file, mock_os_walk, default_parameters): mock_os_walk.return_value = [ ("cvxbenchmarks/problems", [], ["__init__.py", "problem1.py", "problem2.py"]) ] framework1 = t.TestFramework(**default_parameters) framework1.preload_all_problems() calls = mock_load_problem_file.mock_calls assert call("problem1") in calls assert call("problem2") in calls assert not call("__init__") in calls
def test_testframework_init(default_parameters, nondefault_parameters): framework1 = t.TestFramework(**default_parameters) assert framework1.problemDir == "pdir1" assert framework1.configDir == "cdir1" assert framework1.testproblems == [] assert framework1.solverconfigs == [] assert framework1.cacheFile == "cache.pkl" assert framework1.parallel == False assert framework1.tags == [] assert framework1.instances == [] assert framework1.results == [] framework2 = t.TestFramework(**nondefault_parameters) assert framework2.problemDir == "pdir2" assert framework2.configDir == "cdir2" assert framework2.testproblems == ["p1", "p2", "p3"] assert framework2.solverconfigs == ["c1", "c2", "c3"] assert framework2.cacheFile == "cache2.pkl" assert framework2.parallel == True assert framework2.tags == ["SOCP", "SDP"] assert framework2.instances == ["inst1"] assert framework2.results == ["res1"]
def test_testframework_export_results(mock_size_metrics, default_parameters): mock_size_metrics._fields = ["num_scalar_variables"] SizeMetrics = namedtuple("SizeMetrics", ["num_scalar_variables"]) framework1 = t.TestFramework(**default_parameters) result1 = t.TestResults("prob1", "config1", opt_val=0.0, solve_time=1.0, size_metrics=SizeMetrics(num_scalar_variables=3)) result2 = t.TestResults("prob2", "config2", size_metrics=SizeMetrics(num_scalar_variables=5)) result3 = t.TestResults("prob1", "config2", opt_val=0.1, solve_time=2.0, size_metrics=SizeMetrics(num_scalar_variables=3)) result4 = t.TestResults("prob2", "config1", opt_val=-0.1, solve_time=1.5, size_metrics=SizeMetrics(num_scalar_variables=5)) framework1.results = [result1, result2, result3, result4] results = framework1.export_results() assert results.loc[("prob1", "config1"), "opt_val"] == 0.0 assert results.loc[("prob1", "config1"), "solve_time"] == 1.0 assert results.loc[("prob1", "config1"), "num_scalar_variables"] == 3 assert np.isnan(results.loc[("prob2", "config2"), "opt_val"]) assert np.isnan(results.loc[("prob2", "config2"), "solve_time"]) assert results.loc[("prob2", "config2"), "num_scalar_variables"] == 5 assert results.loc[("prob1", "config2"), "opt_val"] == 0.1 assert results.loc[("prob1", "config2"), "solve_time"] == 2.0 assert results.loc[("prob1", "config2"), "num_scalar_variables"] == 3 assert results.loc[("prob2", "config1"), "opt_val"] == -0.1 assert results.loc[("prob2", "config1"), "solve_time"] == 1.5 assert results.loc[("prob2", "config1"), "num_scalar_variables"] == 5