def test_iterate(sphere_pareto): xs, fs = sphere_pareto data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) nadir, ideal = method.initialize(10, 8, xs, fs) zs, fs = method.iterate() fig = plt.figure() ax = fig.add_subplot(111, projection="3d") plt.title("test_iterate in test_enautilus.py:\n " "Green dots should be dominating the red dot.\n Green " "dots should be spread evenly and lay between the pareto\n " "front (blue dots) and the nadir (red dot).\n" "Close this to continue.") ax.scatter( method.obj_sub[0][:, 0], method.obj_sub[0][:, 1], method.obj_sub[0][:, 2], s=0.1, c="blue", ) ax.scatter( method.zshi[0, :, 0], method.zshi[0, :, 1], method.zshi[0, :, 2], c="green", ) ax.scatter(method.nadir[0], method.nadir[1], method.nadir[2], c="red") plt.show()
def test_interact_once(sphere_pareto): xs, fs = sphere_pareto data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) nadir, ideal = method.initialize(10, 5, xs, fs) zs, fslow = method.iterate() method.interact(zs[0], fslow[0]) assert method.h == 1 assert method.ith == 9 assert len(method.obj_sub[method.h]) <= len(method.obj_sub[method.h - 1]) assert len(method.par_sub[method.h]) <= len(method.par_sub[method.h - 1]) assert len(method.obj_sub[method.h]) == len(method.par_sub[method.h]) with pytest.raises(InteractiveMethodError): # bad pref method.interact(np.array([1, 1]), np.array([1, 1, 1])) with pytest.raises(InteractiveMethodError): # bad pref method.interact(np.array([1, 1, 1, 1]), np.array([1, 1, 1])) with pytest.raises(InteractiveMethodError): # bad f_low method.interact(np.array([1, 1, 1]), np.array([1, 1])) with pytest.raises(InteractiveMethodError): # bad f_low method.interact(np.array([1, 1, 1]), np.array([1, 1, 1, 1]))
def test_not_enough_points(sphere_pareto): idxs = np.random.randint(0, len(sphere_pareto[0]), size=5) xs, fs = sphere_pareto[0][idxs], sphere_pareto[1][idxs] data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) _, _ = method.initialize(10, 10) zs, lows = method.iterate() zs_is_nans = np.isnan(zs) lows_is_nans = np.isnan(lows) assert np.all(np.equal(zs_is_nans, lows_is_nans)) zs_points = 0 for row in ~zs_is_nans: if np.all(row): zs_points += 1 lows_points = 0 for row in ~lows_is_nans: if np.all(row): lows_points += 1 assert zs_points == lows_points
def test_initialization(sphere_pareto): xs, fs = sphere_pareto data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) nadir, ideal = method.initialize(10, 5, xs, fs) assert np.all(np.isclose(method.pareto_front, xs)) assert np.all(np.isclose(method.objective_vectors, fs)) assert np.all(nadir >= method.objective_vectors) assert np.all(ideal <= method.objective_vectors) assert method.n_iters == 10 assert method.n_points == 5 assert np.all(np.isnan(method.zshi)) assert method.zshi.shape == (10, 5, 3) assert method.fhilo.shape == (10, 5, 3) assert method.d.shape == (10, 5) assert method.h == 0 assert method.ith == 10 assert np.all(np.isclose(method.obj_sub[0], method.objective_vectors)) assert np.all(np.isclose(method.par_sub[0], method.pareto_front)) assert np.all(np.isclose(method.zpref, method.nadir)) # bad nadir with pytest.raises(InteractiveMethodError): method.nadir = np.array([1, 1]) with pytest.raises(InteractiveMethodError): method.nadir = np.array([1, 1, 1, 1]) # bad ideal with pytest.raises(InteractiveMethodError): method.ideal = np.array([1, 1]) with pytest.raises(InteractiveMethodError): method.ideal = np.array([1, 1, 1, 1]) # bad iters with pytest.raises(InteractiveMethodError): method.n_iters = -1 # bad n points with pytest.raises(InteractiveMethodError): method.n_points = -1
def test_interact_end(sphere_pareto): xs, fs = sphere_pareto data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) xs, fs = sphere_pareto total_iter = 10 nadir, ideal = method.initialize(total_iter, 5) for i in range(total_iter - 1): # till the penultimate iteration zs, f_lows = method.iterate() r = np.random.randint(0, 5) method.interact(zs[r], f_lows[r]) r = np.random.randint(0, 5) _, res = method.interact(zs[r], f_lows[r]) assert np.any(np.all(np.isclose(res, fs), axis=1))
def test_iterate_too_much(sphere_pareto): xs, fs = sphere_pareto xs = xs[:500] fs = fs[:500] data_prob = ScalarDataProblem(xs, fs) method = ENautilus(data_prob) xs, fs = sphere_pareto xs = xs[:500] fs = fs[:500] _, _ = method.initialize(10, 10) while method.ith >= 1: zs, lows = method.iterate() method.interact(zs[0], lows[0]) last_zs, last_lows = method.iterate() last_x, last_f = method.interact(last_zs[0], last_lows[0]) np.random.seed(1) method.iterate() np.random.seed(1) method.iterate() np.random.seed(1) much_zs, much_lows = method.iterate() # Compare NaN as equals since they just represent missing points. assert np.all(np.isclose(last_zs, much_zs, equal_nan=True)) assert np.all(np.isclose(last_lows, much_lows, equal_nan=True)) much_x, much_f = method.interact(much_zs[0], much_lows[0]) print(much_x) print(last_x) assert np.all(np.isclose(last_x, much_x)) assert np.all(np.isclose(last_f, much_f))
"Purchasing and ordering cost", "Holding cost", "Cycle service level", "Probability of product availability", "Inventory turnoever", ] variable_names = ["x{}".format(i + 1) for i in range(xs.shape[1])] is_max = [False, False, True, True, True] # scale the data scaler = MinMaxScaler((-1, 1)) scaler.fit(np.where(is_max, -fs, fs)) fs_norm = scaler.transform(np.where(is_max, -fs, fs)) # create the problem problem = ScalarDataProblem(xs, fs_norm) enautilus = ENautilus(problem) total_iters = 5 points_shown = 4 nadir, ideal = enautilus.initialize(total_iters, points_shown) plotter = Plotter(nadir, ideal, scaler, is_max) # this is bad! intermediate_points = [] intermediate_ranges = [] current_best_idx = 0 previous_best = None ### # fot the parallel axes
def simple_data_problem(simple_data): xs, fs = simple_data return ScalarDataProblem(xs, fs)
def sphere_nimbus(sphere_pareto): problem = ScalarDataProblem(*sphere_pareto) method = SNimbus(problem) return method
def simple_nimbus(four_dimenional_data_with_extremas): xs, fs, nadir, ideal = four_dimenional_data_with_extremas problem = ScalarDataProblem(xs, fs) method = SNimbus(problem) return method
def iterate( self ) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray, np.ndarray]]: """Iterate according to the preferences given by the DM in the interaction phase. Returns: Union[np.ndarray, Tuple[np.ndarray, np.ndarray, np.ndarray]]: Returns the current point for the first iteration. For the following iterations, returns the decision vectors, the objective vectors values and the current archive of saved points. """ # if first iteration, just return the starting point if self.first_iteration: self.first_iteration = False return self.current_point res_all_xs: List[np.ndarray] = [] res_all_fs: List[np.ndarray] = [] if self.generate_intermediate: # generate n points between two previous points # can reuse the solver for subrpoblem 3 here if self.__sprob_3 is None: # create the subproblem and solver self.__sprob_3 = ScalarDataProblem( self.pareto_front, self.objective_vectors ) self.__sprob_3.nadir = self.nadir self.__sprob_3.ideal = self.ideal self.__solver_3 = ASFScalarSolver( self.__sprob_3, DiscreteMinimizer() ) self.__solver_3.asf = PointMethodASF(self.nadir, self.ideal) z_bars = self._create_intermediate_reference_points() for z in z_bars: res = self.__solver_3.solve(z) # type: ignore res_all_xs.append(res[0]) res_all_fs.append(res[1][0]) # always require and explicit request from the DM to generate # intermediate points self.generate_intermediate = False else: # solve the subproblems normally if self.n_points >= 1: # solve the desired number of ASFs self._sort_classsifications() z_bar = self._create_reference_point() # subproblem 1 if self.__sprob_1 is None: # create the subproblem and solver self.__sprob_1 = ScalarDataProblem( self.pareto_front, self.objective_vectors ) self.__sprob_1.nadir = self.nadir self.__sprob_1.ideal = self.ideal self.__solver_1 = ASFScalarSolver( self.__sprob_1, DiscreteMinimizer() ) self.__solver_1.asf = MaxOfTwoASF( self.nadir, self.ideal, [], [] ) # set the constraints for the 1st subproblem sp1_all_cons = [] sp1_cons1_idx = np.sort( ( self.__ind_set_lt + self.__ind_set_lte + self.__ind_set_eq ) ) if len(sp1_cons1_idx) > 0: sp1_cons1_f = lambda _, fs: np.where( # noqa np.all( fs[:, sp1_cons1_idx] <= self.current_point[sp1_cons1_idx], axis=1, ), np.ones(len(fs)), -np.ones(len(fs)), ) sp1_cons1 = ScalarConstraint( "sp1_cons1", self.pareto_front.shape[1], self.objective_vectors.shape[1], sp1_cons1_f, ) sp1_all_cons.append(sp1_cons1) sp1_cons2_idx = self.__ind_set_gte if len(sp1_cons2_idx) > 0: sp1_cons2_f = lambda _, fs: np.where( # noqa np.all( fs[:, sp1_cons2_idx] <= self.__upper_bounds, axis=1 ), np.ones(len(fs)), -np.ones(len(fs)), ) sp1_cons2 = ScalarConstraint( "sp1_cons2", self.pareto_front.shape[1], self.objective_vectors.shape[1], sp1_cons2_f, ) sp1_all_cons.append(sp1_cons2) self.__sprob_1.constraints = sp1_all_cons # solve the subproblem self.__solver_1.asf.lt_inds = self.__ind_set_lt # type: ignore self.__solver_1.asf.lte_inds = ( # type: ignore self.__ind_set_lte ) # type: ignore, # noqa sp1_reference = np.zeros(self.objective_vectors.shape[1]) sp1_reference[self.__ind_set_lte] = self.__aspiration_levels res1 = self.__solver_1.solve(sp1_reference) # type: ignore res_all_xs.append(res1[0]) res_all_fs.append(res1[1][0]) if self.n_points >= 2: # subproblem 2 if self.__sprob_2 is None: # create the subproblem and solver self.__sprob_2 = ScalarDataProblem( self.pareto_front, self.objective_vectors ) self.__sprob_2.nadir = self.nadir self.__sprob_2.ideal = self.ideal self.__solver_2 = ASFScalarSolver( self.__sprob_2, DiscreteMinimizer() ) self.__solver_2.asf = StomASF(self.ideal) res2 = self.__solver_2.solve(z_bar) # type: ignore res_all_xs.append(res2[0]) res_all_fs.append(res2[1][0]) if self.n_points >= 3: # subproblem 3 if self.__sprob_3 is None: # create the subproblem and solver self.__sprob_3 = ScalarDataProblem( self.pareto_front, self.objective_vectors ) self.__sprob_3.nadir = self.nadir self.__sprob_3.ideal = self.ideal self.__solver_3 = ASFScalarSolver( self.__sprob_3, DiscreteMinimizer() ) self.__solver_3.asf = PointMethodASF( self.nadir, self.ideal ) res3 = self.__solver_3.solve(z_bar) # type: ignore res_all_xs.append(res3[0]) res_all_fs.append(res3[1][0]) if self.n_points >= 4: # subproblem 4 if self.__sprob_4 is None: # create the subproblem and solver self.__sprob_4 = ScalarDataProblem( self.pareto_front, self.objective_vectors ) self.__sprob_4.nadir = self.nadir self.__sprob_4.ideal = self.ideal self.__solver_4 = ASFScalarSolver( self.__sprob_4, DiscreteMinimizer() ) self.__solver_4.asf = AugmentedGuessASF( self.nadir, self.ideal, self.__ind_set_free ) else: # update the indices to be excluded in the existing # solver's asf self.__solver_4.asf.indx_to_exclude = ( # type: ignore self.__ind_set_free ) res4 = self.__solver_4.solve(z_bar) # type: ignore res_all_xs.append(res4[0]) res_all_fs.append(res4[1][0]) # return the obtained solutions and the archiveof existing solutions # deepcopy, beacuse we dont want to return a reference to the archive return ( np.array(res_all_xs), np.array(res_all_fs), deepcopy(self.archive), )
"""This is for purely testing. """ from desdeov2.methods.Nautilus import ENautilusB from desdeov2.problem.Problem import ScalarDataProblem import numpy as np data = np.loadtxt("./data/pareto_front_3d_sphere_1st_octant_surface.dat") problem = ScalarDataProblem(data[:, 0:2], data[:, 2:]) method = ENautilusB(problem) method.initialize(10, 10) limits, dist = method.iterate() method.nadir = np.array([2, 2, 2]) new_point = np.array([0, 0, 1]) method.interact(new_point) limits, dist = method.iterate() print(limits) print(dist)
def dummy_problem(dummy_data): xs, fs = dummy_data return ScalarDataProblem(xs, fs)