class HyperVolume(AbstractConvergenceMetric): '''Hypervolume convergence metric class This metric is derived from a hyper-volume measure, which describes the multi-dimensional volume of space contained within the pareto front. When computed with minimum and maximums, it describes the ratio of dominated outcomes to all possible outcomes in the extent of the space. Getting this number to be high or low is not necessarily important, as not all outcomes within the min-max range will be feasible. But, having the hypervolume remain fairly stable over multiple generations of the evolutionary algorithm provides an indicator of convergence. Parameters --------- minimum : numpy array maximum : numpy array ''' def __init__(self, minimum, maximum): super(HyperVolume, self).__init__("hypervolume") self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum) def __call__(self, optimizer): self.results.append( self.hypervolume_func.calculate(optimizer.algorithm.archive)) @classmethod def from_outcomes(cls, outcomes): ranges = [_.expected_range() for _ in outcomes] return cls([_[0] for _ in ranges], [_[1] for _ in ranges])
class HyperVolume(AbstractConvergenceMetric): '''Hypervolume convergence metric class This metric is derived from a hyper-volume measure, which describes the multi-dimensional volume of space contained within the pareto front. When computed with minimum and maximums, it describes the ratio of dominated outcomes to all possible outcomes in the extent of the space. Getting this number to be high or low is not necessarily important, as not all outcomes within the min-max range will be feasible. But, having the hypervolume remain fairly stable over multiple generations of the evolutionary algorithm provides an indicator of convergence. Parameters --------- minimum : numpy array maximum : numpy array ''' def __init__(self, minimum, maximum): super(HyperVolume, self).__init__("hypervolume") self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum) def __call__(self, optimizer): self.results.append(self.hypervolume_func.calculate( optimizer.algorithm.archive)) @classmethod def from_outcomes(cls, outcomes): ranges = [o.expected_range for o in outcomes if o.kind != o.INFO] minimum, maximum = np.asarray(list(zip(*ranges))) return cls(minimum, maximum)
class HyperVolume(AbstractConvergenceMetric): '''Hypervolume convergence metric class Parameters --------- minimum : numpy array maximum : numpy array ''' def __init__(self, minimum, maximum): super(HyperVolume, self).__init__("hypervolume") self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum) def __call__(self, optimizer): self.results.append(self.hypervolume_func.calculate(optimizer.algorithm.archive))
class HyperVolume(AbstractConvergenceMetric): '''Hypervolume convergence metric class Parameters --------- minimum : numpy array maximum : numpy array ''' def __init__(self, minimum, maximum): super(HyperVolume, self).__init__("hypervolume") self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum) def __call__(self, optimizer): self.results.append( self.hypervolume_func.calculate(optimizer.algorithm.archive))
reference_set = EpsilonBoxArchive([0.02, 0.02, 0.02]) for _ in range(1000): solution = Solution(problem) solution.variables = [random.uniform(0,1) if i < problem.nobjs-1 else 0.5 for i in range(problem.nvars)] solution.evaluate() reference_set.add(solution) # compute the indicators gd = GenerationalDistance(reference_set) print("Generational Distance:", gd.calculate(algorithm.result)) igd = InvertedGenerationalDistance(reference_set) print("Inverted Generational Distance:", igd.calculate(algorithm.result)) hyp = Hypervolume(reference_set) print("Hypervolume:", hyp.calculate(algorithm.result)) ei = EpsilonIndicator(reference_set) print("Epsilon Indicator:", ei.calculate(algorithm.result)) sp = Spacing() print("Spacing:", sp.calculate(algorithm.result)) # plot the result versus the reference set import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D fig = plt.figure() ax = fig.add_subplot(1, 1, 1, projection='3d') ax.scatter([s.objectives[0] for s in reference_set],
elif (opt==5): #operating rule (OP5 or OP3 12 demand values) for j in range (12): constraint.append(Real(0.0001,(math.pi/2-0.0001))) #x1 (0-pi/2), but avoid 0 and pi (90 degree) constraint.append(Real(vd,vcap)) #x2 (range from vdead to vcapacity) constraint.append(Real(vd,vcap)) #x3 (range from vdead to vcapacity), check condition later constraint.append(Real(0.0001,(math.pi/2-0.0001))) #x4 (0-pi/2), but avoid 0 and pi (90 degree) num_var+=48 reservoirs[i][1] = 5 # Call optimization function problem = Problem(num_var, totalobs) problem.types[:] = constraint problem.function = viccall problem.constraints[:] = ">=0" start = datetime.datetime.now() hyp = Hypervolume(minimum=minvar, maximum=maxvar) allresults = [] with ProcessPoolEvaluator(number_of_cores) as evaluator: algorithm = EpsNSGAII(problem, eps, population_size=population, evaluator = evaluator) while algorithm.nfe<number_of_functions: algorithm.step() one_step_result = hyp.calculate(algorithm.result) #this param stores information of hypervolume indicator, save as a file if needed allresults.append(one_step_result) end = datetime.datetime.now() nondominated_solutions = nondominated(algorithm.result) os.chdir('../Results') # Finish and save results to files print("Start",start) print("End",end) print("Finish running simulations! See opt_objectives.txt and opt_variables.txt for results.")
problem.directions[:] = DIRECTIONS # Experimenter with NSGAII, NSGAIII and SPEA2 algorithms = [(NSGAII, {"population_size":POPSIZE}), (NSGAIII, {"population_size":POPSIZE, "divisions_outer":12}), (SPEA2, {"population_size":POPSIZE}) ] # Multi-processing (4 parallel processes) with ProcessPoolEvaluator(4) as evaluator: results = experiment(algorithms, problem, nfe=FEs, seeds=n_reps , evaluator=evaluator, display_stats=True) # As of Platypus v1.0.4, NSGA-III works only with minimization objectives # Converting first objective (Apoptosis) back to positive for alg in ["NSGAII", "NSGAIII", "SPEA2"]: for run in range(len(results[alg]["Problem"])): for ind in range(len(results[alg]["Problem"][0])): results[alg]["Problem"][run][ind].objectives[0] = -results[alg]["Problem"][run][ind].objectives[0] results[alg]["Problem"][run][ind].problem.directions[0] = 1 # Save results in pickle import pickle pickle.dump(results, open("res_3obj_"+str(n_reps)+"reps.p", "wb")) # Calculate Hypervolume hyp = Hypervolume(minimum=[-1.0, -1.0, 0], maximum=[1.0, 1.0, 16]) hyp_result = calculate(results, hyp, evaluator=evaluator) display(hyp_result, ndigits=3) with open("hypervolume_3obj_"+str(n_reps)+"reps.txt", 'w') as f: print(hyp_result, file=f)
def __init__(self, minimum, maximum): super(HyperVolume, self).__init__("hypervolume") self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum)
plt.legend(numpoints=1) # plt.xlim(0, 50); plt.xlabel("Compilation Time(in ms)") plt.ylabel("Execution Time(in ms)") plt.title( "Comparision of Compilation and Execution time \n of various compiler optimization approaches" ) # # Hypervolume # In[ ]: from platypus import Hypervolume, experiment, calculate, display hyp = Hypervolume(minimum=[0, 0, 0], maximum=[1, 1, 1]) hyp_result = calculate(results, hyp) display(hyp_result, ndigits=5) print(hyp_result) # # Compilation and Execution time of Optimization levels # In[1012]: import matplotlib.pyplot as plt import numpy as np import collections fig = plt.figure() X = np.arange(4)
minobj = [] maxobj = [] eps = [] for i in range(4): # 4 objective functions considered VIC_objs[i] = int(lines[i + 19].split('\t')[0]) if (VIC_objs[i] > 0): no_of_obj += 1 minobj.append(0) # Normalize objective functions to the range of 0-1 maxobj.append(1) eps.append(0.001) # Setup optimization parameters problem = Problem(no_vars, no_of_obj) problem.types[:] = VIC_types problem.function = viccall hyp = Hypervolume(minimum=minobj, maximum=maxobj) x = [] # Start simulations with ProcessPoolEvaluator(number_of_core) as evaluator: algorithm = EpsNSGAII(problem, eps, population_size=pop_size, evaluator=evaluator) while algorithm.nfe <= number_functions: algorithm.step() y = hyp.calculate(algorithm.result) x.append(y) end = datetime.datetime.now() nondominated_solutions = nondominated(algorithm.result) os.chdir('../Results')