def test_nondominated_points_ref_1(self): """Test input data with non-dominated points and reference.""" _, _, points, reference = random_2d_3d_front(1000) sorted_contribs = np.array(sorted(hv.contributions(points, reference))) parameters = UPMOParameters(points.shape[1], 1.0) archive = UPMOArchive(parameters, reference) for point in points: archive.insert(point, point) self.assertTrue(archive.size == len(points), "Size") output_contribs = np.array( sorted(map(lambda individual: individual.contribution, archive))) self.assertTrue(np.allclose(sorted_contribs, output_contribs), "Contributions")
def test_statistics(self) -> None: """Test statistics.""" parameters = UPMOParameters(10, 1.0) archive = UPMOArchive(parameters) isr = archive.get_statistics().insert_success_ratio self.assertTrue(isr == 0.0) archive.insert(np.array([1.3, 1.3]), np.array([4.0, 5.0])) isr = archive.get_statistics().insert_success_ratio self.assertTrue(isr == 1.0) archive.insert(np.array([1.4, 1.4]), np.array([5.0, 6.0])) isr = archive.get_statistics().insert_success_ratio self.assertTrue(isr == 0.5)
def test_dominated_points_ref_1(self) -> None: """Test input data with dominated points and reference.""" # Random points generated using one of the utility # functions (random_2d_3d_front). points = np.array([ [1.07525383, 9.9420234], [9.0063025, 4.34586186], [1.07525383, 9.9520234], [5.21288155, 8.53380723], [4.56317607, 8.90816971], [8.01491032, 5.98006794], [3.24097153, 9.46023803], [8.02491032, 5.98006794], [4.56317607, 8.89816971], [8.09812306, 5.8668904], [9.47977929, 3.18336057], [8.15916972, 5.78169088], [9.93329032, 1.15314504], ]) ranks, _ = non_dominated_sort(points) nadir = np.array([10.0, 10.0]) # The contributions were generated using the reference implementation # by A.P. Guerreiro, available at (https://github.com/apguerreiro/HVC). # For example: ```./hvc -P 1 -f 0 -r "10. 10. 1" | sort``` # In the 2-D case, the z-component of the points is set to zero # and the z-component of the reference point is set to one. sorted_contribs = np.array([ 0.00690911080401627, 0.0721753062322654, 0.12556094880582, 0.135435028337332, 0.212503643566556, 0.365178867638393, 0.527207157404229, 0.637018803519581, 0.679831715378445, 1.02095415166855, ]) parameters = UPMOParameters(points.shape[1], 1.0) archive = UPMOArchive(parameters, nadir) for point in points: archive.insert(point, point) self.assertTrue(archive.size == np.sum(ranks == 1), "Size") output_contribs = np.array( sorted(map(lambda individual: individual.contribution, archive))) self.assertTrue(np.allclose(sorted_contribs, output_contribs), "Contributions")
def test_creation(self) -> None: """Test creating an empty archive.""" parameters = UPMOParameters(10, 1.0) archive = UPMOArchive(parameters) self.assertTrue(archive.empty, "Empty") self.assertTrue(archive.left_extreme is None, "Left extreme") self.assertTrue(archive.right_extreme is None, "Right extreme") self.assertTrue(archive.size == 0, "Size")
class UPMOCMA(Optimizer): """The UP-MO-CMA-ES optimizer. Parameters ---------- initial_points The search points of the initial population. initial_fitness The objective points of the initial population. initial_step_size: optional The initial step size. Ignored if `parameters` is provided. success_notion: optional The notion of success (either `individual` or `population`). reference: optional A reference point. parameters: optional The external parameters. Allows to provide custom values other than \ the recommended in the literature. rng: optional A random number generator. cov_model: optional How to store the covariance information (either `full` or `cholesky`). Raises ------ ValueError A parameter was provided with an invalid value. NotImplementedError A parameter value is not supported yet. Notes ----- The implementation is based on :cite:`2016:mo-cma-es`. """ def __init__( self, initial_points: np.ndarray, initial_fitness: np.ndarray, initial_step_size: float = 1.0, max_generations: Optional[int] = None, max_evaluations: Optional[int] = None, max_nbytes: Optional[int] = None, max_size: Optional[int] = None, success_notion: str = "population", reference: Optional[np.ndarray] = None, parameters: Optional[UPMOParameters] = None, rng: Optional[np.random.Generator] = None, cov_model: str = "full", ): self._n_dimensions = initial_points.shape[1] self._n_objectives = initial_fitness.shape[1] if self._n_objectives != 2: raise NotImplementedError("Unsupported objective dimensionality") if success_notion == "individual": self._success_notion = SuccessNotion.IndividualBased elif success_notion == "population": self._success_notion = SuccessNotion.PopulationBased else: raise ValueError("Invalid value for success_notion.") if cov_model == "full": self._cov_model = CovModel.Full elif cov_model == "cholesky": raise NotImplementedError( "Support for Cholesky factors has not been implemented.") else: raise ValueError("Invalid value for cov_model.") if parameters is None: self._parameters = UPMOParameters( self._n_dimensions, initial_step_size, ) else: self._parameters = parameters if self._parameters.n_dimensions != self._n_dimensions: raise ValueError( "Invalid value for n_dimensions in provided parameters") self._stopping_conditions = UPMOStoppingConditions( max_generations=max_generations, max_evaluations=max_evaluations, max_nbytes=max_nbytes, max_size=max_size, ) if rng is None: self._rng = np.random.default_rng() else: self._rng = rng self._population = UPMOArchive(self._parameters, reference) for point, fitness in zip(initial_points, initial_fitness): self._population.insert(point, fitness) self._generation_count = 0 self._evaluation_count = 0 self._parent = None self._offspring_cov = None self._offspring_point = None self._ask_called = False @property def name(self): """Return the name of the optimizer. Returns ------- The optimizer's name. """ return "UP-MO-CMA-ES-{}".format(str(self._success_notion)) @property def qualified_name(self): """Return the qualified name of the optimizer. Returns ------- The optimizer's qualified name. """ return self.name @property def generation_count(self) -> int: """Return the number of elapsed generations.""" return self._generation_count @property def evaluation_count(self) -> int: """Return the number of function evaluations.""" return self._evaluation_count @property def parameters(self) -> UPMOParameters: """Return a read-only version of the external parameters.""" return self._parameters @property def size(self) -> int: """Return the size of the population. Returns ------- The size of the population. """ return self._population.size @property def nbytes(self) -> int: """Return the memory in bytes used by the population archive.""" return self._population.nbytes def ask(self) -> Any: """Generate a new search point. Returns ------- np.ndarray The new search point. Raises ------ RuntimeError When called before initializing the population with \ `insert_initial`. """ # Information we need to persist between calls to ask and tell: # * Reference to parent # * Offspring covariance matrix # * Offspring search point sigma_min = self._parameters.sigma_min p_extreme = self._parameters.p_extreme c_r = self._parameters.c_r c_r_h = 0.5 * c_r p = self._rng.uniform(size=2) if p[0] < p_extreme or self._population.size <= 2: self._parent = self._population.sample_extreme(p[1]) if self._parent.step_size < sigma_min: self._parent = self._population.sample_interior(p[1]) else: self._parent = self._population.sample_interior(p[1]) nearest = self._population.nearest(self._parent) self._offspring_cov = (1.0 - c_r) * self._parent.cov if nearest[0] is not None: z = (nearest[0].point - self._parent.point) / self._parent.step_size self._offspring_cov += c_r_h * np.outer(z, z) if nearest[1] is not None: z = (nearest[1].point - self._parent.point) / self._parent.step_size self._offspring_cov += c_r_h * np.outer(z, z) self._offspring_point = self._rng.multivariate_normal( self._parent.point, (self._parent.step_size * self._parent.step_size) * self._offspring_cov, ) self._ask_called = True return self._offspring_point def tell(self, fitness: np.ndarray, evaluation_count: int = 1) -> None: """ Pass fitness information to the optimizer. Parameters ---------- fitness The fitness of the search point. evaluation_count: optional Total evaluation count. Use case: noisy functions. Raises ------ RuntimeError When `tell` is called before `ask`. Notes ----- Assumes stored offspring data (i.e covariance matrix) corresponds to the search point produced by the last call to `ask`. """ if not self._ask_called: raise RuntimeError("Tell called before ask") z = (self._offspring_point - self._parent.point) / self._parent.step_size # If the offspring dominates the parent, the last will be # deleted when inserting the first if dominates(fitness, self._parent.fitness): self._parent = None # We attempt to insert the point into the archive offspring = self._population.insert(self._offspring_point, fitness) # If the offspring was not inserted it is because it was dominated # and hence unsuccessful. if offspring is not None: # With population-based notion of success if it is # inserted it is successful. success_indicator = 1.0 # With individual-based notion of success if its contribution # is greater or equal to its parent's contribution it is successful if (self._success_notion == SuccessNotion.IndividualBased and offspring.contribution < self._parent.contribution): success_indicator = 0.0 c_p = self._parameters.c_p c_cov = self._parameters.c_cov d_inv = 1.0 / self._parameters.d p_target_succ = self._parameters.p_target_succ p_target_succ_comp = 1.0 - p_target_succ zz = np.outer(z, z) # Adapt offspring offspring.p_succ *= 1.0 - c_p offspring.p_succ += c_p * success_indicator offspring.step_size *= math.exp( d_inv * ((offspring.p_succ - p_target_succ) / p_target_succ_comp)) offspring.cov[:, :] = (1.0 - c_cov) * self._offspring_cov + c_cov * zz # Adapt parent if it was not deleted if self._parent is not None: self._parent.p_succ *= 1.0 - c_p self._parent.p_succ += c_p * success_indicator self._parent.step_size *= math.exp( d_inv * ((self._parent.p_succ - p_target_succ) / p_target_succ_comp)) self._parent.cov[:, :] = ( 1.0 - c_cov) * self._parent.cov + c_cov * zz self._offspring_cov = None self._offspring_point = None self._generation_count += 1 self._evaluation_count += evaluation_count self._ask_called = False @property def stop(self) -> UPMOStoppingConditions: conditions = self._stopping_conditions result = UPMOStoppingConditions(is_output=True) if (conditions.max_generations is not None and self._generation_count >= conditions.max_generations): result.triggered = True result.max_generations = self._generation_count if (conditions.max_evaluations is not None and self._evaluation_count >= conditions.max_evaluations): result.triggered = True result.max_evaluations = self._evaluation_count if (conditions.max_size is not None and self._population.size >= conditions.max_size): result.triggered = True result.max_size = self._population.size if (conditions.max_nbytes is not None and self._population.max_nbytes >= conditions.max_nbytes): result.triggered = True result.max_nbytes = self._population.max_nbytes return result @property def best(self): raise NotImplementedError() def fmin( self, fn: OptimizableFunction, fn_args: Optional[Iterable[Any]], fn_kwargs: Optional[dict], **kwargs: Any, ) -> OptimizerResult: raise NotImplementedError()
def __init__( self, initial_points: np.ndarray, initial_fitness: np.ndarray, initial_step_size: float = 1.0, max_generations: Optional[int] = None, max_evaluations: Optional[int] = None, max_nbytes: Optional[int] = None, max_size: Optional[int] = None, success_notion: str = "population", reference: Optional[np.ndarray] = None, parameters: Optional[UPMOParameters] = None, rng: Optional[np.random.Generator] = None, cov_model: str = "full", ): self._n_dimensions = initial_points.shape[1] self._n_objectives = initial_fitness.shape[1] if self._n_objectives != 2: raise NotImplementedError("Unsupported objective dimensionality") if success_notion == "individual": self._success_notion = SuccessNotion.IndividualBased elif success_notion == "population": self._success_notion = SuccessNotion.PopulationBased else: raise ValueError("Invalid value for success_notion.") if cov_model == "full": self._cov_model = CovModel.Full elif cov_model == "cholesky": raise NotImplementedError( "Support for Cholesky factors has not been implemented.") else: raise ValueError("Invalid value for cov_model.") if parameters is None: self._parameters = UPMOParameters( self._n_dimensions, initial_step_size, ) else: self._parameters = parameters if self._parameters.n_dimensions != self._n_dimensions: raise ValueError( "Invalid value for n_dimensions in provided parameters") self._stopping_conditions = UPMOStoppingConditions( max_generations=max_generations, max_evaluations=max_evaluations, max_nbytes=max_nbytes, max_size=max_size, ) if rng is None: self._rng = np.random.default_rng() else: self._rng = rng self._population = UPMOArchive(self._parameters, reference) for point, fitness in zip(initial_points, initial_fitness): self._population.insert(point, fitness) self._generation_count = 0 self._evaluation_count = 0 self._parent = None self._offspring_cov = None self._offspring_point = None self._ask_called = False
def test_sampling(self) -> None: """Test sampling extreme and interior points.""" # m: number of samples used to determine the empirical probabilities m = 100000 # n: number of points n = np.random.default_rng().integers(7, 11, 1, dtype=int)[0] n_f = float(n) # points = {(1, n), (2, n-1), ..., (n, 1)} points = np.array(list(zip(range(1, n + 1), range(n, 0, -1))), dtype=float) np.random.shuffle(points) parameters = UPMOParameters(points.shape[1], 1.0) archive = UPMOArchive(parameters) for point in points: archive.insert(point, point) self.assertTrue( np.all( np.allclose(archive.left_extreme.fitness, np.array([1.0, n_f]))), "Left extreme: {}".format(archive.left_extreme.fitness), ) self.assertTrue( np.allclose(archive.right_extreme.fitness, np.array([n_f, 1.0])), "Right extreme {}".format(archive.right_extreme.fitness), ) sorted_contributions = np.ones(n) sorted_contributions[-1] = np.finfo(float).max sorted_contributions[-2] = np.finfo(float).max output_contributions = np.array( sorted(map(lambda individual: individual.contribution, archive))) self.assertTrue( np.allclose(sorted_contributions, output_contributions), "Sorted contributions: Got\n{}\nExpected:{}\n".format( output_contributions, sorted_contributions, ), ) max_acc_contributions = max( map(lambda individual: individual.acc_contribution, archive)) self.assertTrue( max_acc_contributions == (n_f - 2.0), "Max. cumulative contribution, got {}, expected {}".format( max_acc_contributions, n_f - 2.0), ) # Extreme empirical probabilities counts = np.zeros(n) for p in np.random.default_rng().uniform(size=m): sample = archive.sample_extreme(p) i = int(sample.coord(0)) - 1 counts[i] += 1.0 empirical_ps = np.round(counts / float(m), decimals=1) ps = np.zeros(n) ps[0] = 0.5 ps[-1] = 0.5 self.assertTrue( np.allclose(ps, empirical_ps, atol=1e-1), "extreme empiricals, got: {}, expected: {}".format( empirical_ps, ps), ) # Interior empirical probabilities counts = np.zeros(n) for p in np.random.default_rng().uniform(size=m): sample = archive.sample_interior(p) i = int(sample.coord(0)) - 1 counts[i] += 1.0 empirical_ps = np.round(counts / float(m), decimals=3) ps = np.full((n, ), 1.0 / (n_f - 2.0)) ps[0] = 0.0 ps[-1] = 0.0 ps = np.round(ps, decimals=3) self.assertTrue( np.allclose(ps, empirical_ps, atol=1e-2), "Interior empiricals, got: {}, expected: {}".format( empirical_ps, ps), )
def test_merge(self): """Test merging of archives.""" _, _, points1, _ = random_2d_3d_front(50) _, _, points2, _ = random_2d_3d_front(40) _, _, points3, _ = random_2d_3d_front(60) parameters = UPMOParameters(points1.shape[1], 1.0) archive0 = UPMOArchive(parameters) archive1 = UPMOArchive(parameters) archive2 = UPMOArchive(parameters) archive3 = UPMOArchive(parameters) for point in points1: archive0.insert(point, point) archive1.insert(point, point) for point in points2: archive0.insert(point, point) archive2.insert(point, point) for point in points3: archive0.insert(point, point) archive3.insert(point, point) size1 = archive1.size archive1.merge(archive2) del archive2 gc.collect() self.assertTrue(archive1.size > size1) size1 = archive1.size archive1.merge(archive3) del archive3 gc.collect() self.assertTrue(archive1.size > size1) for x0, x1 in zip(archive0, archive1): p0 = x0.fitness p1 = x1.fitness c0 = x0.contribution c1 = x1.contribution self.assertTrue(np.allclose(p0, p1), "{}, {}".format(p0, p1)) self.assertTrue(math.isclose(c0, c1), "{}, {}".format(c0, c1))