def test_tell(self): """Test that the tell method runs without errors.""" # Note: this doesn't test anything about the correctness # of adaptation and selection, just that they don't result in an error. n = 3 rng = np.random.default_rng() points = rng.uniform(size=(n, 5)) fitness = rng.uniform(size=(n, 2)) optimizer = MOCMA( points, fitness, success_notion="individual", max_evaluations=1000, ) points = optimizer.ask() fitness = rng.uniform(size=(n, 2)) penalized_fitness = rng.uniform(size=(n, 2)) optimizer.tell(fitness, penalized_fitness) # We test the offspring data is copied correctly # which we can do since the offspring data remains untouched # in its buffer after selection. self.assertTrue(np.all(fitness == optimizer.population.fitness[n:]), "fitness") self.assertTrue( np.all(penalized_fitness == optimizer.population.penalized_fitness[n:]), "penalized_fitness", ) self.assertTrue(optimizer.generation_count == 1, "generation_count") self.assertTrue(optimizer.evaluation_count == n, "evaluation_count")
def test_initialization_steady(self): """Test initialization for the steady-state variant.""" rng = np.random.default_rng(0) points = rng.uniform(size=(3, 5)) fitness = rng.uniform(size=(3, 2)) optimizer = MOCMA( points, fitness, max_evaluations=1000, n_offspring=1, ) self.assertTrue(optimizer.name == "MO-CMA-ES", "name") self.assertTrue(optimizer.qualified_name == "(3+1)-MO-CMA-ES-P", "qualified_name") self.assertTrue( optimizer.success_notion.value == SuccessNotion.PopulationBased.value, "success_notion", ) self.assertTrue(optimizer.generation_count == 0, "generation_count") self.assertTrue(optimizer.evaluation_count == 0, "evaluation_count") self.assertTrue(optimizer.parameters.n_dimensions == 5, "parameters.n_dimensions") self.assertTrue( optimizer.stopping_conditions.max_generations == None, "stopping_conditions.max_generations", ) self.assertTrue( optimizer.stopping_conditions.max_evaluations == 1000, "stopping_conditions.max_evaluations", ) self.assertTrue(np.all(optimizer.population.point[:3] == points), "points") self.assertTrue(np.all(optimizer.population.fitness[:3] == fitness), "fitness")
def test_ask_variant(self): """Test the ask method for the n_offspring != n_parents variant.""" rng = np.random.default_rng(0) n_parents = 8 n_offspring = 4 points = rng.uniform(size=(n_parents, 5)) # In this set all points have different rank # Only the first element has rank 1 fitness = np.array([ [0.01245897, 0.27127751], [0.02213313, 0.23395707], [0.0233907, 0.22994154], [0.0392689, 0.1886141], [0.04339422, 0.17990426], [0.16521067, 0.05107939], [0.17855283, 0.0440614], [0.28619405, 0.00950565], ]) optimizer = MOCMA( points, fitness, max_evaluations=1000, n_offspring=n_offspring, ) # Test that the parent indices are correct result = optimizer.population.parent_index expected = np.zeros(n_offspring, dtype=int) self.assertTrue( np.all(result == expected), "parent indices, got: {}, expected: {}".format(result, expected), )
def test_ask(self): """Test that the ask method runs without errors.""" rng = np.random.default_rng(0) n = 100 points = rng.uniform(size=(n, 5)) fitness = rng.uniform(size=(n, 2)) optimizer = MOCMA( points, fitness, success_notion="individual", max_evaluations=1000, ) points = optimizer.ask() # Basic test shape self.assertTrue(points.shape == (n, 5), "points shape") # Test that the parent indices are correct result = optimizer.population.parent_index expected = np.arange(0, n, dtype=int) self.assertTrue( np.all(result == expected), "parent indices, got: {}, expected: {}".format(result, expected), ) # Test that no numbers are infinite or NaN self.assertFalse( np.any(np.isinf(points)), "Got infinite values: {}".format(points), ) self.assertFalse( np.any(np.isnan(points)), "Got NaN values: {}".format(points), ) # Test that the mutation works as expected result = points[0] expected = optimizer.population.point[ 0] + optimizer.population.step_size[0] * ( optimizer.population.cov[0] @ optimizer.population.last_z[0]) self.assertTrue( np.allclose( result, expected, ), "mutation: got {}, expected: {}".format(result, expected), )
def test_best(self): """Test that the best method works as expected.""" rng = np.random.default_rng(0) points = rng.uniform(size=(3, 5)) fitness = rng.uniform(size=(3, 2)) optimizer = MOCMA( points, fitness, success_notion="individual", max_evaluations=1000, ) best = optimizer.best self.assertTrue(np.all(best.point == points), "points") self.assertTrue(np.all(best.fitness == fitness), "fitness") self.assertTrue( np.all(best.step_size == np.repeat( optimizer.parameters.initial_step_size, 3)), "step_size", )
def run_test_volume(self, success_notion: str) -> None: for i, row in enumerate(self.data): n_parents = int(row[0]) if (not self.max_n_parents or n_parents <= self.max_n_parents ) and (not self.min_n_parents or n_parents >= self.min_n_parents): target_volume = row[1] max_evaluations = int(row[2]) n_dimensions = int(row[3]) n_objectives = int(row[4]) reference = row[5:5 + n_objectives] volumes = np.empty(1) fn = self.fn_cls(rng=self.rng) fn.n_dimensions = n_dimensions fn.n_objectives = n_objectives indicator = HypervolumeIndicator(reference) for trial in range(VOLUME_TEST_N_TRIALS): parent_points = fn.random_points(n_parents) parent_fitness = fn(parent_points) optimizer = MOCMA( parent_points, parent_fitness, n_offspring=self.n_offspring, success_notion=success_notion, max_evaluations=max_evaluations, seed=self.rng.integers(0, 10000), ) while not optimizer.stop.triggered: points = optimizer.ask() if fn.has_constraints: optimizer.tell(*fn.evaluate_with_penalty(points)) else: optimizer.tell(fn(points)) volumes[trial] = indicator(optimizer.best.fitness) reference_volume = np.median(volumes) self.assertTrue( np.allclose( reference_volume, target_volume, rtol=VOLUME_TEST_RTOL, ), "Failed (row {}), got {}, expected {}".format( i, reference_volume, target_volume), )
def log_mocma_trial( log_parameters: LogParameters, trial_parameters: MOCMATrialParameters ) -> str: """Run an independent trial of the optimizer and log to a CSV file. Parameters ---------- log_parameters The paramters to configure the logging. trial_parameters The parameters to configure the trial run. Returns ------- str A string identifying the job. """ if trial_parameters.seed is None: rng = np.random.default_rng() else: rng = np.random.default_rng(trial_parameters.seed) if trial_parameters.fn_rng_seed is not None: trial_parameters.fn_kwargs["rng"] = np.random.default_rng( trial_parameters.fn_rng_seed ) else: trial_parameters.fn_kwargs["rng"] = rng fn = trial_parameters.fn_cls( *trial_parameters.fn_args, **trial_parameters.fn_kwargs ) max_evaluations = max(*log_parameters.log_at) if trial_parameters.max_evaluations is not None: max_evaluations = max( trial_parameters.max_evaluations, max_evaluations, ) parent_points = fn.random_points( trial_parameters.n_parents, region_bounds=trial_parameters.region_bounds, ) parent_fitness = fn(parent_points) optimizer = MOCMA( parent_points, parent_fitness, n_offspring=trial_parameters.n_offspring, seed=trial_parameters.seed.generate_state(1)[0], # rng=rng, success_notion=trial_parameters.success_notion, max_generations=trial_parameters.max_generations, max_evaluations=max_evaluations, target_indicator_value=trial_parameters.target_indicator_value, ) if trial_parameters.reference is not None: optimizer.indicator.reference = trial_parameters.reference cpu_info = log_parameters.cpu_info uname = platform.uname() os_info = "{} {}".format(uname.system, uname.release) machine_info = uname.machine if cpu_info is not None: machine_info = cpu_info python_info = "{}.{}.{}".format(*platform.python_version_tuple()) header = ( """Generated with {} {}, {} {}\n""" """Machine: {}\n""" """OS: {}\n""" """Python: {}\n""" """Optimizer: {}\n""" """Function: {}: {} -> {}\n""" """Initial step size: {}\n""" """Reference point: {}\n""" """Trial seed: entropy={}, spawn_key={}\n""" """Function-specific seed: {}\n""" """Trial: {}\n""" """Evaluations: {{}}\n""" """Elapsed time (wall-clock): {{:.2f}}s\n""" """Observation: {{}}\n""".format( anguilla.__name__, anguilla.__version__, np.__name__, np.__version__, machine_info, os_info, python_info, optimizer.qualified_name, fn.qualified_name, fn.n_dimensions, fn.n_objectives, trial_parameters.initial_step_size, trial_parameters.reference, trial_parameters.seed.entropy, trial_parameters.seed.spawn_key, trial_parameters.fn_rng_seed, trial_parameters.key, ) ) sw = StopWatch() def log_to_file(): fname_base = "{}_{}_{}_{}".format( fn.name, optimizer.qualified_name, trial_parameters.key, optimizer.evaluation_count, ) if log_parameters.log_fitness: fname = f"{fname_base}.fitness.csv" np.savetxt( str(log_parameters.path.joinpath(fname).absolute()), optimizer.best.fitness, delimiter=",", header=header.format( optimizer.evaluation_count, sw.duration, "fitness", ), ) if log_parameters.log_points: fname = f"{fname_base}.points.csv" np.savetxt( str(log_parameters.path.joinpath(fname).absolute()), optimizer.best.points, delimiter=",", header=header.format( optimizer.evaluation_count, sw.duration, "point", ), ) if log_parameters.log_step_sizes: fname = f"{fname_base}.step_sizes.csv" np.savetxt( str(log_parameters.path.joinpath(fname).absolute()), optimizer.best.step_size, delimiter=",", header=header.format( optimizer.evaluation_count, sw.duration, "step_size", ), ) # Log initial points log_to_file() sw.start() while not optimizer.stop.triggered: points = optimizer.ask() if fn.has_constraints: fitness = fn.evaluate_with_penalty(points) optimizer.tell(*fitness) else: fitness = fn(points) optimizer.tell(fitness) if optimizer.evaluation_count in log_parameters.log_at: sw.stop() log_to_file() sw.start() return "{}-{}-{}".format( fn.name, optimizer.qualified_name, trial_parameters.key )
def run_trial(parameters: TrialParameters): if parameters.seed is None: rng = np.random.default_rng() else: rng = np.random.default_rng(parameters.seed) if parameters.fn_rng_seed is not None: parameters.fn_kwargs["rng"] = np.random.default_rng( parameters.fn_rng_seed) else: parameters.fn_kwargs["rng"] = rng fn = parameters.fn_cls(*parameters.fn_args, **parameters.fn_kwargs) if not fn.has_scalable_objectives and fn.n_objectives != 2: raise ValueError("The provided function does not support 2 objectives") fn.n_objectives = 2 parent_points = fn.random_points(parameters.n_parents, region_bounds=parameters.region_bounds) parent_fitness = fn(parent_points) if parameters.implementation == "cxx": optimizer = MOCMA( parent_points, parent_fitness, n_offspring=parameters.n_offspring, #success_notion=parameters.success_notion, #max_generations=parameters.max_generations, max_evaluations=parameters.max_evaluations, #target_indicator_value=parameters.target_indicator_value, seed=rng.integers(0, 100000), ) else: optimizer = MOCMAPython( parent_points, parent_fitness, n_offspring=parameters.n_offspring, #success_notion=parameters.success_notion, #max_generations=parameters.max_generations, max_evaluations=parameters.max_evaluations, #target_indicator_value=parameters.target_indicator_value, rng=rng, ) #if parameters.reference is not None: # optimizer.indicator.reference = parameters.reference initial_fitness = optimizer.best.fitness ask_sw = StopWatch() tell_sw = StopWatch() eval_sw = StopWatch() while not optimizer.stop.triggered: ask_sw.start() points = optimizer.ask() ask_sw.stop() if fn.has_constraints: eval_sw.start() fitness = fn.evaluate_with_penalty(points) eval_sw.stop() tell_sw.start() optimizer.tell(*fitness) tell_sw.stop() else: eval_sw.start() fitness = fn(points) eval_sw.stop() tell_sw.start() optimizer.tell(fitness) tell_sw.stop() final_fitness = optimizer.best.fitness volume = None if parameters.reference is not None: volume = optimizer.indicator(final_fitness) duration = TrialDuration(ask_sw.duration, tell_sw.duration, eval_sw.duration) return TrialResult( initial_fitness=initial_fitness, final_fitness=final_fitness, volume=volume, reference=parameters.reference, generation_count=optimizer.generation_count, evaluation_count=optimizer.evaluation_count, duration=duration, fn_name=fn.qualified_name, optimizer_name=optimizer.qualified_name, parameters=parameters, )