Ejemplo n.º 1
0
 def test_tell(self):
     """Test that the tell method runs without errors."""
     # Note: this doesn't test anything about the correctness
     # of adaptation and selection, just that they don't result in an error.
     n = 3
     rng = np.random.default_rng()
     points = rng.uniform(size=(n, 5))
     fitness = rng.uniform(size=(n, 2))
     optimizer = MOCMA(
         points,
         fitness,
         success_notion="individual",
         max_evaluations=1000,
     )
     points = optimizer.ask()
     fitness = rng.uniform(size=(n, 2))
     penalized_fitness = rng.uniform(size=(n, 2))
     optimizer.tell(fitness, penalized_fitness)
     # We test the offspring data is copied correctly
     # which we can do since the offspring data remains untouched
     # in its buffer after selection.
     self.assertTrue(np.all(fitness == optimizer.population.fitness[n:]),
                     "fitness")
     self.assertTrue(
         np.all(penalized_fitness ==
                optimizer.population.penalized_fitness[n:]),
         "penalized_fitness",
     )
     self.assertTrue(optimizer.generation_count == 1, "generation_count")
     self.assertTrue(optimizer.evaluation_count == n, "evaluation_count")
Ejemplo n.º 2
0
    def run_test_volume(self, success_notion: str) -> None:
        for i, row in enumerate(self.data):
            n_parents = int(row[0])
            if (not self.max_n_parents or n_parents <= self.max_n_parents
                ) and (not self.min_n_parents
                       or n_parents >= self.min_n_parents):
                target_volume = row[1]
                max_evaluations = int(row[2])
                n_dimensions = int(row[3])
                n_objectives = int(row[4])
                reference = row[5:5 + n_objectives]
                volumes = np.empty(1)
                fn = self.fn_cls(rng=self.rng)
                fn.n_dimensions = n_dimensions
                fn.n_objectives = n_objectives

                indicator = HypervolumeIndicator(reference)
                for trial in range(VOLUME_TEST_N_TRIALS):
                    parent_points = fn.random_points(n_parents)
                    parent_fitness = fn(parent_points)
                    optimizer = MOCMA(
                        parent_points,
                        parent_fitness,
                        n_offspring=self.n_offspring,
                        success_notion=success_notion,
                        max_evaluations=max_evaluations,
                        seed=self.rng.integers(0, 10000),
                    )
                    while not optimizer.stop.triggered:
                        points = optimizer.ask()
                        if fn.has_constraints:
                            optimizer.tell(*fn.evaluate_with_penalty(points))
                        else:
                            optimizer.tell(fn(points))
                    volumes[trial] = indicator(optimizer.best.fitness)
                reference_volume = np.median(volumes)
                self.assertTrue(
                    np.allclose(
                        reference_volume,
                        target_volume,
                        rtol=VOLUME_TEST_RTOL,
                    ),
                    "Failed (row {}), got {}, expected {}".format(
                        i, reference_volume, target_volume),
                )
Ejemplo n.º 3
0
def log_mocma_trial(
    log_parameters: LogParameters, trial_parameters: MOCMATrialParameters
) -> str:
    """Run an independent trial of the optimizer and log to a CSV file.

    Parameters
    ----------
    log_parameters
        The paramters to configure the logging.
    trial_parameters
        The parameters to configure the trial run.

    Returns
    -------
    str
        A string identifying the job.
    """
    if trial_parameters.seed is None:
        rng = np.random.default_rng()
    else:
        rng = np.random.default_rng(trial_parameters.seed)

    if trial_parameters.fn_rng_seed is not None:
        trial_parameters.fn_kwargs["rng"] = np.random.default_rng(
            trial_parameters.fn_rng_seed
        )
    else:
        trial_parameters.fn_kwargs["rng"] = rng

    fn = trial_parameters.fn_cls(
        *trial_parameters.fn_args, **trial_parameters.fn_kwargs
    )

    max_evaluations = max(*log_parameters.log_at)
    if trial_parameters.max_evaluations is not None:
        max_evaluations = max(
            trial_parameters.max_evaluations,
            max_evaluations,
        )

    parent_points = fn.random_points(
        trial_parameters.n_parents,
        region_bounds=trial_parameters.region_bounds,
    )
    parent_fitness = fn(parent_points)

    optimizer = MOCMA(
        parent_points,
        parent_fitness,
        n_offspring=trial_parameters.n_offspring,
        seed=trial_parameters.seed.generate_state(1)[0],
        # rng=rng,
        success_notion=trial_parameters.success_notion,
        max_generations=trial_parameters.max_generations,
        max_evaluations=max_evaluations,
        target_indicator_value=trial_parameters.target_indicator_value,
    )
    if trial_parameters.reference is not None:
        optimizer.indicator.reference = trial_parameters.reference

    cpu_info = log_parameters.cpu_info
    uname = platform.uname()
    os_info = "{} {}".format(uname.system, uname.release)
    machine_info = uname.machine
    if cpu_info is not None:
        machine_info = cpu_info
    python_info = "{}.{}.{}".format(*platform.python_version_tuple())

    header = (
        """Generated with {} {}, {} {}\n"""
        """Machine: {}\n"""
        """OS: {}\n"""
        """Python: {}\n"""
        """Optimizer: {}\n"""
        """Function: {}: {} -> {}\n"""
        """Initial step size: {}\n"""
        """Reference point: {}\n"""
        """Trial seed: entropy={}, spawn_key={}\n"""
        """Function-specific seed: {}\n"""
        """Trial: {}\n"""
        """Evaluations: {{}}\n"""
        """Elapsed time (wall-clock): {{:.2f}}s\n"""
        """Observation: {{}}\n""".format(
            anguilla.__name__,
            anguilla.__version__,
            np.__name__,
            np.__version__,
            machine_info,
            os_info,
            python_info,
            optimizer.qualified_name,
            fn.qualified_name,
            fn.n_dimensions,
            fn.n_objectives,
            trial_parameters.initial_step_size,
            trial_parameters.reference,
            trial_parameters.seed.entropy,
            trial_parameters.seed.spawn_key,
            trial_parameters.fn_rng_seed,
            trial_parameters.key,
        )
    )

    sw = StopWatch()

    def log_to_file():
        fname_base = "{}_{}_{}_{}".format(
            fn.name,
            optimizer.qualified_name,
            trial_parameters.key,
            optimizer.evaluation_count,
        )
        if log_parameters.log_fitness:
            fname = f"{fname_base}.fitness.csv"
            np.savetxt(
                str(log_parameters.path.joinpath(fname).absolute()),
                optimizer.best.fitness,
                delimiter=",",
                header=header.format(
                    optimizer.evaluation_count,
                    sw.duration,
                    "fitness",
                ),
            )
        if log_parameters.log_points:
            fname = f"{fname_base}.points.csv"
            np.savetxt(
                str(log_parameters.path.joinpath(fname).absolute()),
                optimizer.best.points,
                delimiter=",",
                header=header.format(
                    optimizer.evaluation_count,
                    sw.duration,
                    "point",
                ),
            )
        if log_parameters.log_step_sizes:
            fname = f"{fname_base}.step_sizes.csv"
            np.savetxt(
                str(log_parameters.path.joinpath(fname).absolute()),
                optimizer.best.step_size,
                delimiter=",",
                header=header.format(
                    optimizer.evaluation_count,
                    sw.duration,
                    "step_size",
                ),
            )

    # Log initial points
    log_to_file()

    sw.start()
    while not optimizer.stop.triggered:
        points = optimizer.ask()
        if fn.has_constraints:
            fitness = fn.evaluate_with_penalty(points)
            optimizer.tell(*fitness)
        else:
            fitness = fn(points)
            optimizer.tell(fitness)
        if optimizer.evaluation_count in log_parameters.log_at:
            sw.stop()
            log_to_file()
            sw.start()

    return "{}-{}-{}".format(
        fn.name, optimizer.qualified_name, trial_parameters.key
    )
Ejemplo n.º 4
0
def run_trial(parameters: TrialParameters):
    if parameters.seed is None:
        rng = np.random.default_rng()
    else:
        rng = np.random.default_rng(parameters.seed)
    if parameters.fn_rng_seed is not None:
        parameters.fn_kwargs["rng"] = np.random.default_rng(
            parameters.fn_rng_seed)
    else:
        parameters.fn_kwargs["rng"] = rng
    fn = parameters.fn_cls(*parameters.fn_args, **parameters.fn_kwargs)
    if not fn.has_scalable_objectives and fn.n_objectives != 2:
        raise ValueError("The provided function does not support 2 objectives")
    fn.n_objectives = 2
    parent_points = fn.random_points(parameters.n_parents,
                                     region_bounds=parameters.region_bounds)
    parent_fitness = fn(parent_points)
    if parameters.implementation == "cxx":
        optimizer = MOCMA(
            parent_points,
            parent_fitness,
            n_offspring=parameters.n_offspring,
            #success_notion=parameters.success_notion,
            #max_generations=parameters.max_generations,
            max_evaluations=parameters.max_evaluations,
            #target_indicator_value=parameters.target_indicator_value,
            seed=rng.integers(0, 100000),
        )
    else:
        optimizer = MOCMAPython(
            parent_points,
            parent_fitness,
            n_offspring=parameters.n_offspring,
            #success_notion=parameters.success_notion,
            #max_generations=parameters.max_generations,
            max_evaluations=parameters.max_evaluations,
            #target_indicator_value=parameters.target_indicator_value,
            rng=rng,
        )
    #if parameters.reference is not None:
    #    optimizer.indicator.reference = parameters.reference
    initial_fitness = optimizer.best.fitness
    ask_sw = StopWatch()
    tell_sw = StopWatch()
    eval_sw = StopWatch()
    while not optimizer.stop.triggered:
        ask_sw.start()
        points = optimizer.ask()
        ask_sw.stop()
        if fn.has_constraints:
            eval_sw.start()
            fitness = fn.evaluate_with_penalty(points)
            eval_sw.stop()
            tell_sw.start()
            optimizer.tell(*fitness)
            tell_sw.stop()
        else:
            eval_sw.start()
            fitness = fn(points)
            eval_sw.stop()
            tell_sw.start()
            optimizer.tell(fitness)
            tell_sw.stop()
    final_fitness = optimizer.best.fitness
    volume = None
    if parameters.reference is not None:
        volume = optimizer.indicator(final_fitness)
    duration = TrialDuration(ask_sw.duration, tell_sw.duration,
                             eval_sw.duration)
    return TrialResult(
        initial_fitness=initial_fitness,
        final_fitness=final_fitness,
        volume=volume,
        reference=parameters.reference,
        generation_count=optimizer.generation_count,
        evaluation_count=optimizer.evaluation_count,
        duration=duration,
        fn_name=fn.qualified_name,
        optimizer_name=optimizer.qualified_name,
        parameters=parameters,
    )