Exemplo n.º 1
0
    def evaluate(self, fnc, params=None, mode="train"):
        partition = self.partitions[mode]

        if params is not None:
            wrapper = AlgoWrapper(fnc, params)
        else:
            wrapper = fnc

        r = parallel_evaluate(
            solvers=[wrapper],
            task_subset=partition,  # set to None to evaluate on all tasks
            n_reps=128,  # number of repetitions
            eval_kwargs={'n_calls': 64},
            joblib_kwargs={
                'n_jobs': -1,
                'verbose': 10
            })

        p = calculate_metrics(r)  # returns pandas dataframe

        # load the dataframe with existing results
        df = pd.read_csv(csv_path)

        # get the names of the functions that were actually used.
        # it is assumed that these functions are present in the
        # loaded csv as well.
        names = [f.__name__ for f in partition]
        df = df.set_index('Unnamed: 0')

        # select only the names of tasks in partition
        df = df.loc[names]

        # insert found results
        df[fnc.__name__] = p[fnc.__name__]

        # drop index for proper compatibility with get_average_ranking function
        df = df.reset_index()

        rankings = get_average_ranking(df)
        thisranking = rankings[fnc.__name__]

        # want to minimize ranking. Less ranking means more performant algorithm
        obj = thisranking / len(rankings)

        print('rankings:', rankings)
        print('objective:', thisranking)

        return obj
"""
Example of running the benchmarks locally
on skopt and other software.
"""

from bbob.evaluation import parallel_evaluate, plot_results, calculate_metrics
from skopt import forest_minimize
from bbob.wrappers.gpyopt_minimize import gpyopt_minimize
from bbob.wrappers.hyperopt_minimize import hyperopt_minimize

from bbob.tracks.ampgo import Hartmann3_3_ri, Ackley_3_1_r

r = parallel_evaluate(
    solvers=[forest_minimize, gpyopt_minimize, hyperopt_minimize],
    task_subset=[Hartmann3_3_ri,
                 Ackley_3_1_r],  # set to None to evaluate on all tasks
    n_reps=2,  # number of repetitions
    eval_kwargs={'n_calls': 10},
    joblib_kwargs={
        'n_jobs': -1,
        'verbose': 10
    })

p = calculate_metrics(r)  # returns pandas dataframe
p.to_csv('data.csv')
plot_results(r)
import sys

from skopt import gp_minimize
from bbob.wrappers.gpyopt_minimize import gpyopt_minimize
from bbob.wrappers.hyperopt_minimize import hyperopt_minimize

methods = [gp_minimize, gpyopt_minimize, hyperopt_minimize]

if sys.version[0] == '3':
    from bbob.wrappers.smac_minimize import smac_minimize
    methods += [smac_minimize]

if sys.version[0] == '2':
    from bbob.wrappers.spearmint_minimize import spearmint_minimize
    methods += [spearmint_minimize]

from bbob.evaluation import parallel_evaluate, calculate_metrics
from bbob.tracks import ampgo

r = parallel_evaluate(solvers=methods,
                      task_subset=[ampgo.Ackley_3_1_r],
                      n_reps=2,
                      joblib_kwargs={
                          'verbose': 10,
                          'n_jobs': 1
                      },
                      eval_kwargs={'n_calls': 10})

#plot_results(r)
m = calculate_metrics(r)
m.to_csv('data.csv')