Example #1
0
def test_plots_work_without_cat():
    """Basic smoke tests to make sure plotting doesn't crash."""
    SPACE = [
        Integer(1, 20, name='max_depth'),
        Integer(2, 100, name='min_samples_split'),
        Integer(5, 30, name='min_samples_leaf'),
        Integer(1, 30, name='max_features'),
    ]

    def objective(params):
        clf = DecisionTreeClassifier(random_state=3,
                                     **{
                                         dim.name: val
                                         for dim, val in zip(SPACE, params)
                                         if dim.name != 'dummy'
                                     })
        return -np.mean(cross_val_score(clf, *load_breast_cancer(True)))

    res = gp_minimize(objective, SPACE, n_calls=10, random_state=3)
    plots.plot_convergence(res)
    plots.plot_evaluations(res)
    plots.plot_objective(res)
    plots.plot_objective(res, minimum='expected_minimum')
    plots.plot_objective(res,
                         sample_source='expected_minimum',
                         n_minimum_search=10)
    plots.plot_objective(res, sample_source='result')
    plots.plot_regret(res)
Example #2
0
    def optimize(self, param_space, max_evals=10, n_random_starts=2):
        start_time = time.time()

        @use_named_args(param_space)
        def _minimize(**params):
            self.model.set_params(**params)
            return self.evaluate_model()

        opt = gp_minimize(_minimize,
                          param_space,
                          n_calls=max_evals,
                          n_random_starts=n_random_starts,
                          random_state=2405,
                          n_jobs=-1)
        best_values = opt.x
        optimal_values = dict(
            zip([param.name for param in param_space], best_values))
        best_score = opt.fun
        self.best_score = best_score
        self.opt = opt

        print(
            'optimal_parameters: {}\noptimal score: {}\noptimization time: {}'.
            format(optimal_values, best_score,
                   time.time() - start_time))
        print('updating model with optimal values')
        self.update_model(**optimal_values)
        plot_convergence(opt)
        return optimal_values
Example #3
0
def print_result(days, r1):

    if isinstance(r1, OptimizeResult):
        print("the minimize cost is,", r1.fun)
        print("the best parameter is", r1.x)
        plot_convergence(r1)
        WarehouseA = get_sim_result(r1.x, days, plotdata=True)
        plot_warehouse(WarehouseA, days)
    elif isinstance(r1, BayeOptConstraint):

        plt.plot(list(range(r1.niter)),
                 [np.min(r1.y_obj[0:x]) for x in range(1, 1 + r1.niter)])
        idx = np.where(r1.y_constraint > 0)[0]
        plt.plot(idx - r1.n_restarts + 1,
                 r1.y_obj[idx],
                 "r.",
                 label="service level > 0.8")
        plt.xlabel('number of calls n')
        plt.ylabel('min f(x) after n calls', fontsize=16)
        plt.title("Convergence plot")
        plt.legend()

        WarehouseA = get_sim_result(r1.min_x, days, plotdata=True)
        plot_warehouse(WarehouseA, days)
    elif isinstance(r1, BayeOpt):
        plt.plot(list(range(r1.niter)),
                 [np.min(r1.y_obj[0:x]) for x in range(1, 1 + r1.niter)])
        plt.xlabel('number of calls n')
        plt.ylabel('min f(x) after n calls', fontsize=16)
        plt.title("Convergence plot")

        WarehouseA = get_sim_result(r1.min_x, days, plotdata=True)
        plot_warehouse(WarehouseA, days)
def plot_convergence_(OptimizeResult, fig_savepath, format='PNG', dpi=300):
    """
    Plot one or several convergence traces.
    Parameters
    args[i] [OptimizeResult, list of OptimizeResult, or tuple]: The result(s) for which to plot the convergence trace.
    
    if OptimizeResult, then draw the corresponding single trace;
    if list of OptimizeResult, then draw the corresponding convergence traces in transparency, along with the average convergence trace;
    if tuple, then args[i][0] should be a string label and args[i][1] an OptimizeResult or a list of OptimizeResult.
    ax [Axes, optional]: The matplotlib axes on which to draw the plot, or None to create a new one.
    
    true_minimum [float, optional]: The true minimum value of the function, if known.
    
    yscale [None or string, optional]: The scale for the y-axis.

    fig_savepath [string]: The path to save a convergence figure
    
    Returns
    ax: [Axes]: The matplotlib axes.
    """
    fig = plt.figure(num=1, figsize=(12, 12))
    ax0 = plt.gca()
    plot_convergence(
        OptimizeResult,
        ax=ax0,
        true_minimum=0.0,
    )
    # ax0.set_ylabel(r"$\min MAE$ after $n$ calls")
    plt.tight_layout()
    # plt.subplots_adjust(left=0.08, bottom=0.12, right=0.98, top=0.98, hspace=0.1, wspace=0.2)
    plt.savefig(fig_savepath, format=format, dpi=dpi)
Example #5
0
def test_plots_work():
    """Basic smoke tests to make sure plotting doesn't crash."""
    SPACE = [
        Integer(1, 20, name='max_depth'),
        Integer(2, 100, name='min_samples_split'),
        Integer(5, 30, name='min_samples_leaf'),
        Integer(1, 30, name='max_features'),
        Categorical(['gini', 'entropy'], name='criterion'),
        Categorical(list('abcdefghij'), name='dummy'),
    ]

    def objective(params):
        clf = DecisionTreeClassifier(random_state=3,
                                     **{
                                         dim.name: val
                                         for dim, val in zip(SPACE, params)
                                         if dim.name != 'dummy'
                                     })
        return -np.mean(cross_val_score(clf, *load_breast_cancer(True)))

    res = gp_minimize(objective, SPACE, n_calls=10, random_state=3)
    plots.plot_convergence(res)
    plots.plot_evaluations(res)
    plots.plot_objective(res)
    plots.plot_regret(res)
    def plot_convergence(self, filename=None, n_points=25, n_samples=250):
        # if `savefig(filename)`, don't `show()`
        if filename is not None:
            import matplotlib
            matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        import skopt
        from skopt.plots import plot_convergence

        spaces = list(self.spaces.values())
        pnames = list(self.spaces.keys())
        opt = skopt.Optimizer(spaces, "ET", acq_optimizer="sampling")
        hyperpars, objective = self.trials_and_results
        # skopt minimizes.  Therefore use: acc = 1-acc
        objective = 1-objective
        hyperpars = [list(f) for f in hyperpars]
        objective = list(objective)
        opt.tell(hyperpars, objective)
        opt_result = opt.run(lambda x: 0, n_iter=0)
        plot_convergence(opt_result)#, n_points=n_points, n_samples=n_samples, dimensions=pnames)
        plt.tight_layout()
        if filename is None:
            plt.show()
        else:
            plt.savefig(filename)
Example #7
0
def main():
    X_train, X_test, y_train, y_test = load_data()

    def f(x):
        clf = svm.SVC(gamma=x[0], C=x[1])
        clf.fit(X_train, y_train)
        return -1 * clf.score(X_test, y_test)

    spaces = [
        (2**-15, 2**3, 'log-uniform'),
        (2**-5, 2**15, 'log-uniform'),
        # ['linear', 'poly', 'rbf']
    ]
    res = gp_minimize(f,
                      spaces,
                      acq_func="gp_hedge",
                      n_calls=20,
                      random_state=0)
    print(f'res.x: {res.x}')
    print(f'res.fun: {res.fun}')

    plot_layout(
        np.array(res.x_iters).T[0],
        np.array(res.x_iters).T[1], -1 * res.func_vals, 'skopt.png')

    fig, ax = plt.subplots()
    plot_convergence(res, ax=ax)
    fig.savefig('convergence.png')
Example #8
0
def skopt_search(df):
    X = df
    y = X.pop('y')

    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=.3,
                                                        random_state=42)

    from sklearn.pipeline import Pipeline

    pipeline = Pipeline([('classifier', LogisticRegression())])
    lr_param_space = [
        Categorical([LogisticRegression()], name='classifier'),
        Categorical(['l1', 'l2'], name='classifier__penalty'),
        Real(10**-5, 5, "log-uniform", name='classifier__C'),
        Categorical([True, False], name='classifier__fit_intercept'),
        Categorical([None, 'balanced'], name='classifier__class_weight')
    ]

    clsfr = pipeline

    @use_named_args(lr_param_space)
    def objective(**params):
        clsfr.set_params(**params)

        return -np.mean(
            cross_val_score(clsfr,
                            X_train,
                            y_train,
                            verbose=10,
                            cv=5,
                            n_jobs=-1,
                            scoring="roc_auc"))

    res_gp = gp_minimize(objective, lr_param_space, n_calls=50, random_state=0)

    # print(res_gp)
    # sys.exit()

    print('Best score: {}'.format(res_gp.fun))
    print('''Best parameters:\n
    \t- penalty={0}\n
    \t- C={1}\n
    \t- fit_intercept={2}\n
    \t- class_weight={3}
    '''.format(res_gp.x[1], res_gp.x[2], res_gp.x[3], res_gp.x[4]))
    plot_convergence(res_gp)
    plt.show()

    model = _model_pkl(
        X_train, y_train,
        clsfr.set_params(classifier__penalty=res_gp.x[1],
                         classifier__C=res_gp.x[2],
                         classifier__fit_intercept=res_gp.x[3],
                         classifier__class_weight=res_gp.x[4]))
    print(roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]))
    print(model.named_steps['classifier'].coef_)
    print(model.named_steps['classifier'].intercept_)
Example #9
0
def find_best_hyperparameters(model, X, y, dynamic_params_space, scoring, plot, nfold, **HPO_params):
    
    # filter these warnings - they are not consistent, arise even for float features
    from warnings import filterwarnings
    # simplefilter("ignore", UserWarning)
    filterwarnings("ignore", message="The objective has been evaluated at this point before", category=UserWarning)
  
    # Get model name
    model_name = model.__class__.__name__
    
    # Get dynamic parameters names: 
    @use_named_args(dynamic_params_space)
    def get_params_names(**dynamic_params):
        return list(dynamic_params.keys())    
    param_names = get_params_names(dynamic_params_space)
        
    # Define an objective function
    @use_named_args(dynamic_params_space)
    def objective(**dynamic_params):
        #model.set_params(**static_params)
        model.set_params(**dynamic_params) 
        cv = StratifiedKFold(n_splits=nfold, random_state=seed, shuffle=True)
        scores = cross_validate(model, X, y, cv=cv, scoring = scoring, n_jobs=-1)
        val_score = np.mean(scores['test_score'])
        return -val_score
    
    print(model_name, 'model training...')
    # Load previously trained results and get starting point (x0) as best model from previous run
    try:
        res = load(r'output/models/'+model_name)
        x0 = res.x       
    # If not trained before -> no initial point provided
    except:
        x0 = None
    
    res = forest_minimize(objective, dynamic_params_space, x0 = x0, **HPO_params)
    
    # add attribute - parameters names to the res
    res.param_names = param_names

    print('Optimized parameters:    ', res.param_names)
    print('Previous best parameters:', x0)
    print('Current  best parameters:', res.x)
    print('Best score:', -res.fun)
    
    # Saved optimization result  
    dump(res, r'output/models/'+model_name, store_objective=False)
        
    if plot == True:
        plt.figure(figsize=(5,2))
        plot_convergence(res)
        try:
            # plot_objective would not work if only one parameter was searched for
            plot_objective(res)
        except:
            pass
    plt.show()
Example #10
0
def plot_conv(res):
    plots.plot_convergence(res)
    P.plot(np.arange(len(res.func_vals)) + 1,
           ndimage.gaussian_filter1d(res.func_vals, 4),
           c='C1')
    P.plot(np.arange(len(res.func_vals)) + 1,
           res.func_vals,
           ls='None',
           marker='+',
           c='C0')
def output_results(filepaths, hyperp_of_interest_dict, hyperp_opt_result):

    ##################################
    #   Display Optimal Parameters   #
    ##################################
    print('=================================================')
    print('      Hyperparameter Optimization Complete')
    print('=================================================')
    print('Optimized Validation Loss: {}\n'.format(hyperp_opt_result.fun))
    print('Optimized Parameters:')
    hyperp_of_interest_list = list(hyperp_of_interest_dict.keys())
    for n, parameter_name in enumerate(hyperp_of_interest_list):
        print(parameter_name + ': {}'.format(hyperp_opt_result.x[n]))

    #####################################
    #   Save Optimization Information   #
    #####################################
    #=== Creating Directory for Outputs ===#
    if not os.path.exists(filepaths.directory_hyperp_opt_outputs):
        os.makedirs(filepaths.directory_hyperp_opt_outputs)

    #=== Save .pkl File ===#
    dump(hyperp_opt_result,
         filepaths.hyperp_opt_skopt_res,
         store_objective=False)

    #=== Write Optimal Set Hyperparameters ===#
    with open(filepaths.hyperp_opt_optimal_parameters, 'w') as optimal_set_txt:
        optimal_set_txt.write('Optimized Validation Loss: {}\n'.format(
            hyperp_opt_result.fun))
        optimal_set_txt.write('\n')
        optimal_set_txt.write('Optimized parameters:\n')
        for n, parameter_name in enumerate(hyperp_of_interest_list):
            optimal_set_txt.write(parameter_name +
                                  ': {}\n'.format(hyperp_opt_result.x[n]))

    #=== Write List of Scenarios Trained ===#
    with open(filepaths.hyperp_opt_scenarios_trained,
              'w') as scenarios_trained_txt:
        for scenario in hyperp_opt_result.x_iters:
            scenarios_trained_txt.write("%s\n" % scenario)

    #=== Write List of Validation Losses ===#
    validation_losses_dict = {}
    validation_losses_dict['validation_losses'] = hyperp_opt_result.func_vals
    df_validation_losses = pd.DataFrame(validation_losses_dict)
    df_validation_losses.to_csv(filepaths.hyperp_opt_validation_losses,
                                index=False)

    #=== Convergence Plot ===#
    plot_convergence(hyperp_opt_result)
    plt.savefig(filepaths.hyperp_opt_convergence)

    print('Outputs Saved')
def save_skopt_plots(dirpath,search_result,prior_names):
    if not os.path.exists(dirpath): os.makedirs(dirpath)
    # ---- Evalution
    plot_evaluations(search_result, bins=20)
    plt.savefig( os.path.join(dirpath,'evaluation_plot.png') )
    # ---- Convergence (previously looked better enquire what is going on)
    plot_convergence(search_result)
    plt.savefig( os.path.join(dirpath,'convergence_plot.png') )
    # ---- Partial Dependence plots are only approximations of the modelled fitness function 
    # - which in turn is only an approximation of the true fitness function in fitness
    plot_objective(result=search_result)
    plt.savefig( os.path.join(dirpath,'objective_plot.png') )
Example #13
0
def run_exp(model, train, user_map, item_map, validation):
    if HYPER_PARAM_SEARCH:
        checkpoint_saver = CheckpointSaver(CHECKPOINT_NAME)
        res_gp = gp_minimize(objective,
                             space,
                             n_calls=HYPER_PARAM_SEARCH_N_ITER,
                             random_state=SEED,
                             callback=[checkpoint_saver])
        skopt.dump(res_gp, HYPER_PARAM_FILE_NAME, store_objective=False)
        plot_convergence(res_gp)
    else:
        model.fit(train, user_map, item_map, validation)
Example #14
0
 def optimize(self, n_iters):
     for i in range(n_iters):
         pt = self.opt.ask()
         self.model.params = {
             k: v
             for k, v in zip(self.parameter_names, pt)
         }
         val = self.coeff * self.validator.score(self.model,
                                                 self.featureset)
         clear_output(True)
         plot_convergence(self.opt.tell(pt, val))
         plt.show()
Example #15
0
    def plot_status(self, opt):

        result = opt.run(None, 0)
        output = self.output()

        plot_convergence(result)
        output["conv"].dump(plt.gcf(), bbox_inches="tight")

        plot_objective(result)
        output["obj"].dump(plt.gcf(), bbox_inches="tight")

        plt.close()
Example #16
0
def run_gp_agent():
    r_dim = Real(-1, 1)
    i_dim = Integer(0, 1)
    bounds = [i_dim, i_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim,
              r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim, r_dim]
    res = gp_minimize(reward, bounds, acq_func="EI", n_random_starts=10, n_calls=50, random_state=int(time.time()))
    explored_points = res.x_iters
    r_json = {"restaurants": [construct_restaurant_json(x) for x in explored_points]}
    with open('result.json', 'w') as fp:
        json.dump(r_json, fp)
    print(res)
    plot_convergence(res)
    plt.show()
Example #17
0
 def Baye_search(self, func, space: list):
     checkpoint_saver = CheckpointSaver(args.hyper_ckpt)
     rlt = gp_minimize(
         func,
         dimensions=space,
         n_calls=108,
         n_random_starts=3,
         # callback=[checkpoint_saver],
         random_state=42)
     logger.debug(rlt)
     logger.debug(rlt)
     plot_convergence(rlt)
     return rlt
Example #18
0
def hpscan():
    run_for = 20
    start = time.time()
    space = [
        #Integer(25, 25), #name ='epochs'),
        #Integer(5, 8), #name ='batch_size'),
        #Integer(8, 10), #name='latent size'),
        Real(1, 8),  #name='gen_weight'),
        Real(0.1, 10),  #name='aux_weight'),
        Real(0.1, 10),  #name='ecal_weight'),
        #Real(10**-5, 10**0, "log-uniform"), #name ='lr'),
        #Real(8, 9), #name='rho'),
        #Real(0, 0.0001), #name='decay'),
        #Categorical([True,False]), #name='dflag'),
        #Integer(4, 64), #name='df'),
        #Integer(2, 16), #name='dx'),
        #Integer(2, 16), #name='dy'),
        #Integer(2, 16), #name='dz'),
        #Real(0.01, 0.5), #name='dp'),
        #Categorical([True,False]), #name='gflag'),
        #Integer(4, 64), #name='gf'),
        #Integer(2, 16), #name='gx'),
        #Integer(2, 16), #name='gy'),
        #Integer(2, 16)] #name='gz')
    ]
    externalize = externalfunc(prog=evaluate_threaded, names=space)
    use_func = externalize

    o = Optimizer(
        n_initial_points=5,
        acq_func='gp_hedge',
        acq_optimizer='auto',
        base_estimator=GaussianProcessRegressor(
            alpha=0.0,
            copy_X_train=True,
            #kernel=1**2 * Matern(length_scale=[1, 1], nu=2.5),
            n_restarts_optimizer=2,
            noise='gaussian',
            normalize_y=True,
            optimizer='fmin_l_bfgs_b'),
        dimensions=space,
    )

    m = manager(n=4, skobj=o, iterations=run_for, func=use_func, wait=0)
    start = time.time()
    m.run()
    print(
        "Best parameters:\nLoss Weights:\n_ Weight Gen loss ={}\n_ Weight Aux loss ={}\n_ Weight Ecal loss ={}"
        .format(res_gp.x[0], res_gp.x[1], res_gp.x[2]))
    print("Time taken {} seconds".format(time.time() - start))
    plot_convergence(res_gp).savefig("result_hyp.pdf")
Example #19
0
    def run(self):
        result = self.input()["collection"].targets[0]["opt"].load().run(
            None, 0)
        output = self.output()

        plot_convergence(result)
        output["convergence"].dump(plt.gcf(), bbox_inches="tight")
        plt.close()
        plot_evaluations(result, bins=10)
        output["evaluations"].dump(plt.gcf(), bbox_inches="tight")
        plt.close()
        if self.plot_objective:
            plot_objective(result)
            output["objective"].dump(plt.gcf(), bbox_inches="tight")
            plt.close()
def res_callback(res):
    fig, axs = plt.subplots(nrows=2, gridspec_kw=dict(hspace=0.3))
    # axs[0].set_xscale('log')
    sigs = [x[0] for x in res.x_iters]
    rmse = res.func_vals
    axs[0].scatter(sigs[-1], rmse[-1], s=300, color='C8')
    sigs, rmse = zip(*sorted(zip(sigs, rmse)))
    axs[0].plot(sigs, rmse, ':*')
    # axs[0].set_xscale('log')
    plot_convergence(res, ax=axs[1])

    title = f'f_RMSE: {res.fun:.5f}'
    axs[0].set_title(title)
    fig.savefig(f'convergence.png', dpi=200)
    plt.close('all')
Example #21
0
 def Baye_search_resume(self, func, path: str, space):
     assert os.path.exists(path)
     ckpt = load(path)
     checkpoint_saver = CheckpointSaver(args.hyper_ckpt)
     rlt = gp_minimize(
         func,
         dimensions=space,
         x0=ckpt.x_iters,
         y0=ckpt.func_vals,
         n_calls=20,
         n_random_starts=3,
         # callback=[checkpoint_saver],
         random_state=42)
     logger.debug(rlt)
     plot_convergence(rlt)
     return rlt
Example #22
0
def make_plots(results, dir):
    import matplotlib
    import matplotlib.pyplot as plt
    # https://matplotlib.org/faq/usage_faq.html#non-interactive-example
    # https://matplotlib.org/api/_as_gen/matplotlib.pyplot.savefig.html

    if not dir.exists():
        dir.mkdir(parents=True, exist_ok=True)

    # WIP: there is currently no support for plotting categorical variables...
    # You have to manually checkout this pull request:
    #   git pr 675  # install https://github.com/tj/git-extras
    #   git pull origin master
    # https://github.com/scikit-optimize/scikit-optimize/pull/675
    from skopt.plots import plot_convergence
    _ = plot_convergence(results)
    plt.savefig(dir / 'plot_convergence.png')

    from skopt.plots import plot_objective
    _ = plot_objective(results)
    plt.savefig(dir / 'plot_objective.png')

    from skopt.plots import plot_regret
    _ = plot_regret(results)
    plt.savefig(dir / 'plot_regret.png')

    from skopt.plots import plot_evaluations
    _ = plot_evaluations(results)
    plt.savefig(dir / 'plot_evaluations.png')


# compare convergence...
# https://github.com/scikit-optimize/scikit-optimize/blob/master/examples/strategy-comparison.ipynb
# for all runs...
# from skopt.plots import plot_convergence
# plot = plot_convergence(("dummy_minimize", dummy_res),
#                         ("gp_minimize", gp_res),
#                         ("forest_minimize('rf')", rf_res),
#                         ("forest_minimize('et)", et_res),
#                         true_minimum=0.397887, yscale="log")
# plot.legend(loc="best", prop={'size': 6}, numpoints=1);

# parallel optimization
# in the yaml:
# parallel: X (if you have a small dataset.self.)
# https://github.com/scikit-optimize/scikit-optimize/blob/master/examples/parallel-optimization.ipynb
# from sklearn.externals.joblib import Parallel, delayed
# x = optimizer.ask(n_points=4)  # x is a list of n_points points
# y = Parallel()(delayed(branin)(v) for v in x)  # evaluate points in parallel
# optimizer.tell(x, y)

# checkpoints?
# https://github.com/scikit-optimize/scikit-optimize/blob/master/examples/interruptible-optimization.ipynb
# https://github.com/scikit-optimize/scikit-optimize/blob/master/examples/store-and-load-results.ipynb
# poor man's solution:
# import pickle
# with open('my-optimizer.pkl', 'wb') as f:
#     pickle.dump(opt, f)
# with open('my-optimizer.pkl', 'rb') as f:
#     opt_restored = pickle.load(f)
Example #23
0
def plot_skopt_convergence(opt_res, path):
    """Plots the convergence plot from the skopt package.

    Args:
        opt_res (scipy.optimize.OptimizeResult): Optimization result object.
        path (str): Directory at which to save plot.
    """
    if not os.path.exists(path):
        os.makedirs(path)

    fig = plt.figure()
    skplot.plot_convergence(opt_res, ax=None, true_minumum=None, yscale=None)
    fig = plt.gcf()
    fig.tight_layout()
    fig.savefig(path + "convergence")
    fig.show()
Example #24
0
def hypertrain(modelclass,
               encoder,
               corpus,
               modelsfolder,
               n_calls=100,
               verbose=1,
               valmask=None,
               patience=20,
               maxepochs=1000,
               checkpointfile=None):
    """Performs hypertraining of a certain model architecture

    Arguments
        - modelsclass: class of the model architecture to hyperoptimize
        - encoder: pretrained encoder to use
        - corpus: corpus to use in the hyperoptimization
        - modelsfolder: folder in which to save all models generated during the hyperoptimization
        - n_calls: number of hyperoptimization trials
        - verbose: verbosity level
        - valmask: binary mask for splitting the corpus in training / validation data
        - patience: maximum number of training epochs without validation improvement before stopping a trial
        - maxepochs: maximum allowed training epochs for each model
        - checkpointfile: name of the file to use for checkpointing the hyperoptimization progress. If the file
            already exists, its contents are used to warm-start the hyperoptimization

    Returns 
        - The trained model with the best parameters
    """
    # Hyperoptimization to find the best neural network parameters
    bestparams, bestloss, bestmodel, optres = findbestparams(
        modelclass,
        encoder,
        corpus,
        modelsfolder,
        n_calls,
        verbose=verbose,
        valmask=valmask,
        patience=patience,
        maxepochs=maxepochs,
        checkpointfile=checkpointfile)
    if verbose >= 1:
        print("Best parameters are", bestparams)
        print("Best validation loss is", bestloss)
        if verbose >= 3:
            plot_convergence(optres)
    return bestmodel
Example #25
0
    def optimize_weights_by_skopt(self, n_iter=100):
        # fit ensemble weights by bayesian optimisation (mostly random search though

        res_ens = forest_minimize(
            self._loss_function,
            dimensions=[Real(0.0, 1.0)] * len(self._candidates),
            n_calls=n_iter,
            x0=self._initial_guess(),
            n_random_starts=int(n_iter * 0.5),
            base_estimator="RF",
            n_points=1000
        )

        plot_convergence(res_ens)

        self.ensemble.weights = res_ens.x

        return self.ensemble, res_ens
Example #26
0
def main():
    ## 探索範囲
    spaces = [
        (14, 16),  # start
        (3, 5),  # decrease,
        (26, 28),  # move_threshold
        (50, 120)  # density_threshold
        #['linear', 'poly', 'rbf']
    ]

    res = gp_minimize(f, spaces, acq_func="EI", n_calls=30)

    print(res.fun)
    print(res.x)

    fig, ax = plt.subplots()
    plot_convergence(res, ax=ax)
    plt.savefig("convergence_plot.pdf")
Example #27
0
    def _format_and_plot_result(self, result):
        best_values = result.x
        best_params = self.values_to_dict(best_values)
        best_model = self.init_pipeline(best_values)

        if self.plot_graph:
            pyplot.figure()
            plot_convergence(result)
            pyplot.plot(result.func_vals)

        # return res_bo, best_params, best_model
        return SimpleNamespace(**{
            'optimizer': self,
            'report': self.best_results_summary(),
            'mutual_info_loss': self.params_mutual_info(),
            'mutual_info_time': self.params_mutual_info(self.time_taken_col),
            'result': result,
            'best_params': best_params,
            'best_model': best_model})
Example #28
0
File: TNN.py Project: silketta/hmc
    def optimization_process(self,
                             range_parameters,
                             default_parameters,
                             nb_calls,
                             nb_random_starts,
                             plot_conv=False):

        search_result = gp_minimize(
            func=self.build_and_train,
            dimensions=range_parameters,
            acq_func='EI',  # Expected Improvement.
            n_calls=nb_calls,
            n_random_starts=nb_random_starts,
            x0=default_parameters)
        if plot_conv == True:
            plot_convergence(search_result)
            plt.show()

        return search_result
Example #29
0
def fitbest():
    search_results = gp_minimize(
        func=fitness,
        dimensions=dimensions,
        #acq_func='EI', # expected improvement
        n_calls=50,
        x0=default_params)

    plot_convergence(search_results)

    #best_params = search_results.x
    #fitness(x=best_params)

    # fit the entire dataset on the best 10 fits and use an average of those predictions
    # for our final predictions..

    y_preds = np.zeros((test.shape[0], 6))

    all_fits = sorted(zip(search_results.func_vals, search_results.x_iters))
    print(all_fits)
Example #30
0
def plotting(results):
    fig1 = plt.figure(1)
    ax1 = plt.axes()
    plots.plot_convergence(results)
    plt.savefig(logdir + 'convergence.png', dpi=500)
    plt.close()
    fig2 = plt.figure(1)
    ax2 = plt.axes()
    plots.plot_evaluations(results)
    plt.savefig(logdir + 'eval.png', dpi=500)
    plt.close()
    #plot_objective kann nur aufgerufen werden, sobald die Parameter berechnet und nicht mehr zufällig gewählt sind
    if len(results.models) > 1:
        fig3 = plt.figure(3)
        ax3 = plt.axes()
        plots.plot_objective(results)
        plt.savefig(logdir + 'objective.png', dpi=500)
        plt.close()
    filename = logdir + 'gp_min.sav'
    pickle.dump(results, open(filename, 'wb'))
Example #31
0
def plotting(results):
    fig1 = plt.figure(1)
    ax1 = plt.axes()
    plots.plot_convergence(results)
    plt.savefig(logdir + 'convergence.png', dpi=500)
    plt.close()
    fig2 = plt.figure(1)
    ax2 = plt.axes()
    plots.plot_evaluations(results)
    plt.savefig(logdir + 'eval.png', dpi=500)
    plt.close()
    # plot_objective can only be called if the parameters are being calculated instead of selected randomly
    if len(results.models) > 1:
        fig3 = plt.figure(3)
        ax3 = plt.axes()
        plots.plot_objective(results)
        plt.savefig(logdir + 'objective.png', dpi=500)
        plt.close()
    filename = logdir + 'gp_min.sav'
    pickle.dump(results, open(filename, 'wb'))
Example #32
0
from skopt import gp_minimize

res = gp_minimize(f,                  # the function to minimize
                  [(-2.0, 2.0)],      # the bounds on each dimension of x
                  acq_func="EI",      # the acquisition function
                  n_calls=15,         # the number of evaluations of f 
                  n_random_starts=5,  # the number of random initialization points
                  noise=0.1**2,       # the noise level (optional)
                  random_state=123)   # the random seed


# print ("x^*=%.4f, f(x^*)=%.4f" % (res.x[0], res.fun))
# print(res)

from skopt.plots import plot_convergence
plot_convergence(res)

plt.show()

fads


from skopt.acquisition import gaussian_ei

plt.rcParams["figure.figsize"] = (8, 14)

x = np.linspace(-2, 2, 400).reshape(-1, 1)
x_gp = res.space.transform(x.tolist())
fx = np.array([f(x_i, noise_level=0.0) for x_i in x])

# Plot the 5 iterations following the 5 random points
Example #33
0
# test run
# _fitness(x=default_parameters)

# run model
search_result = gp_minimize(func=_fitness,
                            dimensions=dimensions,
                            acq_func='EI', # Expected Improvement.
                            n_calls=50,
                            x0=default_parameters)

# print results
print("############RESULTS############")
space = search_result.space
print("RESULTS: ", space.point_to_dict(search_result.x))
print("Fitness value: ", search_result.fun)
print("Nodes searched: ")
print(sorted(zip(search_result.func_vals, search_result.x_iters)))
print("##############################")

# plot convergence
plot_convergence(search_result)
plt.show()

# plot dependence
# dim_names = ['learning_rate', 'batch_size', 'train_steps']
dim_names = ['learning_rate', 'batch_size']
fig, ax = plot_objective(result=search_result, dimension_names=dim_names)
plt.show()
fig, ax = plot_evaluations(result=search_result, dimension_names=dim_names)
plt.show()