def hyperparameter_search(n_jobs: int,
                          params: dict,
                          name: str = 'hyperparameter_search'):
    _, _, samples = data.pipeline(params['data']['n_input_steps'],
                                  params['data']['n_output_steps'],
                                  params['paths']['data'])
    datasets = data.get_datasets(samples, params['data']['n_input_steps'])

    # set up ax
    from ax.service.ax_client import AxClient
    ax_client = AxClient(enforce_sequential_optimization=False)

    # define hyperparameter bounds
    ax_client.create_experiment(name=name,
                                parameters=[{
                                    "name": "num_epochs",
                                    "type": "range",
                                    "bounds": [150, 200]
                                }, {
                                    "name": "learning_rate",
                                    "type": "range",
                                    "bounds": [5e-4, 1e-3],
                                    "log_scale": True
                                }, {
                                    "name": "batch_size",
                                    "type": "range",
                                    "bounds": [64, 1024]
                                }, {
                                    "name": "variational_dropout_p",
                                    "type": "range",
                                    "bounds": [0.2, 0.5]
                                }],
                                objective_name='loss',
                                minimize=True)

    for job in range(n_jobs):
        parameters, trial_index = ax_client.get_next_trial()
        ax_client.complete_trial(trial_index=trial_index,
                                 raw_data=train_evaluate(
                                     parameters, datasets=datasets)['loss'])

    print(f'Best parameters found after {n_jobs}:')
    print(ax_client.get_best_parameters())
    ax_client.save_to_json_file()
Exemple #2
0
    opts.max_conv_size = parameters['max_conv_size']
    opts.dense_kernel_size = parameters['dense_kernel_size']
    opts.batch_size = 64  # parameters['batch_size']
    opts.learning_rate = parameters['learning_rate']
    opts.epochs = cmd_line_opts.epochs  # max to run, we also use early stopping

    # run
    start_time = time.time()
    # final_loss = train.train_in_subprocess(opts)
    final_loss = train.train(opts)
    log_record.append(time.time() - start_time)
    log_record.append(final_loss)

    # complete trial
    if final_loss is None:
        print("ax trial", trial_index, "failed?")
        ax.log_trial_failure(trial_index=trial_index)
    else:
        ax.complete_trial(trial_index=trial_index,
                          raw_data={'final_loss': (final_loss, 0)})
    print("CURRENT_BEST", ax.get_best_parameters())

    # flush log
    log_msg = "\t".join(map(str, log_record))
    print(log_msg, file=log)
    print(log_msg)
    log.flush()

    # save ax state
    ax.save_to_json_file()
                 y_train_partial[train_index], X_train_partial[val_index],
                 y_train_partial[val_index])
    }


for i in range(25):
    skf = StratifiedKFold(n_splits=10, random_state=7, shuffle=True)
    for train_index, val_index in skf.split(np.zeros(X_train_partial.shape[0]),
                                            y_train_partial):
        print("Train index: ", train_index, " shape: ", train_index.shape,
              "Validation index: ", val_index, "shape: ", val_index.shape)
        parameters, trial_index = ax_client.get_next_trial()
        ax_client.complete_trial(trial_index=trial_index,
                                 raw_data=evaluate(parameters))

ax_client.save_to_json_file()

ax_client.get_trials_data_frame().sort_values('keras_cv')

best_parameters, values = ax_client.get_best_parameters()

# the best set of parameters.
for k in best_parameters.items():
    print(k)

print()

# the best score achieved.
means, covariances = values
print(means)
Exemple #4
0
# for i in range(25):
#     parameters, trial_index = ax_client.get_next_trial()
#     # Local evaluation here can be replaced with deployment to external system.
#     ax_client.complete_trial(trial_index=trial_index, raw_data=evaluate(parameters))
#     # _, trial_index = ax_client.get_next_trial()
#     ax_client.log_trial_failure(trial_index=trial_index)
#
# ax_client.get_trials_data_frame().sort_values('trial_index')
# best_parameters, values = ax_client.get_best_parameters()

from ax.utils.notebook.plotting import render, init_notebook_plotting
from ax.plot.contour import plot_contour
plot = plot_contour(
    model=gpei,
    param_x=opt_list[0],
    param_y=opt_list[1],
    metric_name="base",
)
render(plot)
ax_client.generation_strategy.model = gpei
init_notebook_plotting(offline=True)
# render(ax_client.get_contour_plot())
render(ax_client.get_contour_plot(param_x=opt_list[0],
                                  param_y=opt_list[0]))  #, metric_name=base))
# render(ax_client.get_optimization_trace(objective_optimum=hartmann6.fmin))  # Objective_optimum is optional.

ax_client.save_to_json_file()  # For custom filepath, pass `filepath` argument.
restored_ax_client = AxClient.load_from_json_file(
)  # For custom filepath, pass `filepath` argument.
Exemple #5
0
        #batch_params = {}
        #for exp_param in exp_params:
        #    parameters, trial_index = ax_client.attach_trial(parameters={"alpha": exp_param.alpha})
        #    batch_params[trial_index] = parameters.get('alpha')
        
        # run 
        #trial_index_to_results =runner.run_exp(batch_params)
        #print(trial_index_to_results)

        #for trial_index in batch_params.keys():
        #    results = trial_index_to_results[trial_index]
        #    ax_client.complete_trial(trial_index=trial_index, raw_data=results)

        # run the opti loop
        for i in range(epocs):
            batch_params = {}
            for j in range(batch_size):
                parameters, trial_index = ax_client.get_next_trial()
                batch_params[trial_index] = parameters.get('alpha')
            # run 
            trial_index_to_results =runner.run_exp(batch_params)

            for trial_index in batch_params.keys():
                results = trial_index_to_results[trial_index]
                ax_client.complete_trial(trial_index=trial_index, raw_data=results)
        
        # save the results
        opti_dir = Path('.')
        filePaht = opti_dir/f'{workload}_{awareness}_bo.json'
        ax_client.save_to_json_file(filepath=filePaht)