Example #1
0
 def fitness(nodes, epochs):
     nonlocal run_count, data_store, fl, fl_store, data_store, data_store_count, data_store_name
     start_time = time.time()
     run_count += 1
     # run_kf for current trial of hyperparameters and return the score
     hparams = create_hparams(nodes=nodes,
                              epochs=epochs,
                              loss=scoring,
                              learning_rate=0.001,
                              reg_l1=0.0005,
                              reg_l2=0,
                              verbose=0)
     if plot_dir:
         plot_name = '{}/{}_{}_run_{}'.format(plot_dir, model_mode,
                                              scoring, run_count)
     else:
         plot_name = None
     val_score, results_dict = run_kf(
         model_mode=model_mode,
         fl=fl,
         fl_store=fl_store,
         hparams=hparams,
         scoring=scoring,
         other_fl_dict=other_fl_dict,
         write_excel_dir=None,
         save_model_name=
         f'{write_dir}/models/{scoring}_{model_mode}_run{run_count}',
         plot_name=plot_name)
     results_dict['info']['opt'] = {'nodes': nodes, 'epochs': epochs}
     results_dict['info']['model_name'] = f'{write_dir}_run{run_count}'
     # Save results
     if (data_store_count - 1) % 5 == 0:
         data_store = []
         data_store_name += 5
     data_store.append(results_dict)
     with open(
             '{}/data_store_{}.pkl'.format(data_store_dir,
                                           data_store_name),
             "wb") as file:
         pickle.dump(data_store, file)
     data_store_count += 1
     end_time = time.time()
     print(
         f'**************************************************************************************************\n'
         f'Run Number {run_count} \n'
         f'nodes: {nodes}, epochs: {epochs}\n'
         f'Time Taken: {end_time - start_time}\n'
         f'*********************************************************************************************'
     )
     return val_score
Example #2
0
 def fitness(depth, num_est):
     nonlocal run_count, data_store, fl, fl_store, data_store_count, data_store_name
     start_time = time.time()
     run_count += 1
     # run_kf for single trial of hyperparameter
     hparams = create_hparams(max_depth=depth,
                              num_est=num_est,
                              chain=chain)
     val_score, results_dict = run_kf(
         model_mode=model_mode,
         fl=fl,
         fl_store=fl_store,
         hparams=hparams,
         scoring=scoring,
         other_fl_dict=other_fl_dict,
         write_excel_dir=None,
         save_model_name=
         f'{write_dir}/models/{scoring}_{model_mode}_run{run_count}',
         plot_name=None)
     results_dict['info']['opt'] = {'depth': depth, 'num_est': num_est}
     results_dict['info']['model_name'] = f'{write_dir}_run{run_count}'
     # Save results in batches
     if (data_store_count - 1) % 5 == 0:
         data_store = []
         data_store_name += 5
     data_store.append(results_dict)
     # Save data_store batch every trial in case hparam_opt accidentally terminates early (e.g. server shut down)
     with open(
             '{}/data_store_{}.pkl'.format(data_store_dir,
                                           data_store_name),
             "wb") as file:
         pickle.dump(data_store, file)
     data_store_count += 1
     end_time = time.time()
     print(
         f'*************************************************************************************************\n'
         f'Run Number {run_count} \n'
         f'Depth {depth}, No. Estimators {num_est}\n'
         f'Time Taken: {end_time - start_time}\n'
         f'*********************************************************************************************'
     )
     return val_score
Example #3
0
from own_package.models import create_hparams
from own_package.train_test import run_train_test

hparams = create_hparams(lstm_units=20, hidden_layers=[20], epochs=120, batch_size=128, learning_rate=0.001)

run_train_test(model_mode='LSTM', hparams=hparams, window_size=48, loader_file='./excel/results.xlsx',
               save_model=True)

from own_package.features_labels_setup import load_data_to_fl, load_testset_to_fl
from own_package.models import create_hparams
from own_package.cross_validation import run_kf
from own_package.others import create_results_directory


def selector(case, **kwargs):
    if case == 1:
        # Run normal KF cross validation for a single hyperparameter
        hparams = create_hparams()
        model_mode = 'lr'
        k_folds = 1
        fl_dir = './excel/Data_loader_spline_full_onehot_R1_cut_CM3.xlsx'
        other_names = ['ett30']
        other_dir = ['./excel/ett30.xlsx']
        # Load main training data
        fl = load_data_to_fl(fl_dir,
                             normalise_labels=False,
                             norm_mask=[0, 1, 3, 4, 5])
        fl_store = fl.create_kf(k_folds=k_folds, shuffle=True)
        # Load other data to evaluate the model on. e.g. the separate test set
        other_fl_dict = {
            k: load_testset_to_fl(v,
                                  norm_mask=[0, 1, 3, 4, 5],
                                  scaler=fl.scaler)
            for k, v in zip(other_names, other_dir)
        }
        write_dir = create_results_directory('./results/kf/kf_results',
                                             folders=['models', 'plots'],
                                             excels=['kf_results'])
        write_excel = f'{write_dir}/kf_results.xlsx'
from own_package.features_labels_setup import load_data_to_fl, load_testset_to_fl
from own_package.models import create_hparams
from own_package.cross_validation import run_kf
from own_package.others import create_results_directory


def selector(case, **kwargs):
    if case == 1:
        # Run normal KF cross validation for a single hyperparameter
        hparams = create_hparams(loss='mse',
                                 learning_rate=0.001,
                                 reg_l1=0.0005,
                                 reg_l2=0.,
                                 nodes=10,
                                 max_depth=5,
                                 num_est=5,
                                 activation='relu',
                                 epochs=100,
                                 batch_size=16,
                                 verbose=0)
        model_mode = 'dtr'
        k_folds = 3
        fl_dir = './excel/Data_loader_Round13.xlsx'
        other_names = ['ett30', 'ettI01']
        other_dir = ['./excel/ett30.xlsx', './excel/ett30I01.xlsx']
        data_augmentation = 'invariant'
        numel = 10
        # Load main training data
        fl = load_data_to_fl(fl_dir,
                             normalise_labels=False,
                             norm_mask=[0, 1, 3, 4, 5])