import Utility.parameter_generator as Pg import Utility.bachelor_utilities as Bu import Utility.network_training as Tr from tensorflow import set_random_seed from numpy.random import seed as set_numpy_seed from random import shuffle, randint import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' filename = r'../../../Logs/results.csv' n_cv = 5 pg = Pg.ParameterGenerator() pg.add_value('dense_layers', default_value=[139, 486, 152, 79, 61, 0, 0, 0, 0, 0]) pg.add_value('learning_rate', default_value=0.005) pg.add_value('optimizer', default_value='adam') pg.add_value('activation', default_value='relu') pg.add_value('dropout', default_value=0.1) pg.add_value('rnn_type', default_value='lstm') pg.add_value('rnn_size', default_value=230) pg.add_value('rnn_activation', default_value='tanh') pg.add_value('rnn_dropout', default_value=0.1) pg.add_value('last_activation', default_value='linear') og_param = pg.sample(1, unique=True)[0] parameters = [] for output_act in ['linear', 'relu', 'leaky_relu']:
seed = None n_cv = 5 lr_grid = [] a = 7 lr_exp_range = range(-8, 0) lr_cof_range = range(1, a * 10) for e in lr_exp_range: for c in lr_cof_range: lr_grid.append(c / a * pow(10, e)) pg = Pg.ParameterGenerator(seed=seed) pg.add_value('dense_layers', default_value=[139, 486, 152, 79, 61, 0, 0, 0, 0, 0]) pg.add_value('learning_rate', default_value=0) pg.add_value('optimizer', default_value='adam') pg.add_value('activation', default_value='relu') pg.add_value('dropout', default_value=0.1) pg.add_value('rnn_type', default_value='lstm') pg.add_value('rnn_size', default_value=311) pg.add_value('rnn_activation', default_value='tanh') pg.add_value('rnn_dropout', default_value=0.1) pg.add_value('last_activation', default_value='linear') og_param = pg.sample(1, unique=True) parameters = []