Beispiel #1
0
pg.add_value('rnn_activation', default_value='tanh')
pg.add_value('rnn_dropout', default_value=0.1)
pg.add_value('last_activation', default_value='linear')

og_param = pg.sample(1, unique=True)[0]
parameters = []

for output_act in ['linear', 'relu', 'leaky_relu']:
    param = dict(og_param)
    param['last_activation'] = output_act
    parameters.append(param)

x1, x2, y = Bu.load_data('FixedTraining')
cvs = Bu.get_cross_validation(x1, x2, y, n_cv)

cbs = Tr.get_callbacks(plat=True, es=True)

head = ['iteration', 'seed']
head += pg.get_head()
head += ['last_perf', 'min_perf', 'time']
print(head)

log = Bu.CSVWriter(filename, head=head)

model_path = 'current_weights.h5'

while True:
    seed = randint(0, 2**32-1)
    set_random_seed(seed)
    set_numpy_seed(seed)
parameters['activation'] = 'relu'
parameters['dropout'] = 0.015
parameters['rnn_type'] = 'lstm'
parameters['rnn_size'] = 230
parameters['rnn_activation'] = 'tanh'
parameters['rnn_dropout'] = 0.125
parameters['last_activation'] = 'relu'
parameters['dense_layers'] = [139, 486, 152, 79, 61, 0, 0, 0, 0, 0]
#parameters[''] =

eul, crp, das28 = Bu.load_data('FixedTraining')
eul = eul.reshape(eul.shape[0], eul.shape[1], 1)

crp = (crp - np.mean(crp)) / np.std(crp)

cbs = Tr.get_callbacks()

epochs = 30
batch_size = 32

dir = r'D:\WindowsFolders\Documents\GitHub\BachelorRetraining\Training\TrainModel\TrainModel\models'

input_eular = keras.layers.Input(shape=(eul.shape[1], 1),
                                 dtype='float32',
                                 name='input_eular')
input_crp = keras.layers.Input(shape=(1, ), dtype='float32', name='input_crp')

optimizer = Tr.get_optimizer(parameters['optimizer'],
                             parameters['learning_rate'])

for i, file in enumerate(os.listdir(dir)):