#Hyper parameters
batch_size = 256
initializer = tf.keras.initializers.Orthogonal()
loss = 'mse' #'categorical_crossentropy'  #'kl_divergence'
activation = 'Mish' # Activation function for hidden layers

lr = 0.01
decay = 0.999
# reducing the learning rate by half every 2 epochs
cbks = [LearningRateScheduler(lambda epoch: lr * decay ** epoch)]
optimizer = keras.optimizers.Nadam(lr=lr)
lr_metric = utils_ML.get_lr_metric(optimizer)

# Saved results recovery for plot them later
BER = utils.read_ber_file(N, k, 'BER')
BER = utils.saved_results(BER, N, k)



# # pretraining
print("----------------------------------Pretraining------------------------------------------")
model_encoder = encoder_generator(N,k)
meta_model = meta_model_generator(k,model_encoder, False, pretrain_epsilon)
### Compile our models
meta_model.compile(loss=loss, optimizer=optimizer, metrics=[utils_ML.ber_metric,lr_metric])
### Fit the model
history = meta_model.fit(U_k, U_k, epochs=epoch_pretrain, verbose=verbose, shuffle=False, batch_size=batch_size, callbacks=cbks)

loss_values = history.history['loss']
metric_values = history.history['ber_metric']
#Hyper parameters
batch_size = 256
initializer = tf.keras.initializers.HeNormal
loss = 'mse' #'categorical_crossentropy'  #'kl_divergence'
activation = 'relu'

lr = 0.001
decay = 0.999
# reducing the learning rate every epoch
cbks = [LearningRateScheduler(lambda epoch: lr * decay ** epoch)]
optimizer = keras.optimizers.Nadam(lr=lr)
lr_metric = utils_ML.get_lr_metric(optimizer)

# Saved results recovery for plot them later
BER = utils.read_ber_file(N, k, 'BER')
BER = utils.saved_results(BER, N, k)
BLER = utils.read_ber_file(N, k, 'BLER')
BLER = utils.saved_results(BLER, N, k, 'BLER')



# # pretraining
if pretraining:
  print("----------------------------------Joint Pretraining------------------------------------------")
  model_encoder = encoder_generator(N,k)
  model_decoder = decoder_generator(N,k)
  meta_model = meta_model_generator(k,model_encoder,model_decoder, False, pretrain_epsilon)
  ### Compile our models
  meta_model.compile(loss=[utils_ML.linear_regularizer,loss,loss,loss,loss,loss], optimizer=optimizer, metrics=[lr_metric],loss_weights=loss_weights)
  ### Fit the model
  history = meta_model.fit([U_k, Interval], [aux,U_k,U_k,U_k,U_k,U_k], epochs=epoch_pretrain, verbose=verbose, shuffle=True, batch_size=batch_size)