x_test_list.append(x_test[i].flatten().astype(np.float))

# normalize dataset
x_train_list, mi, ma = util.normalize_dataset(x_train_list)
x_test_list, mi, ma = util.normalize_dataset(x_test_list)

# Get time before training
t_start = datetime.datetime.now()
print("Starting timer")

# Initialize network and train
model_torch = PcTorch(NETWORK_ARCHITECTURE)
model_torch.train(x_train_list,
                  y_train_list,
                  x_test_list,
                  y_test_list,
                  batch_size=BATCH_SIZE,
                  epochs=EPOCHS,
                  max_it=INFERENCE_STEPS,
                  optmizer=OPTIMIZER,
                  activation=ACTIVATION,
                  dataset_perc=DATA_PERC,
                  learning_rate=LR)

# Get time after training
t_end = datetime.datetime.now()
elapsedTime = (t_end - t_start)
dt_sec = elapsedTime.total_seconds()

print(f"Training time per epoch: {dt_sec/EPOCHS}")
    x_train_list.append(x_train[i].flatten().astype(np.float))

for i in range(len(x_valid)):
    x_valid_list.append(x_valid[i].flatten().astype(np.float))

# Get time before training
t_start = datetime.datetime.now()
print("Starting timer")

# Initialize network and train
model_torch = PcTorch(NETWORK_ARCHITECTURE)
model_torch.train(
    x_train_list, 
    y_train_list, 
    x_valid_list, 
    y_valid_list, 
    batch_size=BATCH_SIZE, 
    epochs=EPOCHS, 
    max_it=INFERENCE_STEPS,
    optimizer=OPTIMIZER,
    activation=ACTIVATION,
    dataset_perc = DATA_PERC
)

# Get time after training
t_end = datetime.datetime.now()
elapsedTime = (t_end - t_start )
dt_sec = elapsedTime.total_seconds()

print(f"Training time per epoch: {dt_sec/EPOCHS}")