def BER_NN(nb_pkts=100):
    # e0 = np.logspace(-3, 0, 15)
    # e0 = np.linspace(0.001, 0.999, 11)
    e0 = np.concatenate(
        (np.array([0.001]), np.linspace(
            0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)),
        axis=0)
    e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
    e1 = [t for t in e0 if t <= 0.5]

    inter_list = np.array(np.tile([0, 0, 0, 1], (2**k, 1)))
    C = np.round(model_encoder.predict([np.array(u_k),
                                        inter_list])).astype('int')
    print('codebook \n', C)
    print('codebook C is Linear? ', utils.isLinear(C))
    aux = []
    for code in C.tolist():
        if code not in aux:
            aux.append(code)
    nb_repeated_codes = len(C) - len(aux)
    print('+++++++++++++++++++ Repeated Codes NN encoder = ',
          nb_repeated_codes)
    print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2**k))
    print('***************************************************************')

    if nb_repeated_codes == 0:
        BER = test.read_ber_file(N, k, 'BER')
        BER = test.saved_results(BER, N, k)
        BLER = test.read_ber_file(N, k, 'BLER')
        BLER = test.saved_results(BLER, N, k, 'BLER')
        print("NN BER")
        t = time.time()
        BER['auto-array-inter'], BLER['auto-array-inter'] = bit_error_rate_NN(
            N, k, C, nb_pkts, e0, e1, channel)
        t = time.time() - t
        print(f"NN time = {t}s ========================")

        print("BER['auto-array-inter'] = ", BER['auto-array-inter'])
        print("BLER['auto-array-inter'] = ", BLER['auto-array-inter'])

        if MAP_test:
            print("MAP BER")
            t = time.time()
            BER['MAP'] = utils.bit_error_rate(k, C, 1000, e0, e1)
            t = time.time() - t
            print(f"MAP time = {t}s =======================")

            print("NN BLEP")
            t = time.time()
            BLER['auto_BLEP'] = utils.block_error_probability(N, k, C, e0, e1)
            t = time.time() - t
            print(f"NN time = {t}s ========================")

        utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN Interval',
                           BER, k / N)
        utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN Interval',
                           BLER, k / N)
    else:
        print('Bad codebook repeated codewords')

# # pretraining
print("----------------------------------Pretraining------------------------------------------")
model_encoder = encoder_generator(N,k)
meta_model = meta_model_generator(k,model_encoder, False, pretrain_epsilon)
### Compile our models
meta_model.compile(loss=loss, optimizer=optimizer, metrics=[utils_ML.ber_metric,lr_metric])
### Fit the model
history = meta_model.fit(U_k, U_k, epochs=epoch_pretrain, verbose=verbose, shuffle=False, batch_size=batch_size, callbacks=cbks)

loss_values = history.history['loss']
metric_values = history.history['ber_metric']

C = np.round(model_encoder.predict(u_k)).astype('int')
print('codebook C is Linear? ', utils.isLinear(C))
BER['NN_Encoder-MAP-lambda'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1, coded = True)

for i in range(2**k):
  u = u_k[i]
  x = model_encoder.predict([u])
  # print('******\n',u,'\n',x ,'\n',utils.MAP_BAC_vector(x,k,C, 0.05, 0.0002))

########################### Plotting ###############################################################################################
# Plot the loss function values calculated during training
fig = plt.figure(figsize=(20,10))
title = f'N={N} k={k} {length_training} - NN Array-MAP-lambda'
plt.semilogy(loss_values  , alpha=0.8 , color='brown',linewidth=0.15, label='Loss')
filter_size = 100
plt.semilogy(utils_ML.smooth(loss_values,filter_size)[filter_size-1:], color='brown')
# Plot the BER metric calculated in training