示例#1
0
def autoencoder_NN(metric, N=8, k=4, nb_pkts = 100, graph = 'BER',interval=False):
  print('------------------- NN-autoencoder -----------------------------', interval)
  key = 'NN_auto'
  C = utils.NN_encoder(k,N)
  # print(np.array(C))
  print('k ', k, 'N ', N)
  if graph == 'BLER':
    metric[key] = utils.block_error_probability(N, k, C, e0, e1)
    a,metric['BLER'] = utils.bit_error_rate_NN_predict(N, k, C, 10000, e0, e1, inter=interval)
  else:
    print("NN BER")
    t = time.time()
    metric[key],a = utils.bit_error_rate_NN_predict(N, k, C, nb_pkts, e0, e1, inter=interval)
    t = time.time() - t
    print(f"NN time = {t}s ========================")
    print(metric[key])

    MAP_test = False
    if MAP_test:
      print("MAP BER")
      t = time.time()
      metric['MAP_dec'],a = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
      t = time.time() - t
      print(f"MAP time = {t}s =======================")
      print(metric['MAP_dec'])
示例#2
0
def BER_NN(nb_pkts=100):
    # e0 = np.logspace(-3, 0, 15)
    # e0 = np.linspace(0.001, 0.999, 11)
    e0 = np.concatenate(
        (np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)),
        axis=0)
    e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
    e1 = [t for t in e0 if t <= 0.5]

    one_hot = np.eye(2**k)

    C = cn
    print('codebook\n', C)

    BER = test.read_ber_file(N, k, 'BER')
    BER = test.saved_results(BER, N, k)
    BLER = test.read_ber_file(N, k, 'BLER')
    BLER = test.saved_results(BLER, N, k, 'BLER')
    print("NN BER")
    t = time.time()
    BER['decoder_cnn'], BLER['decoder_cnn'] = bit_error_rate_NN(
        N, k, C, nb_pkts, e0, e1, channel)
    t = time.time() - t
    print(f"NN time = {t}s ========================")
    print("BER['auto-NN'] = ", BER['decoder_cnn'])
    print("BLER['auto-NN'] = ", BLER['decoder_cnn'])

    if MAP_test:
        print("MAP BER")
        t = time.time()
        BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
        t = time.time() - t
        print(f"MAP time = {t}s =======================")
    utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N)
    utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N)
示例#3
0
def polar_codes_mapping(metric, N=8, k=4, nb_pkts = 100, graph = 'BER',channel='BSC'):
  print('-------------------Polar Codes + Mapping-----------------------------')
  cont = 2
  if channel == 'AWGN':
    design_parameter = np.linspace(0.0, 10, cont)
  else:
    design_parameter = np.linspace(0.0001, 0.1, cont)

  for key in [0.5]:
    e_design = 0.1
    # print('===============Design================',key)
    G,infoBits = polar.polar_generator_matrix(64, k, channel, e_design)

    k = len(G)
    Nt = len(G[1])
    t = int(Nt /N)
    U_k = utils.symbols_generator(k)  # all possible messages
    X_m = utils.symbols_generator(t)  # all possible symbol sequences
    C = utils.matrix_codes(U_k, k, G, Nt)

    nx = 2**t*key
    # print('nx', nx, 't', t)
    x = utils.mapping2(C, X_m, t, nx)
    N = len(x[1])
    if graph == 'BLER':
      metric[f"P({e_design})+M({key})"] = utils.block_error_probability(N,k,x,e0,e1)
    else:
      metric[f"P({e_design})+M({key})"] = utils.bit_error_rate(k,x,nb_pkts,e0,e1)
def integrated_scheme(metric,
                      N=8,
                      k=4,
                      nb_pkts=100,
                      graph='BER',
                      channel='BSC'):
    print(
        '-------------------Integrated Scheme Code-----------------------------'
    )
    for key in [0.5]:
        G, infoBits = polar.polar_generator_matrix(64, k, channel, 0.1)
        k = len(G)
        Nt = len(G[1])
        t = int(Nt / N)

        U_k = utils.symbols_generator(k)  # all possible messages
        C = utils.integrated_function(infoBits, U_k, k, Nt, -1)

        X_m = utils.symbols_generator(t)  # all possible symbol sequences
        nx = 2**t * key
        # print('nx', nx, 't', t)
        x = utils.mapping(C, X_m, t, nx)
        N = len(x[1])
        if graph == 'BLER':
            metric[f"Int_P({key})"] = utils.block_error_probability(
                N, k, C, e0, e1)
        else:
            metric[f"Int_P({key})"] = utils.bit_error_rate(
                k, x, nb_pkts, e0, e1)
def BER_NN(nb_pkts=100):
    # e0 = np.logspace(-3, 0, 15)
    # e0 = np.linspace(0.001, 0.999, 11)
    e0 = np.concatenate(
        (np.array([0.001]), np.linspace(
            0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)),
        axis=0)
    e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
    e1 = [t for t in e0 if t <= 0.5]

    inter_list = np.array(np.tile([0, 0, 0, 1], (2**k, 1)))
    C = np.round(model_encoder.predict([np.array(u_k),
                                        inter_list])).astype('int')
    print('codebook \n', C)
    print('codebook C is Linear? ', utils.isLinear(C))
    aux = []
    for code in C.tolist():
        if code not in aux:
            aux.append(code)
    nb_repeated_codes = len(C) - len(aux)
    print('+++++++++++++++++++ Repeated Codes NN encoder = ',
          nb_repeated_codes)
    print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2**k))
    print('***************************************************************')

    if nb_repeated_codes == 0:
        BER = test.read_ber_file(N, k, 'BER')
        BER = test.saved_results(BER, N, k)
        BLER = test.read_ber_file(N, k, 'BLER')
        BLER = test.saved_results(BLER, N, k, 'BLER')
        print("NN BER")
        t = time.time()
        BER['auto-array-inter'], BLER['auto-array-inter'] = bit_error_rate_NN(
            N, k, C, nb_pkts, e0, e1, channel)
        t = time.time() - t
        print(f"NN time = {t}s ========================")

        print("BER['auto-array-inter'] = ", BER['auto-array-inter'])
        print("BLER['auto-array-inter'] = ", BLER['auto-array-inter'])

        if MAP_test:
            print("MAP BER")
            t = time.time()
            BER['MAP'] = utils.bit_error_rate(k, C, 1000, e0, e1)
            t = time.time() - t
            print(f"MAP time = {t}s =======================")

            print("NN BLEP")
            t = time.time()
            BLER['auto_BLEP'] = utils.block_error_probability(N, k, C, e0, e1)
            t = time.time() - t
            print(f"NN time = {t}s ========================")

        utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN Interval',
                           BER, k / N)
        utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN Interval',
                           BLER, k / N)
    else:
        print('Bad codebook repeated codewords')
示例#6
0
def uncoded(metric, k=4, nb_pkts = 100, graph = 'BER'):
  print('-------------------Uncoded-----------------------------')
  key = f"Uncode"
  N = k
  U_k = utils.symbols_generator(k)  # all possible messages
  if graph == 'BLER':
    metric[key] = utils.block_error_probability(N,k,U_k,e0,e1)
  else:
    metric[key] = utils.bit_error_rate(k,U_k,nb_pkts,e0,e1,False)
def flip_codes(metric, N=8, k=4, nb_pkts=100, graph='BER', channel='BSC'):
    print('-------------------Flip Code-----------------------------')
    key = '2,4,6'
    all_C = flip.codebook_generator_k4(N)
    C = all_C[2, 4, 6]
    print('Flip codebook', np.array(C))
    if graph == 'BLER':
        metric[f"Flip({key})"] = utils.block_error_probability(N, k, C, e0, e1)
    else:
        metric[f"Flip({key})"] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
示例#8
0
def polar_codes(metric, N=8, k=4, nb_pkts = 100, graph = 'BER',channel='BSC'):
  print('-------------------Polar Code-----------------------------')
  for key in [0.1]:
    G, infoBits = polar.polar_generator_matrix(N,k, channel, key)
    k = len(G)
    N = len(G[1])
    U_k = utils.symbols_generator(k)  # all possible messages
    C = utils.matrix_codes(U_k, k, G, N)
    # print('Polar codebook', np.array(C))
    if graph == 'BLER':
      metric[f"Polar({key})"] = utils.block_error_probability(N,k,C,e0,e1)
    else:
      metric[f"Polar({key})"] = utils.bit_error_rate(k,C,nb_pkts,e0,e1)
    print(metric[f"Polar({key})"])
示例#9
0
def BER_NN(codebook,nb_pkts=100):
  e0 = np.logspace(-3, 0, 15)
  # e0 = np.linspace(0.001, 1, 11)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]
  metric = test.read_ber_file(N, k)
  BER = test.saved_results(metric,N, k)

  BER['dec'] = utils.bit_error_rate_NN(N, k, codebook, nb_pkts, e0, e1,channel)
  print("metric['BKLC-NN'] = ",BER['dec'])
  if MAP_test:
    BER['MAP'] = utils.bit_error_rate(k, codebook, nb_pkts, e0, e1)

  test.plot_ber(BER, N,k,e0)
示例#10
0
def BER_NN(codebook,nb_pkts=100):
  e0 = np.logspace(-3, 0, 15)
  e0 = np.concatenate((np.array([0.001]), np.linspace(0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)), axis=0)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]
  metric = test.read_ber_file(N, k,'BER')
  BER = test.saved_results(metric,N, k)

  BER['dec'],a = utils_ML.bit_error_rate_NN_decoder(N, k, codebook, nb_pkts, e0, e1,model_decoder, 'one', train_epsilon)
  print("metric['dec-NN'] = ",BER['dec'])
  if MAP_test:
    BER['MAP'] = utils.bit_error_rate(k, codebook, nb_pkts, e0, e1)

  test.plot_ber(BER, N,k)
示例#11
0
def bch_codes(metric, N=8, k=4, nb_pkts = 100, graph = 'BER'):
  print('-------------------BCH Code-----------------------------')
  G = mat_gen.matrix_codes(N, k, 'bch')
  if G != []:
    for key in [0]:
      # print('G = ', np.array(G))
      k = len(G)
      N = len(G[1])
      U_k = utils.symbols_generator(k)  # all possible messages
      C = utils.matrix_codes(U_k, k, G, N)
      print('k ',k,'N ',N)
      if graph == 'BLER':
        metric[f"BCH({key})"] = utils.block_error_probability(N,k,C,e0,e1)
      else:
        metric[f"BCH({key})"] = utils.bit_error_rate(k,C,nb_pkts,e0,e1)
def linear_codes(metric, N=8, k=4, nb_pkts=100, graph='BER'):
    print('-------------------Linear Code-----------------------------')
    for key in ['BKLC']:
        print(key)
        G = mat_gen.matrix_codes(N, k, key)
        if G != []:
            # print('G = ', np.array(G))
            k = len(G)
            N = len(G[1])
            U_k = utils.symbols_generator(k)  # all possible messages
            C = utils.matrix_codes(U_k, k, G, N)
            print(np.array(C))
            print('k ', k, 'N ', N)
            if graph == 'BLER':
                metric[key] = utils.block_error_probability(N, k, C, e0, e1)
            else:
                metric[key] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
示例#13
0
def BER_NN(nb_pkts=100):
  # e0 = np.logspace(-3, 0, 15)
  # e0 = np.linspace(0.001, 0.999, 11)
  e0 = np.concatenate((np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)), axis=0)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]

  one_hot = np.eye(2 ** k)

  C = np.round(model_encoder.predict(one_hot)).astype('int')
  print('codebook\n', C)
  aux = []
  for code in C.tolist():
    if code not in aux:
      aux.append(code)
  nb_repeated_codes = len(C) - len(aux)
  print('+++++++++++++++++++ Repeated Codes NN encoder = ', nb_repeated_codes)
  print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2 ** k))
  print('***************************************************************')

  if nb_repeated_codes ==0:
    BER = test.read_ber_file(N, k, 'BER')
    BER = test.saved_results(BER, N, k)
    BLER = test.read_ber_file(N, k, 'BLER')
    BLER = test.saved_results(BLER, N, k,'BLER')
    print("NN BER")
    t = time.time()
    BER['auto_non_inter_cnn'],BLER['auto_non_inter_cnn'] = bit_error_rate_NN(N, k, C, nb_pkts, e0, e1,channel)
    t = time.time()-t
    print(f"NN time = {t}s ========================")
    print("BER['auto-NN'] = ", BER['auto_non_inter_cnn'])
    print("BLER['auto-NN'] = ", BLER['auto_non_inter_cnn'])

    if MAP_test:
      print("MAP BER")
      t = time.time()
      BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
      t = time.time()-t
      print(f"MAP time = {t}s =======================")
    utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N)
    utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N)
  else:
    print('Bad codebook repeated codewords')
示例#14
0
def linear_codes_mapping(metric, N=8, k=4, nb_pkts = 100, graph = 'BER'):
  print('-------------------Linear Code + Mapping-----------------------------')
  G = mat_gen.matrix_codes(64,k,'linear')
  if G!= []:
    for key in [0.55]:
      k = len(G)
      Nt = len(G[1])
      t = int(Nt/N)
      U_k= utils.symbols_generator(k)  # all possible messages
      X_m = utils.symbols_generator(t)  # all possible symbol sequences
      C = utils.matrix_codes(U_k, k, G, Nt)
      nx = 2**t*key
      # print('nx', nx, 't', t)
      x = utils.mapping(C, X_m, t, nx) #codebook after mapping
      N = len(x[1])

      if graph == 'BLER':
        metric[f"L+M({key})"] = utils.block_error_probability(N,k,x,e0,e1)
      else:
        metric[f"L+M({key})"] = utils.bit_error_rate(k,x,nb_pkts,e0,e1)
# # pretraining
print("----------------------------------Pretraining------------------------------------------")
model_encoder = encoder_generator(N,k)
meta_model = meta_model_generator(k,model_encoder, False, pretrain_epsilon)
### Compile our models
meta_model.compile(loss=loss, optimizer=optimizer, metrics=[utils_ML.ber_metric,lr_metric])
### Fit the model
history = meta_model.fit(U_k, U_k, epochs=epoch_pretrain, verbose=verbose, shuffle=False, batch_size=batch_size, callbacks=cbks)

loss_values = history.history['loss']
metric_values = history.history['ber_metric']

C = np.round(model_encoder.predict(u_k)).astype('int')
print('codebook C is Linear? ', utils.isLinear(C))
BER['NN_Encoder-MAP-lambda'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1, coded = True)

for i in range(2**k):
  u = u_k[i]
  x = model_encoder.predict([u])
  # print('******\n',u,'\n',x ,'\n',utils.MAP_BAC_vector(x,k,C, 0.05, 0.0002))

########################### Plotting ###############################################################################################
# Plot the loss function values calculated during training
fig = plt.figure(figsize=(20,10))
title = f'N={N} k={k} {length_training} - NN Array-MAP-lambda'
plt.semilogy(loss_values  , alpha=0.8 , color='brown',linewidth=0.15, label='Loss')
filter_size = 100
plt.semilogy(utils_ML.smooth(loss_values,filter_size)[filter_size-1:], color='brown')
# Plot the BER metric calculated in training
plt.semilogy(metric_values  , linestyle=':', alpha=0.8, color='brown',linewidth=0.45)
                          optimizer=optimizer,
                          metrics=[utils_ML.ber_metric, lr_metric])
    ### Fit the model
    history = model_encoder.fit(U_k,
                                c,
                                epochs=300,
                                verbose=verbose,
                                shuffle=False,
                                batch_size=batch_size)
    prediction = model_encoder.predict(U_k)
    C = np.round(prediction).astype('int')
    print('***\n Codebook final\n', prediction, '\n', C, "\n***")
    if MAP_test:
        BER['Polar-MAP'] = utils.bit_error_rate(k,
                                                cn,
                                                nb_pkts,
                                                e0,
                                                e1,
                                                coded=True)
        print(BER['Polar-MAP'])

print(
    "----------------------------------Fine tuning Encoder ------------------------------------------"
)
meta_model = meta_model_generator(k, model_encoder, False, pretrain_epsilon)
### Compile our models
meta_model.compile(loss=loss, optimizer=optimizer, metrics=[lr_metric])
### Fit the model
history = meta_model.fit(U_k,
                         One_hot,
                         epochs=epoch_pretrain,
                         verbose=verbose,
示例#17
0
    metric_values_4 += history.history['decoder_model_4_ber_metric']
    metric_values_5 += history.history['decoder_model_5_ber_metric']

    if a % 2 == 1 or a == iterations:
        C = np.round(model_encoder.predict(u_k)).astype('int')
        print('codebook C is Linear? ', utils.isLinear(C))
        BER[f"auto-array-array_alt-{a//2+1}"], BLER[
            f"auto-array-array_alt-{a//2+1}"] = utils_ML.bit_error_rate_NN(
                N, k, C, nb_pkts, e0, e1, model_decoder, 'array')

    lr = lr * decay**epoch_int  #useful when use the callback to reduce de learning rate

if MAP_test:
    BER['NN-MAP'] = utils.bit_error_rate(k,
                                         C,
                                         nb_pkts // 2,
                                         e0,
                                         e1,
                                         coded=True)

print("The model is ready to be used...")

# model_decoder.save(f"./autoencoder/model_decoder.h5")
# model_encoder.save(f"./autoencoder/model_encoder.h5")

#######################Plotting ###################################################################################
## Plot the loss function values for the different epsilon, they were calculated during training
fig = plt.figure(figsize=(20, 10))
title = f'N={N} k={k} {length_training} - NN Array_array_fine-decoder'
## Commented to not overcharge the loss figure
# plt.semilogy(loss_values  , alpha=0.8 , color='brown',linewidth=0.15)
# plt.semilogy(loss_values_1, alpha=0.8, color='blue',linewidth=0.15)