def BER_NN(nb_pkts=100): # e0 = np.logspace(-3, 0, 15) # e0 = np.linspace(0.001, 0.999, 11) e0 = np.concatenate( (np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)), axis=0) e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001 e1 = [t for t in e0 if t <= 0.5] one_hot = np.eye(2**k) C = cn print('codebook\n', C) BER = test.read_ber_file(N, k, 'BER') BER = test.saved_results(BER, N, k) BLER = test.read_ber_file(N, k, 'BLER') BLER = test.saved_results(BLER, N, k, 'BLER') print("NN BER") t = time.time() BER['decoder_cnn'], BLER['decoder_cnn'] = bit_error_rate_NN( N, k, C, nb_pkts, e0, e1, channel) t = time.time() - t print(f"NN time = {t}s ========================") print("BER['auto-NN'] = ", BER['decoder_cnn']) print("BLER['auto-NN'] = ", BLER['decoder_cnn']) if MAP_test: print("MAP BER") t = time.time() BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1) t = time.time() - t print(f"MAP time = {t}s =======================") utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N) utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N)
def BER_NN(nb_pkts=100): # e0 = np.logspace(-3, 0, 15) # e0 = np.linspace(0.001, 0.999, 11) e0 = np.concatenate( (np.array([0.001]), np.linspace( 0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)), axis=0) e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001 e1 = [t for t in e0 if t <= 0.5] inter_list = np.array(np.tile([0, 0, 0, 1], (2**k, 1))) C = np.round(model_encoder.predict([np.array(u_k), inter_list])).astype('int') print('codebook \n', C) print('codebook C is Linear? ', utils.isLinear(C)) aux = [] for code in C.tolist(): if code not in aux: aux.append(code) nb_repeated_codes = len(C) - len(aux) print('+++++++++++++++++++ Repeated Codes NN encoder = ', nb_repeated_codes) print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2**k)) print('***************************************************************') if nb_repeated_codes == 0: BER = test.read_ber_file(N, k, 'BER') BER = test.saved_results(BER, N, k) BLER = test.read_ber_file(N, k, 'BLER') BLER = test.saved_results(BLER, N, k, 'BLER') print("NN BER") t = time.time() BER['auto-array-inter'], BLER['auto-array-inter'] = bit_error_rate_NN( N, k, C, nb_pkts, e0, e1, channel) t = time.time() - t print(f"NN time = {t}s ========================") print("BER['auto-array-inter'] = ", BER['auto-array-inter']) print("BLER['auto-array-inter'] = ", BLER['auto-array-inter']) if MAP_test: print("MAP BER") t = time.time() BER['MAP'] = utils.bit_error_rate(k, C, 1000, e0, e1) t = time.time() - t print(f"MAP time = {t}s =======================") print("NN BLEP") t = time.time() BLER['auto_BLEP'] = utils.block_error_probability(N, k, C, e0, e1) t = time.time() - t print(f"NN time = {t}s ========================") utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN Interval', BER, k / N) utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN Interval', BLER, k / N) else: print('Bad codebook repeated codewords')
def BER_NN(nb_pkts=100): # e0 = np.logspace(-3, 0, 15) # e0 = np.linspace(0.001, 0.999, 11) e0 = np.concatenate((np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)), axis=0) e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001 e1 = [t for t in e0 if t <= 0.5] one_hot = np.eye(2 ** k) C = np.round(model_encoder.predict(one_hot)).astype('int') print('codebook\n', C) aux = [] for code in C.tolist(): if code not in aux: aux.append(code) nb_repeated_codes = len(C) - len(aux) print('+++++++++++++++++++ Repeated Codes NN encoder = ', nb_repeated_codes) print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2 ** k)) print('***************************************************************') if nb_repeated_codes ==0: BER = test.read_ber_file(N, k, 'BER') BER = test.saved_results(BER, N, k) BLER = test.read_ber_file(N, k, 'BLER') BLER = test.saved_results(BLER, N, k,'BLER') print("NN BER") t = time.time() BER['auto_non_inter_cnn'],BLER['auto_non_inter_cnn'] = bit_error_rate_NN(N, k, C, nb_pkts, e0, e1,channel) t = time.time()-t print(f"NN time = {t}s ========================") print("BER['auto-NN'] = ", BER['auto_non_inter_cnn']) print("BLER['auto-NN'] = ", BLER['auto_non_inter_cnn']) if MAP_test: print("MAP BER") t = time.time() BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1) t = time.time()-t print(f"MAP time = {t}s =======================") utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N) utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N) else: print('Bad codebook repeated codewords')
FER = {} for e_design in design_parameter: print('===============================', e_design) legends.append(f"p = {e_design:.2f}") G, infoBits = polar.polar_generator_matrix(N, k, channel_type, e_design) # print(G) k = len(G) N = len(G[1]) U_k = bac.symbols_generator(k) # all possible messages Y_n = bac.symbols_generator(N) # all possible symbol sequences C = bac.matrix_codes(U_k, k, G, N) e0 = np.linspace(0.1, 0.9, 9) e1 = np.linspace(0.1, 0.5, 5) # print("0.00", '|', ["{:.4f}".format(ep1) for ep1 in e1]) # print('------------------------------------------------------------------') error_probability = {} for ep0 in e0: row = [] for ep1 in (ep1 for ep1 in e1 if ep1 + ep0 <= 1 and ep1 <= ep0): if ep1 == ep0 or ep1 == 0.1: # if ep1 == 0.1: a = bac.succes_probability(Y_n, C, U_k, ep0, ep1) row.append(1 - a) error_probability[ep0] = row # print("{:.2f}".format(ep0), '|', ["{:.4f}".format(a) for a in row]) FER[e_design] = error_probability utils.plot_BSC_BAC(f'FER Polar Codes N={N} k={k}', e0, FER, legends)
### Model print summary # model_encoder.summary() # model_decoder.summary() # meta_model.summary() ### save Model # model_decoder.save(f"./autoencoder/model_decoder_{channel}_rep-{rep}_epsilon-{train_epsilon}_layerSize_{S}_epoch-{epoch}_k_{k}_N-{N}.h5") # model_encoder.save(f"./autoencoder/model_encoder_{channel}_rep-{rep}_epsilon-{train_epsilon}_layerSize_{S}_epoch-{epoch}_k_{k}_N-{N}.h5") # model_decoder.save(f"./autoencoder/model_decoder.h5") # model_encoder.save(f"./autoencoder/model_encoder.h5") if len(sys.argv) > 5: if sys.argv[5] == 'BER': nb_pkts = int(sys.argv[6]) inter_list = np.array(np.tile([0, 0, 0, 1], (2**k, 1))) C = np.round(model_encoder.predict([np.array(u_k), inter_list])).astype('int') BER[f"auto-array-inter_alt-{parameter}"], BLER[ f"auto-array-inter_alt-{parameter}"] = bit_error_rate_NN( N, k, C, nb_pkts, e0, e1, pretrain_epsilon) loss_dict[parameter] = loss_values plot_loss(loss_dict, accuracy, val_accuracy) if len(sys.argv) > 5: utils.plot_BSC_BAC(f'BER N={N} k={k} - NN Array_Interval_Alternate', BER, k / N) utils.plot_BSC_BAC(f'BLER N={N} k={k} - NN Array_Interval_Alternate', BLER, k / N) plt.show() # \Python3\python.exe autoencoder_array_interval_alt-training_loop.py BSC 16 8 10 BER 100000
def plot_ber(metric, N=8,k=4,graph='BER'): utils.plot_BSC_BAC(f'{graph} Coding Mechanism N={N} k={k}',metric,k/N)
ber_tmp = 0 # for bit error rate ser_tmp = 0 #for symbol error rate for t in range(B): u = [rd.randint(0, 1) for i in range(k)] # Bits à envoyer x = bac.FEC_encoder(u, G) # bits encodés y_bac = bac.BAC_channel(x, ep0, ep1) # symboles reçus ser_tmp += bac.NbOfErrors(x, y_bac) u_map_bac = U_k[bac.MAP_BAC(y_bac, k, C, ep0, ep1)] # Detecteur MAP ber_tmp += bac.NbOfErrors( u, u_map_bac) # Calcul de bit error rate avec MAP ber_tmp = ber_tmp / (N * 1.0 * B ) # Calcul de bit error rate avec MAP ser_tmp = ser_tmp / (k * 1.0 * B ) # Calcul de symbol error rate avec MAP ber_row.append(ber_tmp) bep[ep0] = row ber[ep0] = ber_row # print("{:.2f}".format(ep0), '|', ["{:.4f}".format(a) for a in row]) print("{:.2f}".format(ep0), '|', ["{:.4f}".format(a) for a in ber_row]) BER[e_design] = ber utils.plot_BSC_BAC(f'BER Polar Codes - Com chain N={N} k={k}', e0, BER, legends)