Пример #1
0
def BER_NN(nb_pkts=100):
    # e0 = np.logspace(-3, 0, 15)
    # e0 = np.linspace(0.001, 0.999, 11)
    e0 = np.concatenate(
        (np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)),
        axis=0)
    e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
    e1 = [t for t in e0 if t <= 0.5]

    one_hot = np.eye(2**k)

    C = cn
    print('codebook\n', C)

    BER = test.read_ber_file(N, k, 'BER')
    BER = test.saved_results(BER, N, k)
    BLER = test.read_ber_file(N, k, 'BLER')
    BLER = test.saved_results(BLER, N, k, 'BLER')
    print("NN BER")
    t = time.time()
    BER['decoder_cnn'], BLER['decoder_cnn'] = bit_error_rate_NN(
        N, k, C, nb_pkts, e0, e1, channel)
    t = time.time() - t
    print(f"NN time = {t}s ========================")
    print("BER['auto-NN'] = ", BER['decoder_cnn'])
    print("BLER['auto-NN'] = ", BLER['decoder_cnn'])

    if MAP_test:
        print("MAP BER")
        t = time.time()
        BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
        t = time.time() - t
        print(f"MAP time = {t}s =======================")
    utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N)
    utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N)
def BER_NN(nb_pkts=100):
    # e0 = np.logspace(-3, 0, 15)
    # e0 = np.linspace(0.001, 0.999, 11)
    e0 = np.concatenate(
        (np.array([0.001]), np.linspace(
            0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)),
        axis=0)
    e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
    e1 = [t for t in e0 if t <= 0.5]

    inter_list = np.array(np.tile([0, 0, 0, 1], (2**k, 1)))
    C = np.round(model_encoder.predict([np.array(u_k),
                                        inter_list])).astype('int')
    print('codebook \n', C)
    print('codebook C is Linear? ', utils.isLinear(C))
    aux = []
    for code in C.tolist():
        if code not in aux:
            aux.append(code)
    nb_repeated_codes = len(C) - len(aux)
    print('+++++++++++++++++++ Repeated Codes NN encoder = ',
          nb_repeated_codes)
    print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2**k))
    print('***************************************************************')

    if nb_repeated_codes == 0:
        BER = test.read_ber_file(N, k, 'BER')
        BER = test.saved_results(BER, N, k)
        BLER = test.read_ber_file(N, k, 'BLER')
        BLER = test.saved_results(BLER, N, k, 'BLER')
        print("NN BER")
        t = time.time()
        BER['auto-array-inter'], BLER['auto-array-inter'] = bit_error_rate_NN(
            N, k, C, nb_pkts, e0, e1, channel)
        t = time.time() - t
        print(f"NN time = {t}s ========================")

        print("BER['auto-array-inter'] = ", BER['auto-array-inter'])
        print("BLER['auto-array-inter'] = ", BLER['auto-array-inter'])

        if MAP_test:
            print("MAP BER")
            t = time.time()
            BER['MAP'] = utils.bit_error_rate(k, C, 1000, e0, e1)
            t = time.time() - t
            print(f"MAP time = {t}s =======================")

            print("NN BLEP")
            t = time.time()
            BLER['auto_BLEP'] = utils.block_error_probability(N, k, C, e0, e1)
            t = time.time() - t
            print(f"NN time = {t}s ========================")

        utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN Interval',
                           BER, k / N)
        utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN Interval',
                           BLER, k / N)
    else:
        print('Bad codebook repeated codewords')
Пример #3
0
def BER_NN(nb_pkts=100):
  # e0 = np.logspace(-3, 0, 15)
  # e0 = np.linspace(0.001, 0.999, 11)
  e0 = np.concatenate((np.linspace(0.001, 0.2, 10, endpoint=False), np.linspace(0.2, 1, 8)), axis=0)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]

  one_hot = np.eye(2 ** k)

  C = np.round(model_encoder.predict(one_hot)).astype('int')
  print('codebook\n', C)
  aux = []
  for code in C.tolist():
    if code not in aux:
      aux.append(code)
  nb_repeated_codes = len(C) - len(aux)
  print('+++++++++++++++++++ Repeated Codes NN encoder = ', nb_repeated_codes)
  print('dist = ', sum([sum(codeword) for codeword in C]) * 1.0 / (N * 2 ** k))
  print('***************************************************************')

  if nb_repeated_codes ==0:
    BER = test.read_ber_file(N, k, 'BER')
    BER = test.saved_results(BER, N, k)
    BLER = test.read_ber_file(N, k, 'BLER')
    BLER = test.saved_results(BLER, N, k,'BLER')
    print("NN BER")
    t = time.time()
    BER['auto_non_inter_cnn'],BLER['auto_non_inter_cnn'] = bit_error_rate_NN(N, k, C, nb_pkts, e0, e1,channel)
    t = time.time()-t
    print(f"NN time = {t}s ========================")
    print("BER['auto-NN'] = ", BER['auto_non_inter_cnn'])
    print("BLER['auto-NN'] = ", BLER['auto_non_inter_cnn'])

    if MAP_test:
      print("MAP BER")
      t = time.time()
      BER['MAP'] = utils.bit_error_rate(k, C, nb_pkts, e0, e1)
      t = time.time()-t
      print(f"MAP time = {t}s =======================")
    utils.plot_BSC_BAC(f'BER Coding Mechanism N={N} k={k} - NN', BER, k / N)
    utils.plot_BSC_BAC(f'BLER Coding Mechanism N={N} k={k} - NN', BLER, k / N)
  else:
    print('Bad codebook repeated codewords')
Пример #4
0
def BER_NN(codebook,nb_pkts=100):
  e0 = np.logspace(-3, 0, 15)
  # e0 = np.linspace(0.001, 1, 11)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]
  metric = test.read_ber_file(N, k)
  BER = test.saved_results(metric,N, k)

  BER['dec'] = utils.bit_error_rate_NN(N, k, codebook, nb_pkts, e0, e1,channel)
  print("metric['BKLC-NN'] = ",BER['dec'])
  if MAP_test:
    BER['MAP'] = utils.bit_error_rate(k, codebook, nb_pkts, e0, e1)

  test.plot_ber(BER, N,k,e0)
Пример #5
0
def BER_NN(codebook,nb_pkts=100):
  e0 = np.logspace(-3, 0, 15)
  e0 = np.concatenate((np.array([0.001]), np.linspace(0.01, 0.1, 10, endpoint=False), np.linspace(0.1, 1, 15)), axis=0)
  e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
  e1 = [t for t in e0 if t <= 0.5]
  metric = test.read_ber_file(N, k,'BER')
  BER = test.saved_results(metric,N, k)

  BER['dec'],a = utils_ML.bit_error_rate_NN_decoder(N, k, codebook, nb_pkts, e0, e1,model_decoder, 'one', train_epsilon)
  print("metric['dec-NN'] = ",BER['dec'])
  if MAP_test:
    BER['MAP'] = utils.bit_error_rate(k, codebook, nb_pkts, e0, e1)

  test.plot_ber(BER, N,k)
    (np.array([0.001]), np.linspace(0.01, 0.1, 10,
                                    endpoint=False), np.linspace(0.1, 1, 15)),
    axis=0)
e0[len(e0) - 1] = e0[len(e0) - 1] - 0.001
e1 = [t for t in e0 if t <= 0.5]

epoch_pretrain = 1000
epoch_encoder = 150
epoch_decoder = 300
pretrain_epsilon = 0.03
encoder_epsilon = 0.03
decoder_epsilon = 0.03
pretraining = True
fine_tunning = True

BER = test.read_ber_file(N, k, 'BER')
BER = test.saved_results(BER, N, k)
BLER = test.read_ber_file(N, k, 'BLER')
BLER = test.saved_results(BLER, N, k, 'BLER')

loss_dict = {}
for parameter in np.linspace(0.03, 0.12, 4, endpoint=True):
    # for parameter in [0.0, 0.05]:
    # # pretraining
    print(
        f"****************************Parameter={parameter}********************************"
    )
    pretrain_epsilon = parameter
    encoder_epsilon = parameter
    decoder_epsilon = parameter
    if pretraining: