Exemple #1
0
def eval_e2e():
    estimator.load_state_dict(torch.load(est_path))
    equalizer.load_state_dict(torch.load(eq_path))
    model = NeuralTap(estimator, equalizer)
    bers = []

    for snr in eval_snr:
        batches = eval_size // batch_size
        ber = 0
        for _ in tqdm(range(batches)):
            pream, pream_recv, payload_recv, label = offline.gen_ktap(
                batch_size, pream_size, eval_tap_size, snr, payload_size)

            model.update_preamble(pream, pream_recv)
            payload_est = model.estimate(payload_recv)

            label_est = offline.demod_qpsk(payload_est)
            ber += offline.bit_error_rate(label_est, label, 2)
        bers.append(ber / batches)

    return bers
Exemple #2
0
@param weights current lms FIR weights
@return nth-step lms output
"""


def lms(symbol, weights):
    return None


if __name__ == "__main__":
    # gen received symbols using channel simulator
    # @TODO Why is tap a vector of tuples with a size corresponding to
    #       the number of symbols? Is the channel changing for each
    #       batch of symbols / packet?
    # @note pream is the true preamble, recv is the received preamble
    pream, tap, recv = offline.gen_ktap(data_size, pream_size, model_tap_size,
                                        train_snr)

    print("pream:", pream.shape)
    print("pream:", pream)
    print("tap:", tap.shape)
    print("tap:", tap)
    print("recv:", recv.shape)
    print("recv:", recv)

    # alias received preamble symbols as x
    x = recv[0]

    # alias desired preamble symbols as d
    d = pream[0]

    # convert x,d to numpy complex numbers
Exemple #3
0
train_snr = 25  # channel SNR

pream_size = 1000  #ideally ~40   # number of preamble symbols
payload_size = 2500  # number of payload symbols

# LMS parameters
mu = 0.09  # step size
order = 5  # num FIR taps

if __name__ == "__main__":
    # gen received symbols using channel simulator
    # @TODO Why is tap a vector of tuples with a size corresponding to
    #       the number of symbols? Is the channel changing for each
    #       batch of symbols / packet?
    # @note pream is the true preamble, recv is the received preamble
    pream, pream_recv, payload_recv, tx_label = offline.gen_ktap(
        data_size, pream_size, model_tap_size, train_snr, payload_size)

    print("pream:", pream.shape)
    print("pream_recv:", pream_recv.shape)

    print("label:", tx_label.shape)
    print("payload_recv:", payload_recv.shape)

    # alias received preamble symbols as x
    x = pream_recv

    # alias desired preamble symbols as d
    d = pream

    # convert x,d to numpy complex numbers
    x = x[..., 0] + 1j * x[..., 1]
Exemple #4
0
# common parameters
# pream_size = 40
tap_size = 2
data_size = 20000
# zf_data = 'data/zf_data_snr10_pream40'

# model parameters
expand = 8192
eps = 0
snr = 10

if __name__ == "__main__":
    parser = ArgumentParser('gen-zf')
    parser.add_argument('-o', '--output', required=True)
    parser.add_argument('-O', '--order', required=True, type=int)
    parser.add_argument('-p', '--pream', type=int, default=40)
    args = parser.parse_args()

    pream, tap, pream_recv = offline.gen_ktap(data_size, args.pream, tap_size, snr)
    inverse = inverse_tap_fft(tap, expand=expand, trunc=args.order, eps=eps)
    data2save = {
        'pream': pream,
        'pream_recv': pream_recv,
        'inverse_weights': inverse,
        'gen_taps': tap,
    }

    with gzip.open(args.output, 'wb') as f:
        pickle.dump(data2save, f)
Exemple #5
0
if __name__ == "__main__":
    parser = ArgumentParser('test-cnn')
    parser.add_argument('epoch', nargs='?', type=int, default=10)
    args = parser.parse_args()

    model = CNNEstimator(model_tap_size)
    model.load_state_dict(torch.load(est_path))

    #test
    with open(eval_dat, 'w') as f:
        for pream_test_size in pream_test_sizes:
            test_loss = [pream_test_size]

            for snr in eval_snrs:
                pream_test, tap_test, recv_test = offline.gen_ktap(
                    batch_size, pream_test_size, model_tap_size, snr)

                test_loss_epoch = []
                for i in range(args.epoch):
                    print("test on epoch {}".format(i + 1))
                    test_loss_epoch.append(
                        offline.batch_eval(model,
                                           (offline.to_torch(pream_test),
                                            offline.to_torch(recv_test)),
                                           offline.to_torch(tap_test),
                                           F.mse_loss, batch_size))
                    print("epoch {}  test loss: {}".format(
                        i + 1, test_loss_epoch[-1]))

                test_loss.append(np.mean(test_loss_epoch))
Exemple #6
0
def gen_train_data():
    return offline.gen_ktap(data_size, pream_size, model_tap_size, train_snr)