def conv_Pb_bound(R, dfree, Ck, SNRdB, hard_soft, M=2): """ Coded bit error probabilty Pb = conv_Pb_bound(R,dfree,Ck,SNR,hard_soft,M=2) Convolution coding bit error probability upper bound according to Ziemer & Peterson 7-16, p. 507 Mark Wickert November 2014 """ Pb = np.zeros_like(SNRdB) SNR = 10.**(SNRdB / 10.) for n, SNRn in enumerate(SNR): for k in range(dfree, len(Ck) + dfree): if hard_soft == 0: # Evaluate hard decision bound Pb[n] += Ck[k - dfree] * hard_Pk(k, R, SNRn, M) elif hard_soft == 1: # Evaluate soft decision bound Pb[n] += Ck[k - dfree] * soft_Pk(k, R, SNRn, M) else: # Compute Uncoded Pe if M == 2: Pb[n] = Q_fctn(np.sqrt(2. * SNRn)) else: Pb[n] = 4./np.log2(M)*(1 - 1/np.sqrt(M))*\ np.gaussQ(np.sqrt(3*np.log2(M)/(M-1)*SNRn)) return Pb
def conv_Pb_bound(R,dfree,Ck,SNRdB,hard_soft,M=2): """ Pb = conv_Pb_bound(R,dfree,Ck,SNR,hard_soft,M=2) Convolution coding bit error probability upper bound according to Ziemer & Peterson 7-16, p. 507 Mark Wickert November 2014 """ Pb = np.zeros_like(SNRdB) SNR = 10.**(SNRdB/10.) for n,SNRn in enumerate(SNR): for k in range(dfree,len(Ck)+dfree): if hard_soft == 0: # Evaluate hard decision bound Pb[n] += Ck[k-dfree]*hard_Pk(k,R,SNRn,M) elif hard_soft == 1: # Evaluate soft decision bound Pb[n] += Ck[k-dfree]*soft_Pk(k,R,SNRn,M) else: # Compute Uncoded Pe if M == 2: Pb[n] = Q_fctn(np.sqrt(2.*SNRn)) else: Pb[n] = 4./np.log2(M)*(1 - 1/np.sqrt(M))*\ np.gaussQ(np.sqrt(3*np.log2(M)/(M-1)*SNRn)); return Pb
def conv_Pb_bound(R, dfree, Ck, SNRdB, hard_soft, M=2): """ Coded bit error probabilty Convolution coding bit error probability upper bound according to Ziemer & Peterson 7-16, p. 507 Mark Wickert November 2014 Parameters ---------- R: Code rate dfree: Free distance of the code Ck: Weight coefficient SNRdB: Signal to noise ratio in dB hard_soft: 0 hard, 1 soft, 2 uncoded M: M-ary Examples -------- >>> import numpy as np >>> from sk_dsp_comm import fec_conv as fec >>> import matplotlib.pyplot as plt >>> SNRdB = np.arange(2,12,.1) >>> Pb = fec.conv_Pb_bound(1./2,10,[36, 0, 211, 0, 1404, 0, 11633],SNRdB,2) >>> Pb_1_2 = fec.conv_Pb_bound(1./2,10,[36, 0, 211, 0, 1404, 0, 11633],SNRdB,1) >>> Pb_3_4 = fec.conv_Pb_bound(3./4,4,[164, 0, 5200, 0, 151211, 0, 3988108],SNRdB,1) >>> plt.semilogy(SNRdB,Pb) >>> plt.semilogy(SNRdB,Pb_1_2) >>> plt.semilogy(SNRdB,Pb_3_4) >>> plt.axis([2,12,1e-7,1e0]) >>> plt.xlabel(r'$E_b/N_0$ (dB)') >>> plt.ylabel(r'Symbol Error Probability') >>> plt.legend(('Uncoded BPSK','R=1/2, K=7, Soft','R=3/4 (punc), K=7, Soft'),loc='best') >>> plt.grid(); >>> plt.show() Notes ----- The code rate R is given by :math:`R_{s} = \\frac{k}{n}`. Mark Wickert and Andrew Smit 2018 """ Pb = np.zeros_like(SNRdB) SNR = 10.**(SNRdB / 10.) for n, SNRn in enumerate(SNR): for k in range(dfree, len(Ck) + dfree): if hard_soft == 0: # Evaluate hard decision bound Pb[n] += Ck[k - dfree] * hard_Pk(k, R, SNRn, M) elif hard_soft == 1: # Evaluate soft decision bound Pb[n] += Ck[k - dfree] * soft_Pk(k, R, SNRn, M) else: # Compute Uncoded Pe if M == 2: Pb[n] = q_fctn(np.sqrt(2. * SNRn)) else: Pb[n] = 4./np.log2(M)*(1 - 1/np.sqrt(M))*\ np.gaussQ(np.sqrt(3*np.log2(M)/(M-1)*SNRn)) return Pb
def block_single_error_Pb_bound(j, SNRdB, coded=True, M=2): """ Finds the bit error probability bounds according to Ziemer and Tranter page 656. parameters: ----------- j: number of parity bits used in single error correction block code SNRdB: Eb/N0 values in dB coded: Select single error correction code (True) or uncoded (False) M: modulation order returns: -------- Pb: bit error probability bound """ Pb = np.zeros_like(SNRdB) Ps = np.zeros_like(SNRdB) SNR = 10.**(SNRdB / 10.) n = 2**j - 1 k = n - j for i, SNRn in enumerate(SNR): if coded: # compute Hamming code Ps if M == 2: Ps[i] = q_fctn(np.sqrt(k * 2. * SNRn / n)) else: Ps[i] = 4./np.log2(M)*(1 - 1/np.sqrt(M))*\ np.gaussQ(np.sqrt(3*np.log2(M)/(M-1)*SNRn))/k else: # Compute Uncoded Pb if M == 2: Pb[i] = q_fctn(np.sqrt(2. * SNRn)) else: Pb[i] = 4./np.log2(M)*(1 - 1/np.sqrt(M))*\ np.gaussQ(np.sqrt(3*np.log2(M)/(M-1)*SNRn)) # Convert symbol error probability to bit error probability if coded: Pb = ser2ber(M, n, 3, 1, Ps) return Pb # .. ._.. .._ #