path, dirs, files = os.walk(directory).__next__() file_count = 100#len(files) nIterations = file_count Fs = np.intc(802e3) # Receiver sample rate. #! Must be the same as the signals T=np.float(6e-3) # Pulse duration. #! Must be the same as the signals # Generate logarithmic spread of Eb/N0 values. EbN0Start = 40 EbN0End = 0 #EbN0Vector = -np.subtract(-EbN0End, np.logspace(np.log10(-EbN0Start), np.log10(-EbN0End), num=31, endpoint=True, base=10.0)) EbN0Vector = np.linspace(EbN0End, EbN0Start, 41) #EbN0Vector = np.linspace(0, -30, 31) snrVector = comm.EbN0toSNRdB(EbN0Vector, 2, Fs, 1/T) fCenterEstimate = np.zeros((nIterations, len(snrVector)), dtype=np.float64) fCenterEstimate2 = np.zeros((nIterations, len(snrVector)), dtype=np.float64) R_symbEstimate = np.zeros((nIterations, len(snrVector)), dtype=np.float64) R_symbEstimate2 = np.zeros((nIterations, len(snrVector)), dtype=np.float64) for i in range(0, nIterations): print("Iteration", i+1, "of", nIterations) # Load from binary file filename = str(i) fileString = path + filename + ".pkl" with open(fileString,'rb') as f: sigObj = pickle.load(f)
return packet # Configure estimators estimators = [] estimators.append(estimator('Packet Extractor', packetExtractor, Fs=Fs, T=T, threshold=util.db2pow(-10.5))) # Create analysis object m_analysis = analysis('Packet_Extraction', estimators=estimators, lossFcn='MAE') # Generate Eb/N0 range for statistics gathering. EbN0Start = 28 EbN0End = 24 EbN0Vec = np.linspace(EbN0End, EbN0Start, 41) SnrVec = comm.EbN0toSNRdB(EbN0Vec, 2, Fs, 1/T) m_analysis.axis.displayName = '$E_b/N_0$ [dB]' m_analysis.axis.displayVector = EbN0Vec m_analysis.axis.name = 'S/N [dB]' m_analysis.axis.vector = SnrVec m_analysis.analyze(iterations=nIterations, parameter='packet', packetSize=10, debug=False) # Write to binary file path = '../jobs/' filename = 'packetJob' destination = path + filename + str(m_analysis.iterations) + '.pkl' # Save job to binary file with open(destination,'wb') as f: pickle.dump(m_analysis, f)
estimator('$\sqrt{CRLB}$ [Hz]', CRLB, packetSize=packetSize, Fs=Fs)) # Create analysis object m_analysis = analysis('Center_Frequency_Estimation', estimators=estimators, lossFcn='MAE') # Generate Eb/N0 range for statistics gathering. EbN0Start = 40 EbN0End = 10 EbN0Range = np.linspace(EbN0End, EbN0Start, EbN0Start - EbN0End + 1) m_analysis.axis.displayName = '$E_b/N_0$ [dB]' m_analysis.axis.displayVector = EbN0Range m_analysis.axis.name = 'S/N [dB]' m_analysis.axis.vector = comm.EbN0toSNRdB(EbN0Range, 2, Fs, 1 / T) m_analysis.analyze(iterations=nIterations, parameter='fCenter', packetSize=packetSize, debug=Debug) """# Write to binary file path = '../jobs/' filename = 'centerFrequencyJob' destination = path + filename + str(m_analysis.iterations) + '.pkl' # Save job to binary file with open(destination,'wb') as f: pickle.dump(m_analysis, f) # Read from binary file path = '../jobs/'
fWindowWidthHertz=50e3)) # Create analysis object m_analysis = analysis('Symbol_Rate_Estimation', estimators=estimators, lossFcn='MAE') # Generate Eb/N0 range for statistics gathering. EbN0Start = 40 EbN0End = 10 m_analysis.axis.displayName = '$E_b/N_0$ [dB]' m_analysis.axis.displayVector = np.linspace(EbN0End, EbN0Start, EbN0Start - EbN0End + 1) m_analysis.axis.name = 'S/N [dB]' m_analysis.axis.vector = comm.EbN0toSNRdB(m_analysis.axis.displayVector, 2, Fs, 1 / T) m_analysis.analyze(iterations=nIterations, parameter='symbolRate', packetSize=packetSize, debug=Debug) # Write to binary file path = '../jobs/' jobname = 'SRateJob' destination = path + jobname + str(m_analysis.iterations) + '.pkl' # Save job to binary file with open(destination, 'wb') as f: pickle.dump(m_analysis, f) iterations = nIterations #! Must be same as job file """