def __getAM(self, sig): '''Do AM demodulation in chunks of given signal Args: sig (:obj:`comm object`): Input signal Returns: :obj:`commSignal`: AM demodulated signal ''' logging.info('Beginning AM demodulation in chunks') amDemdulator = demod_am.demod_am() amOut = comm.commSignal(sig.sampRate) chunkerObj = chunker.chunker(sig, chunkSize=60000 * 4) for i in chunkerObj.getChunks: logging.info('Processing chunk %d of %d chunks', chunkerObj.getChunks.index(i) + 1, len(chunkerObj.getChunks)) demodSig = amDemdulator.demod(sig.signal[i[0]:i[1]]) amOut.extend(comm.commSignal(sig.sampRate, demodSig)) logging.info('AM demodulation completed') return amOut
def getAudio(self): '''Get the audio from data Returns: :obj:`commSignal`: An audio signal ''' audioFreq = self.__audioFreq strictness = self.__strictness #print(audioFreq, self.__bw) audioOut = comm.commSignal(audioFreq) bhFilter = filters.blackmanHarris(151) fmDemdulator = demod_fm.demod_fm() chunkerObj = chunker.chunker(sigsrc) #print(chunkerObj.getChunks) #print(len(chunkerObj.getChunks[:10])) for i in chunkerObj.getChunks: offset = self.__offset sig = comm.commSignal(self.__sigsrc.sampFreq, self.__sigsrc.read(*i), chunkerObj)\ .offsetFreq(self.__offset).filter(bhFilter)\ .bwLim(self.__bw, uniq="First")\ .funcApply(fmDemdulator.demod)\ .bwLim(audioFreq, strictness) audioOut.extend(sig) return audioOut
def __audio(self, audioFreq=constants.NOAA_AUDSAMPRATE, strictness=True): '''Get the audio from data at this sampling rate Args: audioFreq (:obj:`int`, optional): Target frequency of sampling of audio strictness (:obj:`bool`, optional): Strictness of sampling Returns: :obj:`commSignal`: An audio signal ''' logging.info('Beginning FM demodulation to get audio in chunks') audioOut = comm.commSignal(audioFreq) bhFilter = filters.blackmanHarris(151) fmDemdulator = demod_fm.demod_fm() chunkerObj = chunker.chunker(self.__sigsrc) for i in chunkerObj.getChunks: logging.info('Processing chunk %d of %d chunks', chunkerObj.getChunks.index(i) + 1, len(chunkerObj.getChunks)) sig = comm.commSignal( self.__sigsrc.sampFreq, self.__sigsrc.read(*i), chunkerObj).offsetFreq(self.__offset).filter(bhFilter).bwLim( self.__bw, uniq="First").funcApply( fmDemdulator.demod).bwLim(audioFreq, strictness) audioOut.extend(sig) logging.info('FM demodulation successfully complete') self.__audOut = audioOut return audioOut
from directdemod import source, sink, chunker, comm, constants, filters, demod_am, demod_fm import matplotlib.pyplot as plt ## First the source of data fileName = "../samples/SDRSharp_20170830_073907Z_145825000Hz_IQ_autogain.wav" sigsrc = source.IQwav(fileName) ## Next create a signal object, reading data from the source # Read all values from the source into an array sigArray = sigsrc.read(0, sigsrc.length) # a commSignal object basically stores the signal array and its samplingrate # if you want the array do sig.signal # if you want the samping rate do sig.sampRate sig = comm.commSignal(sigsrc.sampFreq, sigArray) ## Offset the frequency if required, not needed here # sig.offsetFreq(0) ########### Apply a blackman harris filter to get rid of noise bhFilter = filters.blackmanHarris(151) sig.filter(bhFilter) ## Limit bandwidth, say 30000 sig.bwLim(30000) ## FM demodulate fmDemodulator = demod_fm.demod_fm() sig.funcApply(fmDemodulator.demod)
# Firstly we will have to import whatever libraries we would need import os, sys nb_dir = os.path.split(os.getcwd())[0] if nb_dir not in sys.path: sys.path.append(nb_dir) from directdemod import source, sink, chunker, comm, constants, filters, demod_am, demod_fm import matplotlib.pyplot as plt ## First the source of data fileName = "../samples/SDRSharp_20170830_073907Z_145825000Hz_IQ_autogain.wav" sigsrc = source.IQwav(fileName) # initialize all objects out of loop sigOut = comm.commSignal(sigsrc.sampFreq) bhFilter = filters.blackmanHarris(151) fmDemdulator = demod_fm.demod_fm() chunkerObj = chunker.chunker(sigsrc) # use this for loop for i in chunkerObj.getChunks: # everything same as previous, but remember to use the same objects, then the continuity will be maintained between the chunks sig = comm.commSignal(sigsrc.sampFreq, sigsrc.read(*i), chunkerObj) sig.filter(bhFilter) sig.bwLim(30000) sig.funcApply(fmDemdulator.demod) sigOut.extend(sig)
def getAccurateSync(self, useNormCorrelate=True): '''Get the sync locations: at highest sampling rate Args: useNormCorrelate (:obj:`bool`, optional): Whether to use normalized correlation or not Returns: :obj:`list`: A list of locations of sync in sample number (start of sync) ''' if self.__asyncA is None or self.__asyncB is None or self.__asyncBtime is None or self.__asyncAtime is None or self.__asyncBpk is None or self.__asyncApk is None or not self.__useNormCorrelate == useNormCorrelate: self.__useNormCorrelate = useNormCorrelate if self.__syncA is None or self.__syncB is None: self.getCrudeSync() # calculate the width of search window in sample numbers syncTime = constants.NOAA_T * len(constants.NOAA_SYNCA) searchTimeWidth = 3 * syncTime searchSampleWidth = int(searchTimeWidth * self.__sigsrc.sampFreq) # convert sync from samples to time csyncA = self.__syncA / self.__syncCrudeSampRate csyncB = self.__syncB / self.__syncCrudeSampRate # convert back to sample number csyncA *= self.__sigsrc.sampFreq csyncB *= self.__sigsrc.sampFreq ## Accurate syncA self.__asyncA = [] self.__asyncApk = [] self.__asyncAtime = [] logging.info('Beginning Accurate SyncA detection') for i in csyncA: logging.info('Detecting Sync %d of %d syncs', list(csyncA).index(i) + 1, len(csyncA)) startI = int(i) - int(searchSampleWidth) endI = int(i) + int(searchSampleWidth) if startI < 0 or endI > self.__sigsrc.length: continue sig = comm.commSignal( self.__sigsrc.sampFreq, self.__sigsrc.read( startI, endI)).offsetFreq(self.__offset).filter( filters.blackmanHarris( 151, zeroPhase=True)).funcApply( demod_fm.demod_fm().demod).funcApply( demod_am.demod_am().demod) syncDet, PkHeights, TimeSync = self.__correlateAndFindPeaks( sig, constants.NOAA_SYNCA, getExtraInfo=True, useNormCorrelate=useNormCorrelate, usePosNeedle=useNormCorrelate, useFilter=True) self.__asyncA.append(syncDet[0] + startI) self.__asyncApk.append(PkHeights[0]) self.__asyncAtime.append(TimeSync[0]) logging.info('Accurate SyncA detection complete') ## Accurate syncB self.__asyncB = [] self.__asyncBpk = [] self.__asyncBtime = [] logging.info('Beginning Accurate SyncB detection') for i in csyncB: logging.info('Detecting Sync %d of %d syncs', list(csyncB).index(i) + 1, len(csyncB)) startI = int(i) - int(searchSampleWidth) endI = int(i) + int(searchSampleWidth) if startI < 0 or endI > self.__sigsrc.length: continue sig = comm.commSignal( self.__sigsrc.sampFreq, self.__sigsrc.read( startI, endI)).offsetFreq(self.__offset).filter( filters.blackmanHarris( 151, zeroPhase=True)).funcApply( demod_fm.demod_fm().demod).funcApply( demod_am.demod_am().demod) syncDet, PkHeights, TimeSync = self.__correlateAndFindPeaks( sig, constants.NOAA_SYNCB, getExtraInfo=True, useNormCorrelate=useNormCorrelate, usePosNeedle=useNormCorrelate, useFilter=True) self.__asyncB.append(syncDet[0] + startI) self.__asyncBpk.append(PkHeights[0]) self.__asyncBtime.append(TimeSync[0]) logging.info('Accurate SyncB detection complete') return [ self.__asyncA, np.diff(self.__asyncA), self.__asyncApk, self.__asyncAtime, self.__asyncB, np.diff(self.__asyncB), self.__asyncBpk, self.__asyncBtime ]
def getSyncs(self): '''Get syncs of Funcube Returns: :obj:`list`: list of detected syncs ''' # create chunker object chunkerObj = chunker.chunker(self.__sigsrc) # butter filter bf = filters.butter(self.__sigsrc.sampFreq, self.__bw) # init vars for gardner symbolPeriod = self.__sigsrc.sampFreq/12000 timing = 0.00 gardnerC, gardnerB, gardnerA = 0.00, 0.00, 0.00 agcObj = agc() pllObj = costas() ctr = 0 sync = np.array([int(i) for i in "101000110001000000000001010111100"]) sync12khz = np.repeat(sync, 10) sync[sync == 1] = 127 sync[sync == 0] = -128 sync2mhz = np.repeat(sync, int(2048000/1200)) maxResBuff = [] minResBuff = [] maxBuffRetain = -1 maxBuffStart = 0 minSyncs = [] maxSyncs = [] numCtrs = int(chunkerObj.getChunks[-1][-1]*12000/2048000) start_time = time.time() lastMin = None ctrMain = 0 doppCorrect_target = None doppCorrect_current = None chunk_number = 0 for i in chunkerObj.getChunks[:]: #interpolate sig = comm.commSignal(self.__sigsrc.sampFreq, self.__sigsrc.read(*i)) doppCorrect_freqs = self.__offset if self.__corrfreq: bandwidth = 20000 # the bandwidth must cover the signal to be able to find it. chunk_offset = frequency_shift.correct(self.__sigsrc.memmap, self.__sigsrc.sampFreq, self.__center_frequency, self.__signal_freq, bandwidth, chunk_number, len(chunkerObj.getChunks)) logging.info("doppler shift is %f Hz",chunk_offset) chunk_number += 1 doppCorrect_target = self.__offset + chunk_offset if doppCorrect_current == None: doppCorrect_current = doppCorrect_target doppCorrect_bw = 2000.0/constants.PROC_CHUNKSIZE if doppCorrect_target > doppCorrect_current: doppCorr_delta = doppCorrect_bw doppCorrect_freqs = np.arange(doppCorrect_current, doppCorrect_current + ((i[1]-i[0]) * doppCorr_delta) + (10*doppCorr_delta), doppCorr_delta)[:len(sig.signal)] doppCorrect_freqs[doppCorrect_freqs > doppCorrect_target] = doppCorrect_target else: doppCorr_delta = -1*doppCorrect_bw doppCorrect_freqs = np.arange(doppCorrect_current, doppCorrect_current + ((i[1]-i[0]) * doppCorr_delta) + (10*doppCorr_delta), doppCorr_delta)[:len(sig.signal)] doppCorrect_freqs[doppCorrect_freqs < doppCorrect_target] = doppCorrect_target doppCorrect_current = doppCorrect_freqs[-1] sig.offsetFreq(doppCorrect_freqs) sig.filter(bf) ctrCurr = 0 # main loop for i in sig.signal: ### MAXSYNC detection by correlation # start storing 2mhz values near sync possible regions if not lastMin is None and (ctr > lastMin + (4.9*12000) - (2*len(sync12khz)) or not maxBuffRetain == -1) and not ctr > lastMin + (5.2*12000): if len(maxResBuff) == 0: maxBuffStart = ctrMain maxResBuff.append(lim(np.real(i*pllObj.output)/2)) # see if correlation is to be performed if maxBuffRetain == -1: if len(maxResBuff) > (2 * len(sync2mhz)): maxBuffStart += 1 maxResBuff.pop(0) elif maxBuffRetain == 0: maxBuffRetain -= 1 corr = np.abs(np.correlate(maxResBuff,sync2mhz, mode='same')) logging.info("MAXSYNC %d", maxBuffStart+np.argmax(corr)) #print("MAXSYNC", maxBuffStart, np.argmax(corr), maxBuffStart+np.argmax(corr)) maxSyncs.append(maxBuffStart+np.argmax(corr)) maxResBuff = [] #plt.plot(corr) #plt.show() else: maxBuffRetain -= 1 # Gardners algorithm if timing >= symbolPeriod/2 and timing < ((symbolPeriod/2)+1): gardnerB = agcObj.adjust(i) elif timing >= symbolPeriod: gardnerA = agcObj.adjust(i) timing -= symbolPeriod resync_error = (np.imag(gardnerA) - np.imag(gardnerC)) * np.imag(gardnerB) timing += (resync_error*symbolPeriod/(2000000.0)) gardnerC = gardnerA gardnerA = pllObj.loop(gardnerA) ctr += 1 # 12khz buffer minResBuff.append(limBin(np.real(gardnerA))) minResBuff = minResBuff[-1*len(sync12khz):] # print periodic status try: if ctr%1000 == 0: logging.info("[%.2f percent complete] [%.2f seconds elapsed] [%.2f seconds remain]", (ctr*100/numCtrs), (time.time() - start_time), (((time.time() - start_time)/(ctr/numCtrs))-(time.time() - start_time))) #print(ctr, '[%.2f' %(ctr*100/numCtrs),"%]",'[%.2f' %(time.time() - start_time),"seconds elapsed]",'[%.2f' %(((time.time() - start_time)/(ctr/numCtrs))-(time.time() - start_time)), "seconds remaining]", pllObj.mean, doppCorrect_freqs[ctrCurr], doppCorrect_target) except: pass # see if sync is present if len(minResBuff) == len(sync12khz) and np.abs(np.sum(np.abs(np.array(minResBuff) - sync12khz)) - (len(sync12khz)/2)) > 120: logging.info("MINSYNC: %d %f",ctr, np.abs(np.sum(np.abs(np.array(minResBuff) - sync12khz)) - (len(sync12khz)/2))) #print("MINSYNC:",ctr, np.abs(np.sum(np.abs(np.array(minResBuff) - sync12khz)) - (len(sync12khz)/2))) minSyncs.append(ctr) lastMin = ctr maxBuffRetain = 2 * len(sync2mhz) timing += 1 ctrMain += 1 ctrCurr += 1 if len(maxSyncs) > 0: # check usefulness if np.min(np.abs(np.diff(maxSyncs) - (4.98*2048000))) < (0.2*2048000): self.__useful = 1 return list(maxSyncs)[1:] else: return []
def getMsg(self): '''Get the message from data Returns: :string: A string of message data ''' if self.__msg is None: sig = comm.commSignal(self.__sigsrc.sampFreq) chunkerObj = chunker.chunker(self.__sigsrc) bhFilter = filters.blackmanHarris(151) fmDemodObj = demod_fm.demod_fm() for i in chunkerObj.getChunks: logging.info('Processing chunk %d of %d chunks', chunkerObj.getChunks.index(i) + 1, len(chunkerObj.getChunks)) # get the signal chunkSig = comm.commSignal(self.__sigsrc.sampFreq, self.__sigsrc.read(*i), chunkerObj) ## Offset the frequency if required, not needed here chunkSig.offsetFreq(self.__offset) ## Apply a blackman harris filter to get rid of noise chunkSig.filter(bhFilter) ## Limit bandwidth chunkSig.bwLim(self.__bw) # store signal sig.extend(chunkSig) ## FM demodulate sig.funcApply(fmDemodObj.demod) logging.info('FM demod complete') ## APRS has two freqs 1200 and 2200, hence create a butter band pass filter from 1200-500 to 2200+500 sig.filter( filters.butter(sig.sampRate, 1200 - 500, 2200 + 500, typeFlt=constants.FLT_BP)) logging.info('Filtering complete') ## plot the signal if self.__graphs == 1: plt.plot(sig.signal) plt.show() buffer_size = int(np.round(self.__bw / self.__BAUDRATE)) SAMPLE_PER_BAUD = self.__bw // self.__BAUDRATE # creating the “correlation list" for the comparison frequencies of the digital frequency filers corr_mark_i = np.zeros(buffer_size) corr_mark_q = np.zeros(buffer_size) corr_space_i = np.zeros(buffer_size) corr_space_q = np.zeros(buffer_size) # filling the "correlation list" with sampled waveform for the two frequencies. for i in range(buffer_size): mark_angle = (i * 1.0 / self.__bw) / ( 1 / self.__mark_frequency) * 2 * np.pi corr_mark_i[i] = np.cos(mark_angle) corr_mark_q[i] = np.sin(mark_angle) space_angle = (i * 1.0 / self.__bw) / ( 1 / self.__space_frequency) * 2 * np.pi corr_space_i[i] = np.cos(space_angle) corr_space_q[i] = np.sin(space_angle) # now we check the full signal for the binary states, whether it is closer to 1200 hz or closer to 2200 Hz binary_filter = np.zeros(len(sig.signal)) for sample in range(len(sig.signal) - buffer_size): corr_mi = 0 corr_mq = 0 corr_si = 0 corr_sq = 0 for sub in range(buffer_size): corr_mi = corr_mi + sig.signal[sample + sub] * corr_mark_i[sub] corr_mq = corr_mq + sig.signal[sample + sub] * corr_mark_q[sub] corr_si = corr_si + sig.signal[sample + sub] * corr_space_i[sub] corr_sq = corr_sq + sig.signal[sample + sub] * corr_space_q[sub] binary_filter[sample] = (corr_mi**2 + corr_mq**2 - corr_si**2 - corr_sq**2) logging.info('Binary filter complete') if self.__graphs == 1: plt.plot(sig.signal / np.max(sig.signal)) plt.plot(np.sign(binary_filter)) plt.show() # now trying to find the raising or falling edges of the bits # generating the edge detection kernel kernel = np.zeros(SAMPLE_PER_BAUD) for i in range(len(kernel)): if i < SAMPLE_PER_BAUD // 2: kernel[i] = -1 else: kernel[i] = 1 changes = np.correlate(np.sign(binary_filter), kernel, mode="same") / SAMPLE_PER_BAUD if self.__graphs == 1: plt.plot(np.sign(binary_filter)) plt.plot(changes) plt.title("bit starts") plt.show() # by using the edges of the bits for synching the sampling to the transmitted bits, the algo is # self synchronizing. # but sometimes the crossing areas between the bits can be uncertain. for that, a peak detection defines # only one solution in close vicinity and defining the edges further. peaks = peakdetect.peakdetect(np.abs(changes), lookahead=int(SAMPLE_PER_BAUD * 0.65)) peaks1_x = [] peaks1_y = [] # positive peaks for i in range(len(peaks[0])): peaks1_x.append(peaks[0][i][0]) peaks1_y.append(peaks[0][i][1]) if self.__graphs == 1: plt.plot(peaks1_x, peaks1_y, "o") plt.plot(np.abs(changes)) plt.plot(np.sign(binary_filter)) plt.plot(sig.signal / np.max(sig.signal)) plt.show() bit_repeated = np.round( np.diff(peaks1_x) / (self.__bw / self.__BAUDRATE)) logging.info('Bit repeat complete') if self.__graphs == 1: plt.plot(np.sign(binary_filter)) plt.plot(peaks1_x[:-1], bit_repeated, "*") plt.grid() plt.title("where frequency shifts") plt.show() # making the bits for nrzi bitstream_nrzi = [] c = 0 for i in range(len(bit_repeated)): # print(c, i, x1[i], "p", int(bit_repeated[i])) for repeats in range(int(bit_repeated[i])): bitstream_nrzi.append((np.mean( binary_filter[peaks1_x[i] + repeats * SAMPLE_PER_BAUD:peaks1_x[i] + (repeats + 1) * SAMPLE_PER_BAUD]))) # print(c, bitstream_nrzi[-1]) c += 1 # here we convert the nrzi bits to normal bits bitstream = decode_afsk1200.decode_nrzi(np.sign(bitstream_nrzi)) logging.info('Decoding NRZI complete') if self.__graphs == 1: plt.plot(np.sign(bitstream_nrzi)) plt.plot(bitstream_nrzi, "o-") plt.plot(bitstream, "*") plt.show() bit_startflag = [] bit_startflag_marker = [] for bit in range(len(bitstream) - 8): out = "" for i in range(8): out += str(bitstream[bit + i]) if out == "01111110": # print(bit) bit_startflag.append(bit) bit_startflag_marker.append(1) length = np.diff(bit_startflag) # there are still the stuffed bits inside the bit stream, so we need to find them... bitstream_stuffed = decode_afsk1200.find_bit_stuffing(bitstream) if self.__graphs == 1: plt.plot(bitstream_nrzi) plt.plot(bitstream, "o-") plt.plot(bit_startflag[:-1], length, "o") plt.plot(bit_startflag, bit_startflag_marker, "o") plt.plot(bitstream_stuffed, "*") plt.title("test1") plt.show() logging.info('Stuffed bit removal complete') # checking at each possible start flag, if the bit stream was received correctly. # this is done by checking the crc16 at the end of the msg with the msg body. for flag in range(len(bit_startflag) - 1): # and firstly, we need to get rid of the stuffed bits, that are still inside the bit stream bits = decode_afsk1200.reduce_stuffed_bit( bitstream[bit_startflag[flag] + 8:bit_startflag[flag + 1]], bitstream_stuffed[bit_startflag[flag] + 8:bit_startflag[flag + 1]]) msg = bits[:-16] if len(bits) % 8 == 0 and len(msg) > 16 * 8: out = "" for i in range(len(msg)): out += str(msg[i]) crc = framechecksequence.fcs_crc16(out) crc_received = "" msg_rest = bits[-16:] for i in range(len(msg_rest)): crc_received += str(msg_rest[i]) if crc_received == crc: msg_text = decode_afsk1200.bits_to_msg(msg) print("one aprs msg with correct crc is found. #", flag, "starts at", bit_startflag[flag], "length is", len(bits) / 8) msg_text if self.__graphs == 1: plt.plot( bitstream[bit_startflag[flag] + 8:bit_startflag[flag + 1] + 8], "o-") plt.plot(bits, "*-") plt.show() # there can be several messages per stream, so for now only the last is stored. # to-do self.__msg = "template: space rocks!" self.__useful = 1 logging.info('Message extraction complete') return self.__msg
def getSyncs(self): '''Get syncs of Meteor M2 Returns: :obj:`list`: list of detected syncs ''' # create chunker object chunkerObj = chunker.chunker(self.__sigsrc) # butter filter bf = filters.butter(self.__sigsrc.sampFreq, self.__bw) # init vars for gardner symbolPeriod = self.__sigsrc.sampFreq / 72000 timing = 0.00 gardnerC, gardnerB, gardnerA = 0.00, 0.00, 0.00 agcObj = agc() pllObj = costas() ctr = 0 sync = [ int(i) for i in "0, 13, 13, 12, 13, 13, 13, 0, 0, 0, 13, 13, 0, 13, 13, 0, 13, 0, 0, 0, 13, 13, 13, 0, 0, 13, 0, 13, 0, 13, 0, 13, 13, 0, 0, 0, 13, 13, 0, 0, 0, 0, 13, 0, 13, 13, 0, 0, 0, 0, 0, 13, 1, 13, 0, 13, 13, 13, 13, 12, 0, 13, 0, 13, 0, 0, 13, 0, 13, 0, 13, 13, 0, 13, 13, 13, 0, 0, 0, 0, 13, 0, 13, 0, 13, 13, 13, 13, 13, 0, 13, 13, 13, 0, 0, 0, 0, 13, 13, 13, 0, 13, 0, 0, 0, 13, 0, 13, 13, 0, 13, 0, 13, 13, 0, 0, 0, 13, 13, 13" .split(",") ] sync = np.array(sync) sync[sync < 7] = 0 sync[sync >= 7] = 1 sync72khz = np.repeat(sync, 1) sync72khz1 = [] for i in range(len(sync72khz)): if i % 2 == 0: sync72khz1.append(sync72khz[i]) else: sync72khz1.append(1 - sync72khz[i]) sync72khz1 = np.array(sync72khz1) sync72khz2 = [] for i in range(len(sync72khz)): if i % 2 == 1: sync72khz2.append(sync72khz[i]) else: sync72khz2.append(1 - sync72khz[i]) sync72khz2 = np.array(sync72khz2) sync[sync == 1] = 127 sync[sync == 0] = -128 sync2mhz = np.repeat(sync, int(2048000 / 72000)) sync = np.array(sync72khz1[:]) sync[sync == 1] = 127 sync[sync == 0] = -128 sync2mhz1 = np.repeat(sync, int(2048000 / 72000)) sync = np.array(sync72khz2[:]) sync[sync == 1] = 127 sync[sync == 0] = -128 sync2mhz2 = np.repeat(sync, int(2048000 / 72000)) maxResBuff = [] minResBuff1 = [] minResBuff2 = [] maxBuffRetain = -1 maxBuffStart = 0 minSyncs = [] maxSyncs = [] numCtrs = int(chunkerObj.getChunks[-1][-1] * 72000 / 2048000) start_time = time.time() lastMin = None ctrMain = 0 sync2mhzChosen = sync2mhz for i in chunkerObj.getChunks[:]: #interpolate sig = comm.commSignal(self.__sigsrc.sampFreq, self.__sigsrc.read(*i)) sig.offsetFreq(self.__offset) sig.filter(bf) # main loop for i in sig.signal: ### MAXSYNC detection by correlation # start storing 2mhz values near sync possible regions if not lastMin is None and ( ctr > lastMin + (0.1 * 72000) - (2 * len(sync72khz)) or not maxBuffRetain == -1) and not ctr > lastMin + (1 * 72000): if len(maxResBuff) == 0: maxBuffStart = ctrMain corrVal = i * pllObj.output maxResBuff.append(lim(np.real(corrVal) / 2)) maxResBuff.append(lim(np.imag(corrVal) / 2)) # see if correlation is to be performed if maxBuffRetain == -1: if len(maxResBuff) > (2 * len(sync2mhz)): maxBuffStart += 1 maxResBuff.pop(0) maxResBuff.pop(0) elif maxBuffRetain == 0: maxBuffRetain -= 1 corr = np.abs( np.correlate(maxResBuff, sync2mhzChosen, mode='same')) logging.info("MAXSYNC %d", maxBuffStart + (np.argmax(corr) / 2.0)) #print("MAXSYNC", maxBuffStart, np.argmax(corr), maxBuffStart+np.argmax(corr)) maxSyncs.append(maxBuffStart + (np.argmax(corr) / 2.0)) maxResBuff = [] #plt.plot(corr) #plt.show() else: maxBuffRetain -= 1 # Gardners algorithm if timing >= symbolPeriod / 2 and timing < ( (symbolPeriod / 2) + 1): gardnerB = agcObj.adjust(i) elif timing >= symbolPeriod: gardnerA = agcObj.adjust(i) timing -= symbolPeriod resync_error = (np.imag(gardnerA) - np.imag(gardnerC)) * np.imag(gardnerB) timing += (resync_error * symbolPeriod / (2000000.0)) gardnerC = gardnerA gardnerA = pllObj.loop(gardnerA) ctr += 1 # print periodic status try: if ctr % 1000 == 0: logging.info( "[%.2f percent complete] [%.2f seconds elapsed] [%.2f seconds remain]", (ctr * 100 / numCtrs), (time.time() - start_time), (((time.time() - start_time) / (ctr / numCtrs)) - (time.time() - start_time))) #print(ctr, '[%.2f' %(ctr*100/numCtrs),"%]",'[%.2f' %(time.time() - start_time),"seconds elapsed]",'[%.2f' %(((time.time() - start_time)/(ctr/numCtrs))-(time.time() - start_time)), "seconds remaining]", pllObj.mean) except: pass if lastMin is None or ctr > lastMin + 0.1 * (72000): # 72khz buffer minResBuff1.append(limBin(np.real(gardnerA))) minResBuff1.append(limBin(np.imag(gardnerA))) minResBuff1 = minResBuff1[-1 * len(sync72khz):] minResBuff2.append(limBin(np.imag(gardnerA))) minResBuff2.append(limBin(np.real(gardnerA))) minResBuff2 = minResBuff2[-1 * len(sync72khz):] buff1corr, buff2corr, buff3corr, buff4corr, buff5corr, buff6corr = 0, 0, 0, 0, 0, 0 if len(minResBuff1) == len(sync72khz): buff1corr = np.abs( np.sum( np.abs(np.array(minResBuff1) - sync72khz)) - (len(sync72khz) / 2)) #if len(minResBuff2) == len(sync72khz): # buff2corr = np.abs(np.sum(np.abs(np.array(minResBuff2) - sync72khz)) - (len(sync72khz)/2)) #if len(minResBuff1) == len(sync72khz1): # buff3corr = np.abs(np.sum(np.abs(np.array(minResBuff1) - sync72khz1)) - (len(sync72khz1)/2)) if len(minResBuff2) == len(sync72khz1): buff4corr = np.abs( np.sum( np.abs(np.array(minResBuff2) - sync72khz1)) - (len(sync72khz1) / 2)) #if len(minResBuff1) == len(sync72khz2): # buff5corr = np.abs(np.sum(np.abs(np.array(minResBuff1) - sync72khz2)) - (len(sync72khz2)/2)) #if len(minResBuff2) == len(sync72khz2): # buff6corr = np.abs(np.sum(np.abs(np.array(minResBuff2) - sync72khz2)) - (len(sync72khz2)/2)) if buff1corr > 30 or buff2corr > 30: sync2mhzChosen = sync2mhz if buff3corr > 30 or buff4corr > 30: sync2mhzChosen = sync2mhz1 if buff4corr > 30 or buff6corr > 30: sync2mhzChosen = sync2mhz2 # see if sync is present if buff1corr > 30 or buff2corr > 30 or buff3corr > 30 or buff4corr > 30 or buff5corr > 30 or buff6corr > 30: logging.info("MINSYNC: %d", ctr) #logging.info("MINSYNC: %d %f %f %f %f %f %f",ctr, buff1corr, buff2corr, buff3corr, buff4corr, buff5corr, buff6corr) #print("MINSYNC:",ctr, np.abs(np.sum(np.abs(np.array(minResBuff) - sync72khz)) - (len(sync72khz)/2))) minSyncs.append(ctr) lastMin = ctr maxBuffRetain = 2 * len(sync2mhz) timing += 1 ctrMain += 1 if len(maxSyncs) > 0: # check usefulness if np.min(np.abs(np.diff(maxSyncs) - (0.11 * 2048000))) < (0.05 * 2048000): self.__useful = 1 return list(maxSyncs)[1:] else: return []