def parseClockBias(statsClk: tempfile._TemporaryFileWrapper, logger: logging.Logger) -> pd.DataFrame: """ parse the clock file """ cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green') logger.info('{func:s}: parsing RTKLib clock statistics {file:s}'.format(func=cFuncName, file=statsClk.name)) # read in the satellite status file dfCLKs = pd.read_csv(statsClk.name, header=None, sep=',', names=rtkc.dRTKPosStat['Clk']['colNames'], usecols=rtkc.dRTKPosStat['Clk']['useCols']) amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger) # replace the headers cols = np.asarray(rtkc.dRTKPosStat['Clk']['useCols'][-4:]) # if value of clk parameters is 0 replace by NaN dfCLKs[cols] = dfCLKs[cols].replace({0: np.nan}) # add DateTime dfCLKs['DT'] = dfCLKs.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1) amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger) amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfCLKs, dfName='dfCLKs') return dfCLKs
def parseClockBias(statsClk: tempfile._TemporaryFileWrapper, logger: logging.Logger) -> pd.DataFrame: """ parse the clock file """ cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored( sys._getframe().f_code.co_name, 'green') logger.info('{func:s}: parsing RTKLib clock statistics {file:s}'.format( func=cFuncName, file=statsClk.name)) # # read in the satellite status file # print('colNames = {!s}'.format(rtkc.dRTKPosStat['Clk']['colNames'])) # print('useNames = {!s}'.format(rtkc.dRTKPosStat['Clk']['useCols'])) # with open(statsClk.name, 'r') as fstat: # i = 0 # for line in fstat: # print(line, end='') # i = i + 1 # if i == 10: # break # input("Press Enter to continue...") # read in the satellite status file dfCLKs = pd.read_csv(statsClk.name, header=None, sep=',', usecols=[*range(1, 9)]) dfCLKs.columns = rtkc.dRTKPosStat['Clk']['useCols'] amutils.printHeadTailDataFrame(df=dfCLKs, name='dfCLKs range') # # read in the satellite status file # dfCLKs = pd.read_csv(statsClk.name, header=None, sep=',', names=rtkc.dRTKPosStat['Clk']['colNames'], usecols=rtkc.dRTKPosStat['Clk']['useCols']) # amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger) # replace the headers cols = np.asarray(rtkc.dRTKPosStat['Clk']['useCols'][-4:]) # if value of clk parameters is 0 replace by NaN dfCLKs[cols] = dfCLKs[cols].replace({0: np.nan}) # add DateTime dfCLKs['DT'] = dfCLKs.apply( lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1) amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger) amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfCLKs, dfName='dfCLKs') return dfCLKs
def parseSatelliteStatistics(statsSat: tempfile._TemporaryFileWrapper, logger: logging.Logger) -> pd.DataFrame: """ parseSatelliteStatistics reads the SAT statitics file into a dataframe """ # set current function name cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored( sys._getframe().f_code.co_name, 'green') logger.info( '{func:s}: Parsing RTKLib satellites file {file:s} ({info:s})'.format( func=cFuncName, file=statsSat.name, info=colored('be patient', 'red'))) dfSat = pd.read_csv(statsSat.name, header=None, sep=',', usecols=[*range(1, 11)]) dfSat.columns = rtkc.dRTKPosStat['Res']['useCols'] amutils.printHeadTailDataFrame(df=dfSat, name='dfSat range') # dfSat = pd.read_csv(statsSat.name, header=None, sep=',', names=rtkc.dRTKPosStat['Res']['colNames'], usecols=rtkc.dRTKPosStat['Res']['useCols']) # amutils.printHeadTailDataFrame(df=dfSat, name='dfSat usecol') # sys.exit(77) # add DT column dfSat['DT'] = dfSat.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1) # if PRres == 0.0 => than I suppose only 4 SVs used, so no residuals can be calculated, so change to NaN dfSat.PRres.replace(0.0, np.nan, inplace=True) amc.logDataframeInfo(df=dfSat, dfName='dfSat', callerName=cFuncName, logger=logger) amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfSat, dfName='dfSat') return dfSat
def TOW2UTC(WkNr, TOW): """ TOW2UTC transforms an list expressed in TOW to UTC list Parameters: WkNr: week number of TOW TOW: list of TOWs to transform Return: UTC: list of UTCs """ # transform TOW to UTC representation UTC = [] for i in range(0, len(TOW)): UTC.append(gpstime.UTCFromWT(float(WkNr), float(TOW[i]))) print("UTC = %s to %s" % (UTC[0], UTC[-1])) return UTC
def parseRTKLibPositionFile(logger: logging.Logger) -> pd.DataFrame: """ parse the position file from RTKLIB processing into a dataframe """ # set current function name cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green') logger.info('{func:s}: parsing RTKLib position file {posf:s}'.format(func=cFuncName, posf=amc.dRTK['info']['rtkPosFile'])) # check whether the datafile is readable endHeaderLine = amutils.line_num_for_phrase_in_file('% GPST', amc.dRTK['info']['rtkPosFile']) dfPos = pd.read_csv(amc.dRTK['info']['rtkPosFile'], header=endHeaderLine, delim_whitespace=True) dfPos = dfPos.rename(columns={'%': 'WNC', 'GPST': 'TOW', 'latitude(deg)': 'lat', 'longitude(deg)': 'lon', 'height(m)': 'ellH', 'sdn(m)': 'sdn', 'sde(m)': 'sde', 'sdu(m)': 'sdu', 'sdne(m)': 'sdne', 'sdeu(m)': 'sdeu', 'sdun(m)': 'sdun', 'age(s)': 'age'}) # convert the GPS time to UTC dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1) dTime = {} dTime['epochs'] = dfPos.shape[0] dTime['date'] = dfPos.DT.iloc[0].strftime('%d %b %Y') dTime['start'] = dfPos.DT.iloc[0].strftime('%H:%M:%S') dTime['end'] = dfPos.DT.iloc[-1].strftime('%H:%M:%S') amc.dRTK['Time'] = dTime # add UTM coordinates dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos['UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(), dfPos['lon'].to_numpy()) logger.info('{func:s}: added UTM coordiantes'.format(func=cFuncName)) # inform user amc.logDataframeInfo(df=dfPos, dfName='dfPos', callerName=cFuncName, logger=logger) logger.info('{func:s}: dTime = {time!s}'.format(func=cFuncName, time=dTime)) amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos, dfName='{posf:s}'.format(posf=amc.dRTK['info']['rtkPosFile'])) # put the info of the dfPosn into debug logging logger.debug('{func:s}: dfPos info\n{info!s}'.format(info=dfPos.info(), func=cFuncName)) return dfPos
# read the MeasExtra data into numpy array dataExtra = sbf2stf.readMeasExtra( sbf2stfConverted[SBF2STFOPTS.index(option)], verbose) else: print(' wrong option %s given.' % option) sys.exit(E_WRONG_OPTION) # check whether the same signaltypes are on corresponsing lines after sorting if not sbf2stf.verifySignalTypeOrder(dataMeas['MEAS_SIGNALTYPE'], dataExtra['EXTRA_SIGNALTYPE'], dataMeas['MEAS_TOW'], verbose): sys.exit(E_SIGNALTYPE_MISMATCH) # determine current weeknumber and subsequent date from SBF data WkNr = int(dataMeas['MEAS_WNC'][0]) dateString = gpstime.UTCFromWT(WkNr, float( dataMeas['MEAS_TOW'][0])).strftime("%d/%m/%Y") if verbose: print('WkNr = %d - dateString = %s' % (WkNr, dateString)) # correct the smoothed PR Code and work with the raw PR dataMeas['MEAS_CODE'] = sbf2stf.removeSmoothing( dataMeas['MEAS_CODE'], dataExtra['EXTRA_SMOOTHINGCORR'], dataExtra['EXTRA_MPCORR']) # print('rawPR = %s\n' % dataMeas['MEAS_CODE']) # find list of SVIDs and SignalTypes observed SVIDs = sbf2stf.observedSatellites(dataMeas['MEAS_SVID'], verbose) signalTypes = sbf2stf.observedSignalTypes(dataMeas['MEAS_SIGNALTYPE'], verbose) # create the CN0 plots for all SVs and SignalTypes
def plotLockTime(SVID, signalTypes, dataMeasSVID, lliIndices, lliTOWs, verbose): """ plotLockTime creates a plot of the locktime and indicates loss of locks Parameters: SVID: satellite ID signalTypes: signal types to represent dataMeasSVID: data from MeasEpoch_2 but for one SVs indexLossOfLock: indices for the occurance of loss of lock verbose: display interactive plot """ # print('\nplotLockTime' + '-' * 25) gnssSyst, gnssSystShort, gnssPRN = mSSN.svPRN(SVID) # for i, signalType in enumerate(signalTypes): # print('PLT: signalType[%d] = %s' % (i, signalType)) # print('PLT: TOW = %s (%d)' % (dataMeasSVID[i]['MEAS_TOW'], len(dataMeasSVID[i]['MEAS_TOW']))) # print('PLT: lockTimes = %s (%d)\n' % (dataMeasSVID[i]['MEAS_LOCKTIME'], len(dataMeasSVID[i]['MEAS_LOCKTIME']))) # print("PLT: indexLossOfLock[%d] = %s (Nr = %d)" % (i, lliIndices[i], len(lliIndices[i]))) # # myData2 = dataMeasSVID[i][lliIndices[i]] # # print("PLT: myData2 = %s (len = %d)" % (myData2['MEAS_TOW'], len(myData2['MEAS_TOW']))) # # print("PLT: idemand = %s (len = %d)\n" % (dataMeasSVID[i][lliIndices[i]]['MEAS_TOW'], len(dataMeasSVID[i][)lliIndices[i]]['MEAS_TOW'])) # create the plot window # plt.style.use('BEGPIOS') plt.style.use('ggplot') plt.figure(1) subPlot = plt.subplot(1, 1, 1) # titles and axis-labels dateString = gpstime.UTCFromWT( float(dataMeasSVID[0]['MEAS_WNC'][0]), float(dataMeasSVID[0]['MEAS_TOW'][0])).strftime("%d/%m/%Y") plt.title('Lock Times for %s PRN %d (%d)' % (gnssSyst, gnssPRN, SVID)) # , fontsize='18' plt.ylabel('Lock Time [s]') plt.xlabel('Time [hh:mm] (' + dateString + ')') for index, signalType in enumerate(signalTypes): # lockTime = dataMeasSVID[index]['MEAS_LOCKTIME'] # print("index = %d lockTime.size = %d" % (index, len(lockTime))) sigTypeColor = mPlt.getSignalTypeColor(signalType) utc = [] for count in range(0, len(dataMeasSVID[index])): utc.append( gpstime.UTCFromWT( float(dataMeasSVID[index]['MEAS_WNC'][count]), float(dataMeasSVID[index]['MEAS_TOW'][count]))) plt.plot(utc, dataMeasSVID[index]['MEAS_LOCKTIME'], color=sigTypeColor, linestyle='', markersize=0.75, marker='.') # add a marker at the LLI utc2 = [] for count2 in range(0, len(dataMeasSVID[index][lliIndices[index]])): utc2.append( gpstime.UTCFromWT( float(dataMeasSVID[index][lliIndices[index]]['MEAS_WNC'] [count2]), float(dataMeasSVID[index][lliIndices[index]]['MEAS_TOW'] [count2]))) plt.plot(utc2, dataMeasSVID[index][lliIndices[index]]['MEAS_LOCKTIME'], color=sigTypeColor, linestyle='', markersize=7, markerfacecolor=sigTypeColor, marker=mPlt.mFilledMarkers[signalType % len(mPlt.mFilledMarkers)]) # annotate the plot annotateTxt = mSSN.GNSSSignals[signalType]['name'] + str( ': %d LLI' % len(lliIndices[index])) subPlot.text(0.02, 0.95 - index * 0.0375, annotateTxt, verticalalignment='bottom', horizontalalignment='left', transform=subPlot.transAxes, color=sigTypeColor, fontsize=12) # make x-axis a hh:mm:ss ax = plt.gca() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) # adjust range for Y axis axes = plt.gca() axes.set_ylim(mPlt.adjustYAxisLimits(axes)) axes.set_xlim(mPlt.adjustXAxisLimits(axes)) plt.text(0, -0.125, r'$\copyright$ Alain Muls ([email protected])', horizontalalignment='left', verticalalignment='bottom', transform=ax.transAxes, alpha=0.5, fontsize='x-small') plt.text(1, -0.125, r'$\copyright$ Frederic Snyers ([email protected])', horizontalalignment='right', verticalalignment='bottom', transform=ax.transAxes, alpha=0.5, fontsize='x-small') # mPlt.annotateText(r'$\copyright$ Alain Muls ([email protected])', subPlot, 0, -0.12, 'left', fontsize='x-small') # mPlt.annotateText(r'$\copyright$ Frederic Snyers ([email protected])', subPlot, 1, -0.12, 'right', fontsize='x-small') fig = plt.gcf() # fig.set_size_inches(12*2.5, 9*2.5) fig.savefig('%s-%s%d-locktime.png' % (gnssSyst, gnssSystShort, gnssPRN), dpi=fig.dpi) if verbose: plt.show(block=False) # block=False)
SVID, signalTypesSVID, dataMeasSVIDSignalType, verbose) if verbose: print('common TOWs = %s (Total %d)' % (iTOW, len(iTOW))) print('deltaPR = %s (Total %d)' % (deltaPR, len(deltaPR))) print('sidePeakTOWs = %s (Total %d)' % (sidePeakTOWs, len(sidePeakTOWs))) print('sidePeakDeltaPRs = %s (Total %d)' % (sidePeakDeltaPRs, len(sidePeakDeltaPRs))) print('jumpDPRNear97Indices = %s (#%d)' % (jumpDPRNear97Indices, len(jumpDPRNear97Indices))) print("dataMeasSVID[0]['MEAS_WNC'] = %s" % dataMeasSVID[0]['MEAS_WNC']) dateString = gpstime.UTCFromWT( float(dataMeasSVID[0]['MEAS_WNC']), float(dataMeasSVID[0]['MEAS_TOW'])).strftime("%d/%m/%Y") print('dateString = %s' % dateString) plotSidePeaks.plotSidePeaks(SVID, signalTypesSVID, dataMeasSVID[0]['MEAS_WNC'], iTOW, deltaPR, sidePeakTOWs, sidePeakDeltaPRs, jumpDPRNear97Indices, jumpDPRNear1465Indices, lliTOWs, dateString, verbose) else: sys.stderr.write('SV %s%d has incorrent signal types (%s)\n' % (gnssSystShort, gnssPRN, signalTypesSVID)) sys.exit(0)
def plotSidePeaks(SVID, signalTypesSVID, WkNr, iTOW, deltaPR, sidePeaksTOW, sidePeakDPR, jumpDPRNear97Indices, jumpDPRNear1465Indices, lliTOWs, strDate, verbose): """ plotSidePeaks plots the difference between the code measurements on L1A (reference) and E6A and indicates where a possible side peak is noticed Parameters: SVID: SSN SVID of satellite signalTypesSVID; the signal types for this SVID WkNt: week number iTOW: common TOWs where both code measurements are available deltaPR: difference between PR on L1A and E61 sidePeaksTOW: list of TOWs which could be indicators of SidePeaks sidePeakDPR: delta PR at these TOWs jumpDPRNear97Indices: indices in sidePeaksTOW, sidePeakDPR which are closest to integer multipe of 9.7m jumpDPRNear1465Indices: indices in sidePeaksTOW, sidePeakDPR which are closest to integer multipe of 14.65m lliTOWs: TOW that indicate a loss of lock per signal type strDate: observation date verbose: ok """ print '-' * 50 # get info for GNSS satellite gnssSyst, gnssSystShort, gnssPRN = mSSN.svPRN(SVID) SVIDColor = mPlt.getSVIDColor(SVID) # create the plot window # plt.style.use('BEGPIOS') plt.style.use('ggplot') plt.figure(2) subPlot = plt.subplot(1, 1, 1) # titles and axis-labels plt.title('Side Peak Indicator for %s PRN %d (%d)' % (gnssSyst, gnssPRN, SVID)) # , fontsize='18' plt.ylabel(r'$\Delta$ PR (%s - %s)' % (mSSN.GNSSSignals[16]['name'], mSSN.GNSSSignals[18]['name'])) plt.xlabel('Time [hh:mm] (' + strDate + ')') # plot the deltaPRs vs iTOW print 'iTOW = %s (%d)' % (iTOW, len(iTOW)) print 'deltaPR = %s (%d)' % (deltaPR, len(deltaPR)) # plot the indicators for the sidepeaks after conversion to utc utc2 = [] # used for all possible detections utc3 = [] # used for those that are multiple of 9.7m utc4 = [] # used for those that are multiple of 14.65m for count in range(0, len(sidePeaksTOW)): utc2.append(gpstime.UTCFromWT(float(WkNr), float(sidePeaksTOW[count]))) if count in jumpDPRNear97Indices: utc3.append(utc2[-1]) if count in jumpDPRNear1465Indices: utc4.append(utc2[-1]) plt.plot(utc2, sidePeakDPR, color='orange', linestyle='', markersize=7, marker='o', markeredgecolor='orange', markerfacecolor=None) print 'utc2 = %s (#%d)' % (utc2, len(utc2)) print 'sidePeakDPR = %s (#%d)' % (sidePeakDPR, len(sidePeakDPR)) print 'jumpDPRNear97Indices = %s (#%d)' % (jumpDPRNear97Indices, len(jumpDPRNear97Indices)) print 'sidePeakDPR = %s (#%d)' % (sidePeakDPR[jumpDPRNear97Indices], len(sidePeakDPR[jumpDPRNear97Indices])) print 'utc3 = %s (#%d)' % (utc3, len(utc3)) plt.plot(utc3, sidePeakDPR[jumpDPRNear97Indices], color='red', linestyle='', markersize=7, marker='o', markeredgecolor='red', markerfacecolor=None) plt.plot(utc4, sidePeakDPR[jumpDPRNear1465Indices], color='blue', linestyle='', markersize=7, marker='o', markeredgecolor='blue', markerfacecolor=None) # annotate to signal number of detections and number of integer multiple of 9.7m annotateTxt = 'Side Peaks on E1A: %d' % len(utc3) subPlot.text(0.95, 0.95, annotateTxt, verticalalignment='bottom', horizontalalignment='right', transform=subPlot.transAxes, color='red', fontsize=12) # annotate to signal number of detections and number of integer multiple of 14.65m annotateTxt = 'Side Peaks on E6A: %d' % len(utc4) subPlot.text(0.95, 0.92, annotateTxt, verticalalignment='bottom', horizontalalignment='right', transform=subPlot.transAxes, color='blue', fontsize=12) annotateTxt = 'Other: %d' % (len(utc2) - len(utc3) - len(utc4)) subPlot.text(0.95, 0.89, annotateTxt, verticalalignment='bottom', horizontalalignment='right', transform=subPlot.transAxes, color='orange', fontsize=12) # transform WkNr, TOW to UTC time utc = [] for i in range(0, len(iTOW)): utc.append(gpstime.UTCFromWT(float(WkNr), float(iTOW[i]))) # plot the deltaPR vs UTC time plt.plot(utc, deltaPR, color=SVIDColor, linestyle='-', linewidth=0.5, marker='.', markersize=3.5) # , linestyle='', marker='.', markersize=1) for i, lliTOWsST in enumerate(lliTOWs): print 'lliTOWs[%d] = %s' % (i, lliTOWsST) utc2 = [] sigTypeColor = mPlt.getSignalTypeColor(signalTypesSVID[i]) # annotate the plot annotateTxt = mSSN.GNSSSignals[signalTypesSVID[i]]['name'] + ' LLI' subPlot.text(0.02, 0.95 - i * 0.0375, annotateTxt, verticalalignment='bottom', horizontalalignment='left', transform=subPlot.transAxes, color=sigTypeColor, fontsize=12) # drax vertical line for the LLI indicators for j, lliTOWST in enumerate(lliTOWsST): utc2.append(gpstime.UTCFromWT(float(WkNr), lliTOWST)) # print 'lliTOWs[%d] = %s utc = %s' % (j, lliTOWST, utc2[j]) # draw a vertical line in the color of the signal type plt.axvline(utc2[j], color=sigTypeColor) print 'sidePeaksTOW = %s (%d)' % (sidePeaksTOW, len(sidePeaksTOW)) # print 'utc2 = %s (%d)' % (utc2, len(utc2)) # plt.plot(sidePeaksTOW, 0.5, color='red', linestyle='', markersize=7, marker=mPlt.mFilledMarkers[SVID % len(mPlt.mFilledMarkers)]) # adjust the axes to represent hh:mm:ss ax = plt.gca() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) # adjust range for Y axis axes = plt.gca() axes.set_ylim(mPlt.adjustYAxisLimits(axes)) axes.set_xlim(mPlt.adjustXAxisLimits(axes)) plt.text(0, -0.125, r'$\copyright$ Alain Muls ([email protected])', horizontalalignment='left', verticalalignment='bottom', transform=ax.transAxes, alpha=0.5, fontsize='x-small') plt.text(1, -0.125, r'$\copyright$ Frederic Snyers ([email protected])', horizontalalignment='right', verticalalignment='bottom', transform=ax.transAxes, alpha=0.5, fontsize='x-small') fig = plt.gcf() # fig.set_size_inches(12*2.5, 9*2.5) fig.savefig('%s-%s%d-sidepeak.png' % (gnssSyst, gnssSystShort, gnssPRN), dpi=fig.dpi) if verbose: plt.show() # close the figure plt.close() print '-' * 50
def parsePosFile(logger: logging.Logger) -> pd.DataFrame: """ parses 'posn' file created by pyrtklib.py """ # set current function name cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored( sys._getframe().f_code.co_name, 'green') posFilePath = os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile']) logger.info('{func:s} parsing rtk-pos file {posf:s}'.format( func=cFuncName, posf=posFilePath)) # looking for start times of observation file for line in open(posFilePath): rec = line.strip() if rec.startswith('% obs start'): amc.dRTK['obsStart'] = datetime.strptime(rec[14:33], '%Y/%m/%d %H:%M:%S') break # looking for end times of observation file for line in open(posFilePath): rec = line.strip() if rec.startswith('% obs end'): amc.dRTK['obsEnd'] = datetime.strptime(rec[14:33], '%Y/%m/%d %H:%M:%S') break # looking for ref pos of observation file foundRefPos = False for line in open(posFilePath): rec = line.strip() if rec.startswith('% ref pos'): amc.dRTK['RefPos'] = [float(x) for x in rec.split(':')[1].split()] amc.dRTK['RefPosUTM'] = utm.from_latlon(amc.dRTK['RefPos'][0], amc.dRTK['RefPos'][1]) logger.info( '{func:s}: reference station coordinates are LLH={llh!s} UTM={utm!s}' .format(func=cFuncName, llh=amc.dRTK['RefPos'], utm=amc.dRTK['RefPosUTM'])) foundRefPos = True break if not foundRefPos: amc.dRTK['RefPos'] = [np.NaN, np.NaN, np.NaN] amc.dRTK['RefPosUTM'] = (np.NaN, np.NaN, np.NaN, np.NaN) logger.info( '{func:s}: no reference station used'.format(func=cFuncName)) # find start of results in rtk file endHeaderLine = amutils.line_num_for_phrase_in_file('% GPST', posFilePath) dfPos = pd.read_csv(posFilePath, header=endHeaderLine, delim_whitespace=True) dfPos = dfPos.rename( columns={ '%': 'WNC', 'GPST': 'TOW', 'latitude(deg)': 'lat', 'longitude(deg)': 'lon', 'height(m)': 'ellH', 'sdn(m)': 'sdn', 'sde(m)': 'sde', 'sdu(m)': 'sdu', 'sdne(m)': 'sdne', 'sdeu(m)': 'sdeu', 'sdun(m)': 'sdun', 'age(s)': 'age' }) # check if we have records for this mode in the data, else exit if dfPos.shape[0] == 0: logger.info('{func:s}: found no data in pos-file {pos:s}'.format( func=cFuncName, pos=amc.dRTK['posFile'])) sys.exit(amc.E_FAILURE) # store total number of observations amc.dRTK['#obs'] = dfPos.shape[0] # store number of calculated positions for requested rtk quality amc.dRTK['#obsQual'] = len(dfPos.loc[dfPos['Q'] == amc.dRTK['iQual']]) logger.info('{func:s}: amc.dRTK = \n{drtk!s}'.format(func=cFuncName, drtk=amc.dRTK)) # convert the time in seconds dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1) # add UTM coordinates dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos[ 'UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(), dfPos['lon'].to_numpy()) amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos, dfName='{posf:s}'.format(posf=posFilePath)) amc.logDataframeInfo(df=dfPos, dfName='dfPos', callerName=cFuncName, logger=logger) return dfPos
def readSTFGeodetic(stfFile: str, logger: logging.Logger) -> pd.DataFrame: """ read in the STF Geodetic_v2 file using included header information """ cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green') # read in the file with in logger.info('{func:s}: reading file {file:s}'.format(file=stfFile, func=cFuncName)) dfSTF = pd.read_csv(stfFile, sep=',', skiprows=range(1, 2)) amutils.logHeadTailDataFrame(df=dfSTF, dfName=dSTF['stf'], callerName=cFuncName, logger=logger) dfSTF.dropna(subset=['Latitude[rad]', 'Longitude[rad]'], inplace=True) amutils.logHeadTailDataFrame(df=dfSTF, dfName=dSTF['stf'], callerName=cFuncName, logger=logger) dfSTF.reset_index(inplace=True) # zone definition dZone = {} dZone['allow'] = {'lat': 50.934519, 'lon': 4.466130, 'radius': 300} dZone['deny'] = {'lat': 50.934877, 'lon': 4.466280, 'radius': 200} # convert lat/lon to UTM for zone, zone_crd in dZone.items(): dZone[zone]['UTM.E'], dZone[zone]['UTM.N'], dZone[zone]['UTM.Z'], dZone[zone]['UTM.L'] = UTM.from_latlon(dZone[zone]['lat'], dZone[zone]['lon']) # add to dict dStf dSTF['zones'] = dZone dfSTF['lat'] = np.degrees(dfSTF['Latitude[rad]']) dfSTF['lon'] = np.degrees(dfSTF['Longitude[rad]']) # convert the GPS time to UTC dfSTF['time'] = dfSTF.apply(lambda x: gpstime.UTCFromWT(x['WNc[week]'], x['TOW[s]']), axis=1) # add UTM coordinates dfSTF['UTM.E'], dfSTF['UTM.N'], dfSTF['UTM.Z'], dfSTF['UTM.L'] = UTM.from_latlon(dfSTF['lat'].to_numpy(), dfSTF['lon'].to_numpy()) # calculate distance to st-Niklass 51.1577189 4.1915975 dfSTF['dist'] = np.linalg.norm(dfSTF[['UTM.E', 'UTM.N']].sub(np.array([dSTF['marker']['UTM.E'], dSTF['marker']['UTM.N']])), axis=1) # dfSTF['dist2'] = np.linalg.norm([dfSTF['UTM.E'].iloc[0], dfSTF['UTM.N'].iloc[0]] - [dSTF['marker']['UTM.E'], dSTF['marker']['UTM.N']]) # add info to dSTF about time dTime = {} dTime['epochs'] = dfSTF.shape[0] dTime['date'] = dfSTF.time.iloc[0].strftime('%d %b %Y') dTime['start'] = dfSTF.time.iloc[0].strftime('%H:%M:%S') dTime['end'] = dfSTF.time.iloc[-1].strftime('%H:%M:%S') dSTF['Time'] = dTime # add info to dSTF about #epochs dSTF['#epochs'] = dfSTF.shape[0] # add info to dSTF about used signal types used dST = {} sigTypes = dfSTF.SignalInfo.unique() logger.info('{func:s}: found nav-signals {sigt!s}'.format(sigt=sigTypes, func=cFuncName)) for i, sigType in enumerate(sigTypes): logger.debug('{func:s}: searching name for sig-type {st!s}'.format(st=sigType, func=cFuncName)) sigTypeNames = [] for k, v in ssnst.dSigType.items(): # logger.debug('{func:s}: checking presence of signal {sig!s}'.format(sig=v, func=cFuncName)) # logger.debug('{func:s}: bin(sigType) = {st!s}'.format(st=bin(sigType), func=cFuncName)) # logger.debug('{func:s}: bin(0b1 << k) = {ssnst!s}'.format(ssnst=bin(0b1 << k), func=cFuncName)) # logger.debug('{func:s}: bin(bin(sigType) & bin(0b1 << k)) = {binops!s})'.format(binops=bin(sigType & (0b1 << k)), func=cFuncName)) # logger.debug('{func:s}: binary check sigtype = {st!s} - ssn = {ssnst!s} operator and = {opsbin!s}'.format(st=bin(sigType), ssnst=bin(0b1 << k), opsbin=bin(sigType & (0b1 << k)), func=cFuncName)) # logger.debug('-' * 10) if (sigType & (0b1 << k)) != 0: logger.info('{func:s}: found signal {ssnst:s}'.format(ssnst=v, func=cFuncName)) # add name to the used signal types sigTypeNames.append(v) # add signal to the dST dict dST[sigType] = sigTypeNames # nrBitsSet = ssnst.countSetBits(sigType) # lst1Bits = ssnst.findAllSetBits(sigType, nrBitsSet) # # get the name of the signals # stName = ssnst.dSigType[lst1Bits[0]] # if nrBitsSet > 1: # for j in lst1Bits[1:]: # stName += '+' + ssnst.dSigType[j] # dST[sigType] = stName dSTF['signals'] = dST logger.info('{func:s}: found signals {signals!s}'.format(signals=dSTF['signals'], func=cFuncName)) # find out what PVT error codess we have dErrCodes = {} errCodes = list(set(dfSTF.Error.unique())) for errCode in errCodes: logger.debug('{func:s}: searching name for error codes {errc:d}'.format(errc=errCode, func=cFuncName)) for k, v in ssnst.dPVTErrorCode.items(): if (errCode == k): logger.info('{func:s}: found error code {errc:s}'.format(errc=colored(v, 'green'), func=cFuncName)) # add error code to errCodeNames dErrCodes[errCode] = v dSTF['errCodes'] = dErrCodes logger.info('{func:s}: found error codes {errc!s}'.format(errc=errCodes, func=cFuncName)) # inform user logger.info('{func:s}: read STF file {file:s}, added UTM coordiantes and GNSS time'.format(file=stfFile, func=cFuncName)) return dfSTF
def readSTFRxStatus(stfFile: str, logger: logging.Logger) -> pd.DataFrame: """ read in the STF ReceiverStatus_2 file using included header information """ cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored( sys._getframe().f_code.co_name, 'green') # read in the file with in logger.info('{func:s}: reading file {file:s}'.format(file=stfFile, func=cFuncName)) dfSTF = pd.read_csv(stfFile, sep=',', skiprows=range(1, 2)) # remove columns CPULoad[%] UpTime[s] RxStatus RxError Antenna SampleVar Blanking[%] col2Drop = [ 'CPULoad[%]', 'UpTime[s]', 'RxStatus', 'RxError', 'Antenna', 'SampleVar', 'Blanking[%]' ] dfSTF.drop(columns=col2Drop, inplace=True, axis=1) # drop rows without entry for AGC idxNaN = pd.isnull(dfSTF).any(1).to_numpy().nonzero()[0] logger.info('{func:s}: dropping NaN on indices {idx!s} (#{nbr:d})'.format( idx=idxNaN, nbr=len(idxNaN), func=cFuncName)) dfSTF.drop(idxNaN, inplace=True, axis=0) # convert the GPS time to UTC dfSTF['time'] = dfSTF.apply( lambda x: gpstime.UTCFromWT(x['WNc[week]'], x['TOW[s]']), axis=1) # find extreme values in FrontEnd column dAGC = {} dAGC['min'] = dfSTF['AGCGain[dB]'].min() dAGC['max'] = dfSTF['AGCGain[dB]'].max() dSTF['AGC'] = dAGC # add info to dSTF about time dTime = {} dTime['epochs'] = len(dfSTF['time'].unique().tolist()) dTime['date'] = dfSTF.time.iloc[0].strftime('%d %b %Y') dTime['start'] = dfSTF.time.iloc[0].strftime('%H:%M:%S') dTime['end'] = dfSTF.time.iloc[-1].strftime('%H:%M:%S') dSTF['Time'] = dTime # find out for wihch FrontEnds a AGC value is reported dFrontEnd = {} # get unique values for FrontEnd lstFrontEnds = np.sort(dfSTF['FrontEnd'].unique().tolist()) logger.info('{func:s}: found front-ends {frend!s}'.format( frend=lstFrontEnds, func=cFuncName)) # find the corresponding names for i, frontEnd in enumerate(lstFrontEnds): dFrontEnd[frontEnd] = {} dFrontEnd[frontEnd]['name'] = ssnst.dFrontEnd[frontEnd] dSTF['frontend'] = dFrontEnd # add info to dSTF about #epochs dSTF['#rows'] = dfSTF.shape[0] logger.info( '{func:s}: read STF file {file:s}, added UTM coordiantes and GNSS time' .format(file=stfFile, func=cFuncName)) return dfSTF
def plotGEOD(dataGEODPos, dataDOP): plt.style.use('BEGPIOS') # plt.style.use('ggplot') # verbose = 1 # print "datameas 0 ", dataMeasSVID[0]['MEAS_CN0'] # print "datameas 1 ", dataMeasSVID[1]['MEAS_CN0'] plt.figure(1) # f, axes = plt.subplots(2, 1) plt.suptitle('UTM Position', fontsize=20) # print "type", type(dataGEODPos) # print "data:", dataGEODPos[0:5] # print "data2:", dataGEODPos['MEAS_WNC'][0:5] utc = [] # index = sbf2stf.findNanValues(dataGEODPos['Latitude']) # print "index: ", index[0][:] # if len(index) < 0: # print "NO POSITION CALCULATED" # else: # dataGEODPOSValid = dataGEODPos[index] # dataDOPValid = dataDOP[index] # print "DATA %s" % dataGEODPos # print "DATA %s" % dataGEODPOSValid for count in range(0, len(dataGEODPos)): utc.append( gpstime.UTCFromWT(float(dataGEODPos['GEOD_WNC'][count]), float(dataGEODPos['GEOD_TOW'][count]))) # myProj = Proj("+proj=utm +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs") # east, north = myProj(rad2deg(dataGEODPOSValid['Longitude']), rad2deg(dataGEODPOSValid['Latitude'])) # print "EAST", east[0] # print "NORTH", north[0] # print "len data", len(dataGEODPos) plt.subplot(4, 1, 1) ax = plt.gca() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) # EAST IS PUT IN LONGITUDE COLUMN plt.plot(utc, dataGEODPos['GEOD_Longitude'], label="Easting (m)", marker='.', linestyle='', markersize=1.5) plt.legend(shadow=True) plt.subplot(4, 1, 2) ax = plt.gca() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) y_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False) ax.yaxis.set_major_formatter(y_formatter) # NORTH IS PUT IN LATITUDE COLUMN plt.plot(utc, dataGEODPos['GEOD_Latitude'], label="Northing (m)", marker='.', linestyle='', markersize=1.5) plt.legend(shadow=True) plt.subplot(4, 1, 3) ax = plt.gca() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) plt.plot(utc, dataGEODPos['GEOD_Height'], label="Height (m)", marker='.', linestyle='', markersize=1.5) plt.legend(shadow=True) plt.subplot(4, 1, 4) ax = plt.gca() ax2 = ax.twinx() xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) ax.legend(shadow=True, loc=1) # ax2.plot(utc, dataDOPValid['PDOP']/100) ax.fill_between(utc, 0, dataDOP['DOP_PDOP'], label="PDOP", zorder=1) ax.set_ylim([0, 20]) ax.set_ylabel('PDOP') ax2.plot(utc, dataGEODPos['GEOD_NrSV'], label="NrSV", marker='.', linestyle='', markersize=1.5, zorder=2) ax2.set_ylim([2, max(dataGEODPos['GEOD_NrSV']) + 1]) ax2.grid(True) ax.grid(False) ax2.legend(shadow=True, loc=0) dateString = gpstime.UTCFromWT( float(dataGEODPos['GEOD_WNC'][0]), float(dataGEODPos['GEOD_TOW'][0])).strftime("%d/%m/%Y") plt.xlabel('Time [hh:mm:ss] of ' + dateString) plt.show()
for option in SBF2STFOPTS: # print('option = %s - %d' % (option, SBF2STFOPTS.index(option))) if option == 'ChannelStatus_1': # read the MeasEpoch data into a numpy array dataChanSt = sbf2stf.readChannelStatus( sbf2stfConverted[SBF2STFOPTS.index(option)], verbose) else: print(' wrong option %s given.' % option) sys.exit(E_WRONG_OPTION) print('dataChanSt = %s' % dataChanSt) print('dataChanSt[0] = %s' % dataChanSt[0]) # determine current weeknumber and subsequent date from SBF data WkNr = int(dataChanSt['CHST_WNC'][0]) dateString = gpstime.UTCFromWT(WkNr, float( dataChanSt['CHST_TOW'][0])).strftime("%d/%m/%Y") if verbose: print('WkNr = %d - dateString = %s' % (WkNr, dateString)) # create subset with only valid elevation angles indexValid = sbf2stf.findValidElevation(dataChanSt['CHST_Elevation'], verbose) dataChanStValid = dataChanSt[indexValid] # find the list of SVIDs with valid elev/azim data SVIDs = sbf2stf.observedSatellites(dataChanStValid['CHST_SVID'], verbose) # extract for each satellite the elevation and azimuth angles prnElev = [] prnAzim = [] prnHour = []