Exemple #1
0
def mergeSignals(csvFiles: list, logger: logging.Logger) -> pd.DataFrame:
    """
    merge dataframes form bith signals into 1 dataframe
    """
    # read both files into separate dataframes
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    dfObs = []
    for i, csvFile in enumerate(csvFiles):
        dfObs.append(pd.read_csv(csvFile, sep=',', parse_dates=True))
        # get list of SVs for each signaltype
        dCSV[i]['SVs'] = dfObs[i].columns.values[1:]
        dCSV[i]['#SVs'] = len(dCSV[i]['SVs'])

        # rename the columns in each dataframe to reflect PRN-ST as column name
        colNames = dfObs[i].columns.values[1:] + '-{st:s}'.format(
            st=dCSV[i]['signal'])
        colNames = np.concatenate([['time'], colNames])
        dfObs[i].columns = colNames
        # show the dataframe
        amutils.logHeadTailDataFrame(df=dfObs[i],
                                     dfName=dCSV[i]['signal'],
                                     callerName=cFuncName,
                                     logger=logger)
        dfObs[i]['time'] = pd.to_datetime(dfObs[i]['time'],
                                          format='%Y-%m-%d %H:%M:%S')
        # print('type of time = {type!s}'.format(type=type(dfObs[i]['time'][0])))
        # sys.exit(0)

    # get list of unique SVs
    print(type(dCSV[1]['SVs']))
    dCSV['SVs'] = np.intersect1d(dCSV[0]['SVs'], dCSV[1]['SVs'])
    dCSV['#SVs'] = len(dCSV['SVs'])

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dCSV,
                                                          func=cFuncName))

    # merge both dataframes on 'time'
    return pd.merge(dfObs[0], dfObs[1], on=['time'], how='outer')
Exemple #2
0
def store_to_cvs(df: pd.DataFrame,
                 ext: str,
                 dInfo: dict,
                 logger: logging.Logger,
                 index: bool = True):
    """
    store the dataframe to a CSV file
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    csv_name = amc.dRTK['info']['rtkPosFile'] + '.' + ext
    dInfo[ext] = csv_name
    df.to_csv(csv_name, index=index, header=True)

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df,
                                 dfName=csv_name)
    logger.info('{func:s}: stored dataframe as csv file {csv:s}'.format(
        csv=colored(csv_name, 'green'), func=cFuncName))
def enu_statistics(dRtk: dict, dfENU: pd.DataFrame, logger: logging.Logger) -> pd.DataFrame:
    """
    enu_statistics calculates the statistics of the ENU coordinates passed
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    dfENUStats = dfENU[['dUTM.E', 'dUTM.N', 'dEllH']].describe()
    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfENUStats, dfName='dfENUStats')

    # add statistics for UTM coordinate differences
    dENUStats = {}
    for col in ('dUTM.E', 'dUTM.N', 'dEllH'):
        dCol = {}
        for index, row in dfENUStats.iterrows():
            dCol[index] = row[col]
        dENUStats[col] = dCol

        logger.debug('{func:s}: statistics for {col:s}\n{stat!s}'.format(col=col, stat=dENUStats[col], func=cFuncName))

    # add to global dRTK dict
    dRtk['stats'] = dENUStats

    return dfENUStats
def parseSatelliteStatistics(statsSat: tempfile._TemporaryFileWrapper, logger: logging.Logger) -> pd.DataFrame:
    """
    parseSatelliteStatistics reads the SAT statitics file into a dataframe
    """
    # set current function name
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: Parsing RTKLib satellites file {file:s} ({info:s})'.format(func=cFuncName, file=statsSat.name, info=colored('be patient', 'red')))

    # read in the satellite status file
    dfSat = pd.read_csv(statsSat.name, header=None, sep=',', names=rtkc.dRTKPosStat['Res']['colNames'], usecols=rtkc.dRTKPosStat['Res']['useCols'])

    # add DT column
    dfSat['DT'] = dfSat.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1)

    # if PRres == 0.0 => than I suppose only 4 SVs used, so no residuals can be calculated, so change to NaN
    dfSat.PRres.replace(0.0, np.nan, inplace=True)

    amc.logDataframeInfo(df=dfSat, dfName='dfSat', callerName=cFuncName, logger=logger)

    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfSat, dfName='dfSat')

    return dfSat
def parseRTKLibPositionFile(logger: logging.Logger) -> pd.DataFrame:
    """
    parse the position file from RTKLIB processing into a dataframe
    """
    # set current function name
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: parsing RTKLib position file {posf:s}'.format(func=cFuncName, posf=amc.dRTK['info']['rtkPosFile']))

    # check whether the datafile is readable
    endHeaderLine = amutils.line_num_for_phrase_in_file('%  GPST', amc.dRTK['info']['rtkPosFile'])
    dfPos = pd.read_csv(amc.dRTK['info']['rtkPosFile'], header=endHeaderLine, delim_whitespace=True)
    dfPos = dfPos.rename(columns={'%': 'WNC', 'GPST': 'TOW', 'latitude(deg)': 'lat', 'longitude(deg)': 'lon', 'height(m)': 'ellH', 'sdn(m)': 'sdn', 'sde(m)': 'sde', 'sdu(m)': 'sdu', 'sdne(m)': 'sdne', 'sdeu(m)': 'sdeu', 'sdun(m)': 'sdun', 'age(s)': 'age'})

    # convert the GPS time to UTC
    dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1)

    dTime = {}
    dTime['epochs'] = dfPos.shape[0]
    dTime['date'] = dfPos.DT.iloc[0].strftime('%d %b %Y')
    dTime['start'] = dfPos.DT.iloc[0].strftime('%H:%M:%S')
    dTime['end'] = dfPos.DT.iloc[-1].strftime('%H:%M:%S')
    amc.dRTK['Time'] = dTime

    # add UTM coordinates
    dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos['UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(), dfPos['lon'].to_numpy())
    logger.info('{func:s}: added UTM coordiantes'.format(func=cFuncName))

    # inform user
    amc.logDataframeInfo(df=dfPos, dfName='dfPos', callerName=cFuncName, logger=logger)
    logger.info('{func:s}: dTime = {time!s}'.format(func=cFuncName, time=dTime))
    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos, dfName='{posf:s}'.format(posf=amc.dRTK['info']['rtkPosFile']))

    # put the info of the dfPosn into debug logging
    logger.debug('{func:s}: dfPos info\n{info!s}'.format(info=dfPos.info(), func=cFuncName))

    return dfPos
Exemple #6
0
def crdDiff(dMarker: dict, dfUTMh: pd.DataFrame, plotCrds: list,
            logger: logging.Logger) -> Tuple[pd.DataFrame, dict]:
    """
    calculates the differences of UTM,ellH using reference position or mean position
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # determine the difference to weighted average or marker position of UTM (N,E), ellH to plot
    dfCrd = pd.DataFrame(columns=plotCrds)

    # determine the coordinates of used reference (either mean or user determined)
    if [dMarker['UTM.E'], dMarker['UTM.N'],
            dMarker['ellH']] == [np.NaN, np.NaN, np.NaN]:
        # so no reference position given use mean position
        originCrds = [float(amc.dRTK['WAvg'][crd]) for crd in plotCrds]
    else:
        # make difference to reference position
        originCrds = [float(amc.dRTK['marker'][crd]) for crd in plotCrds]

    # subtract origin coordinates from UTMh positions
    dfCrd = dfUTMh.sub(originCrds, axis='columns')

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfCrd,
                                 dfName='dfCrd')

    crdMax = max(dfCrd.max())
    crdMin = min(dfCrd.min())
    crdMax = int(crdMax + (1 if crdMax > 0 else -1))
    crdMin = int(crdMin + (1 if crdMin > 0 else -1))

    dCrdLim = {'max': crdMax, 'min': crdMin}

    return dfCrd, dCrdLim
Exemple #7
0
def rise_set_times(prn: str, df_obstab: pd.DataFrame, nomint_multi: int,
                   logger: logging.Logger) -> Tuple[int, list, list, list]:
    """
    rise_set_times determines observed rise and set times for PRN
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # index the rows for this PRN
    prn_loc = df_obstab.loc[df_obstab['PRN'] == prn].index
    # logger.info('{func:s}: Data for {prn:s} are at indices\n{idx!s}'.format(prn=colored(prn, 'green'), idx=prn_loc, func=cFuncName))

    # create a df_prn only using these rows
    df_prn = df_obstab.iloc[prn_loc]
    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=df_prn, dfName='df_prn[{:s}]'.format(prn), head=30)

    # determine the datetime gaps for this PRN
    df_prn['gap'] = (df_prn['DATE_TIME'] -
                     df_prn['DATE_TIME'].shift(1)).astype('timedelta64[s]')
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df_prn,
                                 dfName='df_prn[{:s}]'.format(prn))

    # find the nominal time interval of observations
    nominal_interval = df_prn['gap'].median()
    # find the indices where the interval is bigger than nominal_interval
    # idx_arc_start = df_prn[(df_prn['gap'] > nomint_multi * nominal_interval) | (df_prn['gap'].isna())].index.tolist()
    idx_arc_start = [
        df_prn.index[0]
    ] + df_prn[df_prn['gap'] > nomint_multi * nominal_interval].index.tolist()
    idx_arc_end = df_prn[df_prn['gap'].shift(-1) > nomint_multi *
                         nominal_interval].index.tolist() + [df_prn.index[-1]]

    # find the number of observations for each arc
    obs_arc_count = []
    for arc_start, arc_end in zip(idx_arc_start, idx_arc_end):
        obs_arc_count.append(
            prn_loc.get_loc(arc_end) - prn_loc.get_loc(arc_start) + 1)

    # get the corresponding data time info
    df_tmp = df_prn.loc[idx_arc_start][['DATE_TIME']]
    # lst_time = [datetime.strptime(dt.strftime('%H:%M:%S'), '%H:%M:%S').time() for dt in df_tmp['DATE_TIME']]
    # df_tmp.loc[:, 'time'] = lst_time

    # dt_arc_start = pd.to_datetime(df_tmp['DATE_TIME']).tolist()

    dt_arc_start = [
        datetime.strptime(dt.strftime('%H:%M:%S'), '%H:%M:%S').time()
        for dt in df_tmp['DATE_TIME']
    ]
    print('dt_arc_start = {!s}'.format(dt_arc_start))

    df_tmp = df_prn.loc[idx_arc_end][['DATE_TIME']]
    # dt_arc_end = pd.to_datetime(df_tmp['DATE_TIME']).tolist()
    dt_arc_end = [
        datetime.strptime(dt.strftime('%H:%M:%S'), '%H:%M:%S').time()
        for dt in df_tmp['DATE_TIME']
    ]
    print('dt_arc_end = {!s}'.format(dt_arc_end))

    logger.info(
        '{func:s}:    nominal observation interval for {prn:s} = {tint:f}'.
        format(prn=colored(prn, 'green'),
               tint=nominal_interval,
               func=cFuncName))
    # logger.info('{func:s}:    {prn:s} rises at:\n{arcst!s}'.format(prn=colored(prn, 'green'), arcst=df_prn.loc[idx_arc_end][['DATE_TIME', 'PRN', 'gap']], func=cFuncName))
    # logger.info('{func:s}:    {prn:s} sets at:\n{arcend!s}'.format(prn=colored(prn, 'green'), arcend=df_prn.loc[idx_arc_end][['DATE_TIME', 'PRN', 'gap']], func=cFuncName))

    for i, (stdt, enddt) in enumerate(zip(dt_arc_start, dt_arc_end)):
        logger.info(
            '{func:s}:       arc[{nr:d}]: {stdt:s} -> {enddt:s}'.format(
                nr=i,
                stdt=colored(stdt.strftime('%H:%M:%S'), 'yellow'),
                enddt=colored(enddt.strftime('%H:%M:%S'), 'yellow'),
                func=cFuncName))

    # copy the gap column for this PRN into original df
    df_obstab.loc[df_prn.index, 'gap'] = df_prn['gap']

    return nominal_interval, dt_arc_start, dt_arc_end, obs_arc_count
Exemple #8
0
def plot_utm_ellh(dRtk: dict,
                  dfUTM: pd.DataFrame,
                  logger: logging.Logger,
                  showplot: bool = False):
    """
     plots the UTM coordinates

    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # select colors for position mode
    # colors = []
    # colors.append([51 / 256., 204 / 256., 51 / 256.])
    # colors.append([51 / 256., 51 / 256., 255 / 256.])
    # colors.append([255 / 256., 51 / 256., 51 / 256.])
    colors = [
        'tab:white', 'tab:green', 'tab:olive', 'tab:orange', 'tab:cyan',
        'tab:blue', 'tab:red', 'tab:pink', 'tab:purple', 'tab:brown'
    ]

    # what to plot
    crds2Plot = ['UTM.E', 'UTM.N', 'ellH', 'age']
    stdDev2Plot = ['sde', 'sdn', 'sdu']
    stdDevWAvg = ['sdUTM.E', 'sdUTM.N', 'sdellH']

    # set up the plot
    plt.style.use('ggplot')

    # subplots
    fig, ax = plt.subplots(nrows=len(crds2Plot),
                           ncols=1,
                           sharex=True,
                           figsize=(20.0, 16.0))

    # make title for plot
    ax[0].annotate(
        '{camp:s} - {date:s} - {marker:s} ({pos:s}, quality {mode:s})'.format(
            camp=dRtk['campaign'],
            date=dRtk['obsStart'].strftime('%d %b %Y'),
            marker=dRtk['marker'],
            pos=dRtk['posFile'],
            mode=dRtk['rtkqual'].upper()),
        xy=(0.5, 1),
        xycoords='axes fraction',
        xytext=(0, 0),
        textcoords='offset pixels',
        horizontalalignment='center',
        verticalalignment='bottom',
        weight='strong',
        fontsize='large')

    # copyright this
    ax[-1].annotate(r'$\copyright$ Alain Muls ([email protected])',
                    xy=(1, 1),
                    xycoords='axes fraction',
                    xytext=(0, 0),
                    textcoords='offset pixels',
                    horizontalalignment='right',
                    verticalalignment='bottom',
                    weight='strong',
                    fontsize='large')

    # determine the difference to weighted average or marker position of UTM (N,E), ellH to plot
    dfCrd = pd.DataFrame(columns=crds2Plot[:3])
    originCrds = [float(amc.dRTK['WAVG'][crd]) for crd in crds2Plot[:3]]
    dfCrd = dfUTM[crds2Plot[:3]].sub(originCrds, axis='columns')
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfCrd,
                                 dfName='dfCrd')

    crdMax = max(dfCrd.max())
    crdMin = min(dfCrd.min())
    crdMax = int(crdMax + (1 if crdMax > 0 else -1))
    crdMin = int(crdMin + (1 if crdMin > 0 else -1))

    # plot the coordinates dN, dE, dU and ns
    for i, crd in enumerate(crds2Plot):
        if i < 3:  # subplots for coordinates display dN, dE, dU
            # plot over the different coordinate offsets using different colors

            # we plot offset to the weighted averga value
            # ax[i].plot(dfUTM['DT'], dCrd[crd], linestyle='', marker='.', markersize=2, color=colors[i], label=crd)
            # ax[i].fill_between(dfUTM['DT'], dCrd[crd]-dfUTM[stdDev2Plot[i]], dCrd[crd]+dfUTM[stdDev2Plot[i]], color=colors[i], alpha=0.15, interpolate=False)

            for key, value in rtkc.dRTKQual.items():
                # get the indices according to the position mode
                idx = dfUTM.index[dfUTM['Q'] == key]
                rgb = mpcolors.colorConverter.to_rgb(colors[key])
                rgb_new = amutils.make_rgb_transparent(rgb, (1, 1, 1), 0.3)

                # plot according to the color list if the length of the index is not 0
                if len(idx) > 0:
                    ax[i].errorbar(x=dfUTM.loc[idx]['DT'],
                                   y=dfCrd.loc[idx][crd],
                                   yerr=dfUTM.loc[idx][stdDev2Plot[i]],
                                   linestyle='None',
                                   fmt='o',
                                   ecolor=rgb_new,
                                   capthick=2,
                                   markersize=2,
                                   color=colors[key],
                                   label=value)

            # set dimensions of y-axis
            ax[i].set_ylim([crdMin, crdMax])
            ax[i].set_ylabel('{crd:s} [m]'.format(crd=crd, fontsize='large'))

            # lcoation of legend
            ax[i].legend(loc='best', markerscale=4)

            # annotate plot
            annotatetxt = r'WAvg: {crd:.3f}m $\pm$ {sdcrd:.3f}m'.format(
                crd=amc.dRTK['WAVG'][crds2Plot[i]],
                sdcrd=amc.dRTK['WAVG'][stdDevWAvg[i]])
            ax[i].annotate(annotatetxt,
                           xy=(1, 1),
                           xycoords='axes fraction',
                           xytext=(0, 0),
                           textcoords='offset pixels',
                           horizontalalignment='right',
                           verticalalignment='bottom',
                           fontweight='bold',
                           fontsize='large')

        else:  # last subplot: age of corrections & #SVs
            # plot #SVs on left axis
            # ax[i].set_ylim([0, 24])

            # plot AGE value
            ax[i].set_ylabel('Age [s]', fontsize='large', color='darkorchid')
            ax[i].set_xlabel('Time [sec]', fontsize='large')
            ax[i].plot(dfUTM['DT'],
                       dfUTM['age'],
                       linestyle='',
                       marker='x',
                       markersize=2,
                       color='darkorchid',
                       label='age')
            ax[i].set_title('#SVs & Age of correction',
                            fontsize='large',
                            fontweight='bold')

            # plot number of SV on second y-axis
            axRight = ax[i].twinx()

            axRight.set_ylim([0, 25])
            axRight.set_ylabel('#SVs [-]', fontsize='large', color='grey')

            ax[i].fill_between(dfUTM['DT'],
                               0,
                               dfUTM['ns'],
                               alpha=0.5,
                               linestyle='-',
                               linewidth=3,
                               color='grey',
                               label='#SVs',
                               interpolate=False)

            # create the ticks for the time axis
            dtFormat = plot_utils.determine_datetime_ticks(
                startDT=dfUTM['DT'].iloc[0], endDT=dfUTM['DT'].iloc[-1])

            if dtFormat['minutes']:
                if dfUTM.shape[0] > 300:
                    ax[i].xaxis.set_major_locator(
                        dates.MinuteLocator(byminute=range(1, 60, 10),
                                            interval=1))
                else:
                    ax[i].xaxis.set_major_locator(
                        dates.MinuteLocator(byminute=range(1, 60), interval=1))

            else:
                ax[i].xaxis.set_major_locator(
                    dates.HourLocator(
                        interval=dtFormat['hourInterval']))  # every 4 hours
            ax[i].xaxis.set_major_formatter(
                dates.DateFormatter('%H:%M'))  # hours and minutes

            ax[i].xaxis.set_minor_locator(
                dates.DayLocator(interval=1))  # every day
            ax[i].xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

            ax[i].xaxis.set_tick_params(rotation=0)
            for tick in ax[i].xaxis.get_major_ticks():
                # tick.tick1line.set_markersize(0)
                # tick.tick2line.set_markersize(0)
                tick.label1.set_horizontalalignment('center')

    # save the plot in subdir png of GNSSSystem
    pngName = os.path.join(
        dRtk['posDir'],
        '{name:s}-ENU.png'.format(name=os.path.splitext(dRtk['posFile'])[0]))
    fig.savefig(pngName, dpi=fig.dpi)

    logger.info('{func:s}: created plot {plot:s}'.format(func=cFuncName,
                                                         plot=colored(
                                                             pngName,
                                                             'green')))

    if showplot:
        plt.show(block=True)
    else:
        plt.close(fig)

    return
Exemple #9
0
def plotUTMCoords(dStf: dict, dfCrd: pd.DataFrame, logger=logging.Logger):
    """
    plots the UTM coordinates and #SVs on 4 different plots as a function of time
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')
    crds = ['UTM.E', 'UTM.N', 'Height[m]', 'dist', 'NrSV']
    # crds = ['dist', 'NrSV']

    logger.info(
        '{func:s}: start plotting UTM coordinates'.format(func=cFuncName))

    amutils.logHeadTailDataFrame(df=dfCrd,
                                 dfName='dfCrd',
                                 callerName=cFuncName,
                                 logger=logger)

    # specify the style
    mpl.style.use('seaborn')

    fig, axes = plt.subplots(nrows=len(crds), ncols=1, sharex=True)
    fig.set_size_inches(18.5, 15)

    # get the index for 2D/3D
    idx3D = dfCrd.index[dfCrd['2D/3D'] == 0]
    idx2D = dfCrd.index[dfCrd['2D/3D'] == 1]

    # get the index for signals used for PNT AND for 3D/2D
    dIdx = {}  # dict with indices corresponding to signals & 3D/2D usage
    for st, lstSTNames in dStf['signals'].items():
        stNames = ",".join(lstSTNames)
        logger.info('{func:s}: st = {st:d}  name = {name!s}'.format(
            st=st, name=stNames, func=cFuncName))
        dIdx[st] = {}
        dIdx[st]['3D'] = dfCrd.index[(dfCrd['SignalInfo'] == st)
                                     & (dfCrd['2D/3D'] == 0)]
        dIdx[st]['2D'] = dfCrd.index[(dfCrd['SignalInfo'] == st)
                                     & (dfCrd['2D/3D'] == 1)]
        logger.info(
            '{func:s}: list of indices dIdx[{st:d}][3D] = {idx!s}'.format(
                st=st, idx=dIdx[st]['3D'], func=cFuncName))
        logger.info(
            '{func:s}: list of indices dIdx[{st:d}][2D] = {idx!s}'.format(
                st=st, idx=dIdx[st]['2D'], func=cFuncName))

    # for setting the time on time-scale
    dtFormat = plot_utils.determine_datetime_ticks(
        startDT=dfCrd['time'].iloc[0], endDT=dfCrd['time'].iloc[-1])

    for crd, ax in zip(crds, axes):
        # print in order UTM.E, UTM.N, height, and NrSV and indicate 2D/3D by alpha
        logger.info('{func:s}: plotting {crd:s}'.format(crd=crd,
                                                        func=cFuncName))

        # x-axis properties
        ax.set_xlim([dfCrd['time'].iloc[0], dfCrd['time'].iloc[-1]])
        if dtFormat['minutes']:
            ax.xaxis.set_major_locator(
                dates.MinuteLocator(byminute=[0, 15, 30, 45], interval=1))
        else:
            ax.xaxis.set_major_locator(
                dates.HourLocator(
                    interval=dtFormat['hourInterval']))  # every 4 hours
        ax.xaxis.set_major_formatter(
            dates.DateFormatter('%H:%M'))  # hours and minutes

        ax.xaxis.set_minor_locator(dates.DayLocator(interval=1))  # every day
        ax.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

        ax.xaxis.set_tick_params(rotation=0)
        for tick in ax.xaxis.get_major_ticks():
            # tick.tick1line.set_markersize(0)
            # tick.tick2line.set_markersize(0)
            tick.label1.set_horizontalalignment('center')

        # (re)set the color iterator
        colorsIter = iter(list(mcolors.TABLEAU_COLORS))

        if crd is not 'NrSV':
            # plot according to signals used and 2D/3D
            for st, lstSTNames in dStf['signals'].items():
                stNames = ",".join(lstSTNames)
                for mode in '3D', '2D':
                    lblTxt = '{st:s} ({mode:s})'.format(st=stNames, mode=mode)
                    logger.debug('{func:s}: plotting {stm:s}'.format(
                        stm=lblTxt, func=cFuncName))

                    # get the index for this sigType & mode
                    idx = dIdx[st][mode]
                    ax.plot(dfCrd['time'].iloc[idx],
                            dfCrd[crd].iloc[idx],
                            color=next(colorsIter),
                            linestyle='',
                            marker='.',
                            label=lblTxt,
                            markersize=2)
        else:
            # plot when 3D posn
            ax.fill_between(dfCrd['time'], dfCrd[crd], color='grey', alpha=.2)

            # plot when 3D posn
            ax.plot(dfCrd['time'].iloc[idx3D],
                    dfCrd[crd].iloc[idx3D],
                    color='green',
                    linestyle='',
                    marker='.',
                    markersize=2,
                    label='3D')
            # plot when 2D posn
            ax.plot(dfCrd['time'].iloc[idx2D],
                    dfCrd[crd].iloc[idx2D],
                    color='red',
                    linestyle='',
                    marker='.',
                    markersize=2,
                    label='2D')

        # name y-axis
        ax.set_ylabel(crd, fontsize=14)

        # add a legend the plot showing 2D/3D positioning displayed
        ax.legend(loc='best', ncol=16, markerscale=5)

    # title of plot
    title = '{syst:s}: UTM Coordinates'.format(syst=dStf['gnss'])
    fig.suptitle(title, fontsize=16)

    # copyright this
    ax.annotate(r'$\copyright$ Alain Muls ([email protected])',
                xy=(1, 0),
                xycoords='axes fraction',
                xytext=(0, -45),
                textcoords='offset pixels',
                horizontalalignment='right',
                verticalalignment='bottom',
                weight='strong',
                fontsize='medium')

    # Save the file in dir png
    pltDir = os.path.join(dStf['dir'], 'png')
    os.makedirs(pltDir, exist_ok=True)
    pltName = '{syst:s}-UTM.png'.format(syst=dStf['gnss'].replace(' ', '-'))
    pltName = os.path.join(pltDir, pltName)
    fig.savefig(pltName, dpi=100)

    logger.info('{func:s}: plot saved as {name:s}'.format(name=pltName,
                                                          func=cFuncName))

    plt.show(block=False)
Exemple #10
0
def plotUTMSuppressed(dStf: dict, dfCrd: pd.DataFrame, logger=logging.Logger):
    """
    plots the UTM E-N scatter

    PVT error code. The following values are defined:
    0: no error.
    1: not enough measurements
    2: not enough ephemerides available
    3: DOP too large (larger than 15)
    4: sum of squared residuals too large
    5: no convergence
    6: not enough measurements after outlier rejection
    7: position output prohibited due to export laws
    8: not enough differential corrections available
    9: base station coordinates unavailable
    127: valid position output actively suppressed (e.g. PRS denial)
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: start plotting (un)suppressed trajectories'.format(
        func=cFuncName))

    # specify the style
    mpl.style.use('seaborn')

    # get the index for PVT suppression
    dIdx = {}  # dict with indices corresponding to PNT suppression
    for errCode in dStf['errCodes']:
        dIdx[errCode] = dfCrd.index[dfCrd['Error'] == errCode]
        logger.info(
            '{func:s}: list of indices dIdx[{errc:d}] = {idx!s}'.format(
                errc=errCode, idx=dIdx[errCode], func=cFuncName))

    # convert time column to seconds
    dsTime = dfCrd['time'] - dfCrd['time'].iloc[0]
    dfCrd['sec'] = dsTime.dt.total_seconds()
    amutils.logHeadTailDataFrame(df=dfCrd,
                                 dfName='dfCrd plot scatter',
                                 callerName=cFuncName,
                                 logger=logger)

    # get index when sec is multiple of 10 minutes
    idxTime = dfCrd.index[dfCrd['sec'] % 300 == 0]
    logger.debug('{func:s}: indices multiple of 300s = {idx!s}'.format(
        idx=idxTime, func=cFuncName))

    fig, ax = plt.subplots(nrows=1, ncols=1)
    fig.set_size_inches(14, 14)
    ax.axis('equal')

    # copyright this
    ax.annotate(r'$\copyright$ Alain Muls ([email protected])',
                xy=(1, 0),
                xycoords='axes fraction',
                xytext=(0, -45),
                textcoords='offset pixels',
                horizontalalignment='right',
                verticalalignment='bottom',
                weight='strong',
                fontsize='medium')

    # (re)set the color iterator
    colorsIter = iter(list(mcolors.TABLEAU_COLORS))

    # plot the E-N coordinates according to PVT Error mode
    for errCode, errCodeName in dStf['errCodes'].items():
        logger.debug('{func:s}: plotting {errc:d}: {errtxt:s}'.format(
            errc=errCode, errtxt=errCodeName, func=cFuncName))

        # get the index for this error code
        idx = dIdx[errCode]
        ax.plot(dfCrd['UTM.E'].iloc[idx],
                dfCrd['UTM.N'].iloc[idx],
                color=next(colorsIter),
                linestyle='',
                marker='.',
                label=errCodeName,
                markersize=4)

    # ax.plot(dfCrd['UTM.E'].iloc[idx3D], dfCrd['UTM.N'].iloc[idx3D], color='blue', label='3D mode', markersize=2, linestyle='', marker='.')
    # ax.plot(dfCrd['UTM.E'].iloc[idx2D], dfCrd['UTM.N'].iloc[idx2D], color='red', label='2D mode', markersize=2, linestyle='', marker='.')

    # annotate plot with time
    annText = [dfCrd['time'][idx].strftime('%H:%M:%S') for idx in idxTime]
    logger.info('{func:s}: annotate text\n{ann!s}'.format(ann=annText,
                                                          func=cFuncName))
    for idx, text in zip(idxTime, annText):
        ax.annotate(text, (dfCrd['UTM.E'].iloc[idx], dfCrd['UTM.N'].iloc[idx]),
                    textcoords='offset points',
                    xytext=(0, 10),
                    ha='center')

    # # draw circles for zones on plot
    # for zone, zone_crd in dStf['zones'].items():
    #     E, N, R = zone_crd['UTM.E'], zone_crd['UTM.N'], zone_crd['radius']

    #     # draw marker & cricle
    #     ax.scatter(E, N, color='black', marker='^', alpha=0.4)
    #     newCircle = plt.Circle((E, N), R, color='black', fill=False, clip_on=True)
    #     ax.add_artist(newCircle)

    #     # annotate the markers
    #     ax.annotate('{zone:s}'.format(zone=zone), xy=(E + 2, N), textcoords='data', xycoords='data', clip_on=True, color='black', alpha=0.4)

    #     # break

    # name y-axis
    ax.set_xlabel('UTM.E', fontsize=14)
    ax.set_ylabel('UTM.N', fontsize=14)

    # add a legend the plot showing 2D/3D positioning displayed
    ax.legend(loc='best', ncol=16, markerscale=5)

    # title of plot
    title = '{syst:s}: UTM Trajectory'.format(syst=dStf['gnss'])
    fig.suptitle(title, fontsize=16)

    # Save the file in dir png
    pltDir = os.path.join(dStf['dir'], 'png')
    os.makedirs(pltDir, exist_ok=True)
    pltName = '{syst:s}-UTMsuppressed.png'.format(
        syst=dStf['gnss'].replace(' ', '-'))
    pltName = os.path.join(pltDir, pltName)
    fig.savefig(pltName, dpi=100)
    logger.info('{func:s}: plot saved as {name:s}'.format(name=pltName,
                                                          func=cFuncName))

    plt.show(block=True)
Exemple #11
0
def main(argv) -> bool:
    """
    glabplotposn plots data from gLAB (v6) OUTPUT messages

    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # store cli parameters
    amc.dRTK = {}
    cli_opt = {}
    cli_opt['glab_db'], cli_opt['gnsss'], cli_opt['prcodes'], cli_opt[
        'markers'], cli_opt['yyyy'], cli_opt['doy_begin'], cli_opt[
            'doy_last'], show_plot, log_levels = treatCmdOpts(argv)
    amc.dRTK['options'] = cli_opt

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=os.getcwd(),
                                         logLevels=log_levels)

    # check  arguments
    ret_val = check_arguments(logger=logger)
    if ret_val != amc.E_SUCCESS:
        sys.exit(ret_val)

    # for crds in ['ENU', 'dENU']:
    for crds in ['ENU']:
        # parse the database file to get the GNSSs and prcodes we need
        tmp_name = glabdb_parse.db_parse_gnss_codes(
            db_name=amc.dRTK['options']['glab_db'],
            crd_types=glc.dgLab['OUTPUT'][crds],
            logger=logger)

        # read into dataframe
        logger.info(
            '{func:s}: reading selected information into dataframe'.format(
                func=cFuncName))

        colnames = ['yyyy', 'doy', 'gnss', 'marker', 'prcodes', 'crds']
        if crds == 'ENU':
            colnames += ['mean', 'std', 'max', 'min']

        print('colnames = {!s}'.format(colnames))
        try:
            df_crds = pd.read_csv(tmp_name, names=colnames, header=None)
            # test time
            d = datetime.date(2020, 1, 1) + datetime.timedelta(39 - 1)
            print(d)

            # convert YYYY/DOY to a datetime.date field
            df_crds['DT'] = df_crds.apply(lambda x: datetime.date(
                x['yyyy'], 1, 1) + datetime.timedelta(x['doy'] - 1),
                                          axis=1)

        except FileNotFoundError as e:
            logger.critical('{func:s}: Error = {err!s}'.format(err=e,
                                                               func=cFuncName))
            sys.exit(amc.E_FILE_NOT_EXIST)

        amutils.logHeadTailDataFrame(logger=logger,
                                     callerName=cFuncName,
                                     df=df_crds,
                                     dfName='df[{crds:s}]'.format(crds=crds))

        # determine statistics
        if crds == 'ENU':
            # statistics over the coordinates ENU per prcode selected
            amc.dRTK['stats_{crd:s}'.format(
                crd=crds)] = glabdb_statistics.crd_statistics(
                    crds=crds,
                    prcodes=amc.dRTK['options']['prcodes'],
                    df_crds=df_crds,
                    logger=logger)
            # plot the mean / std values for all prcodes per ENU coordinates
            glabdb_plot_crds.plot_glabdb_position(
                crds=crds,
                prcodes=amc.dRTK['options']['prcodes'],
                df_crds=df_crds,
                logger=logger,
                showplot=show_plot)

    # report to the user
    logger.info('{func:s}: Project information =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))

    return amc.E_SUCCESS
Exemple #12
0
def plotUTMScatter(dStf: dict, dfCrd: pd.DataFrame, logger=logging.Logger):
    """
    plots the UTM E-N scatter
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: start plotting trajectories'.format(func=cFuncName))

    # specify the style
    mpl.style.use('seaborn')

    # get the index for signals used for PNT AND for 3D/2D
    dIdx = {}  # dict with indices corresponding to signals & 3D/2D usage
    for st, lstSTNames in dStf['signals'].items():
        stNames = ",".join(lstSTNames)
        logger.info('{func:s}: st = {st:d}  name = {name:s}'.format(
            st=st, name=stNames, func=cFuncName))
        dIdx[st] = {}
        dIdx[st]['3D'] = dfCrd.index[(dfCrd['SignalInfo'] == st)
                                     & (dfCrd['2D/3D'] == 0)]
        dIdx[st]['2D'] = dfCrd.index[(dfCrd['SignalInfo'] == st)
                                     & (dfCrd['2D/3D'] == 1)]
        logger.info(
            '{func:s}: list of indices dIdx[{st:d}][3D] = {idx!s}'.format(
                st=st, idx=dIdx[st]['3D'], func=cFuncName))
        logger.info(
            '{func:s}: list of indices dIdx[{st:d}][2D] = {idx!s}'.format(
                st=st, idx=dIdx[st]['2D'], func=cFuncName))

    # convert time column to seconds
    dsTime = dfCrd['time'] - dfCrd['time'].iloc[0]
    dfCrd['sec'] = dsTime.dt.total_seconds()
    amutils.logHeadTailDataFrame(df=dfCrd,
                                 dfName='dfCrd plot scatter',
                                 callerName=cFuncName,
                                 logger=logger)

    # get index when sec is multiple of 10 minutes
    idxTime = dfCrd.index[dfCrd['sec'] % 600 == 0]
    logger.debug('{func:s}: indices multiple of 300s = {idx!s}'.format(
        idx=idxTime, func=cFuncName))

    fig, ax = plt.subplots(nrows=1, ncols=1)
    fig.set_size_inches(14, 14)
    ax.axis('equal')

    # copyright this
    ax.annotate(r'$\copyright$ Alain Muls ([email protected])',
                xy=(1, 0),
                xycoords='axes fraction',
                xytext=(0, -45),
                textcoords='offset pixels',
                horizontalalignment='right',
                verticalalignment='bottom',
                weight='strong',
                fontsize='medium')

    # get the index for 2D/3D
    idx3D = dfCrd.index[dfCrd['2D/3D'] == 0]
    idx2D = dfCrd.index[dfCrd['2D/3D'] == 1]

    # (re)set the color iterator
    colorsIter = iter(list(mcolors.TABLEAU_COLORS))

    # plot the E-N coordinates according to signals used and 2D/3D mode
    for st, lstSTNames in dStf['signals'].items():
        stNames = ",".join(lstSTNames)
        for mode in '3D', '2D':
            lblTxt = '{st:s} ({mode:s})'.format(st=stNames, mode=mode)
            logger.debug('{func:s}: plotting {stm:s}'.format(stm=lblTxt,
                                                             func=cFuncName))

            # get the index for this sigType & mode
            idx = dIdx[st][mode]
            ax.plot(dfCrd['UTM.E'].iloc[idx],
                    dfCrd['UTM.N'].iloc[idx],
                    color=next(colorsIter),
                    linestyle='',
                    marker='.',
                    label=lblTxt,
                    markersize=2)

    # ax.plot(dfCrd['UTM.E'].iloc[idx3D], dfCrd['UTM.N'].iloc[idx3D], color='blue', label='3D mode', markersize=2, linestyle='', marker='.')
    # ax.plot(dfCrd['UTM.E'].iloc[idx2D], dfCrd['UTM.N'].iloc[idx2D], color='red', label='2D mode', markersize=2, linestyle='', marker='.')

    # annotate plot with time
    annText = [dfCrd['time'][idx].strftime('%H:%M:%S') for idx in idxTime]
    logger.info('{func:s}: annotate text\n{ann!s}'.format(ann=annText,
                                                          func=cFuncName))
    for idx, text in zip(idxTime, annText):
        ax.annotate(text, (dfCrd['UTM.E'].iloc[idx], dfCrd['UTM.N'].iloc[idx]),
                    textcoords='offset points',
                    xytext=(0, 10),
                    ha='center')

    # annotate with position of marker
    logger.info('{func:s}: marker location = {E!s} {N!s}'.format(
        E=dStf['marker']['UTM.E'], N=dStf['marker']['UTM.N'], func=cFuncName))
    E, N = dStf['marker']['UTM.E'], dStf['marker']['UTM.N']
    ax.annotate('marker',
                xy=(E, N),
                xytext=(E - 200, N),
                xycoords='data',
                horizontalalignment='right',
                verticalalignment='center',
                color='magenta')
    ax.scatter(E, N, color='magenta', marker='^')

    # draw circles for distancd evaluation on plot
    for radius in range(5, 50, 5):
        newCircle = plt.Circle((E, N),
                               radius * 1000,
                               color='red',
                               fill=False,
                               clip_on=True,
                               alpha=0.4)
        ax.add_artist(newCircle)
        # annotate the radius
        ax.annotate('{radius:d} km'.format(radius=radius),
                    xy=(E + radius * 1000 * np.cos(np.pi * 5 / 4),
                        N + radius * 1000 * np.sin(np.pi * 5 / 4)),
                    textcoords='data',
                    xycoords='data',
                    clip_on=True,
                    color='blue',
                    alpha=0.4)
    for radius in 2.5, 7.5:
        newCircle = plt.Circle((E, N),
                               radius * 1000,
                               color='red',
                               fill=False,
                               clip_on=True,
                               alpha=0.4)
        ax.add_artist(newCircle)
        # annotate the radius
        ax.annotate('{radius:.1f} km'.format(radius=radius),
                    xy=(E + radius * 1000 * np.cos(np.pi * 5 / 4),
                        N + radius * 1000 * np.sin(np.pi * 5 / 4)),
                    textcoords='data',
                    xycoords='data',
                    clip_on=True,
                    color='blue',
                    alpha=0.4)

    # name y-axis
    ax.set_xlabel('UTM.E', fontsize=14)
    ax.set_ylabel('UTM.N', fontsize=14)

    # add a legend the plot showing 2D/3D positioning displayed
    ax.legend(loc='best', ncol=16, markerscale=5)

    # title of plot
    title = '{syst:s}: UTM Trajectory'.format(syst=dStf['gnss'])
    fig.suptitle(title, fontsize=16)

    # Save the file in dir png
    pltDir = os.path.join(dStf['dir'], 'png')
    os.makedirs(pltDir, exist_ok=True)
    pltName = '{syst:s}-UTMscatter.png'.format(
        syst=dStf['gnss'].replace(' ', '-'))
    pltName = os.path.join(pltDir, pltName)
    fig.savefig(pltName, dpi=100)
    logger.info('{func:s}: plot saved as {name:s}'.format(name=pltName,
                                                          func=cFuncName))

    plt.show(block=True)
Exemple #13
0
def rnxobs_dataframe(rnx_file: str, prn: str, dPRNSysObs: dict, dProgs: dict,
                     logger: logging.Logger) -> dict:
    """
    rnxobs_dataframe selects the observations for a PRN and returns observation dataframe
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info(
        '{func:s}: creating dataframe for PRN {prn:s} with observations {obs:s}'
        .format(prn=prn, obs=', '.join(dPRNSysObs), func=cFuncName))

    # create tabular output for this PRN
    tab_name = os.path.join(tempfile.gettempdir(),
                            '{tmpname:s}.tab'.format(tmpname=uuid.uuid4().hex))

    cmdGFZRNX = '{prog:s} -finp {rnx:s} -fout {out:s} -tab_obs -tab_sep "," -prn {prn:s} -obs_types={obs:s}'.format(
        prog=dProgs['gfzrnx'],
        rnx=rnx_file,
        out=tab_name,
        prn=prn,
        obs=','.join(dPRNSysObs))

    logger.info('{func:s}: Running:\n{prog:s}'.format(prog=colored(
        cmdGFZRNX, 'blue'),
                                                      func=cFuncName))

    # run the program
    # gfzrnx -finp P1710171.20O -tab_obs -fout P1710171_20O.tab -prn E09 -obs_types C1C,C5Q -tab_sep ','
    exeprogram.subProcessDisplayStdErr(cmd=cmdGFZRNX, verbose=False)

    # find the header lines that don't belong to this GNSS system and remove that line
    hdlookup = '#HD,'
    gnsslookup = '#HD,{gnss:s}'.format(gnss=prn[0])

    hd_lines = []
    gnss_lines = []
    with open(tab_name) as f:
        for num, line in enumerate(f, 1):
            if hdlookup in line:
                # print('found at line: ', num)
                hd_lines.append(num)
            if gnsslookup in line:
                # print('found at line: ', num)
                gnss_lines.append(num)

    # print('hd_lines = {!s}'.format(hd_lines))
    # print('gnss_lines = {!s}'.format(gnss_lines))

    remove_lines = [linenr for linenr in hd_lines if linenr not in gnss_lines]
    # print('remove_lines = {!s}'.format(remove_lines))

    # remove the lines that are not related to current GNSS
    amutils.delete_lines(original_file=tab_name, lst_line_number=remove_lines)

    # read the CSV created file into a panda dataframe
    dfPrn = pd.read_csv(tab_name, delimiter=',')
    # add datetime columns
    sDT = dfPrn.apply(lambda x: DT_from_DTstr(x['DATE'], x['TIME']), axis=1)
    dfPrn.insert(loc=4, column='DT', value=sDT)

    amutils.logHeadTailDataFrame(
        logger=logger,
        callerName=cFuncName,
        df=dfPrn,
        dfName='{tab:s}'.format(tab='{prn:s} with observables = {obs!s}'.
                                format(prn=prn, obs=', '.join(dPRNSysObs))))

    # remove the temporary tabular file
    os.remove(tab_name)

    return dfPrn
def parse_elevation_distribution(dRtk: dict, dfSat: pd.DataFrame, logger: logging.Logger) -> Tuple[pd.DataFrame, pd.Series, pd.DataFrame, pd.Series]:
    """
    parse_elevation_distribution parses the observed resiudals per constellation and per elevation bin of 10 degrees
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: parses observed resiudals as funcion of elevation²'.format(func=cFuncName))

    # define what we use for binning
    gnssLetters = ('E', 'G')
    gnssNames = ('GAL', 'GPS')
    cols = ['Elev', 'PRres', 'CN0']
    # define the bins used
    elev_bins, step = np.linspace(start=0, stop=90, num=7, endpoint=True, retstep=True, dtype=int, axis=0)
    logger.info('{func:s}: elevation bins = {bins!s}'.format(bins=elev_bins, func=cFuncName))
    CN0_bins, step = np.linspace(start=10, stop=70, num=13, endpoint=True, retstep=True, dtype=int, axis=0)
    logger.info('{func:s}: CN0 bins = {bins!s}'.format(bins=CN0_bins, func=cFuncName))
    PRres_bins, step = np.linspace(start=-5, stop=5, num=21, endpoint=True, retstep=True, dtype=float, axis=0)
    tmpArr = np.append(PRres_bins, np.inf)
    PRres_bins = np.append(-np.inf, tmpArr)
    logger.info('{func:s}: PRres bins = {bins!s}'.format(bins=PRres_bins, func=cFuncName))

    # add to dRtk the bins used

    # create dataframe for CN0 / PRres distribution
    dfCN0dist = pd.DataFrame()
    dfPRresdist = pd.DataFrame()

    # calculate the number of PRres within [-2,+2] and CN0 statistics over the elevation bins
    for gnssLetter, gnssName in zip(gnssLetters, gnssNames):
        # create a dataframe to work on
        dfGNSS = dfSat[dfSat['SV'].str.startswith(gnssLetter)][cols]

        if len(dfGNSS.index):
            # display frame for this system
            amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfGNSS, dfName=gnssName)

            # go over the elevation bins
            for elev_min, elev_max in zip(elev_bins[:-1], elev_bins[1:]):
                dfGNSS['elevbin'] = dfGNSS.Elev.between(elev_min, elev_max, inclusive=True)

                amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfGNSS, dfName='{syst:s} in [{min:d}..{max:d}] deg'.format(syst=gnssName, min=elev_min, max=elev_max))

                # create a distribution for PRres
                dfPRresdist['{syst:s}[{min:d}..{max:d}]'.format(syst=gnssName, min=elev_min, max=elev_max)] = pd.cut(dfGNSS.loc[dfGNSS['elevbin']]['PRres'], bins=PRres_bins).value_counts(sort=False)

                # create a distribution for CN0
                dfCN0dist['{syst:s}[{min:d}..{max:d}]'.format(syst=gnssName, min=elev_min, max=elev_max)] = pd.cut(dfGNSS.loc[dfGNSS['elevbin']]['CN0'], bins=CN0_bins).value_counts(sort=False)

            # reduce the values to percentage based on observations taken over all bins
            dsPRres_per_bin = dfPRresdist.loc[:, dfPRresdist.columns].sum()
            PRres_total = dsPRres_per_bin.sum() / 100

            dsCN0_per_bin = dfCN0dist.loc[:, dfCN0dist.columns].sum()
            CN0_total = dsCN0_per_bin.sum() / 100

            # report the distibutions of CN0 and PRres to the user
            amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPRresdist, dfName='dfPRresdist')
            amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPRresdist / PRres_total, dfName='dfPRresdist in percentage')
            logger.info('{func:s}: PRres totals per elev bin = \n{bins!s}'.format(bins=dsPRres_per_bin, func=cFuncName))

            amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfCN0dist, dfName='dfCN0dist')
            amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfCN0dist / CN0_total, dfName='dfCN0dist in percentage')
            logger.info('{func:s}: CN0 totals per elev bin = \n{bins!s}'.format(bins=dsCN0_per_bin, func=cFuncName))

    return dfCN0dist, dsCN0_per_bin, dfPRresdist, dsPRres_per_bin
Exemple #15
0
def readSTFGeodetic(stfFile: str, logger: logging.Logger) -> pd.DataFrame:
    """
    read in the STF Geodetic_v2 file using included header information
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # read in the file with in
    logger.info('{func:s}: reading file {file:s}'.format(file=stfFile, func=cFuncName))
    dfSTF = pd.read_csv(stfFile, sep=',', skiprows=range(1, 2))
    amutils.logHeadTailDataFrame(df=dfSTF, dfName=dSTF['stf'], callerName=cFuncName, logger=logger)
    dfSTF.dropna(subset=['Latitude[rad]', 'Longitude[rad]'], inplace=True)
    amutils.logHeadTailDataFrame(df=dfSTF, dfName=dSTF['stf'], callerName=cFuncName, logger=logger)
    dfSTF.reset_index(inplace=True)

    # zone definition
    dZone = {}
    dZone['allow'] = {'lat': 50.934519, 'lon': 4.466130, 'radius': 300}
    dZone['deny'] = {'lat': 50.934877, 'lon': 4.466280, 'radius': 200}

    # convert lat/lon to UTM
    for zone, zone_crd in dZone.items():
        dZone[zone]['UTM.E'], dZone[zone]['UTM.N'], dZone[zone]['UTM.Z'], dZone[zone]['UTM.L'] = UTM.from_latlon(dZone[zone]['lat'], dZone[zone]['lon'])

    # add to dict dStf
    dSTF['zones'] = dZone

    dfSTF['lat'] = np.degrees(dfSTF['Latitude[rad]'])
    dfSTF['lon'] = np.degrees(dfSTF['Longitude[rad]'])
    # convert the GPS time to UTC
    dfSTF['time'] = dfSTF.apply(lambda x: gpstime.UTCFromWT(x['WNc[week]'], x['TOW[s]']), axis=1)

    # add UTM coordinates
    dfSTF['UTM.E'], dfSTF['UTM.N'], dfSTF['UTM.Z'], dfSTF['UTM.L'] = UTM.from_latlon(dfSTF['lat'].to_numpy(), dfSTF['lon'].to_numpy())

    # calculate distance to st-Niklass 51.1577189  4.1915975
    dfSTF['dist'] = np.linalg.norm(dfSTF[['UTM.E', 'UTM.N']].sub(np.array([dSTF['marker']['UTM.E'], dSTF['marker']['UTM.N']])), axis=1)
    # dfSTF['dist2'] = np.linalg.norm([dfSTF['UTM.E'].iloc[0], dfSTF['UTM.N'].iloc[0]] - [dSTF['marker']['UTM.E'], dSTF['marker']['UTM.N']])

    # add info to dSTF about time
    dTime = {}
    dTime['epochs'] = dfSTF.shape[0]
    dTime['date'] = dfSTF.time.iloc[0].strftime('%d %b %Y')
    dTime['start'] = dfSTF.time.iloc[0].strftime('%H:%M:%S')
    dTime['end'] = dfSTF.time.iloc[-1].strftime('%H:%M:%S')
    dSTF['Time'] = dTime

    # add info to dSTF about #epochs
    dSTF['#epochs'] = dfSTF.shape[0]

    # add info to dSTF about used signal types used
    dST = {}
    sigTypes = dfSTF.SignalInfo.unique()
    logger.info('{func:s}: found nav-signals {sigt!s}'.format(sigt=sigTypes, func=cFuncName))
    for i, sigType in enumerate(sigTypes):
        logger.debug('{func:s}: searching name for sig-type {st!s}'.format(st=sigType, func=cFuncName))

        sigTypeNames = []

        for k, v in ssnst.dSigType.items():
            # logger.debug('{func:s}: checking presence of signal {sig!s}'.format(sig=v, func=cFuncName))
            # logger.debug('{func:s}: bin(sigType) = {st!s}'.format(st=bin(sigType), func=cFuncName))
            # logger.debug('{func:s}: bin(0b1 << k) = {ssnst!s}'.format(ssnst=bin(0b1 << k), func=cFuncName))
            # logger.debug('{func:s}: bin(bin(sigType) & bin(0b1 << k)) = {binops!s})'.format(binops=bin(sigType & (0b1 << k)), func=cFuncName))
            # logger.debug('{func:s}: binary check sigtype = {st!s} - ssn = {ssnst!s} operator and = {opsbin!s}'.format(st=bin(sigType), ssnst=bin(0b1 << k), opsbin=bin(sigType & (0b1 << k)), func=cFuncName))
            # logger.debug('-' * 10)

            if (sigType & (0b1 << k)) != 0:
                logger.info('{func:s}: found signal {ssnst:s}'.format(ssnst=v, func=cFuncName))
                # add name to the used signal types
                sigTypeNames.append(v)

        # add signal to the dST dict
        dST[sigType] = sigTypeNames

        # nrBitsSet = ssnst.countSetBits(sigType)
        # lst1Bits = ssnst.findAllSetBits(sigType, nrBitsSet)

        # # get the name of the signals
        # stName = ssnst.dSigType[lst1Bits[0]]
        # if nrBitsSet > 1:
        #     for j in lst1Bits[1:]:
        #         stName += '+' + ssnst.dSigType[j]
        # dST[sigType] = stName

    dSTF['signals'] = dST
    logger.info('{func:s}: found signals {signals!s}'.format(signals=dSTF['signals'], func=cFuncName))

    # find out what PVT error codess we have
    dErrCodes = {}
    errCodes = list(set(dfSTF.Error.unique()))
    for errCode in errCodes:
        logger.debug('{func:s}: searching name for error codes {errc:d}'.format(errc=errCode, func=cFuncName))

        for k, v in ssnst.dPVTErrorCode.items():
            if (errCode == k):
                logger.info('{func:s}: found error code {errc:s}'.format(errc=colored(v, 'green'), func=cFuncName))
                # add error code to errCodeNames
                dErrCodes[errCode] = v

    dSTF['errCodes'] = dErrCodes

    logger.info('{func:s}: found error codes {errc!s}'.format(errc=errCodes, func=cFuncName))

    # inform user
    logger.info('{func:s}: read STF file {file:s}, added UTM coordiantes and GNSS time'.format(file=stfFile, func=cFuncName))

    return dfSTF
Exemple #16
0
def main(argv):
    """
    pyRTKPlot adds UTM coordinates to output of rnx2rtkp.
    If 'stat' file is available, calculates xDOP values, and make plots of statictics

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # some options for diasplay of dataframes
    pd.set_option('display.max_columns', None)  # or 1000
    pd.set_option('display.max_rows', None)  # or 1000
    pd.set_option('display.max_colwidth', -1)  # or 199
    # limit float precision
    json.encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # treat command line options
    posFile, rootDir, subDir, rtkqual, marker, campaign, excel, logLevels = treatCmdOpts(
        argv)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['rootDir'] = rootDir
    amc.dRTK['subDir'] = subDir
    amc.dRTK['posDir'] = os.path.join(rootDir, subDir)
    amc.dRTK['posFile'] = posFile
    amc.dRTK['rtkqual'] = rtkqual
    amc.dRTK['marker'] = marker
    amc.dRTK['campaign'] = campaign
    amc.dRTK['excel'] = excel

    # get th enumeric vale for this quality
    amc.dRTK['iQual'] = [
        key for key, value in rtkc.dRTKQual.items()
        if value == amc.dRTK['rtkqual']
    ][0]

    if amc.dRTK['excel']:
        amc.dRTK['xlsName'] = os.path.join(
            amc.dRTK['rootDir'],
            '{pos:s}.xlsx'.format(pos=amc.dRTK['campaign']))

    # create logging for better debugging
    logger = amc.createLoggers(baseName=os.path.basename(__file__),
                               dir=amc.dRTK['posDir'],
                               logLevels=logLevels)

    # change to selected directory if exists
    if not os.path.exists(amc.dRTK['posDir']):
        logger.error('{func:s}: directory {dir:s} does not exists'.format(
            func=cFuncName, dir=colored(amc.dRTK['posDir'], 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(amc.dRTK['posDir'])
        logger.info('{func:s}: changed to dir {dir:s}'.format(
            func=cFuncName, dir=colored(amc.dRTK['posDir'], 'green')))

    # check wether pos and stat file are present, else exit
    if not os.access(os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile']),
                     os.R_OK):
        logger.error('{func:s}: file {pos:s} is not accessible'.format(
            func=cFuncName,
            pos=os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile'])))
        sys.exit(amc.E_FILE_NOT_EXIST)

    # read the position file into a dataframe and add dUTM coordinates
    dfPos = parse_rtkpos_file.parsePosFile(logger=logger)

    # get the indices according to the position mode
    idx = dfPos.index[dfPos['Q'] == amc.dRTK['iQual']]

    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos.loc[idx], dfName='{posf:s}'.format(posf=amc.dRTK['posFile']))

    # determine statistics for the requested quality mode
    logger.info('{func:s}: stats are\n{stat!s}'.format(
        func=cFuncName,
        stat=dfPos.loc[idx][['lat', 'lon', 'ellH', 'UTM.E',
                             'UTM.N']].describe()))
    # add weighted average for th erequested quality of position
    llh = ['lat', 'lon', 'ellH', 'UTM.E', 'UTM.N']
    dSDenu = ['sdn', 'sde', 'sdu', 'sdn', 'sde']

    dWAVG = {}

    for values in zip(llh, dSDenu):
        dWAVG[values[0]] = parse_rtkpos_file.wavg(group=dfPos.loc[idx],
                                                  avg_name=values[0],
                                                  weight_name=values[1])
    # calculate the stddev of the weighted average
    for crd in llh[2:]:
        # print('crd = {!s}'.format(crd))
        dWAVG['sd{crd:s}'.format(crd=crd)] = parse_rtkpos_file.stddev(
            dfPos.loc[idx][crd], dWAVG[(crd)])
        # print(dWAVG)
        # print('{crd:s} = {sd:.3f}'.format(crd=crd, sd=dWAVG['sd{crd:s}'.format(crd=crd)]))

    # get UTM coordiantes/zone for weigted average
    dWAVG['UTM.E'], dWAVG['UTM.N'], dWAVG['UTM.Z'], dWAVG[
        'UTM.L'] = utm.from_latlon(dWAVG['lat'], dWAVG['lon'])
    amc.dRTK['WAVG'] = dWAVG

    logger.info('{func:s}: weighted averages: {wavg!s}'.format(func=cFuncName,
                                                               wavg=dWAVG))

    amutils.logHeadTailDataFrame(
        logger=logger,
        callerName=cFuncName,
        df=dfPos,
        dfName='{posf:s}'.format(posf=amc.dRTK['posFile']))

    # create UTM plot
    plot_utm.plot_utm_ellh(dRtk=amc.dRTK,
                           dfUTM=dfPos,
                           logger=logger,
                           showplot=True)

    # add results to campaign file
    addRTKResult(logger)

    # write to csv file
    csvName = os.path.join(amc.dRTK['posDir'],
                           '{pos:s}.csv'.format(pos=amc.dRTK['posFile']))
    dfPos.to_csv(csvName, index=None, header=True)

    # add sheet write to excel workbook
    if amc.dRTK['excel']:
        sheetName = '{pos:s}-{qual:s}'.format(pos=os.path.splitext(
            os.path.basename(amc.dRTK['posFile']))[0],
                                              qual=amc.dRTK['rtkqual'])
        df2excel.append_df_to_excel(filename=amc.dRTK['xlsName'],
                                    df=dfPos,
                                    sheet_name=sheetName,
                                    truncate_sheet=True,
                                    startrow=0,
                                    index=False,
                                    float_format="%.9f")
        logger.info(
            '{func:s}: added sheet {sheet:s} to workbook {wb:s}'.format(
                func=cFuncName, sheet=sheetName, wb=amc.dRTK['xlsName']))

    logger.info('{func:s}: amc.dRTK =\n{settings!s}'.format(func=cFuncName,
                                                            settings=amc.dRTK))
Exemple #17
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirSTF, fileSTF, GNSSsyst, crdMarker, logLevels = treatCmdOpts(argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__), dir=dirSTF, logLevels=logLevels)

    # check if arguments are accepted
    workDir = checkExistenceArgs(stfDir=dirSTF, stfFile=fileSTF, logger=logger)

    # create dictionary with the current info
    global dSTF
    dSTF = {}
    dSTF['dir'] = workDir
    dSTF['gnss'] = GNSSsyst
    dSTF['stf'] = fileSTF

    # set the reference point
    dMarker = {}
    dMarker['lat'], dMarker['lon'], dMarker['ellH'] = map(float, crdMarker)
    if [dMarker['lat'], dMarker['lon'], dMarker['ellH']] == [0, 0, 0]:
        dMarker['lat'] = dMarker['lon'] = dMarker['ellH'] = np.NaN
        dMarker['UTM.E'] = dMarker['UTM.N'] = np.NaN
        dMarker['UTM.Z'] = dMarker['UTM.L'] = ''
    else:
        dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker['UTM.L'] = UTM.from_latlon(dMarker['lat'], dMarker['lon'])

    logger.info('{func:s}: marker coordinates = {crd!s}'.format(func=cFuncName, crd=dMarker))
    dSTF['marker'] = dMarker

    # # add jammer location coordinates
    # dMarker = {}
    # dMarker['geod'] = {}
    # dMarker['geod']['lat'] = 51.19306  # 51.193183
    # dMarker['geod']['lon'] = 4.15528  # 4.155056
    # dMarker['UTM'] = {}
    # dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker['UTM.L'] = utm.from_latlon(dMarker['geod']['lat'], dMarker['geod']['lon'])
    # dSTF['marker'] = dMarker

    # read in the STF file using included header information
    dfGeod = readSTFGeodetic(stfFile=fileSTF, logger=logger)
    amutils.logHeadTailDataFrame(df=dfGeod, dfName=dSTF['stf'], callerName=cFuncName, logger=logger)

    # save to cvs file
    dSTF['csv'] = os.path.splitext(dSTF['stf'])[0] + '.csv'
    dfGeod.to_csv(dSTF['csv'])

    # plot trajectory
    logger.info('{func:s}: information:\n{dict!s}'.format(dict=amutils.pretty(dSTF), func=cFuncName))
    plotcoords.plotUTMSuppressed(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'Error']], logger=logger)

    # plot the UTM coordinates and #SVs
    plotcoords.plotUTMCoords(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'Height[m]', 'NrSV', 'SignalInfo', 'dist', '2D/3D']], logger=logger)
    # plot trajectory
    plotcoords.plotUTMScatter(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'SignalInfo', '2D/3D']], logger=logger)

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=amutils.pretty(dSTF), func=cFuncName))
Exemple #18
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirCSV, filesCSV, GNSSsyst, GNSSsignals, movAvg, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=dirCSV,
                               logLevels=logLevels)

    # check if arguments are accepted
    workDir = checkExistenceArgs(cvsDir=dirCSV,
                                 csvFiles=filesCSV,
                                 logger=logger)

    # create dictionary with the current info
    global dCSV
    dCSV = {}
    dCSV['dir'] = workDir
    dCSV['gnss'] = GNSSsyst
    for i, (signal, csv) in enumerate(zip(GNSSsignals, filesCSV)):
        dCSV[i] = {'signal': signal, 'file': csv}
    dCSV['movavg'] = movAvg
    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dCSV,
                                                          func=cFuncName))

    # read and merge into a single dataframe
    dfObsMerged = mergeSignals(csvFiles=filesCSV, logger=logger)
    # create signalwise difference
    dfObsMerged = signalDifference(dfObs=dfObsMerged, logger=logger)
    amutils.logHeadTailDataFrame(df=dfObsMerged,
                                 dfName='dfObsMerged',
                                 callerName=cFuncName,
                                 logger=logger)

    # find max/min values for signals and for difference over all PRNs
    dCSV['dMax'] = amutils.divround((dfObsMerged[dCSV['SVs']].max()).max(), 5,
                                    2.5)
    dCSV['dMin'] = amutils.divround((dfObsMerged[dCSV['SVs']].min()).min(), 5,
                                    2.5)
    for i in [0, 1]:
        stCols = dCSV[i]['SVs'] + '-{st:s}'.format(st=dCSV[i]['signal'])

        dCSV[i]['max'] = amutils.divround(dfObsMerged[stCols].max().max(), 5,
                                          2.5)
        dCSV[i]['min'] = amutils.divround(dfObsMerged[stCols].min().min(), 5,
                                          2.5)

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dCSV,
                                                          func=cFuncName))

    # create plots per prn
    signalDiffPlot.plotSignalDiff(dCsv=dCSV, dfSig=dfObsMerged, logger=logger)
Exemple #19
0
def plotAGC(dStf: dict, dfAgc: pd.DataFrame, logger=logging.Logger):
    """
    plots the UTM coordinates and #SVs on 4 different plots as a function of time
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: start plotting AGC values'.format(func=cFuncName))

    amutils.logHeadTailDataFrame(df=dfAgc, dfName='dfAgc', callerName=cFuncName, logger=logger)

    # specify the style
    mpl.style.use('seaborn')
    colors = ['tab:green', 'tab:olive', 'tab:orange', 'tab:cyan', 'tab:blue', 'tab:red', 'tab:pink', 'tab:purple', 'tab:brown', 'tab:white']
    # (re)set the color iterator
    # colorsIter = iter(list(mcolors.TABLEAU_COLORS))
    colorsIter = iter(list(colors))

    fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True)
    fig.set_size_inches(14, 10)

    # for setting the time on time-scale
    dtFormat = plot_utils.determine_datetime_ticks(startDT=dfAgc['time'].iloc[0], endDT=dfAgc['time'].iloc[-1])

    # x-axis properties
    ax.set_xlim([dfAgc['time'].iloc[0], dfAgc['time'].iloc[-1]])
    if dtFormat['minutes']:
        ax.xaxis.set_major_locator(dates.MinuteLocator(byminute=[0, 15, 30, 45], interval=1))
    else:
        ax.xaxis.set_major_locator(dates.HourLocator(interval=dtFormat['hourInterval']))   # every 4 hours
    ax.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))  # hours and minutes

    ax.xaxis.set_minor_locator(dates.DayLocator(interval=1))    # every day
    ax.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

    ax.xaxis.set_tick_params(rotation=0)
    for tick in ax.xaxis.get_major_ticks():
        # tick.tick1line.set_markersize(0)
        # tick.tick2line.set_markersize(0)
        tick.label1.set_horizontalalignment('center')

    # get the index for the different frontends
    dIdx = {}  # dict with indices
    for i, fe in enumerate(dStf['frontend']):
        logger.info('{func:s}: ... plotting frontend[{nr:d}], SSNID = {ssnid:d}, name = {name:s}'.format(nr=i, ssnid=fe, name=dStf['frontend'][fe]['name'], func=cFuncName))

        idx = dfAgc.index[dfAgc['FrontEnd'] == fe]
        logger.info('{func:s}:    ... indices found {idx!s} (#{len:d})'.format(idx=idx, len=len(idx), func=cFuncName))

        # plot the AGC for this frontend
        ax.plot(dfAgc['time'].loc[idx], dfAgc['AGCGain[dB]'].loc[idx], color=next(colorsIter), linestyle='', marker='.', label=dStf['frontend'][fe]['name'], markersize=3)

    # name y-axis
    ax.set_ylabel('AGC Gain [dB]', fontsize=14)

    # add a legend the plot showing 2D/3D positioning displayed
    ax.legend(loc='best', ncol=16, markerscale=6)

    # title of plot
    title = '{syst:s}: AGC Gain [dB]'.format(syst=dStf['gnss'])
    fig.suptitle(title, fontsize=16)

    # copyright this
    ax.annotate(r'$\copyright$ Alain Muls ([email protected])', xy=(1, 0), xycoords='axes fraction', xytext=(0, -45), textcoords='offset pixels', horizontalalignment='right', verticalalignment='bottom', weight='strong', fontsize='medium')

    # Save the file in dir png
    pltDir = os.path.join(dStf['dir'], 'png')
    os.makedirs(pltDir, exist_ok=True)
    pltName = '{syst:s}-AGC.png'.format(syst=dStf['gnss'].replace(' ', '-'))
    pltName = os.path.join(pltDir, pltName)
    fig.savefig(pltName, dpi=100)

    logger.info('{func:s}: plot saved as {name:s}'.format(name=pltName, func=cFuncName))

    plt.show(block=True)
def plot_elev_distribution(dRtk: dict,
                           df: pd.DataFrame,
                           ds: pd.Series,
                           obs_name: str,
                           logger: logging.Logger,
                           showplot: bool = False):
    """
    plot_elev_distribution plots the distribution of CN0 or PRres as function of elevation bins
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: creating {obs:s} distribution plot'.format(
        obs=obs_name, func=cFuncName))

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df,
                                 dfName=obs_name)

    # set up the plot
    plt.style.use('ggplot')

    # possible GNSS systems in df
    gnss_names = ('GAL', 'GPS')
    colors = ('blue', 'red')
    dgnss_avail = {}
    col_width = 0.25

    for gnss_name in gnss_names:
        dgnss_avail[gnss_name] = any(
            [True for col in df if col.startswith(gnss_name)])

    nrCols = 3
    col_move = 0
    if all(dgnss_avail.values()):
        tmpValue = divmod(len(df.columns) / 2, nrCols)
        col_move = 0.125
    else:
        tmpValue = divmod(len(df.columns), nrCols)

    syst_names = ' + '.join([k for k, v in dgnss_avail.items() if v == True])

    if (tmpValue[1] == 0):
        nrRows = int(tmpValue[0])
    else:
        nrRows = int(tmpValue[0]) + 1

    # get the elevation bins used
    elev_bins = list(set([col[3:] for col in df.columns]))
    elev_bins.sort()
    logger.info('{func:s}: elevation bins {bins!s}'.format(bins=elev_bins,
                                                           func=cFuncName))
    logger.info('{func:s}: elevation bins sorted {bins!s}'.format(
        bins=elev_bins.sort(), func=cFuncName))

    fig, ax = plt.subplots(nrows=nrRows,
                           ncols=nrCols,
                           sharex=True,
                           sharey=True,
                           figsize=(20.0, 12.0))
    fig.suptitle('{syst:s} - {posf:s} - {date:s}: {obs:s} Statistics'.format(
        posf=dRtk['info']['rtkPosFile'],
        syst=syst_names,
        date=dRtk['Time']['date'],
        obs=obs_name),
                 fontsize='xx-large')

    for i, elev_bin, gnss, color in zip((-1, +1), elev_bins, dgnss_avail,
                                        colors):

        # plot if the gnss is avaiable
        if dgnss_avail[gnss]:
            logger.info('{func:s}: plotting for system {gnss:s}'.format(
                gnss=gnss, func=cFuncName))

            # the indexes on the x-axis
            ind = np.arange(len(df.index))
            logger.info('{func:s}: ind = {ind!s}'.format(ind=ind,
                                                         func=cFuncName))

            # columns of this system
            gnss_cols = [
                '{gnss:s}{bin:s}'.format(gnss=gnss, bin=elev_bin)
                for elev_bin in elev_bins
            ]

            # calculate the total number of observations per system
            obs_per_bin = df.loc[:, gnss_cols].sum()
            logger.info('{func:s}: obs_per_bin = {nrobs!s}'.format(
                nrobs=obs_per_bin, func=cFuncName))

            if obs_name == 'PRres':
                # get index numbers for PRres between -2 and +2
                tmpValue = divmod(df.shape[0], 2)
                if tmpValue[1] == 0:
                    mid_prres = tmpValue[0] - 0.5
                else:
                    mid_prres = tmpValue

            for axis, col in zip(ax.flat, gnss_cols):
                # create a filled area for domain [-1, 1] if PRres plot
                if obs_name == 'PRres':
                    axis.axvspan(mid_prres - 2,
                                 mid_prres + 2,
                                 alpha=0.1,
                                 color='green')

                # draw a bar plot
                axis.bar(ind + (i * col_move),
                         df[col] / obs_per_bin.sum() * 100,
                         alpha=0.5,
                         color=color,
                         edgecolor='none')

                # rotate the ticks on this axis
                idx = np.asarray([i for i in range(len(df.index))])
                axis.set_xticks(idx)
                axis.set_xticklabels(df.index.tolist(), rotation='vertical')

                axis.annotate('# = {:.0f} ({:.2f}%)'.format(
                    ds[col], ds[col] / ds.sum() * 100),
                              xy=(1, 1),
                              xycoords='axes fraction',
                              xytext=(0, -25),
                              textcoords='offset pixels',
                              horizontalalignment='right',
                              verticalalignment='bottom',
                              weight='strong',
                              fontsize='large')

                # set the title for sub-plot
                axis.set_title(
                    label='Elevation bin {bin:s}'.format(bin=col[3:]),
                    fontsize='x-large')

                # set the title for the Y axis
                axis.set_ylabel(
                    '{obs:s} statistics in [%]'.format(obs=obs_name))

    # save the plot in subdir png of GNSSSystem
    amutils.mkdir_p(os.path.join(dRtk['info']['dir'], 'png'))
    pngName = os.path.join(
        dRtk['info']['dir'], 'png',
        os.path.splitext(dRtk['info']['rtkPosFile'])[0] +
        '-{syst:s}-{obs:s}-dist.png'.format(syst=syst_names.replace(" ", ""),
                                            obs=obs_name))
    fig.savefig(pngName, dpi=fig.dpi)

    if showplot:
        plt.show(block=True)
    else:
        plt.close(fig)
def calcDOPs(dfSats: pd.DataFrame, logger: logging.Logger) -> pd.DataFrame:
    """
    calculates the number of SVs used and corresponding DOP values
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: calculating number of SVs in PVT and DOP values'.format(func=cFuncName))

    # calculate sin/cos of elevation/azimuth
    dfSats['sinEl'] = np.sin(np.deg2rad(dfSats.Elev))
    dfSats['cosEl'] = np.cos(np.deg2rad(dfSats.Elev))
    dfSats['sinAz'] = np.sin(np.deg2rad(dfSats.Azim))
    dfSats['cosAz'] = np.cos(np.deg2rad(dfSats.Azim))

    # calculate the direction cosines for each satellite
    dfSats['alpha'] = dfSats['cosEl'] * dfSats['sinAz']
    dfSats['beta'] = dfSats['cosEl'] * dfSats['cosAz']
    dfSats['gamma'] = dfSats['sinEl']

    amc.logDataframeInfo(df=dfSats, dfName='dfSats', callerName=cFuncName, logger=logger)

    # get count of SVs
    dfSVCount = countSVs(dfSVs=dfSats, logger=logger)
    amc.logDataframeInfo(df=dfSVCount, dfName='dfSVCount', callerName=cFuncName, logger=logger)

    # calculating DOP is time consumig, so thin down the TOWs
    naTOWs4DOP = getTOWs4DOP(dfNrSVs=dfSVCount, logger=logger)
    logger.debug('{func:s} TOWs for calculating DOPs = {array!s}'.format(func=cFuncName, array=naTOWs4DOP))

    # create a dataframe for DOP values containing the DateTime column (unique values)
    dfDOPs = pd.DataFrame(naTOWs4DOP, columns=['DT'])
    amc.logDataframeInfo(df=dfDOPs, dfName='dfDOPs start', callerName=cFuncName, logger=logger)

    # select the #SVs from dfSVCount for the intervals we use for DOP calculation
    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfSVCount, dfName='dfSVCount')
    dfNrSVs4DOP = dfSVCount.loc[dfSVCount['DT'].isin(naTOWs4DOP)]
    dfNrSVs4DOP.reset_index(inplace=True)
    amc.logDataframeInfo(df=dfNrSVs4DOP, dfName='dfNrSVs4DOP', callerName=cFuncName, logger=logger)

    # merge last column with #SVs into dfDops
    dfDOPs.loc[:, '#SVs'] = dfNrSVs4DOP['#SVs']

    # add NA columns for xDOP values
    dfDOPs = dfDOPs.reindex(columns=dfDOPs.columns.tolist() + ['HDOP', 'VDOP', 'PDOP', 'GDOP'])
    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfDOPs, dfName='dfDOPs')

    # iterate over all unique TOWs to determine corresponding xDOP values
    logger.info('{func:s}: calculating xDOP values for {epochs:d} epochs'.format(func=cFuncName, epochs=len(naTOWs4DOP)))

    for i, DT in enumerate(naTOWs4DOP):
        # get the index for each DT we have so that we can select the direction cosines associated
        # DT = '2019-04-10 00:00:00'
        # dt = DT.strptime('%Y-%m-%d %H:%M:%S')
        # print('DT = {!s}   {!s}'.format(DT, type(DT)))
        # print('np.datetime64(DT) = {!s}   {!s}'.format(np.datetime64(DT), type(np.datetime64(DT))))
        # print('dfSats[DT].iloc[0] = {!s}   {!s}'.format(dfSats['DT'].iloc[0], type(dfSats['DT'].iloc[0])))

        towIndices = dfSats.index[dfSats['DT'] == np.datetime64(DT)].tolist()
        # print('towIndices = {!s}'.format(towIndices))

        # create matrix with the direction cosines
        dfTOW = dfSats[['alpha', 'beta', 'gamma']].iloc[towIndices]
        dfTOW['delta'] = 1.
        A = dfTOW.to_numpy()
        # print('dfTOW = {!s}'.format(dfTOW))
        # print('dfTOW = {!s}'.format(type(dfTOW)))

        # invert ATA and retain the diagonal squared
        ATAinvDiag = np.linalg.inv(A.transpose().dot(A)).diagonal()
        sqDiag = np.square(ATAinvDiag)
        # print('ATAinvDiag = \n{!s}  \n{!s}'.format(ATAinvDiag, type(ATAinvDiag)))
        # print('sqDiag = \n{!s}  \n{!s}'.format(sqDiag, type(sqDiag)))

        # get the index for this DT into the dfDOPs
        indexTOW = dfDOPs.index[dfDOPs['DT'] == DT].tolist()[0]
        # print('index DT = {!s}'.format(indexTOW))

        # calculate the xDOP values and store them in the dfDOPs
        PDOP = np.sqrt(sqDiag[0] + sqDiag[1] + sqDiag[2])

        dfDOPs.HDOP.iloc[indexTOW] = np.sqrt(sqDiag[0] + sqDiag[1])
        dfDOPs.VDOP.iloc[indexTOW] = ATAinvDiag[2]
        dfDOPs.PDOP.iloc[indexTOW] = PDOP
        dfDOPs.GDOP.iloc[indexTOW] = np.sqrt(sqDiag[0] + sqDiag[1] + sqDiag[2] + sqDiag[3])

        # print('dfDOPS.iloc[indexTOW] = {!s}'.format(dfDOPs.iloc[indexTOW]))

        # show progress bar
        progbar(i, len(naTOWs4DOP), 60)

    print()  # empty print statement for ending progbar
    # drop the cos/sin & direction cosines columns from dfSats
    dfSats.drop(['sinEl', 'cosEl', 'sinAz', 'cosAz', 'alpha', 'beta', 'gamma'], axis=1, inplace=True)

    amc.logDataframeInfo(df=dfDOPs, dfName='dfDOPs (end)', callerName=cFuncName, logger=logger)
    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfDOPs, dfName='dfDOPs')

    return dfDOPs
def parsePosFile(logger: logging.Logger) -> pd.DataFrame:
    """
    parses 'posn' file created by pyrtklib.py
    """

    # set current function name
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    posFilePath = os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile'])

    logger.info('{func:s} parsing rtk-pos file {posf:s}'.format(
        func=cFuncName, posf=posFilePath))

    # looking for start times of observation file
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% obs start'):
            amc.dRTK['obsStart'] = datetime.strptime(rec[14:33],
                                                     '%Y/%m/%d %H:%M:%S')
            break
    # looking for end times of observation file
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% obs end'):
            amc.dRTK['obsEnd'] = datetime.strptime(rec[14:33],
                                                   '%Y/%m/%d %H:%M:%S')
            break
    # looking for ref pos of observation file
    foundRefPos = False
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% ref pos'):
            amc.dRTK['RefPos'] = [float(x) for x in rec.split(':')[1].split()]
            amc.dRTK['RefPosUTM'] = utm.from_latlon(amc.dRTK['RefPos'][0],
                                                    amc.dRTK['RefPos'][1])
            logger.info(
                '{func:s}: reference station coordinates are LLH={llh!s} UTM={utm!s}'
                .format(func=cFuncName,
                        llh=amc.dRTK['RefPos'],
                        utm=amc.dRTK['RefPosUTM']))
            foundRefPos = True
            break

    if not foundRefPos:
        amc.dRTK['RefPos'] = [np.NaN, np.NaN, np.NaN]
        amc.dRTK['RefPosUTM'] = (np.NaN, np.NaN, np.NaN, np.NaN)
        logger.info(
            '{func:s}: no reference station used'.format(func=cFuncName))

    # find start of results in rtk file
    endHeaderLine = amutils.line_num_for_phrase_in_file('%  GPST', posFilePath)
    dfPos = pd.read_csv(posFilePath,
                        header=endHeaderLine,
                        delim_whitespace=True)
    dfPos = dfPos.rename(
        columns={
            '%': 'WNC',
            'GPST': 'TOW',
            'latitude(deg)': 'lat',
            'longitude(deg)': 'lon',
            'height(m)': 'ellH',
            'sdn(m)': 'sdn',
            'sde(m)': 'sde',
            'sdu(m)': 'sdu',
            'sdne(m)': 'sdne',
            'sdeu(m)': 'sdeu',
            'sdun(m)': 'sdun',
            'age(s)': 'age'
        })

    # check if we have records for this mode in the data, else exit
    if dfPos.shape[0] == 0:
        logger.info('{func:s}: found no data in pos-file {pos:s}'.format(
            func=cFuncName, pos=amc.dRTK['posFile']))
        sys.exit(amc.E_FAILURE)

    # store total number of observations
    amc.dRTK['#obs'] = dfPos.shape[0]

    # store number of calculated positions for requested rtk quality
    amc.dRTK['#obsQual'] = len(dfPos.loc[dfPos['Q'] == amc.dRTK['iQual']])

    logger.info('{func:s}: amc.dRTK = \n{drtk!s}'.format(func=cFuncName,
                                                         drtk=amc.dRTK))

    # convert the time in seconds
    dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']),
                              axis=1)

    # add UTM coordinates
    dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos[
        'UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(),
                                   dfPos['lon'].to_numpy())

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfPos,
                                 dfName='{posf:s}'.format(posf=posFilePath))

    amc.logDataframeInfo(df=dfPos,
                         dfName='dfPos',
                         callerName=cFuncName,
                         logger=logger)

    return dfPos
Exemple #23
0
def main(argv):
    """
    pyRTKPlot adds UTM coordinates to output of rnx2rtkp.
    If 'stat' file is available, calculates xDOP values, and make plots of statictics

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # some options for diasplay of dataframes
    pd.set_option('display.max_columns', None)  # or 1000
    pd.set_option('display.max_rows', None)  # or 1000
    pd.set_option('display.max_colwidth', -1)  # or 199
    # limit float precision
    json.encoder.FLOAT_REPR = lambda o: format(o, '.3f')
    np.set_printoptions(precision=4)

    # treat command line options
    rtkPosFile, rtkDir, crdMarker, showPlots, overwrite, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(baseName=os.path.basename(__file__),
                                         dir=rtkDir,
                                         logLevels=logLevels)

    # change to selected directory if exists
    # print('rtkDir = %s' % rtkDir)
    if not os.path.exists(rtkDir):
        logger.error('{func:s}: directory {dir:s} does not exists'.format(
            func=cFuncName, dir=colored(rtkDir, 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(rtkDir)
        logger.info('{func:s}: changed to dir {dir:s}'.format(func=cFuncName,
                                                              dir=colored(
                                                                  rtkDir,
                                                                  'green')))

    # store information
    dInfo = {}
    dInfo['dir'] = rtkDir
    dInfo['rtkPosFile'] = rtkPosFile
    dInfo['rtkStatFile'] = dInfo['rtkPosFile'] + '.stat'
    dInfo['posn'] = dInfo['rtkPosFile'] + '.posn'
    dInfo['posnstat'] = dInfo['posn'] + '.html'
    amc.dRTK['info'] = dInfo

    # GNSS system is last part of root directory
    amc.dRTK['syst'] = 'UNKNOWN'
    for _, syst in enumerate(['GAL', 'GPS', 'COM']):
        if syst.lower() in amc.dRTK['info']['dir'].lower():
            amc.dRTK['syst'] = syst
    # print('amc.dRTK['syst'] = {:s}'.format(amc.dRTK['syst']))

    # info about PDOP bins and statistics
    dPDOP = {}
    dPDOP['bins'] = [0, 2, 3, 4, 5, 6, math.inf]
    amc.dRTK['PDOP'] = dPDOP

    # set the reference point
    dMarker = {}
    dMarker['lat'], dMarker['lon'], dMarker['ellH'] = map(float, crdMarker)
    print('crdMarker = {!s}'.format(crdMarker))

    if [dMarker['lat'], dMarker['lon'], dMarker['ellH']] == [0, 0, 0]:
        dMarker['lat'] = dMarker['lon'] = dMarker['ellH'] = np.NaN
        dMarker['UTM.E'] = dMarker['UTM.N'] = np.NaN
        dMarker['UTM.Z'] = dMarker['UTM.L'] = ''
    else:
        dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker[
            'UTM.L'] = utm.from_latlon(dMarker['lat'], dMarker['lon'])

    logger.info('{func:s}: marker coordinates = {crd!s}'.format(func=cFuncName,
                                                                crd=dMarker))
    amc.dRTK['marker'] = dMarker

    # check wether pos and stat file are present, else exit
    if not os.access(os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                     os.R_OK) or not os.access(
                         os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile']),
                         os.R_OK):
        logger.error(
            '{func:s}: file {pos:s} or {stat:s} is not accessible'.format(
                func=cFuncName,
                pos=os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                stat=os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile'])))

        sys.exit(amc.E_FILE_NOT_EXIST)

    # read the position file into a dataframe and add dUTM coordinates
    logger.info('{func:s}: parsing RTKLib pos file {pos:s}'.format(
        pos=amc.dRTK['info']['rtkPosFile'], func=cFuncName))
    dfPosn = parse_rtk_files.parseRTKLibPositionFile(logger=logger)

    # calculate the weighted avergae of llh & enu
    amc.dRTK['WAvg'] = parse_rtk_files.weightedAverage(dfPos=dfPosn,
                                                       logger=logger)

    # find difference with reference and ax/min limits for UTM plot
    logger.info(
        '{func:s}: calculating coordinate difference with reference/mean position'
        .format(func=cFuncName))
    dfCrd, dCrdLim = plot_position.crdDiff(
        dMarker=amc.dRTK['marker'],
        dfUTMh=dfPosn[['UTM.E', 'UTM.N', 'ellH']],
        plotCrds=['UTM.E', 'UTM.N', 'ellH'],
        logger=logger)
    # merge dfCrd into dfPosn
    dfPosn[['dUTM.E', 'dUTM.N', 'dEllH']] = dfCrd[['UTM.E', 'UTM.N', 'ellH']]

    # work on the statistics file
    # split it in relavant parts
    dTmpFiles = parse_rtk_files.splitStatusFile(
        amc.dRTK['info']['rtkStatFile'], logger=logger)

    # parse the satellite file (contains Az, El, PRRes, CN0)
    dfSats = parse_rtk_files.parseSatelliteStatistics(dTmpFiles['sat'],
                                                      logger=logger)
    store_to_cvs(df=dfSats, ext='sats', dInfo=amc.dRTK, logger=logger)

    # determine statistics on PR residuals for all satellites per elevation bin
    dfDistCN0, dsDistCN0, dfDistPRres, dsDistPRRes = parse_rtk_files.parse_elevation_distribution(
        dRtk=amc.dRTK, dfSat=dfSats, logger=logger)
    store_to_cvs(df=dfDistCN0, ext='CN0.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistPRres,
                 ext='PRres.dist',
                 dInfo=amc.dRTK,
                 logger=logger)

    # BEGIN DEBUG
    # END DEBUG

    # determine statistics of PR residuals for each satellite
    amc.dRTK['PRres'] = parse_rtk_files.parse_sv_residuals(dfSat=dfSats,
                                                           logger=logger)

    # calculate DOP values from El, Az info for each TOW
    dfDOPs = parse_rtk_files.calcDOPs(dfSats, logger=logger)
    store_to_cvs(df=dfDOPs, ext='XDOP', dInfo=amc.dRTK, logger=logger)

    # merge the PDOP column of dfDOPs into dfPosn and interpolate the PDOP column
    dfResults = pd.merge(left=dfPosn,
                         right=dfDOPs[['DT', 'PDOP', 'HDOP', 'VDOP', 'GDOP']],
                         left_on='DT',
                         right_on='DT',
                         how='left')
    dfPosn = dfResults.interpolate()
    store_to_cvs(df=dfPosn, ext='posn', dInfo=amc.dRTK, logger=logger)

    # calculate per DOP bin the statistics of PDOP
    parse_rtk_files.addPDOPStatistics(dRtk=amc.dRTK,
                                      dfPos=dfPosn,
                                      logger=logger)

    # add statistics for the E,N,U coordinate differences
    dfStatENU = enu_stat.enu_statistics(
        dRtk=amc.dRTK,
        dfENU=dfPosn[['DT', 'dUTM.E', 'dUTM.N', 'dEllH']],
        logger=logger)
    # add statistics for the E,N,U coordinate differences
    dfDistENU, dfDistXDOP = enu_stat.enupdop_distribution(dRtk=amc.dRTK,
                                                          dfENU=dfPosn[[
                                                              'DT', 'dUTM.E',
                                                              'dUTM.N',
                                                              'dEllH', 'PDOP',
                                                              'HDOP', 'VDOP',
                                                              'GDOP'
                                                          ]],
                                                          logger=logger)
    store_to_cvs(df=dfDistENU, ext='ENU.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistXDOP, ext='XDOP.dist', dInfo=amc.dRTK, logger=logger)

    logger.info('{func:s}: dRTK =\n{settings!s}'.format(func=cFuncName,
                                                        settings=json.dumps(
                                                            amc.dRTK,
                                                            sort_keys=False,
                                                            indent=4)))

    # # store statistics for dfPosn
    # logger.info('{func:s}: creating pandas profile report {ppname:s} for dfPosn, {help:s}'.format(ppname=colored(amc.dRTK['info']['posnstat'], 'green'), help=colored('be patient', 'red'), func=cFuncName))
    # dfProfile = dfPosn[['DT', 'ns', 'dUTM.E', 'dUTM.N', 'dEllH', 'sdn', 'sde', 'sdu', 'PDOP']]

    # ppTitle = 'Report on {posn:s} - {syst:s} - {date:s}'.format(posn=amc.dRTK['info']['posn'], syst=amc.dRTK['syst'], date=amc.dRTK['Time']['date'])

    # profile = pp.ProfileReport(df=dfProfile, check_correlation_pearson=False, correlations={'pearson': False, 'spearman': False, 'kendall': False, 'phi_k': False, 'cramers': False, 'recoded': False}, title=ppTitle)
    # profile.to_file(output_file=amc.dRTK['info']['posnstat'])

    # parse the clock stats
    dfCLKs = parse_rtk_files.parseClockBias(dTmpFiles['clk'], logger=logger)
    store_to_cvs(df=dfCLKs, ext='clks', dInfo=amc.dRTK, logger=logger)

    # BEGIN debug
    dfs = (dfPosn, dfSats, dfCLKs, dfCrd, dfDOPs, dfStatENU, dfDistENU,
           dfDistXDOP, dfDistPRres, dfDistCN0)
    dfsNames = ('dfPosn', 'dfSats', 'dfCLKs', 'dfCrd', 'dfDOPs', 'dfStatENU',
                'dfDistENU', 'dfDistXDOP')
    for df, dfName in zip(dfs, dfsNames):
        amutils.logHeadTailDataFrame(logger=logger,
                                     callerName=cFuncName,
                                     df=df,
                                     dfName=dfName)
        amc.logDataframeInfo(df=df,
                             dfName=dfName,
                             callerName=cFuncName,
                             logger=logger)
    # EOF debug

    # create the position plot (use DOP to color segments)
    plot_position.plotUTMOffset(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)

    # create the UTM N-E scatter plot
    plot_scatter.plotUTMScatter(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)
    plot_scatter.plotUTMScatterBin(dRtk=amc.dRTK,
                                   dfPos=dfPosn,
                                   dfCrd=dfCrd,
                                   dCrdLim=dCrdLim,
                                   logger=logger,
                                   showplot=showPlots)

    # create ENU distribution plots
    plot_distributions_crds.plot_enu_distribution(dRtk=amc.dRTK,
                                                  dfENUdist=dfDistENU,
                                                  dfENUstat=dfStatENU,
                                                  logger=logger,
                                                  showplot=showPlots)

    # create XDOP plots
    plot_distributions_crds.plot_xdop_distribution(dRtk=amc.dRTK,
                                                   dfXDOP=dfDOPs,
                                                   dfXDOPdisp=dfDistXDOP,
                                                   logger=logger,
                                                   showplot=showPlots)

    # plot pseudo-range residus
    dPRResInfo = {
        'name': 'PRres',
        'yrange': [-6, 6],
        'title': 'PR Residuals',
        'unit': 'm',
        'linestyle': '-'
    }
    logger.info(
        '{func:s}: creating dPRRes plots based on dict {dict!s}'.format(
            func=cFuncName, dict=dPRResInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dPRResInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # plot CN0
    dCN0Info = {
        'name': 'CN0',
        'yrange': [20, 60],
        'title': 'CN0 Ratio',
        'unit': 'dBHz',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating CN0 plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dCN0Info))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dCN0Info,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # create plots for elevation distribution of CN0 and PRres
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistCN0,
                                                   ds=dsDistCN0,
                                                   obs_name='CN0',
                                                   logger=logger,
                                                   showplot=showPlots)
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistPRres,
                                                   ds=dsDistPRRes,
                                                   obs_name='PRres',
                                                   logger=logger,
                                                   showplot=showPlots)

    # # plot elevation
    dElevInfo = {
        'name': 'Elev',
        'yrange': [0, 90],
        'title': 'Elevation',
        'unit': 'Deg',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating Elev plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dElevInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dElevInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # # plot the receiver clock
    logger.info('{func:s}: creating Clock plots'.format(func=cFuncName))
    plot_clock.plotClock(dfClk=dfCLKs,
                         dRtk=amc.dRTK,
                         logger=logger,
                         showplot=showPlots)

    logger.info('{func:s}: final amc.dRTK =\n{settings!s}'.format(
        func=cFuncName,
        settings=json.dumps(amc.dRTK, sort_keys=False, indent=4)))

    jsonName = amc.dRTK['info']['rtkPosFile'] + '.json'
    with open(jsonName, 'w') as f:
        json.dump(amc.dRTK, f, ensure_ascii=False, indent=4)

    logger.info('{func:s}: created json file {json:s}'.format(func=cFuncName,
                                                              json=colored(
                                                                  jsonName,
                                                                  'green')))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            amc.dRTK['info']['dir'], '{obs:s}-{prog:s}'.format(
                obs=amc.dRTK['info']['rtkPosFile'].replace(';', '_'),
                prog='plot.log')))
    os.remove(log_name)
Exemple #24
0
def addRTKResult(logger: logging.Logger):
    """
    adds the result to campaign csv file
    """
    # write formatted output of result
    cFuncName = amc.cBaseName + ': ' + colored(sys._getframe().f_code.co_name,
                                               'green')

    amc.dRTK['csvFile'] = os.path.join(
        amc.dRTK['rootDir'],
        '{campaign:s}.csv'.format(campaign=amc.dRTK['campaign']))

    # if campaign file exists, read in the data
    if os.access(amc.dRTK['csvFile'], os.R_OK):
        logger.info('{func:s}: reading campaign csv file {csv:s}'.format(
            func=cFuncName, csv=amc.dRTK['csvFile']))
        # read in the campaign data
        dfCampaign = pd.read_csv(amc.dRTK['csvFile'])
    else:
        # create an empty dataframe
        dfCampaign = pd.DataFrame(columns=[
            'campaign', 'marker', 'date', 'start', 'end', 'rtkqual',
            '#obsTotal', '#obsQual', 'lat', 'lon', 'ellH', 'sdu', 'UTM.E',
            'sde', 'UTM.N', 'sdn', 'UTM.Z', 'UTM.L', 'Ref_lat', 'Ref_lon',
            'Ref_ellH', 'Ref_UTM.E', 'Ref_UTM.N', 'Ref_UTM.Z', 'Ref_UTM.L',
            'Distance', 'DeltaH'
        ])

    # check whether the current processing has been saved to csv file
    index = dfCampaign.loc[(dfCampaign['campaign'] == amc.dRTK['campaign'])
                           & (dfCampaign['marker'] == amc.dRTK['marker']) &
                           (dfCampaign['rtkqual']
                            == amc.dRTK['rtkqual'])].index
    # if index is not empty => delete dataframe current info
    if len(index) > 0:
        dfCampaign.drop(index, inplace=True)

    # calculate the slant distance to Reference if available
    try:
        crdWAvg = (amc.dRTK['WAVG']['lat'], amc.dRTK['WAVG']['lon'])
        crdRefPt = (amc.dRTK['RefPos'][0], amc.dRTK['RefPos'][1])

        distance = geopy.distance.vincenty(crdWAvg, crdRefPt).m
        DeltaH = amc.dRTK['WAVG']['ellH'] - amc.dRTK['RefPos'][2]

    except ValueError:
        distance = np.NaN
        DeltaH = np.NaN

    # add the new info to the csv file
    dfCampaign = dfCampaign.append(pd.Series([
        amc.dRTK['campaign'], amc.dRTK['marker'],
        amc.dRTK['obsStart'].strftime("%d/%m/%Y"),
        amc.dRTK['obsStart'].strftime('%H:%M:%S'),
        amc.dRTK['obsEnd'].strftime('%H:%M:%S'), amc.dRTK['rtkqual'],
        amc.dRTK['#obs'], amc.dRTK['#obsQual'], amc.dRTK['WAVG']['lat'],
        amc.dRTK['WAVG']['lon'], amc.dRTK['WAVG']['ellH'],
        amc.dRTK['WAVG']['sdellH'], amc.dRTK['WAVG']['UTM.E'],
        amc.dRTK['WAVG']['sdUTM.E'], amc.dRTK['WAVG']['UTM.N'],
        amc.dRTK['WAVG']['sdUTM.N'], amc.dRTK['WAVG']['UTM.Z'],
        amc.dRTK['WAVG']['UTM.L'], amc.dRTK['RefPos'][0],
        amc.dRTK['RefPos'][1], amc.dRTK['RefPos'][2], amc.dRTK['RefPosUTM'][0],
        amc.dRTK['RefPosUTM'][1], amc.dRTK['RefPosUTM'][2],
        amc.dRTK['RefPosUTM'][3], distance, DeltaH
    ],
                                             index=dfCampaign.columns),
                                   ignore_index=True)

    # sort the dataframe
    dfCampaign.sort_values(['campaign', 'date', 'rtkqual', 'marker'],
                           ascending=[True, True, True, True],
                           inplace=True)

    # format the output of data for importing in excel workbook
    # for col in ['ellH', 'UTM.E', 'UTM.N', 'Ref_ellH', 'Ref_UTM.N', 'Ref_UTM.E', 'Distance']:
    #     dfCampaign[col] = dfCampaign[col].map('{:.3f}'.format)
    # for col in ['lat', 'lon', 'Ref_lat', 'Ref_lon']:
    #     dfCampaign[col] = dfCampaign[col].map('{:.9f}'.format)

    # info logging for campaign
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfCampaign,
                                 dfName='dfCampaign updated')

    # add campaign sheet write to excel workbook
    if amc.dRTK['excel']:
        df2excel.append_df_to_excel(filename=amc.dRTK['xlsName'],
                                    df=dfCampaign,
                                    sheet_name=amc.dRTK['campaign'],
                                    truncate_sheet=True,
                                    startrow=0,
                                    index=False,
                                    float_format="%.9f")
        logger.info(
            '{func:s}: added sheet {sheet:s} to workbook {wb:s}'.format(
                func=cFuncName,
                sheet=amc.dRTK['campaign'],
                wb=amc.dRTK['xlsName']))

    # save the dataframe by overwriting the amc.dRTK['csvfile']
    dfCampaign.to_csv(amc.dRTK['csvFile'], index=None, header=True)
Exemple #25
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    rnx_dir, gnss, cutoff, multiplier, showPlots, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=rnx_dir,
                                         logLevels=logLevels)

    logger.info('{func:s}: arguments processed: {args!s}'.format(
        args=rnx_dir, func=cFuncName))

    # check validity of passed arguments
    retCode = checkValidityArgs(dir_rnx=rnx_dir, logger=logger)
    if retCode != amc.E_SUCCESS:
        logger.error('{func:s}: Program exits with code {error:s}'.format(
            error=colored('{!s}'.format(retCode), 'red'), func=cFuncName))
        sys.exit(retCode)

    # store parameters
    amc.dRTK = {}
    # get the information from pyconvbin created json file
    read_json(dir_rnx=rnx_dir, logger=logger)

    # load the requested OBSTAB file into a pandas dataframe
    df_obs = rnxobs_tabular.read_obs_tabular(gnss=gnss, logger=logger)
    df_obs['gap'] = np.nan

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df_obs,
                                 dfName='df_obs')
    # get unique list of PRNs in dataframe
    prn_lst = sorted(df_obs['PRN'].unique())
    logger.info('{func:s}: observed PRNs are {prns!s} (#{total:d})'.format(
        prns=prn_lst, total=len(prn_lst), func=cFuncName))

    logger.info(
        '{func:s}; getting corresponding NORAD info'.format(func=cFuncName))

    # read the files galileo-NORAD-PRN.t and gps-ops-NORAD-PRN.t
    dfNORAD = tle_parser.read_norad2prn(logger=logger)
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfNORAD,
                                 dfName='dfNORAD')

    # get the corresponding NORAD nrs for the given PRNs
    dNORADs = tle_parser.get_norad_numbers(prns=prn_lst,
                                           dfNorad=dfNORAD,
                                           logger=logger)
    logger.info('{func:s}: corresponding NORAD nrs (#{count:d}):'.format(
        count=len(dNORADs), func=cFuncName))

    # load a time scale and set RMA as Topo
    # loader = sf.Loader(dir_tle, expire=True)  # loads the needed data files into the tle dir
    ts = sf.load.timescale()
    RMA = sf.Topos('50.8438 N', '4.3928 E')
    logger.info('{func:s}: Earth station RMA @ {topo!s}'.format(
        topo=colored(RMA, 'green'), func=cFuncName))
    # get the datetime that corresponds to yydoy
    date_yydoy = datetime.strptime(amc.dRTK['rnx']['times']['DT'],
                                   '%Y-%m-%d %H:%M:%S')
    yydoy = date_yydoy.strftime('%y%j')
    logger.info(
        '{func:s}: calculating rise / set times for {date:s} ({yydoy:s})'.
        format(date=colored(date_yydoy.strftime('%d-%m-%Y'), 'green'),
               yydoy=yydoy,
               func=cFuncName))

    t0 = ts.utc(int(date_yydoy.strftime('%Y')), int(date_yydoy.strftime('%m')),
                int(date_yydoy.strftime('%d')))
    date_tomorrow = date_yydoy + timedelta(days=1)
    t1 = ts.utc(int(date_tomorrow.strftime('%Y')),
                int(date_tomorrow.strftime('%m')),
                int(date_tomorrow.strftime('%d')))

    # find corresponding TLE record for NORAD nrs
    df_tles = tle_parser.find_norad_tle_yydoy(dNorads=dNORADs,
                                              yydoy=yydoy,
                                              logger=logger)

    # list of rise / set times by observation / TLEs
    lst_obs_rise = []

    # find in observations and by TLEs what the riuse/set times are and number of observations
    for prn in prn_lst:
        # find rise & set times for each SV and store into list dt_obs_rise_set and dt_obs_set
        nom_interval, dt_obs_rise, dt_obs_set, obs_arc_count = rnxobs_tabular.rise_set_times(
            prn=prn, df_obstab=df_obs, nomint_multi=multiplier, logger=logger)

        # find rise:set times using TLEs
        dt_tle_rise, dt_tle_set, dt_tle_cul, tle_arc_count = tle_parser.tle_rise_set_times(
            prn=prn,
            df_tle=df_tles,
            marker=RMA,
            t0=t0,
            t1=t1,
            elev_min=cutoff,
            obs_int=nom_interval,
            logger=logger)

        # add to list for creating dataframe
        lst_obs_rise.append([
            dt_obs_rise, dt_obs_set, obs_arc_count, dt_tle_rise, dt_tle_set,
            dt_tle_cul, tle_arc_count
        ])

    # test to import in dataframe
    df_rise_set_tmp = pd.DataFrame(lst_obs_rise,
                                   columns=[
                                       'obs_rise', 'obs_set', 'obs_arc_count',
                                       'tle_rise', 'tle_set', 'tle_cul',
                                       'tle_arc_count'
                                   ],
                                   index=prn_lst)

    # find corresponding arcs between observation and predicted TLE
    max_arcs, df_rise_set = rnxobs_tabular.intersect_arcs(
        df_rs=df_rise_set_tmp, logger=logger)

    # inform user
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df_rise_set,
                                 dfName='df_rise_set')
    # write to csv file
    csvName = os.path.join(amc.dRTK['gfzrnxDir'],
                           amc.dRTK['rnx']['gnss'][gnss]['marker'],
                           'rise-set-dt.csv')
    df_rise_set.to_csv(csvName, index=None, header=True)

    # create a new dataframe that has PRNs as index and the max_arcs columns with number of obs / TLEs
    df_obs_arcs = rnxobs_tabular.rearrange_arcs(nr_arcs=max_arcs,
                                                df_rs=df_rise_set,
                                                logger=logger)
    # write to csv file
    csvName = os.path.join(amc.dRTK['gfzrnxDir'],
                           amc.dRTK['rnx']['gnss'][gnss]['marker'],
                           'obs_arcs.csv')
    df_obs_arcs.to_csv(csvName, index=None, header=True)

    # plot the statistics of observed vs TLE predicted
    plot_obstab.plot_rise_set_times(gnss=gnss,
                                    df_rs=df_rise_set,
                                    logger=logger,
                                    showplot=showPlots)
    plot_obstab.plot_rise_set_stats(gnss=gnss,
                                    df_arcs=df_obs_arcs,
                                    nr_arcs=max_arcs,
                                    logger=logger,
                                    showplot=showPlots)

    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=df_obs[(df_obs['gap'] > 1.) | (df_obs['gap'].isna())], dfName='df_obs', head=50)

    # logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(json=json.dumps(amc.dRTK, sort_keys=False, indent=4, default=amutils.DT_convertor), func=cFuncName))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            os.path.join(amc.dRTK['gfzrnxDir'],
                         amc.dRTK['rnx']['gnss'][gnss]['marker']),
            'pyobstab.log'))
    os.remove(log_name)