Beispiel #1
0
def parseClockBias(statsClk: tempfile._TemporaryFileWrapper, logger: logging.Logger) -> pd.DataFrame:
    """
    parse the clock file
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: parsing RTKLib clock statistics {file:s}'.format(func=cFuncName, file=statsClk.name))

    # read in the satellite status file
    dfCLKs = pd.read_csv(statsClk.name, header=None, sep=',', names=rtkc.dRTKPosStat['Clk']['colNames'], usecols=rtkc.dRTKPosStat['Clk']['useCols'])

    amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger)

    # replace the headers
    cols = np.asarray(rtkc.dRTKPosStat['Clk']['useCols'][-4:])
    # if value of clk parameters is 0 replace by NaN
    dfCLKs[cols] = dfCLKs[cols].replace({0: np.nan})
    # add DateTime
    dfCLKs['DT'] = dfCLKs.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1)

    amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger)

    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfCLKs, dfName='dfCLKs')

    return dfCLKs
def countSVs(dfSVs: pd.DataFrame, logger: logging.Logger) -> pd.DataFrame:
    """
    get a count of SVs for each TOW and determine the difference between these counts
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    dfCountSVs = pd.DataFrame(dfSVs.groupby('DT').size())

    amc.logDataframeInfo(df=dfCountSVs,
                         dfName='dfCountSVs',
                         callerName=cFuncName,
                         logger=logger)

    dfCountSVs.reset_index(inplace=True)
    dfCountSVs.columns = ['DT', '#SVs']

    # find jumps in number of sats whic introduces a change in DOP values
    dfCountSVs['dSVs'] = dfCountSVs['#SVs'].diff()

    amc.logDataframeInfo(df=dfCountSVs,
                         dfName='dfCountSVs',
                         callerName=cFuncName,
                         logger=logger)

    return dfCountSVs
def parseClockBias(statsClk: tempfile._TemporaryFileWrapper,
                   logger: logging.Logger) -> pd.DataFrame:
    """
    parse the clock file
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: parsing RTKLib clock statistics {file:s}'.format(
        func=cFuncName, file=statsClk.name))

    # # read in the satellite status file
    # print('colNames = {!s}'.format(rtkc.dRTKPosStat['Clk']['colNames']))
    # print('useNames = {!s}'.format(rtkc.dRTKPosStat['Clk']['useCols']))

    # with open(statsClk.name, 'r') as fstat:
    #     i = 0
    #     for line in fstat:
    #         print(line, end='')
    #         i = i + 1
    #         if i == 10:
    #             break

    # input("Press Enter to continue...")

    # read in the satellite status file
    dfCLKs = pd.read_csv(statsClk.name,
                         header=None,
                         sep=',',
                         usecols=[*range(1, 9)])
    dfCLKs.columns = rtkc.dRTKPosStat['Clk']['useCols']
    amutils.printHeadTailDataFrame(df=dfCLKs, name='dfCLKs range')

    # # read in the satellite status file
    # dfCLKs = pd.read_csv(statsClk.name, header=None, sep=',', names=rtkc.dRTKPosStat['Clk']['colNames'], usecols=rtkc.dRTKPosStat['Clk']['useCols'])

    # amc.logDataframeInfo(df=dfCLKs, dfName='dfCLKs', callerName=cFuncName, logger=logger)

    # replace the headers
    cols = np.asarray(rtkc.dRTKPosStat['Clk']['useCols'][-4:])
    # if value of clk parameters is 0 replace by NaN
    dfCLKs[cols] = dfCLKs[cols].replace({0: np.nan})
    # add DateTime
    dfCLKs['DT'] = dfCLKs.apply(
        lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1)

    amc.logDataframeInfo(df=dfCLKs,
                         dfName='dfCLKs',
                         callerName=cFuncName,
                         logger=logger)

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfCLKs,
                                 dfName='dfCLKs')

    return dfCLKs
def parseSatelliteStatistics(statsSat: tempfile._TemporaryFileWrapper,
                             logger: logging.Logger) -> pd.DataFrame:
    """
    parseSatelliteStatistics reads the SAT statitics file into a dataframe
    """
    # set current function name
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    logger.info(
        '{func:s}: Parsing RTKLib satellites file {file:s} ({info:s})'.format(
            func=cFuncName,
            file=statsSat.name,
            info=colored('be patient', 'red')))

    dfSat = pd.read_csv(statsSat.name,
                        header=None,
                        sep=',',
                        usecols=[*range(1, 11)])
    dfSat.columns = rtkc.dRTKPosStat['Res']['useCols']
    amutils.printHeadTailDataFrame(df=dfSat, name='dfSat range')

    # dfSat = pd.read_csv(statsSat.name, header=None, sep=',', names=rtkc.dRTKPosStat['Res']['colNames'], usecols=rtkc.dRTKPosStat['Res']['useCols'])
    # amutils.printHeadTailDataFrame(df=dfSat, name='dfSat usecol')

    # sys.exit(77)

    # add DT column
    dfSat['DT'] = dfSat.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']),
                              axis=1)

    # if PRres == 0.0 => than I suppose only 4 SVs used, so no residuals can be calculated, so change to NaN
    dfSat.PRres.replace(0.0, np.nan, inplace=True)

    amc.logDataframeInfo(df=dfSat,
                         dfName='dfSat',
                         callerName=cFuncName,
                         logger=logger)

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfSat,
                                 dfName='dfSat')

    return dfSat
Beispiel #5
0
def parseRTKLibPositionFile(logger: logging.Logger) -> pd.DataFrame:
    """
    parse the position file from RTKLIB processing into a dataframe
    """
    # set current function name
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: parsing RTKLib position file {posf:s}'.format(func=cFuncName, posf=amc.dRTK['info']['rtkPosFile']))

    # check whether the datafile is readable
    endHeaderLine = amutils.line_num_for_phrase_in_file('%  GPST', amc.dRTK['info']['rtkPosFile'])
    dfPos = pd.read_csv(amc.dRTK['info']['rtkPosFile'], header=endHeaderLine, delim_whitespace=True)
    dfPos = dfPos.rename(columns={'%': 'WNC', 'GPST': 'TOW', 'latitude(deg)': 'lat', 'longitude(deg)': 'lon', 'height(m)': 'ellH', 'sdn(m)': 'sdn', 'sde(m)': 'sde', 'sdu(m)': 'sdu', 'sdne(m)': 'sdne', 'sdeu(m)': 'sdeu', 'sdun(m)': 'sdun', 'age(s)': 'age'})

    # convert the GPS time to UTC
    dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']), axis=1)

    dTime = {}
    dTime['epochs'] = dfPos.shape[0]
    dTime['date'] = dfPos.DT.iloc[0].strftime('%d %b %Y')
    dTime['start'] = dfPos.DT.iloc[0].strftime('%H:%M:%S')
    dTime['end'] = dfPos.DT.iloc[-1].strftime('%H:%M:%S')
    amc.dRTK['Time'] = dTime

    # add UTM coordinates
    dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos['UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(), dfPos['lon'].to_numpy())
    logger.info('{func:s}: added UTM coordiantes'.format(func=cFuncName))

    # inform user
    amc.logDataframeInfo(df=dfPos, dfName='dfPos', callerName=cFuncName, logger=logger)
    logger.info('{func:s}: dTime = {time!s}'.format(func=cFuncName, time=dTime))
    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos, dfName='{posf:s}'.format(posf=amc.dRTK['info']['rtkPosFile']))

    # put the info of the dfPosn into debug logging
    logger.debug('{func:s}: dfPos info\n{info!s}'.format(info=dfPos.info(), func=cFuncName))

    return dfPos
Beispiel #6
0
def plotUTMOffset(dRtk: dict, dfPos: pd.DataFrame, dfCrd: pd.DataFrame, dCrdLim: dict, logger: logging.Logger, showplot: bool = False):
    """
    plotUTMOffset plots the offset NEU wrt to reference point

    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # select colors for E, N, U coordinate difference
    colors = []
    colors.append([51 / 256., 204 / 256., 51 / 256.])
    colors.append([51 / 256., 51 / 256., 255 / 256.])
    colors.append([255 / 256., 51 / 256., 51 / 256.])

    # what to plot
    crds2Plot = ['UTM.E', 'UTM.N', 'ellH', 'ns']
    stdDev2Plot = ['sde', 'sdn', 'sdu']
    annotateList = ['east', 'north', 'ellh', '#SV']

    # find gaps in the data by comparing to mean value of difference in time
    dfPos['tDiff'] = dfPos['DT'].diff(1)
    dtMean = dfPos['tDiff'].mean()

    # look for it using location indexing
    dfPos.loc[dfPos['tDiff'] > dtMean, 'ns'] = np.nan

    amc.logDataframeInfo(df=dfPos, dfName='dfPos', callerName=cFuncName, logger=logger)

    # set up the plot
    plt.style.use('ggplot')

    # subplots
    fig, ax = plt.subplots(nrows=len(crds2Plot), ncols=1, sharex=True, figsize=(20.0, 16.0))
    fig.suptitle('{syst:s} - {posf:s} - {date:s}'.format(posf=dRtk['info']['rtkPosFile'], syst=dRtk['syst'], date=dRtk['Time']['date']))

    # make title for plot
    ax[0].annotate('{syst:s} - {date:s}'.format(syst=dRtk['syst'], date=dfPos['DT'].iloc[0].strftime('%d %b %Y')), xy=(0, 1), xycoords='axes fraction', xytext=(0, 0), textcoords='offset pixels', horizontalalignment='left', verticalalignment='bottom', weight='strong', fontsize='large')

    # copyright this
    ax[-1].annotate(r'$\copyright$ Alain Muls ([email protected])', xy=(1, 1), xycoords='axes fraction', xytext=(0, 0), textcoords='offset pixels', horizontalalignment='right', verticalalignment='bottom', weight='strong', fontsize='large')

    # subplots for coordinates display delta NEU
    for i, crd in enumerate(crds2Plot[:3]):
        axis = ax[i]

        # color for markers and alpha colors for error bars
        rgb = mpcolors.colorConverter.to_rgb(colors[i])
        rgb_new = amutils.make_rgb_transparent(rgb, (1, 1, 1), 0.3)

        # plot coordinate differences and error bars
        axis.errorbar(x=dfPos['DT'], y=dfCrd[crd], yerr=dfPos[stdDev2Plot[i]], linestyle='None', fmt='o', ecolor=rgb_new, capthick=1, markersize=1, color=colors[i])

        # set dimensions of y-axis
        axis.set_ylim([dCrdLim['min'], dCrdLim['max']])
        axis.set_ylabel('{crd:s} [m]'.format(crd=crd, fontsize='large'), color=colors[i])

        # # annotate each subplot with its reference position
        # if [dRtk['marker']['UTM.E'], dRtk['marker']['UTM.N'], dRtk['marker']['ellH']] == [np.NaN, np.NaN, np.NaN]:
        #     # use the mean UTM/ellH position for the reference point
        #     crdRef = dRtk['WAvg'][crd]
        #     crdSD = dRtk['WAvg'][stdDev2Plot[i]]
        #     annotatetxt = r'Mean: {refcrd:.3f}m ($\pm${stddev:.2f}m)'.format(refcrd=crdRef, stddev=crdSD)
        # else:
        #     # we have a reference point
        #     crdRef = dRtk['marker'][crd]
        #     crdOffset = dRtk['marker'][crd] - dRtk['WAvg'][crd]
        #     crdSD = dRtk['WAvg'][stdDev2Plot[i]]
        #     annotatetxt = r'Ref: {crd:s} = {refcrd:.3f}m ({offset:.3f}m $\pm${stddev:.2f}m)'.format(crd=crd, refcrd=crdRef, stddev=crdSD, offset=crdOffset)

        annotatetxt = markerAnnotation(crd, stdDev2Plot[i])

        # put annotation text
        axis.annotate(annotatetxt, xy=(1, 1), xycoords='axes fraction', xytext=(0, 0), textcoords='offset pixels', horizontalalignment='right', verticalalignment='bottom', weight='strong', fontsize='large')

        # title of sub-plot
        axis.set_title('{crd:s} offset'.format(crd=str.capitalize(annotateList[i]), fontsize='large'))

    # last subplot: number of satellites & PDOP
    for _, crd in enumerate(crds2Plot[3:4]):
        # plot #SVs on left axis
        axis = ax[-1]
        axis.set_ylim([0, 24])
        axis.set_ylabel('#SVs [-]', fontsize='large', color='grey')
        # axis.set_xlabel('Time [sec]', fontsize='large')

        axis.fill_between(dfPos['DT'], 0, dfPos['ns'], alpha=0.5, linestyle='-', linewidth=3, color='grey', label='#SVs', interpolate=False)
        # plot PDOP on second y-axis
        axRight = axis.twinx()

        axRight.set_ylim([0, 15])
        axRight.set_ylabel('PDOP [-]', fontsize='large', color='darkorchid')

        # plot PDOP value
        axRight.plot(dfPos['DT'], dfPos['PDOP'], linestyle='-', marker='.', markersize=1, color='darkorchid', label='PDOP')

        # set title
        axis.set_title('Visible satellites & PDOP', fontsize='large')

        # create the ticks for the time axis
        dtFormat = plot_utils.determine_datetime_ticks(startDT=dfPos['DT'].iloc[0], endDT=dfPos['DT'].iloc[-1])

        if dtFormat['minutes']:
            axis.xaxis.set_major_locator(dates.MinuteLocator(byminute=range[1, 60, 5], interval=1))
        else:
            axis.xaxis.set_major_locator(dates.HourLocator(interval=dtFormat['hourInterval']))   # every 4 hours
        axis.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))  # hours and minutes

        axis.xaxis.set_minor_locator(dates.DayLocator(interval=1))    # every day
        axis.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

        axis.xaxis.set_tick_params(rotation=0)
        for tick in axis.xaxis.get_major_ticks():
            # tick.tick1line.set_markersize(0)
            # tick.tick2line.set_markersize(0)
            tick.label1.set_horizontalalignment('center')

    # save the plot in subdir png of GNSSSystem
    amutils.mkdir_p(os.path.join(dRtk['info']['dir'], 'png'))
    pngName = os.path.join(dRtk['info']['dir'], 'png', os.path.splitext(dRtk['info']['rtkPosFile'])[0] + '-ENU.png')
    fig.savefig(pngName, dpi=fig.dpi)

    logger.info('{func:s}: created plot {plot:s}'.format(func=cFuncName, plot=colored(pngName, 'green')))

    if showplot:
        plt.show(block=True)
    else:
        plt.close(fig)

    return
def parsePosFile(logger: logging.Logger) -> pd.DataFrame:
    """
    parses 'posn' file created by pyrtklib.py
    """

    # set current function name
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    posFilePath = os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile'])

    logger.info('{func:s} parsing rtk-pos file {posf:s}'.format(
        func=cFuncName, posf=posFilePath))

    # looking for start times of observation file
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% obs start'):
            amc.dRTK['obsStart'] = datetime.strptime(rec[14:33],
                                                     '%Y/%m/%d %H:%M:%S')
            break
    # looking for end times of observation file
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% obs end'):
            amc.dRTK['obsEnd'] = datetime.strptime(rec[14:33],
                                                   '%Y/%m/%d %H:%M:%S')
            break
    # looking for ref pos of observation file
    foundRefPos = False
    for line in open(posFilePath):
        rec = line.strip()
        if rec.startswith('% ref pos'):
            amc.dRTK['RefPos'] = [float(x) for x in rec.split(':')[1].split()]
            amc.dRTK['RefPosUTM'] = utm.from_latlon(amc.dRTK['RefPos'][0],
                                                    amc.dRTK['RefPos'][1])
            logger.info(
                '{func:s}: reference station coordinates are LLH={llh!s} UTM={utm!s}'
                .format(func=cFuncName,
                        llh=amc.dRTK['RefPos'],
                        utm=amc.dRTK['RefPosUTM']))
            foundRefPos = True
            break

    if not foundRefPos:
        amc.dRTK['RefPos'] = [np.NaN, np.NaN, np.NaN]
        amc.dRTK['RefPosUTM'] = (np.NaN, np.NaN, np.NaN, np.NaN)
        logger.info(
            '{func:s}: no reference station used'.format(func=cFuncName))

    # find start of results in rtk file
    endHeaderLine = amutils.line_num_for_phrase_in_file('%  GPST', posFilePath)
    dfPos = pd.read_csv(posFilePath,
                        header=endHeaderLine,
                        delim_whitespace=True)
    dfPos = dfPos.rename(
        columns={
            '%': 'WNC',
            'GPST': 'TOW',
            'latitude(deg)': 'lat',
            'longitude(deg)': 'lon',
            'height(m)': 'ellH',
            'sdn(m)': 'sdn',
            'sde(m)': 'sde',
            'sdu(m)': 'sdu',
            'sdne(m)': 'sdne',
            'sdeu(m)': 'sdeu',
            'sdun(m)': 'sdun',
            'age(s)': 'age'
        })

    # check if we have records for this mode in the data, else exit
    if dfPos.shape[0] == 0:
        logger.info('{func:s}: found no data in pos-file {pos:s}'.format(
            func=cFuncName, pos=amc.dRTK['posFile']))
        sys.exit(amc.E_FAILURE)

    # store total number of observations
    amc.dRTK['#obs'] = dfPos.shape[0]

    # store number of calculated positions for requested rtk quality
    amc.dRTK['#obsQual'] = len(dfPos.loc[dfPos['Q'] == amc.dRTK['iQual']])

    logger.info('{func:s}: amc.dRTK = \n{drtk!s}'.format(func=cFuncName,
                                                         drtk=amc.dRTK))

    # convert the time in seconds
    dfPos['DT'] = dfPos.apply(lambda x: gpstime.UTCFromWT(x['WNC'], x['TOW']),
                              axis=1)

    # add UTM coordinates
    dfPos['UTM.E'], dfPos['UTM.N'], dfPos['UTM.Z'], dfPos[
        'UTM.L'] = utm.from_latlon(dfPos['lat'].to_numpy(),
                                   dfPos['lon'].to_numpy())

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfPos,
                                 dfName='{posf:s}'.format(posf=posFilePath))

    amc.logDataframeInfo(df=dfPos,
                         dfName='dfPos',
                         callerName=cFuncName,
                         logger=logger)

    return dfPos
Beispiel #8
0
def plotRTKLibSatsColumn(dCol: dict, dRtk: dict, dfSVs: pd.DataFrame, logger: logging.Logger, showplot: bool = False):
    """
    plotRTKLibSatsColumn plots a data columln from the stas dataframe
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # set up the plot
    plt.style.use('ggplot')

    # create a dataframe dfMerged with columns DT and the columns we want (PRres, CN0, Elev) selected by dCol
    # check for which GNSS we have data to display
    GNSSSysts = []
    if dRtk['PRres']['#GAL'] > 0:
        GNSSSysts.append('GAL')
    if dRtk['PRres']['#GPS'] > 0:
        GNSSSysts.append('GPS')
    if dRtk['PRres']['#GAL'] > 0 and dRtk['PRres']['#GPS'] > 0:
        GNSSSysts.append('COM')

    logger.info('{func:s}: processing GNSS Systems = {systs!s}'.format(func=cFuncName, systs=GNSSSysts))

    for _, GNSSSyst in enumerate(GNSSSysts):
        logger.info('{func:s}: working on GNSS = {syst:s}'.format(func=cFuncName, syst=GNSSSyst))

        # start with create a dataframe containing the DTs (unique values) and per column the value of column 'col'
        dfSatsCol = pd.DataFrame(dfSVs.DT.unique(), columns=['DT'])
        logger.debug('{func:s}: dfSatsCol.columns = {cols!s}'.format(func=cFuncName, cols=dfSatsCol.columns))
        # logger.debug('{func:s}: final dRtk =\n{settings!s}'.format(func=cFuncName, settings=json.dumps(dRtk, sort_keys=False, indent=4)))

        if GNSSSyst == 'COM':
            curSVsList = dRtk['PRres']['GALList'] + dRtk['PRres']['GPSList']
        else:
            curSVsList = dRtk['PRres']['%sList' % GNSSSyst]

        logger.debug('{func:s} #{line:d}: curSVsList of system {syst:s} = {list!s}   {count:d}'.format(func=cFuncName, list=curSVsList, count=len(curSVsList), syst=GNSSSyst, line=amc.lineno()))

        # add column to this dataframe for each SV and for its selected value 'col'
        for i, sv in enumerate(curSVsList):
            dfSVCol = pd.DataFrame(dfSVs[['DT', dCol['name']]][dfSVs['SV'] == sv])
            dfSVCol.rename(columns={dCol['name']: sv}, inplace=True)
            # logger.debug('{func:s}: dfSVCol = {df!s}  (#{size!s})'.format(df=dfSVCol, size=dfSVCol.size, func=cFuncName))

            # merge together
            dfMerged = pd.merge(dfSatsCol, dfSVCol, on=['DT'], how='outer')
            dfSatsCol = dfMerged

        # add a count of the number of residuals we have
        dfMerged['#{name:s}'.format(name=dCol['name'])] = dfMerged.apply(lambda x: x.count() - 1, axis=1)  # -1 else DT is counted as well

        amc.logDataframeInfo(df=dfMerged, dfName='dfMerged', callerName=cFuncName, logger=logger)

        # only processing dCol['name'] of current system, plot both vs DT and statistics
        if dCol['name'] in ['CN0', 'PRres']:
            fig, axis = plt.subplots(nrows=3, ncols=1, figsize=(24.0, 20.0))
        else:
            fig, axis = plt.subplots(nrows=2, ncols=1, figsize=(24.0, 16.0))

        # determine the discrete colors for SVs
        colormap = plt.cm.nipy_spectral  # I suggest to use nipy_spectral, Set1, Paired
        colors = [colormap(i) for i in np.linspace(0, 1, len(dfMerged.columns) - 1)]
        # print('colors = {!s}'.format(colors))

        # Plot first the dCol['name'] versus DT
        ax1 = axis[0]

        # color white background between [-2 and +2]
        if dCol['name'] == 'PRres':
            ax1.fill_between(dfMerged['DT'], -2, +2, color='lightgreen', alpha=0.2)

        # plot the selected 'col' values excluding last column
        dfMerged[dfMerged.columns[:-1]].set_index('DT').plot(ax=ax1, color=colors, marker='.', markersize=1, linestyle='', alpha=1)
        # name the ax1 and set limits
        ax1.set_ylabel('{title:s} [{unit:s}]'.format(title=dCol['title'], unit=dCol['unit']), fontsize='large')
        if not dCol['yrange'][0] is np.nan:
            ax1.set_ylim(dCol['yrange'])

        # title for plot
        ax1.set_title('{title:s} {syst:s} - {date:s}'.format(title=dCol['title'], syst=GNSSSyst, date=dRtk['Time']['date']), fontsize='large')

        # add legend
        ax1.legend(bbox_to_anchor=(0.5, 0.025), loc='lower center', ncol=min(np.size(curSVsList), 15), fontsize='small', markerscale=10)

        # create the ticks for the time axis
        dtFormat = plot_utils.determine_datetime_ticks(startDT=dfMerged['DT'].iloc[0], endDT=dfMerged['DT'].iloc[-1])

        if dtFormat['minutes']:
            ax1.xaxis.set_major_locator(dates.MinuteLocator(byminute=[0, 15, 30, 45], interval=1))
        else:
            ax1.xaxis.set_major_locator(dates.HourLocator(interval=dtFormat['hourInterval']))   # every 4 hours
        ax1.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))  # hours and minutes

        ax1.xaxis.set_minor_locator(dates.DayLocator(interval=1))    # every day
        ax1.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

        ax1.xaxis.set_tick_params(rotation=0)
        for tick in ax1.xaxis.get_major_ticks():
            # tick.tick1line.set_markersize(0)
            # tick.tick2line.set_markersize(0)
            tick.label1.set_horizontalalignment('center')

        ax1.annotate(r'$\copyright$ Alain Muls ([email protected])', xy=(1, 1), xycoords='axes fraction', xytext=(0, 0), textcoords='offset pixels', horizontalalignment='right', verticalalignment='bottom', weight='strong', fontsize='large')

        # SECOND: PLOT THE STATISTICS FOR DCOL['NAME'] FOR ALL SVS
        logger.info('{func:s}: {gnss:s} statistics {name:s}\n{stat!s}'.format(func=cFuncName, name=dCol['name'], gnss=GNSSSyst, stat=dfMerged.describe()))

        ax2 = axis[1]
        plotTitle = '{title:s} {gnss:s} statistics - {date:s}'.format(title=dCol['title'], gnss=GNSSSyst, date=dRtk['Time']['date'])

        # leave out the column DT and the count column (eg '#PRres')
        selectCols = [x for x in dfMerged.columns if x not in ['DT', '#{name:s}'.format(name=dCol['name'])]]
        # print('selectCols = {!s}'.format(selectCols))
        boxPlot = dfMerged[selectCols].plot(ax=ax2, kind='box', title=plotTitle, legend=True, fontsize='large', colormap='jet', return_type='dict', ylim=dCol['yrange'], rot=90, notch=True, patch_artist=True)
        # name the ax1 and set limits
        ax2.set_ylabel('%s [%s]' % (dCol['title'], dCol['unit']), fontsize='large')
        if not dCol['yrange'][0] is np.nan:
            ax2.set_ylim(dCol['yrange'])

        # beautify the boxplots
        svColors = []
        if GNSSSyst == 'GAL' or GNSSSyst == 'GPS':
            # for i, color in enumerate(colors):
            svColors = colors
        else:
            for _, SV in enumerate(curSVsList):
                if SV.startswith('E'):
                    svColors.append('blue')
                else:
                    svColors.append('red')

        for item in ['boxes', 'fliers', 'medians']:  # , 'whiskers', 'fliers', 'medians', 'caps']:
            for patch, color in zip(boxPlot[item], svColors):
                patch.set(color=color, linewidth=2)
                if item in ['boxes', 'fliers']:
                    # make transparent background fill
                    patch.set_alpha(0.15)

        # double the colors because whiskers exist at both sides
        doubleSVColors = []
        for i, svColor in enumerate(svColors):
            doubleSVColors.append(svColor)
            doubleSVColors.append(svColor)

        # color elements that are twice available
        for item in ['whiskers', 'caps']:  # , 'whiskers', 'fliers', 'medians', 'caps']:
            for patch, color in zip(boxPlot[item], doubleSVColors):
                patch.set(color=color, linewidth=2)

        # THIRD: FOR CN0 WE ALSO PLOT THE TIMEWISE DIFFERENCE
        dfMergedDiff = pd.DataFrame()
        if dCol['name'] == 'CN0':
            dfMergedDiff = dfMerged[dfMerged.columns[1:]].diff()
            dfMergedDiff = dfMergedDiff.mask(dfMergedDiff.abs() <= 1)
            dfMergedDiff.insert(loc=0, column='DT', value=dfMerged['DT'])
            dfMergedDiff.dropna(axis=0, how='all', subset=dfMerged.columns[1:], inplace=True)
            dfMergedDiff.dropna(axis=1, how='all', inplace=True)

            logger.debug('{func:s}: SVs observed (CN0 value) dfMerged.columns = {cols!s}'.format(func=cFuncName, cols=dfMerged.columns))
            logger.debug('{func:s}: SVs with SN0 diff > 1 dfMergedDiff.columns = {cols!s}'.format(func=cFuncName, cols=dfMergedDiff.columns))

            # THIRD: create the CN0 difference plot on the 3rd axis
            ax3 = axis[2]
            ax3.set_xlim([dfMerged['DT'].iat[0], dfMerged['DT'].iat[-1]])

            # plot the selected 'col' values
            for sv in dfMergedDiff.columns[1:-1]:
                logger.debug('{func:s}: {syst:s} {sv:s}'.format(func=cFuncName, syst=GNSSSyst, sv=sv))
                svcolor = svColors[curSVsList.index(sv)]
                # print('sv = {!s}   {!s}'.format(sv, svcolor))
                markerline, stemlines, baseline = ax3.stem(dfMergedDiff['DT'], dfMergedDiff[sv], label=sv)
                plt.setp(stemlines, color=svcolor, linewidth=2)
                plt.setp(markerline, color=svcolor, markersize=4)

            ax3.set_ylabel('Diff %s [%s]' % (dCol['title'], dCol['unit']), fontsize='large')

            # create the ticks for the time axis
            dtFormat = plot_utils.determine_datetime_ticks(startDT=dfMerged['DT'].iloc[0], endDT=dfMerged['DT'].iloc[-1])

            if dtFormat['minutes']:
                ax3.xaxis.set_major_locator(dates.MinuteLocator(byminute=[0, 15, 30, 45], interval=1))
            else:
                ax3.xaxis.set_major_locator(dates.HourLocator(interval=dtFormat['hourInterval']))   # every 4 hours
            ax3.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))  # hours and minutes

            ax3.xaxis.set_minor_locator(dates.DayLocator(interval=1))    # every day
            ax3.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

            ax3.xaxis.set_tick_params(rotation=0)
            for tick in ax3.xaxis.get_major_ticks():
                tick.label1.set_horizontalalignment('center')

        elif dCol['name'] == 'PRres':
            # THIRD: FOR PRRES WE ALSO PLOT THE PERCENTAGE OF PRRES WITHIN [-2, +2]
            ax3 = axis[2]
            ax3bis = ax3.twinx()

            # get the list of SVs for this GNSS
            if GNSSSyst == 'COM':
                curSVsList = dRtk['PRres']['GALList'] + dRtk['PRres']['GPSList']
            else:
                curSVsList = dRtk['PRres']['%sList' % GNSSSyst]

            logger.debug('{func:s} #{line:d}: curSVsList = {list!s}   {count:d}'.format(func=cFuncName, list=curSVsList, count=len(curSVsList), line=amc.lineno()))

            # create a dataframe indexed by SVID and which holds the #PR, #PRreslt2, %PRreslt2
            dfSVPR = pd.DataFrame(curSVsList, columns=['SV']).set_index('SV', drop=False)
            # add dummy columns for holding the residu info
            dfSVPR = dfSVPR.reindex(columns=['SV', '#res', '#reslt2', '#pcreslt2'])

            if GNSSSyst == 'COM':
                curSVsPRRes = {k: v for d in (dRtk['PRres']['GALSVs'], dRtk['PRres']['GPSSVs']) for k, v in d.items()}
            else:
                curSVsPRRes = dRtk['PRres']['%sSVs' % GNSSSyst]

            logger.debug('{func:s} #{line:d}: curSVsPRRes = {list!s}   {count:d}'.format(func=cFuncName, list=curSVsPRRes, count=len(curSVsPRRes), line=amc.lineno()))

            for i, sv in enumerate(curSVsList):
                dfSVPR.loc[dfSVPR['SV'] == sv, '#res'] = curSVsPRRes[sv]['count']
                dfSVPR.loc[dfSVPR['SV'] == sv, '#reslt2'] = curSVsPRRes[sv]['PRlt2']
                dfSVPR.loc[dfSVPR['SV'] == sv, '#pcreslt2'] = curSVsPRRes[sv]['PRlt2%']

            amc.logDataframeInfo(df=dfSVPR, dfName='dfSVPR', callerName=cFuncName, logger=logger)

            # plot the bars for PRres and PRReslt2
            dfSVPR.plot(kind='bar', ax=ax3, x='SV', y=['#res', '#reslt2'], edgecolor='white', fontsize='large', alpha=0.5)
            ax3.legend(labels=[r'#PRres', r'#PRres $\leq$ 2'], fontsize='medium')

            start, end = ax3.get_xlim()
            # print('start = {!s}'.format(start))
            # print('end = {!s}'.format(end))
            # ax3.set_xlim(left=start-1, right=end+1)

            # plot line for representing the percentage
            dfSVPR.plot(kind='line', ax=ax3bis, x='SV', y=['#pcreslt2'], fontsize='large', color='green', marker='o', markersize=5, linestyle='')
            ax3bis.legend(labels=[r'% PRres  $\leq$ 2'], fontsize='medium')
            ax3bis.set_ylim([94.5, 100.5])
            ax3bis.tick_params(axis='y', colors='green')
            # start, end = ax3bis.get_xlim()
            # print('start = {!s}'.format(start))
            # print('end = {!s}'.format(end))
            ax3bis.set_xlim(left=start, right=end)

            svRects = []
            for i, rect in enumerate(ax3.patches):
                # print('i = {:d}  rect = {!s}'.format(i, rect))
                if i < len(curSVsList):
                    svRects.append(rect)

            for i, rect in enumerate(ax3.patches):
                # print('i = {:d}  len = {:d}'.format(i, len(curSVsList)))
                if i % 2:  # 2 bars per SV
                    # get_width pulls left or right; get_y pushes up or down
                    sv = curSVsList[int(i / 2)]
                    # print('SV = {:s}  rect = {!s}'.format(sv, rect))
                    svRect = svRects[int(i / 2)]
                    ax3.text(svRect.get_x() + svRect.get_width(), svRect.get_y() + svRect.get_height(), '{:.2f}%'.format(dfSVPR.loc[sv]['#pcreslt2']), fontsize='medium', color='green', horizontalalignment='center')

            # name the y axis
            ax3.set_ylabel('# of {:s}'.format(dCol['name']), fontsize='large')
            ax3bis.set_ylabel(r'% $\in$ [-2, +2]'.format(dCol['name']), fontsize='large', color='green')

    # save the plot in subdir png of GNSSSystem
    amutils.mkdir_p(os.path.join(dRtk['info']['dir'], 'png'))
    pngName = os.path.join(dRtk['info']['dir'], 'png', os.path.splitext(dRtk['info']['rtkPosFile'])[0] + '-{col:s}.png'.format(col=dCol['name']))
    fig.savefig(pngName, dpi=fig.dpi)

    logger.info('{func:s}: created plot {plot:s}'.format(func=cFuncName, plot=colored(pngName, 'green')))

    if showplot:
        plt.show(block=True)
    else:
        plt.close(fig)
Beispiel #9
0
def calcDOPs(dfSats: pd.DataFrame, logger: logging.Logger) -> pd.DataFrame:
    """
    calculates the number of SVs used and corresponding DOP values
    """
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    logger.info('{func:s}: calculating number of SVs in PVT and DOP values'.format(func=cFuncName))

    # calculate sin/cos of elevation/azimuth
    dfSats['sinEl'] = np.sin(np.deg2rad(dfSats.Elev))
    dfSats['cosEl'] = np.cos(np.deg2rad(dfSats.Elev))
    dfSats['sinAz'] = np.sin(np.deg2rad(dfSats.Azim))
    dfSats['cosAz'] = np.cos(np.deg2rad(dfSats.Azim))

    # calculate the direction cosines for each satellite
    dfSats['alpha'] = dfSats['cosEl'] * dfSats['sinAz']
    dfSats['beta'] = dfSats['cosEl'] * dfSats['cosAz']
    dfSats['gamma'] = dfSats['sinEl']

    amc.logDataframeInfo(df=dfSats, dfName='dfSats', callerName=cFuncName, logger=logger)

    # get count of SVs
    dfSVCount = countSVs(dfSVs=dfSats, logger=logger)
    amc.logDataframeInfo(df=dfSVCount, dfName='dfSVCount', callerName=cFuncName, logger=logger)

    # calculating DOP is time consumig, so thin down the TOWs
    naTOWs4DOP = getTOWs4DOP(dfNrSVs=dfSVCount, logger=logger)
    logger.debug('{func:s} TOWs for calculating DOPs = {array!s}'.format(func=cFuncName, array=naTOWs4DOP))

    # create a dataframe for DOP values containing the DateTime column (unique values)
    dfDOPs = pd.DataFrame(naTOWs4DOP, columns=['DT'])
    amc.logDataframeInfo(df=dfDOPs, dfName='dfDOPs start', callerName=cFuncName, logger=logger)

    # select the #SVs from dfSVCount for the intervals we use for DOP calculation
    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfSVCount, dfName='dfSVCount')
    dfNrSVs4DOP = dfSVCount.loc[dfSVCount['DT'].isin(naTOWs4DOP)]
    dfNrSVs4DOP.reset_index(inplace=True)
    amc.logDataframeInfo(df=dfNrSVs4DOP, dfName='dfNrSVs4DOP', callerName=cFuncName, logger=logger)

    # merge last column with #SVs into dfDops
    dfDOPs.loc[:, '#SVs'] = dfNrSVs4DOP['#SVs']

    # add NA columns for xDOP values
    dfDOPs = dfDOPs.reindex(columns=dfDOPs.columns.tolist() + ['HDOP', 'VDOP', 'PDOP', 'GDOP'])
    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfDOPs, dfName='dfDOPs')

    # iterate over all unique TOWs to determine corresponding xDOP values
    logger.info('{func:s}: calculating xDOP values for {epochs:d} epochs'.format(func=cFuncName, epochs=len(naTOWs4DOP)))

    for i, DT in enumerate(naTOWs4DOP):
        # get the index for each DT we have so that we can select the direction cosines associated
        # DT = '2019-04-10 00:00:00'
        # dt = DT.strptime('%Y-%m-%d %H:%M:%S')
        # print('DT = {!s}   {!s}'.format(DT, type(DT)))
        # print('np.datetime64(DT) = {!s}   {!s}'.format(np.datetime64(DT), type(np.datetime64(DT))))
        # print('dfSats[DT].iloc[0] = {!s}   {!s}'.format(dfSats['DT'].iloc[0], type(dfSats['DT'].iloc[0])))

        towIndices = dfSats.index[dfSats['DT'] == np.datetime64(DT)].tolist()
        # print('towIndices = {!s}'.format(towIndices))

        # create matrix with the direction cosines
        dfTOW = dfSats[['alpha', 'beta', 'gamma']].iloc[towIndices]
        dfTOW['delta'] = 1.
        A = dfTOW.to_numpy()
        # print('dfTOW = {!s}'.format(dfTOW))
        # print('dfTOW = {!s}'.format(type(dfTOW)))

        # invert ATA and retain the diagonal squared
        ATAinvDiag = np.linalg.inv(A.transpose().dot(A)).diagonal()
        sqDiag = np.square(ATAinvDiag)
        # print('ATAinvDiag = \n{!s}  \n{!s}'.format(ATAinvDiag, type(ATAinvDiag)))
        # print('sqDiag = \n{!s}  \n{!s}'.format(sqDiag, type(sqDiag)))

        # get the index for this DT into the dfDOPs
        indexTOW = dfDOPs.index[dfDOPs['DT'] == DT].tolist()[0]
        # print('index DT = {!s}'.format(indexTOW))

        # calculate the xDOP values and store them in the dfDOPs
        PDOP = np.sqrt(sqDiag[0] + sqDiag[1] + sqDiag[2])

        dfDOPs.HDOP.iloc[indexTOW] = np.sqrt(sqDiag[0] + sqDiag[1])
        dfDOPs.VDOP.iloc[indexTOW] = ATAinvDiag[2]
        dfDOPs.PDOP.iloc[indexTOW] = PDOP
        dfDOPs.GDOP.iloc[indexTOW] = np.sqrt(sqDiag[0] + sqDiag[1] + sqDiag[2] + sqDiag[3])

        # print('dfDOPS.iloc[indexTOW] = {!s}'.format(dfDOPs.iloc[indexTOW]))

        # show progress bar
        progbar(i, len(naTOWs4DOP), 60)

    print()  # empty print statement for ending progbar
    # drop the cos/sin & direction cosines columns from dfSats
    dfSats.drop(['sinEl', 'cosEl', 'sinAz', 'cosAz', 'alpha', 'beta', 'gamma'], axis=1, inplace=True)

    amc.logDataframeInfo(df=dfDOPs, dfName='dfDOPs (end)', callerName=cFuncName, logger=logger)
    amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfDOPs, dfName='dfDOPs')

    return dfDOPs
Beispiel #10
0
def plotClock(dfClk: pd.DataFrame,
              dRtk: dict,
              logger: logging.Logger,
              showplot: bool = False):
    """
    plotClock plots athe clock for all systems
    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # set up the plot
    plt.style.use('ggplot')
    colors = ['blue', 'red', 'green', 'black']

    amc.logDataframeInfo(df=dfClk,
                         dfName='dfClk',
                         callerName=cFuncName,
                         logger=logger)

    # find out for which system we have clk offset values
    GNSSSysts = []
    for gnss in ['GAL', 'GPS', 'OTH', 'GLO']:
        if dfClk[gnss].any():
            GNSSSysts.append(gnss)
    logger.info('{func:s}: Clock available for GNSS systems {syst:s}'.format(
        func=cFuncName, syst=' '.join(GNSSSysts)))

    # create the plot araea
    fig, axis = plt.subplots(nrows=len(GNSSSysts),
                             ncols=1,
                             figsize=(24.0, 20.0))

    for i, GNSSsyst in enumerate(GNSSSysts):
        logger.info('{func:s}: plotting clock offset for {syst:s}'.format(
            func=cFuncName, syst=GNSSsyst))

        # get the axis to draw to
        if len(GNSSSysts) == 1:
            ax = axis
        else:
            ax = axis[i]

        # create the plot for this GNSS system
        dfClk.plot(ax=ax,
                   x='DT',
                   y=GNSSsyst,
                   marker='.',
                   linestyle='',
                   color=colors[i])

        # create the ticks for the time axis
        dtFormat = plot_utils.determine_datetime_ticks(
            startDT=dfClk['DT'].iloc[0], endDT=dfClk['DT'].iloc[-1])

        if dtFormat['minutes']:
            ax.xaxis.set_major_locator(
                dates.MinuteLocator(byminute=[0, 15, 30, 45], interval=1))
        else:
            ax.xaxis.set_major_locator(
                dates.HourLocator(
                    interval=dtFormat['hourInterval']))  # every 4 hours
        ax.xaxis.set_major_formatter(
            dates.DateFormatter('%H:%M'))  # hours and minutes

        ax.xaxis.set_minor_locator(dates.DayLocator(interval=1))  # every day
        ax.xaxis.set_minor_formatter(dates.DateFormatter('\n%d-%m-%Y'))

        ax.xaxis.set_tick_params(rotation=0)
        for tick in ax.xaxis.get_major_ticks():
            # tick.tick1line.set_markersize(0)
            # tick.tick2line.set_markersize(0)
            tick.label1.set_horizontalalignment('center')

        # name the axis
        ax.set_ylabel('{syst:s} Clock Offset [ns]'.format(syst=GNSSsyst),
                      fontsize='large',
                      color=colors[i])
        ax.set_xlabel('Time', fontsize='large')

        # title of sub-plot
        ax.set_title('Clock offset relative to {syst:s} @ {date:s}'.format(
            syst=GNSSsyst,
            date=dfClk['DT'].iloc[0].strftime('%d %b %Y'),
            fontsize='large'))

    # save the plot in subdir png of GNSSSystem
    amutils.mkdir_p(os.path.join(dRtk['info']['dir'], 'png'))
    pngName = os.path.join(
        dRtk['info']['dir'], 'png',
        os.path.splitext(dRtk['info']['rtkPosFile'])[0] + '-CLK.png')
    # print('pngName = {:s}'.format(pngName))
    fig.savefig(pngName, dpi=fig.dpi)

    logger.info('{func:s}: created plot {plot:s}'.format(func=cFuncName,
                                                         plot=colored(
                                                             pngName,
                                                             'green')))

    if showplot:
        plt.show(block=True)
    else:
        plt.close(fig)
Beispiel #11
0
def main(argv):
    """
    pyRTKPlot adds UTM coordinates to output of rnx2rtkp.
    If 'stat' file is available, calculates xDOP values, and make plots of statictics

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # some options for diasplay of dataframes
    pd.set_option('display.max_columns', None)  # or 1000
    pd.set_option('display.max_rows', None)  # or 1000
    pd.set_option('display.max_colwidth', -1)  # or 199
    # limit float precision
    json.encoder.FLOAT_REPR = lambda o: format(o, '.3f')
    np.set_printoptions(precision=4)

    # treat command line options
    rtkPosFile, rtkDir, crdMarker, showPlots, overwrite, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(baseName=os.path.basename(__file__),
                                         dir=rtkDir,
                                         logLevels=logLevels)

    # change to selected directory if exists
    # print('rtkDir = %s' % rtkDir)
    if not os.path.exists(rtkDir):
        logger.error('{func:s}: directory {dir:s} does not exists'.format(
            func=cFuncName, dir=colored(rtkDir, 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(rtkDir)
        logger.info('{func:s}: changed to dir {dir:s}'.format(func=cFuncName,
                                                              dir=colored(
                                                                  rtkDir,
                                                                  'green')))

    # store information
    dInfo = {}
    dInfo['dir'] = rtkDir
    dInfo['rtkPosFile'] = rtkPosFile
    dInfo['rtkStatFile'] = dInfo['rtkPosFile'] + '.stat'
    dInfo['posn'] = dInfo['rtkPosFile'] + '.posn'
    dInfo['posnstat'] = dInfo['posn'] + '.html'
    amc.dRTK['info'] = dInfo

    # GNSS system is last part of root directory
    amc.dRTK['syst'] = 'UNKNOWN'
    for _, syst in enumerate(['GAL', 'GPS', 'COM']):
        if syst.lower() in amc.dRTK['info']['dir'].lower():
            amc.dRTK['syst'] = syst
    # print('amc.dRTK['syst'] = {:s}'.format(amc.dRTK['syst']))

    # info about PDOP bins and statistics
    dPDOP = {}
    dPDOP['bins'] = [0, 2, 3, 4, 5, 6, math.inf]
    amc.dRTK['PDOP'] = dPDOP

    # set the reference point
    dMarker = {}
    dMarker['lat'], dMarker['lon'], dMarker['ellH'] = map(float, crdMarker)
    print('crdMarker = {!s}'.format(crdMarker))

    if [dMarker['lat'], dMarker['lon'], dMarker['ellH']] == [0, 0, 0]:
        dMarker['lat'] = dMarker['lon'] = dMarker['ellH'] = np.NaN
        dMarker['UTM.E'] = dMarker['UTM.N'] = np.NaN
        dMarker['UTM.Z'] = dMarker['UTM.L'] = ''
    else:
        dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker[
            'UTM.L'] = utm.from_latlon(dMarker['lat'], dMarker['lon'])

    logger.info('{func:s}: marker coordinates = {crd!s}'.format(func=cFuncName,
                                                                crd=dMarker))
    amc.dRTK['marker'] = dMarker

    # check wether pos and stat file are present, else exit
    if not os.access(os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                     os.R_OK) or not os.access(
                         os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile']),
                         os.R_OK):
        logger.error(
            '{func:s}: file {pos:s} or {stat:s} is not accessible'.format(
                func=cFuncName,
                pos=os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                stat=os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile'])))

        sys.exit(amc.E_FILE_NOT_EXIST)

    # read the position file into a dataframe and add dUTM coordinates
    logger.info('{func:s}: parsing RTKLib pos file {pos:s}'.format(
        pos=amc.dRTK['info']['rtkPosFile'], func=cFuncName))
    dfPosn = parse_rtk_files.parseRTKLibPositionFile(logger=logger)

    # calculate the weighted avergae of llh & enu
    amc.dRTK['WAvg'] = parse_rtk_files.weightedAverage(dfPos=dfPosn,
                                                       logger=logger)

    # find difference with reference and ax/min limits for UTM plot
    logger.info(
        '{func:s}: calculating coordinate difference with reference/mean position'
        .format(func=cFuncName))
    dfCrd, dCrdLim = plot_position.crdDiff(
        dMarker=amc.dRTK['marker'],
        dfUTMh=dfPosn[['UTM.E', 'UTM.N', 'ellH']],
        plotCrds=['UTM.E', 'UTM.N', 'ellH'],
        logger=logger)
    # merge dfCrd into dfPosn
    dfPosn[['dUTM.E', 'dUTM.N', 'dEllH']] = dfCrd[['UTM.E', 'UTM.N', 'ellH']]

    # work on the statistics file
    # split it in relavant parts
    dTmpFiles = parse_rtk_files.splitStatusFile(
        amc.dRTK['info']['rtkStatFile'], logger=logger)

    # parse the satellite file (contains Az, El, PRRes, CN0)
    dfSats = parse_rtk_files.parseSatelliteStatistics(dTmpFiles['sat'],
                                                      logger=logger)
    store_to_cvs(df=dfSats, ext='sats', dInfo=amc.dRTK, logger=logger)

    # determine statistics on PR residuals for all satellites per elevation bin
    dfDistCN0, dsDistCN0, dfDistPRres, dsDistPRRes = parse_rtk_files.parse_elevation_distribution(
        dRtk=amc.dRTK, dfSat=dfSats, logger=logger)
    store_to_cvs(df=dfDistCN0, ext='CN0.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistPRres,
                 ext='PRres.dist',
                 dInfo=amc.dRTK,
                 logger=logger)

    # BEGIN DEBUG
    # END DEBUG

    # determine statistics of PR residuals for each satellite
    amc.dRTK['PRres'] = parse_rtk_files.parse_sv_residuals(dfSat=dfSats,
                                                           logger=logger)

    # calculate DOP values from El, Az info for each TOW
    dfDOPs = parse_rtk_files.calcDOPs(dfSats, logger=logger)
    store_to_cvs(df=dfDOPs, ext='XDOP', dInfo=amc.dRTK, logger=logger)

    # merge the PDOP column of dfDOPs into dfPosn and interpolate the PDOP column
    dfResults = pd.merge(left=dfPosn,
                         right=dfDOPs[['DT', 'PDOP', 'HDOP', 'VDOP', 'GDOP']],
                         left_on='DT',
                         right_on='DT',
                         how='left')
    dfPosn = dfResults.interpolate()
    store_to_cvs(df=dfPosn, ext='posn', dInfo=amc.dRTK, logger=logger)

    # calculate per DOP bin the statistics of PDOP
    parse_rtk_files.addPDOPStatistics(dRtk=amc.dRTK,
                                      dfPos=dfPosn,
                                      logger=logger)

    # add statistics for the E,N,U coordinate differences
    dfStatENU = enu_stat.enu_statistics(
        dRtk=amc.dRTK,
        dfENU=dfPosn[['DT', 'dUTM.E', 'dUTM.N', 'dEllH']],
        logger=logger)
    # add statistics for the E,N,U coordinate differences
    dfDistENU, dfDistXDOP = enu_stat.enupdop_distribution(dRtk=amc.dRTK,
                                                          dfENU=dfPosn[[
                                                              'DT', 'dUTM.E',
                                                              'dUTM.N',
                                                              'dEllH', 'PDOP',
                                                              'HDOP', 'VDOP',
                                                              'GDOP'
                                                          ]],
                                                          logger=logger)
    store_to_cvs(df=dfDistENU, ext='ENU.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistXDOP, ext='XDOP.dist', dInfo=amc.dRTK, logger=logger)

    logger.info('{func:s}: dRTK =\n{settings!s}'.format(func=cFuncName,
                                                        settings=json.dumps(
                                                            amc.dRTK,
                                                            sort_keys=False,
                                                            indent=4)))

    # # store statistics for dfPosn
    # logger.info('{func:s}: creating pandas profile report {ppname:s} for dfPosn, {help:s}'.format(ppname=colored(amc.dRTK['info']['posnstat'], 'green'), help=colored('be patient', 'red'), func=cFuncName))
    # dfProfile = dfPosn[['DT', 'ns', 'dUTM.E', 'dUTM.N', 'dEllH', 'sdn', 'sde', 'sdu', 'PDOP']]

    # ppTitle = 'Report on {posn:s} - {syst:s} - {date:s}'.format(posn=amc.dRTK['info']['posn'], syst=amc.dRTK['syst'], date=amc.dRTK['Time']['date'])

    # profile = pp.ProfileReport(df=dfProfile, check_correlation_pearson=False, correlations={'pearson': False, 'spearman': False, 'kendall': False, 'phi_k': False, 'cramers': False, 'recoded': False}, title=ppTitle)
    # profile.to_file(output_file=amc.dRTK['info']['posnstat'])

    # parse the clock stats
    dfCLKs = parse_rtk_files.parseClockBias(dTmpFiles['clk'], logger=logger)
    store_to_cvs(df=dfCLKs, ext='clks', dInfo=amc.dRTK, logger=logger)

    # BEGIN debug
    dfs = (dfPosn, dfSats, dfCLKs, dfCrd, dfDOPs, dfStatENU, dfDistENU,
           dfDistXDOP, dfDistPRres, dfDistCN0)
    dfsNames = ('dfPosn', 'dfSats', 'dfCLKs', 'dfCrd', 'dfDOPs', 'dfStatENU',
                'dfDistENU', 'dfDistXDOP')
    for df, dfName in zip(dfs, dfsNames):
        amutils.logHeadTailDataFrame(logger=logger,
                                     callerName=cFuncName,
                                     df=df,
                                     dfName=dfName)
        amc.logDataframeInfo(df=df,
                             dfName=dfName,
                             callerName=cFuncName,
                             logger=logger)
    # EOF debug

    # create the position plot (use DOP to color segments)
    plot_position.plotUTMOffset(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)

    # create the UTM N-E scatter plot
    plot_scatter.plotUTMScatter(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)
    plot_scatter.plotUTMScatterBin(dRtk=amc.dRTK,
                                   dfPos=dfPosn,
                                   dfCrd=dfCrd,
                                   dCrdLim=dCrdLim,
                                   logger=logger,
                                   showplot=showPlots)

    # create ENU distribution plots
    plot_distributions_crds.plot_enu_distribution(dRtk=amc.dRTK,
                                                  dfENUdist=dfDistENU,
                                                  dfENUstat=dfStatENU,
                                                  logger=logger,
                                                  showplot=showPlots)

    # create XDOP plots
    plot_distributions_crds.plot_xdop_distribution(dRtk=amc.dRTK,
                                                   dfXDOP=dfDOPs,
                                                   dfXDOPdisp=dfDistXDOP,
                                                   logger=logger,
                                                   showplot=showPlots)

    # plot pseudo-range residus
    dPRResInfo = {
        'name': 'PRres',
        'yrange': [-6, 6],
        'title': 'PR Residuals',
        'unit': 'm',
        'linestyle': '-'
    }
    logger.info(
        '{func:s}: creating dPRRes plots based on dict {dict!s}'.format(
            func=cFuncName, dict=dPRResInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dPRResInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # plot CN0
    dCN0Info = {
        'name': 'CN0',
        'yrange': [20, 60],
        'title': 'CN0 Ratio',
        'unit': 'dBHz',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating CN0 plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dCN0Info))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dCN0Info,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # create plots for elevation distribution of CN0 and PRres
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistCN0,
                                                   ds=dsDistCN0,
                                                   obs_name='CN0',
                                                   logger=logger,
                                                   showplot=showPlots)
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistPRres,
                                                   ds=dsDistPRRes,
                                                   obs_name='PRres',
                                                   logger=logger,
                                                   showplot=showPlots)

    # # plot elevation
    dElevInfo = {
        'name': 'Elev',
        'yrange': [0, 90],
        'title': 'Elevation',
        'unit': 'Deg',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating Elev plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dElevInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dElevInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # # plot the receiver clock
    logger.info('{func:s}: creating Clock plots'.format(func=cFuncName))
    plot_clock.plotClock(dfClk=dfCLKs,
                         dRtk=amc.dRTK,
                         logger=logger,
                         showplot=showPlots)

    logger.info('{func:s}: final amc.dRTK =\n{settings!s}'.format(
        func=cFuncName,
        settings=json.dumps(amc.dRTK, sort_keys=False, indent=4)))

    jsonName = amc.dRTK['info']['rtkPosFile'] + '.json'
    with open(jsonName, 'w') as f:
        json.dump(amc.dRTK, f, ensure_ascii=False, indent=4)

    logger.info('{func:s}: created json file {json:s}'.format(func=cFuncName,
                                                              json=colored(
                                                                  jsonName,
                                                                  'green')))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            amc.dRTK['info']['dir'], '{obs:s}-{prog:s}'.format(
                obs=amc.dRTK['info']['rtkPosFile'].replace(';', '_'),
                prog='plot.log')))
    os.remove(log_name)