Esempio n. 1
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    obsRnx, dirRnx, interval, logLevels = treatCmdOpts(argv)

    # store cli parameters
    amc.dRTK = {}
    dArgs = {}
    dArgs['rinexDir'] = dirRnx
    dArgs['obs_name'] = obsRnx
    dArgs['interval'] = interval

    amc.dRTK['args'] = dArgs

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=dirRnx,
                               logLevels=logLevels)
    # locate the gfzrnx program used for execution
    dProgs = {}
    dProgs['gfzrnx'] = location.locateProg('gfzrnx', logger)
    amc.dRTK['progs'] = dProgs

    # check arguments
    checkArguments(logger=logger)

    # read the header info using gfzrnx
    amc.dRTK['header'] = rnx_obs_header.rnxobs_header_metadata(
        dArgs=amc.dRTK['args'], dProgs=amc.dRTK['progs'], logger=logger)
    # get list of PRNs in RINEX obs file
    amc.dRTK['prns'] = rnx_obs_header.rnxobs_parse_prns(
        dArgs=amc.dRTK['args'], dProgs=amc.dRTK['progs'], logger=logger)
    # extract parts of the rinex observation header
    rnx_obs_header.rnxobs_metadata_parser(dobs_hdr=amc.dRTK['header'],
                                          dPRNs=amc.dRTK['prns'],
                                          dArgs=amc.dRTK['args'],
                                          logger=logger)

    # show the information JSON structure
    logger.info('{func:s}: info dictionary = \n{prt!s}'.format(
        prt=amutils.pretty(amc.dRTK), func=cFuncName))
    # store the json structure
    jsonName = os.path.join(
        amc.dRTK['args']['rinexDir'],
        amc.dRTK['args']['obs_name'].replace('.', '-') + '.json')
    print('jsonName {!s}'.format(jsonName))
    with open(jsonName, 'w') as f:
        json.dump(amc.dRTK,
                  f,
                  ensure_ascii=False,
                  indent=4,
                  default=amutils.DT_convertor)
Esempio n. 2
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # limit float precision
    encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # store parameters
    amc.dRTK = {}
    dFTP = {}
    dFTP['server'] = 'cddis.gsfc.nasa.gov'
    dFTP['user'] = '******'
    dFTP['passwd'] = '*****@*****.**'
    amc.dRTK['ftp'] = dFTP
    dDate = {}
    amc.dRTK['date'] = dDate
    dLocal = {}
    dLocal['root'] = '.'
    amc.dRTK['local'] = dLocal

    # treat command line options
    amc.dRTK['local']['root'], amc.dRTK['ftp']['server'], dDate['year'], dDate[
        'daynr'], overwrite, logLevels = treatCmdOpts(argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=amc.dRTK['local']['root'],
                                         logLevels=logLevels)

    # get the YY and DOY values as string
    dDate['YY'] = dDate['year'][2:]
    dDate['DOY'] = '{:03d}'.format(dDate['daynr'])

    # create the remote/local directories and filenames to download the individual/combined RINEX Navigation files from
    amc.dRTK['remote'] = createRemoteFTPInfo(logger=logger)

    # download the RINEX navigation files using ncftpget
    doNcFTPDownload(logger=logger)

    # report to the user
    logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(func=cFuncName,
                                                        json=json.dumps(
                                                            amc.dRTK,
                                                            sort_keys=False,
                                                            indent=4)))

    # copy temp log file to the YYDOY directory
    copyfile(log_name, os.path.join(amc.dRTK['local']['dir'],
                                    'pyftposnav.log'))
    os.remove(log_name)
Esempio n. 3
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirSTF, fileSTF, GNSSsyst, logLevels = treatCmdOpts(argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=dirSTF,
                               logLevels=logLevels)

    # check if arguments are accepted
    workDir = checkExistenceArgs(stfDir=dirSTF, stfFile=fileSTF, logger=logger)

    # create dictionary with the current info
    global dSTF
    dSTF = {}
    dSTF['dir'] = workDir
    dSTF['gnss'] = GNSSsyst
    dSTF['stf'] = fileSTF

    # read in the STF file using included header information
    dfAGC = readSTFRxStatus(stfFile=fileSTF, logger=logger)
    amutils.logHeadTailDataFrame(df=dfAGC,
                                 dfName=dSTF['stf'],
                                 callerName=cFuncName,
                                 logger=logger)

    # save to cvs file
    dSTF['csv'] = os.path.splitext(dSTF['stf'])[0] + '.csv'
    dfAGC.to_csv(dSTF['csv'])
    logger.info('{func:s}: saved to csv file {csv:s}'.format(csv=dSTF['csv'],
                                                             func=cFuncName))

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dSTF,
                                                          func=cFuncName))

    # plot the AGC values
    plotagc.plotAGC(dStf=dSTF, dfAgc=dfAGC, logger=logger)
    # plotcoords.plotUTMCoords(dStf=dSTF, dfCrd=dfAGC[['time', 'UTM.E', 'UTM.N', 'Height[m]', 'NrSV', 'SignalInfo', 'dist', '2D/3D']], logger=logger)
    # # plot trajectory
    # plotcoords.plotUTMScatter(dStf=dSTF, dfCrd=dfAGC[['time', 'UTM.E', 'UTM.N', 'SignalInfo', '2D/3D']], logger=logger)

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dSTF,
                                                          func=cFuncName))
Esempio n. 4
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # store parameters
    amc.dRTK = {}
    cli_opt = {}

    # treat command line options
    cli_opt['rnx_dir'], cli_opt['gnsss'], cli_opt['prcodes'], cli_opt['marker'], cli_opt['cutoff'], cli_opt['tmult'], showPlots, logLevels = treatCmdOpts(argv)
    amc.dRTK['options'] = cli_opt

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__), dir=amc.dRTK['options']['rnx_dir'], logLevels=logLevels)

    logger.info('{func:s}: arguments processed: {args!s}'.format(args=amc.dRTK['options']['rnx_dir'], func=cFuncName))
Esempio n. 5
0
def main(argv):
    """
    pyRTKPlot adds UTM coordinates to output of rnx2rtkp.
    If 'stat' file is available, calculates xDOP values, and make plots of statictics

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # some options for diasplay of dataframes
    pd.set_option('display.max_columns', None)  # or 1000
    pd.set_option('display.max_rows', None)  # or 1000
    pd.set_option('display.max_colwidth', -1)  # or 199
    # limit float precision
    json.encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # treat command line options
    posFile, rootDir, subDir, rtkqual, marker, campaign, excel, logLevels = treatCmdOpts(
        argv)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['rootDir'] = rootDir
    amc.dRTK['subDir'] = subDir
    amc.dRTK['posDir'] = os.path.join(rootDir, subDir)
    amc.dRTK['posFile'] = posFile
    amc.dRTK['rtkqual'] = rtkqual
    amc.dRTK['marker'] = marker
    amc.dRTK['campaign'] = campaign
    amc.dRTK['excel'] = excel

    # get th enumeric vale for this quality
    amc.dRTK['iQual'] = [
        key for key, value in rtkc.dRTKQual.items()
        if value == amc.dRTK['rtkqual']
    ][0]

    if amc.dRTK['excel']:
        amc.dRTK['xlsName'] = os.path.join(
            amc.dRTK['rootDir'],
            '{pos:s}.xlsx'.format(pos=amc.dRTK['campaign']))

    # create logging for better debugging
    logger = amc.createLoggers(baseName=os.path.basename(__file__),
                               dir=amc.dRTK['posDir'],
                               logLevels=logLevels)

    # change to selected directory if exists
    if not os.path.exists(amc.dRTK['posDir']):
        logger.error('{func:s}: directory {dir:s} does not exists'.format(
            func=cFuncName, dir=colored(amc.dRTK['posDir'], 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(amc.dRTK['posDir'])
        logger.info('{func:s}: changed to dir {dir:s}'.format(
            func=cFuncName, dir=colored(amc.dRTK['posDir'], 'green')))

    # check wether pos and stat file are present, else exit
    if not os.access(os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile']),
                     os.R_OK):
        logger.error('{func:s}: file {pos:s} is not accessible'.format(
            func=cFuncName,
            pos=os.path.join(amc.dRTK['posDir'], amc.dRTK['posFile'])))
        sys.exit(amc.E_FILE_NOT_EXIST)

    # read the position file into a dataframe and add dUTM coordinates
    dfPos = parse_rtkpos_file.parsePosFile(logger=logger)

    # get the indices according to the position mode
    idx = dfPos.index[dfPos['Q'] == amc.dRTK['iQual']]

    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=dfPos.loc[idx], dfName='{posf:s}'.format(posf=amc.dRTK['posFile']))

    # determine statistics for the requested quality mode
    logger.info('{func:s}: stats are\n{stat!s}'.format(
        func=cFuncName,
        stat=dfPos.loc[idx][['lat', 'lon', 'ellH', 'UTM.E',
                             'UTM.N']].describe()))
    # add weighted average for th erequested quality of position
    llh = ['lat', 'lon', 'ellH', 'UTM.E', 'UTM.N']
    dSDenu = ['sdn', 'sde', 'sdu', 'sdn', 'sde']

    dWAVG = {}

    for values in zip(llh, dSDenu):
        dWAVG[values[0]] = parse_rtkpos_file.wavg(group=dfPos.loc[idx],
                                                  avg_name=values[0],
                                                  weight_name=values[1])
    # calculate the stddev of the weighted average
    for crd in llh[2:]:
        # print('crd = {!s}'.format(crd))
        dWAVG['sd{crd:s}'.format(crd=crd)] = parse_rtkpos_file.stddev(
            dfPos.loc[idx][crd], dWAVG[(crd)])
        # print(dWAVG)
        # print('{crd:s} = {sd:.3f}'.format(crd=crd, sd=dWAVG['sd{crd:s}'.format(crd=crd)]))

    # get UTM coordiantes/zone for weigted average
    dWAVG['UTM.E'], dWAVG['UTM.N'], dWAVG['UTM.Z'], dWAVG[
        'UTM.L'] = utm.from_latlon(dWAVG['lat'], dWAVG['lon'])
    amc.dRTK['WAVG'] = dWAVG

    logger.info('{func:s}: weighted averages: {wavg!s}'.format(func=cFuncName,
                                                               wavg=dWAVG))

    amutils.logHeadTailDataFrame(
        logger=logger,
        callerName=cFuncName,
        df=dfPos,
        dfName='{posf:s}'.format(posf=amc.dRTK['posFile']))

    # create UTM plot
    plot_utm.plot_utm_ellh(dRtk=amc.dRTK,
                           dfUTM=dfPos,
                           logger=logger,
                           showplot=True)

    # add results to campaign file
    addRTKResult(logger)

    # write to csv file
    csvName = os.path.join(amc.dRTK['posDir'],
                           '{pos:s}.csv'.format(pos=amc.dRTK['posFile']))
    dfPos.to_csv(csvName, index=None, header=True)

    # add sheet write to excel workbook
    if amc.dRTK['excel']:
        sheetName = '{pos:s}-{qual:s}'.format(pos=os.path.splitext(
            os.path.basename(amc.dRTK['posFile']))[0],
                                              qual=amc.dRTK['rtkqual'])
        df2excel.append_df_to_excel(filename=amc.dRTK['xlsName'],
                                    df=dfPos,
                                    sheet_name=sheetName,
                                    truncate_sheet=True,
                                    startrow=0,
                                    index=False,
                                    float_format="%.9f")
        logger.info(
            '{func:s}: added sheet {sheet:s} to workbook {wb:s}'.format(
                func=cFuncName, sheet=sheetName, wb=amc.dRTK['xlsName']))

    logger.info('{func:s}: amc.dRTK =\n{settings!s}'.format(func=cFuncName,
                                                            settings=amc.dRTK))
Esempio n. 6
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # limit float precision
    encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # treat command line options
    rootDir, binFile, binType, rinexDir, crd_cart, interval, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=rootDir,
                                         logLevels=logLevels)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['rootDir'] = rootDir
    amc.dRTK['binFile'] = binFile
    amc.dRTK['binType'] = binType
    amc.dRTK['rinexDir'] = rinexDir
    amc.dRTK['ant_crds'] = crd_cart
    amc.dRTK['interval'] = interval * 60
    amc.dRTK['gfzrnxDir'] = os.path.join(rinexDir, 'gfzrnx')

    logger.info('{func:s}: arguments processed: amc.dRTK = {drtk!s}'.format(
        func=cFuncName, drtk=amc.dRTK))

    # check validity of passed arguments
    retCode = checkValidityArgs(logger=logger)
    if retCode != amc.E_SUCCESS:
        logger.error('{func:s}: Program exits with code {error:s}'.format(
            func=cFuncName, error=colored('{!s}'.format(retCode), 'red')))
        sys.exit(retCode)

    # locate the conversion programs SBF2RIN and CONVBIN
    amc.dRTK['bin'] = {}
    amc.dRTK['bin']['CONVBIN'] = location.locateProg('convbin', logger)
    amc.dRTK['bin']['SBF2RIN'] = location.locateProg('sbf2rin', logger)
    amc.dRTK['bin']['GFZRNX'] = location.locateProg('gfzrnx', logger)
    amc.dRTK['bin']['RNX2CRZ'] = location.locateProg('rnx2crz', logger)
    amc.dRTK['bin']['COMPRESS'] = location.locateProg('compress', logger)

    # convert binary file to rinex
    logger.info(
        '{func:s}: convert binary file to rinex'.format(func=cFuncName))
    if amc.dRTK['binType'] == 'SBF':
        dRnxTmp = sbf2rinex(logger=logger)
        gfzrnx_ops.rnxobs_header_info(dTmpRnx=dRnxTmp, logger=logger)
        gfzrnx_ops.rnxobs_statistics_file(dTmpRnx=dRnxTmp, logger=logger)
        gfzrnx_ops.gnss_rinex_creation(dTmpRnx=dRnxTmp, logger=logger)
        # gfzrnx_ops.create_rnxobs_subfreq(logger=logger)
        # gfzrnx_ops.compress_rinex_obsnav(logger=logger)
    else:
        ubx2rinex(logger=logger)

    # report to the user
    logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))
    # store the json structure
    jsonName = os.path.join(amc.dRTK['rinexDir'],
                            amc.dRTK['binFile'].replace('.', '-') + '.json')
    with open(jsonName, 'w') as f:
        json.dump(amc.dRTK,
                  f,
                  ensure_ascii=False,
                  indent=4,
                  default=amutils.DT_convertor)

    # remove the temporar files
    for file in dRnxTmp.values():
        os.remove(file)

    # copy temp log file to the YYDOY directory
    copyfile(log_name, os.path.join(amc.dRTK['rinexDir'], 'pyconvbin.log'))
    os.remove(log_name)
Esempio n. 7
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # treat command line options
    obsRnx, dirRnx, obs_types, sat_systs, frequencies, prns, interval, logLevels = treatCmdOpts(argv)

    # store cli parameters
    amc.dRTK = {}

    dArgs = {}
    dArgs['rinexDir'] = dirRnx
    dArgs['obs_name'] = obsRnx
    dArgs['gnss'] = sat_systs
    dArgs['systyp'] = obs_types
    dArgs['sysfrq'] = frequencies
    dArgs['prns'] = prns
    dArgs['interval'] = interval

    amc.dRTK['args'] = dArgs

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__), dir=dirRnx, logLevels=logLevels)
    # locate the gfzrnx program used for execution
    dProgs = {}
    dProgs['gfzrnx'] = location.locateProg('gfzrnx', logger)
    dProgs['grep'] = location.locateProg('grep', logger)
    amc.dRTK['progs'] = dProgs

    # check arguments
    checkArguments(logger=logger)

    # read the header info using gfzrnx
    amc.dRTK['info']['header'] = rnx_obs_header.rnxobs_header_metadata(dArgs=amc.dRTK['args'], dProgs=amc.dRTK['progs'], logger=logger)
    # get list of PRNs in RINEX obs file
    amc.dRTK['info']['prns'] = rnx_obs_header.rnxobs_parse_prns(dArgs=amc.dRTK['args'], dProgs=amc.dRTK['progs'], logger=logger)
    # extract parts of the rinex observation header
    amc.dRTK['available'] = rnx_obs_header.rnxobs_metadata_parser(dobs_hdr=amc.dRTK['info']['header'], dPRNs=amc.dRTK['info']['prns'], dArgs=amc.dRTK['args'], logger=logger)
    amc.dRTK['analysed'] = rnx_obs_header.rnxobs_argument_parser(dobs_hdr=amc.dRTK['info']['header'], dPRNs=amc.dRTK['info']['prns'], dArgs=amc.dRTK['args'], logger=logger)

    # for each PRN selected, extract the variables of same systyp in tabular output and read in a dataframe
    for gnss in amc.dRTK['analysed']:
        logger.info('=' * 50)
        logger.info('{func:s}: start analysing GNSS {gnss:s}'.format(gnss=colored(gnss, 'green'), func=cFuncName))

        for prn in amc.dRTK['analysed'][gnss]['prns']:
            logger.info('-' * 25)
            # create a dataframe from the rinex observation file
            dfPRN = rnx_obs_analyse.rnxobs_dataframe(rnx_file=dArgs['obs_name'], prn=prn, dPRNSysObs=amc.dRTK['analysed'][gnss]['sysobs'], dProgs=amc.dRTK['progs'], logger=logger)

            # perform analysis calculations, returned is list of ALL possible observations
            prn_sigobstyps = rnx_obs_analyse.rnxobs_analyse(prn=prn, dfPrn=dfPRN, dPRNSysType=amc.dRTK['analysed'][gnss]['systyp'], logger=logger)

            for sigtyp in amc.dRTK['analysed'][gnss]['systyp']:
                # plotting is done per sigtyp
                prn_stobs = [stobs for stobs in prn_sigobstyps if stobs.startswith(sigtyp)]
                cols = ['DT'] + prn_stobs
                # plot the observables for this specific sigtyp
                rnx_obs_plot.rnx_prsobs_plot(dArgs=amc.dRTK['args'], prn=prn, stobs=prn_stobs, dfPrn=dfPRN[cols], rawobs=amc.dRTK['analysed'][gnss]['sysobs'], logger=logger, showplot=True)

    sys.exit(11)

    # show the information JSON structure
    logger.info('{func:s}: info dictionary = \n{prt!s}'.format(prt=amutils.pretty(amc.dRTK), func=cFuncName))
Esempio n. 8
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirSTF, fileSTF, GNSSsyst, crdMarker, logLevels = treatCmdOpts(argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__), dir=dirSTF, logLevels=logLevels)

    # check if arguments are accepted
    workDir = checkExistenceArgs(stfDir=dirSTF, stfFile=fileSTF, logger=logger)

    # create dictionary with the current info
    global dSTF
    dSTF = {}
    dSTF['dir'] = workDir
    dSTF['gnss'] = GNSSsyst
    dSTF['stf'] = fileSTF

    # set the reference point
    dMarker = {}
    dMarker['lat'], dMarker['lon'], dMarker['ellH'] = map(float, crdMarker)
    if [dMarker['lat'], dMarker['lon'], dMarker['ellH']] == [0, 0, 0]:
        dMarker['lat'] = dMarker['lon'] = dMarker['ellH'] = np.NaN
        dMarker['UTM.E'] = dMarker['UTM.N'] = np.NaN
        dMarker['UTM.Z'] = dMarker['UTM.L'] = ''
    else:
        dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker['UTM.L'] = UTM.from_latlon(dMarker['lat'], dMarker['lon'])

    logger.info('{func:s}: marker coordinates = {crd!s}'.format(func=cFuncName, crd=dMarker))
    dSTF['marker'] = dMarker

    # # add jammer location coordinates
    # dMarker = {}
    # dMarker['geod'] = {}
    # dMarker['geod']['lat'] = 51.19306  # 51.193183
    # dMarker['geod']['lon'] = 4.15528  # 4.155056
    # dMarker['UTM'] = {}
    # dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker['UTM.L'] = utm.from_latlon(dMarker['geod']['lat'], dMarker['geod']['lon'])
    # dSTF['marker'] = dMarker

    # read in the STF file using included header information
    dfGeod = readSTFGeodetic(stfFile=fileSTF, logger=logger)
    amutils.logHeadTailDataFrame(df=dfGeod, dfName=dSTF['stf'], callerName=cFuncName, logger=logger)

    # save to cvs file
    dSTF['csv'] = os.path.splitext(dSTF['stf'])[0] + '.csv'
    dfGeod.to_csv(dSTF['csv'])

    # plot trajectory
    logger.info('{func:s}: information:\n{dict!s}'.format(dict=amutils.pretty(dSTF), func=cFuncName))
    plotcoords.plotUTMSuppressed(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'Error']], logger=logger)

    # plot the UTM coordinates and #SVs
    plotcoords.plotUTMCoords(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'Height[m]', 'NrSV', 'SignalInfo', 'dist', '2D/3D']], logger=logger)
    # plot trajectory
    plotcoords.plotUTMScatter(dStf=dSTF, dfCrd=dfGeod[['time', 'UTM.E', 'UTM.N', 'SignalInfo', '2D/3D']], logger=logger)

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=amutils.pretty(dSTF), func=cFuncName))
Esempio n. 9
0
def main(argv):
    """
    pyRTKPlot adds UTM coordinates to output of rnx2rtkp.
    If 'stat' file is available, calculates xDOP values, and make plots of statictics

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # some options for diasplay of dataframes
    pd.set_option('display.max_columns', None)  # or 1000
    pd.set_option('display.max_rows', None)  # or 1000
    pd.set_option('display.max_colwidth', -1)  # or 199
    # limit float precision
    json.encoder.FLOAT_REPR = lambda o: format(o, '.3f')
    np.set_printoptions(precision=4)

    # treat command line options
    rtkPosFile, rtkDir, crdMarker, showPlots, overwrite, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(baseName=os.path.basename(__file__),
                                         dir=rtkDir,
                                         logLevels=logLevels)

    # change to selected directory if exists
    # print('rtkDir = %s' % rtkDir)
    if not os.path.exists(rtkDir):
        logger.error('{func:s}: directory {dir:s} does not exists'.format(
            func=cFuncName, dir=colored(rtkDir, 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(rtkDir)
        logger.info('{func:s}: changed to dir {dir:s}'.format(func=cFuncName,
                                                              dir=colored(
                                                                  rtkDir,
                                                                  'green')))

    # store information
    dInfo = {}
    dInfo['dir'] = rtkDir
    dInfo['rtkPosFile'] = rtkPosFile
    dInfo['rtkStatFile'] = dInfo['rtkPosFile'] + '.stat'
    dInfo['posn'] = dInfo['rtkPosFile'] + '.posn'
    dInfo['posnstat'] = dInfo['posn'] + '.html'
    amc.dRTK['info'] = dInfo

    # GNSS system is last part of root directory
    amc.dRTK['syst'] = 'UNKNOWN'
    for _, syst in enumerate(['GAL', 'GPS', 'COM']):
        if syst.lower() in amc.dRTK['info']['dir'].lower():
            amc.dRTK['syst'] = syst
    # print('amc.dRTK['syst'] = {:s}'.format(amc.dRTK['syst']))

    # info about PDOP bins and statistics
    dPDOP = {}
    dPDOP['bins'] = [0, 2, 3, 4, 5, 6, math.inf]
    amc.dRTK['PDOP'] = dPDOP

    # set the reference point
    dMarker = {}
    dMarker['lat'], dMarker['lon'], dMarker['ellH'] = map(float, crdMarker)
    print('crdMarker = {!s}'.format(crdMarker))

    if [dMarker['lat'], dMarker['lon'], dMarker['ellH']] == [0, 0, 0]:
        dMarker['lat'] = dMarker['lon'] = dMarker['ellH'] = np.NaN
        dMarker['UTM.E'] = dMarker['UTM.N'] = np.NaN
        dMarker['UTM.Z'] = dMarker['UTM.L'] = ''
    else:
        dMarker['UTM.E'], dMarker['UTM.N'], dMarker['UTM.Z'], dMarker[
            'UTM.L'] = utm.from_latlon(dMarker['lat'], dMarker['lon'])

    logger.info('{func:s}: marker coordinates = {crd!s}'.format(func=cFuncName,
                                                                crd=dMarker))
    amc.dRTK['marker'] = dMarker

    # check wether pos and stat file are present, else exit
    if not os.access(os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                     os.R_OK) or not os.access(
                         os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile']),
                         os.R_OK):
        logger.error(
            '{func:s}: file {pos:s} or {stat:s} is not accessible'.format(
                func=cFuncName,
                pos=os.path.join(rtkDir, amc.dRTK['info']['rtkPosFile']),
                stat=os.path.join(rtkDir, amc.dRTK['info']['rtkStatFile'])))

        sys.exit(amc.E_FILE_NOT_EXIST)

    # read the position file into a dataframe and add dUTM coordinates
    logger.info('{func:s}: parsing RTKLib pos file {pos:s}'.format(
        pos=amc.dRTK['info']['rtkPosFile'], func=cFuncName))
    dfPosn = parse_rtk_files.parseRTKLibPositionFile(logger=logger)

    # calculate the weighted avergae of llh & enu
    amc.dRTK['WAvg'] = parse_rtk_files.weightedAverage(dfPos=dfPosn,
                                                       logger=logger)

    # find difference with reference and ax/min limits for UTM plot
    logger.info(
        '{func:s}: calculating coordinate difference with reference/mean position'
        .format(func=cFuncName))
    dfCrd, dCrdLim = plot_position.crdDiff(
        dMarker=amc.dRTK['marker'],
        dfUTMh=dfPosn[['UTM.E', 'UTM.N', 'ellH']],
        plotCrds=['UTM.E', 'UTM.N', 'ellH'],
        logger=logger)
    # merge dfCrd into dfPosn
    dfPosn[['dUTM.E', 'dUTM.N', 'dEllH']] = dfCrd[['UTM.E', 'UTM.N', 'ellH']]

    # work on the statistics file
    # split it in relavant parts
    dTmpFiles = parse_rtk_files.splitStatusFile(
        amc.dRTK['info']['rtkStatFile'], logger=logger)

    # parse the satellite file (contains Az, El, PRRes, CN0)
    dfSats = parse_rtk_files.parseSatelliteStatistics(dTmpFiles['sat'],
                                                      logger=logger)
    store_to_cvs(df=dfSats, ext='sats', dInfo=amc.dRTK, logger=logger)

    # determine statistics on PR residuals for all satellites per elevation bin
    dfDistCN0, dsDistCN0, dfDistPRres, dsDistPRRes = parse_rtk_files.parse_elevation_distribution(
        dRtk=amc.dRTK, dfSat=dfSats, logger=logger)
    store_to_cvs(df=dfDistCN0, ext='CN0.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistPRres,
                 ext='PRres.dist',
                 dInfo=amc.dRTK,
                 logger=logger)

    # BEGIN DEBUG
    # END DEBUG

    # determine statistics of PR residuals for each satellite
    amc.dRTK['PRres'] = parse_rtk_files.parse_sv_residuals(dfSat=dfSats,
                                                           logger=logger)

    # calculate DOP values from El, Az info for each TOW
    dfDOPs = parse_rtk_files.calcDOPs(dfSats, logger=logger)
    store_to_cvs(df=dfDOPs, ext='XDOP', dInfo=amc.dRTK, logger=logger)

    # merge the PDOP column of dfDOPs into dfPosn and interpolate the PDOP column
    dfResults = pd.merge(left=dfPosn,
                         right=dfDOPs[['DT', 'PDOP', 'HDOP', 'VDOP', 'GDOP']],
                         left_on='DT',
                         right_on='DT',
                         how='left')
    dfPosn = dfResults.interpolate()
    store_to_cvs(df=dfPosn, ext='posn', dInfo=amc.dRTK, logger=logger)

    # calculate per DOP bin the statistics of PDOP
    parse_rtk_files.addPDOPStatistics(dRtk=amc.dRTK,
                                      dfPos=dfPosn,
                                      logger=logger)

    # add statistics for the E,N,U coordinate differences
    dfStatENU = enu_stat.enu_statistics(
        dRtk=amc.dRTK,
        dfENU=dfPosn[['DT', 'dUTM.E', 'dUTM.N', 'dEllH']],
        logger=logger)
    # add statistics for the E,N,U coordinate differences
    dfDistENU, dfDistXDOP = enu_stat.enupdop_distribution(dRtk=amc.dRTK,
                                                          dfENU=dfPosn[[
                                                              'DT', 'dUTM.E',
                                                              'dUTM.N',
                                                              'dEllH', 'PDOP',
                                                              'HDOP', 'VDOP',
                                                              'GDOP'
                                                          ]],
                                                          logger=logger)
    store_to_cvs(df=dfDistENU, ext='ENU.dist', dInfo=amc.dRTK, logger=logger)
    store_to_cvs(df=dfDistXDOP, ext='XDOP.dist', dInfo=amc.dRTK, logger=logger)

    logger.info('{func:s}: dRTK =\n{settings!s}'.format(func=cFuncName,
                                                        settings=json.dumps(
                                                            amc.dRTK,
                                                            sort_keys=False,
                                                            indent=4)))

    # # store statistics for dfPosn
    # logger.info('{func:s}: creating pandas profile report {ppname:s} for dfPosn, {help:s}'.format(ppname=colored(amc.dRTK['info']['posnstat'], 'green'), help=colored('be patient', 'red'), func=cFuncName))
    # dfProfile = dfPosn[['DT', 'ns', 'dUTM.E', 'dUTM.N', 'dEllH', 'sdn', 'sde', 'sdu', 'PDOP']]

    # ppTitle = 'Report on {posn:s} - {syst:s} - {date:s}'.format(posn=amc.dRTK['info']['posn'], syst=amc.dRTK['syst'], date=amc.dRTK['Time']['date'])

    # profile = pp.ProfileReport(df=dfProfile, check_correlation_pearson=False, correlations={'pearson': False, 'spearman': False, 'kendall': False, 'phi_k': False, 'cramers': False, 'recoded': False}, title=ppTitle)
    # profile.to_file(output_file=amc.dRTK['info']['posnstat'])

    # parse the clock stats
    dfCLKs = parse_rtk_files.parseClockBias(dTmpFiles['clk'], logger=logger)
    store_to_cvs(df=dfCLKs, ext='clks', dInfo=amc.dRTK, logger=logger)

    # BEGIN debug
    dfs = (dfPosn, dfSats, dfCLKs, dfCrd, dfDOPs, dfStatENU, dfDistENU,
           dfDistXDOP, dfDistPRres, dfDistCN0)
    dfsNames = ('dfPosn', 'dfSats', 'dfCLKs', 'dfCrd', 'dfDOPs', 'dfStatENU',
                'dfDistENU', 'dfDistXDOP')
    for df, dfName in zip(dfs, dfsNames):
        amutils.logHeadTailDataFrame(logger=logger,
                                     callerName=cFuncName,
                                     df=df,
                                     dfName=dfName)
        amc.logDataframeInfo(df=df,
                             dfName=dfName,
                             callerName=cFuncName,
                             logger=logger)
    # EOF debug

    # create the position plot (use DOP to color segments)
    plot_position.plotUTMOffset(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)

    # create the UTM N-E scatter plot
    plot_scatter.plotUTMScatter(dRtk=amc.dRTK,
                                dfPos=dfPosn,
                                dfCrd=dfCrd,
                                dCrdLim=dCrdLim,
                                logger=logger,
                                showplot=showPlots)
    plot_scatter.plotUTMScatterBin(dRtk=amc.dRTK,
                                   dfPos=dfPosn,
                                   dfCrd=dfCrd,
                                   dCrdLim=dCrdLim,
                                   logger=logger,
                                   showplot=showPlots)

    # create ENU distribution plots
    plot_distributions_crds.plot_enu_distribution(dRtk=amc.dRTK,
                                                  dfENUdist=dfDistENU,
                                                  dfENUstat=dfStatENU,
                                                  logger=logger,
                                                  showplot=showPlots)

    # create XDOP plots
    plot_distributions_crds.plot_xdop_distribution(dRtk=amc.dRTK,
                                                   dfXDOP=dfDOPs,
                                                   dfXDOPdisp=dfDistXDOP,
                                                   logger=logger,
                                                   showplot=showPlots)

    # plot pseudo-range residus
    dPRResInfo = {
        'name': 'PRres',
        'yrange': [-6, 6],
        'title': 'PR Residuals',
        'unit': 'm',
        'linestyle': '-'
    }
    logger.info(
        '{func:s}: creating dPRRes plots based on dict {dict!s}'.format(
            func=cFuncName, dict=dPRResInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dPRResInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # plot CN0
    dCN0Info = {
        'name': 'CN0',
        'yrange': [20, 60],
        'title': 'CN0 Ratio',
        'unit': 'dBHz',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating CN0 plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dCN0Info))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dCN0Info,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # create plots for elevation distribution of CN0 and PRres
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistCN0,
                                                   ds=dsDistCN0,
                                                   obs_name='CN0',
                                                   logger=logger,
                                                   showplot=showPlots)
    plot_distributions_elev.plot_elev_distribution(dRtk=amc.dRTK,
                                                   df=dfDistPRres,
                                                   ds=dsDistPRRes,
                                                   obs_name='PRres',
                                                   logger=logger,
                                                   showplot=showPlots)

    # # plot elevation
    dElevInfo = {
        'name': 'Elev',
        'yrange': [0, 90],
        'title': 'Elevation',
        'unit': 'Deg',
        'linestyle': '-'
    }
    logger.info('{func:s}: creating Elev plots based on dict {dict!s}'.format(
        func=cFuncName, dict=dElevInfo))
    plot_sats_column.plotRTKLibSatsColumn(dCol=dElevInfo,
                                          dRtk=amc.dRTK,
                                          dfSVs=dfSats,
                                          logger=logger,
                                          showplot=showPlots)

    # # plot the receiver clock
    logger.info('{func:s}: creating Clock plots'.format(func=cFuncName))
    plot_clock.plotClock(dfClk=dfCLKs,
                         dRtk=amc.dRTK,
                         logger=logger,
                         showplot=showPlots)

    logger.info('{func:s}: final amc.dRTK =\n{settings!s}'.format(
        func=cFuncName,
        settings=json.dumps(amc.dRTK, sort_keys=False, indent=4)))

    jsonName = amc.dRTK['info']['rtkPosFile'] + '.json'
    with open(jsonName, 'w') as f:
        json.dump(amc.dRTK, f, ensure_ascii=False, indent=4)

    logger.info('{func:s}: created json file {json:s}'.format(func=cFuncName,
                                                              json=colored(
                                                                  jsonName,
                                                                  'green')))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            amc.dRTK['info']['dir'], '{obs:s}-{prog:s}'.format(
                obs=amc.dRTK['info']['rtkPosFile'].replace(';', '_'),
                prog='plot.log')))
    os.remove(log_name)
Esempio n. 10
0
def main(argv) -> bool:
    """
    glabplotposn plots data from gLAB (v6) OUTPUT messages

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # limit float precision
    # encoder.FLOAT_REPR = lambda o: format(o, '.3f')
    # pd.options.display.float_format = "{:,.3f}".format

    # treat command line options
    dir_root, glab_out, scale_enu, center_enu, show_plot, log_levels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=dir_root,
                                         logLevels=log_levels)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['dir_root'] = dir_root
    amc.dRTK['glab_out'] = glab_out

    # create sub dict for gLAB related info
    dgLABng = {}
    dgLABng['dir_glab'] = 'glabng'

    amc.dRTK['dgLABng'] = dgLABng

    # create the DOP bins for plotting
    amc.dRTK['dop_bins'] = [0, 2, 3, 4, 5, 6, math.inf]

    # check some arguments
    ret_val = check_arguments(logger=logger)
    if ret_val != amc.E_SUCCESS:
        sys.exit(ret_val)

    # split gLABs out file in parts
    glab_msgs = glc.dgLab['messages'][0:2]  # INFO & OUTPUT messages needed
    dglab_tmpfiles = glab_split_outfile.split_glab_outfile(
        msgs=glab_msgs, glab_outfile=amc.dRTK['glab_out'], logger=logger)

    # read in the INFO messages from INFO temp file
    amc.dRTK['INFO'] = glab_parser_info.parse_glab_info(
        glab_info=dglab_tmpfiles['INFO'], logger=logger)
    # read in the OUTPUT messages from OUTPUT temp file
    df_output = glab_parser_output.parse_glab_output(
        glab_output=dglab_tmpfiles['OUTPUT'], logger=logger)
    # save df_output as CSV file
    store_to_cvs(df=df_output, ext='pos', logger=logger, index=False)

    # calculate statitics
    # gLAB OUTPUT messages
    amc.dRTK['dgLABng']['stats'] = glab_statistics.statistics_glab_outfile(
        df_outp=df_output, logger=logger)

    # plot the gLABs OUTPUT messages
    # - position ENU and PDOP plots
    glab_plot_output_enu.plot_glab_position(dfCrd=df_output,
                                            scale=scale_enu,
                                            showplot=show_plot,
                                            logger=logger)
    # - scatter plot of EN per dop bind
    glab_plot_output_enu.plot_glab_scatter(dfCrd=df_output,
                                           scale=scale_enu,
                                           center=center_enu,
                                           showplot=show_plot,
                                           logger=logger)
    # - scatter plot of EN per dop bind (separate)
    glab_plot_output_enu.plot_glab_scatter_bin(dfCrd=df_output,
                                               scale=scale_enu,
                                               center=center_enu,
                                               showplot=show_plot,
                                               logger=logger)
    # - plot the DOP parameters
    glab_plot_output_enu.plot_glab_xdop(dfCrd=df_output,
                                        showplot=show_plot,
                                        logger=logger)
    # - plot the ENU box plots per DOP bin
    glab_plot_output_stats.plot_glab_statistics(
        df_dopenu=df_output[glc.dgLab['OUTPUT']['XDOP'] +
                            glc.dgLab['OUTPUT']['dENU']],
        scale=scale_enu,
        showplot=show_plot,
        logger=logger)

    # report to the user
    # report to the user
    logger.info('{func:s}: Project information =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))

    # create pickle file from amc.dRTK
    # store the json structure
    json_out = amc.dRTK['glab_out'].split('.')[0] + '.json'
    with open(json_out, 'w') as f:
        json.dump(amc.dRTK,
                  f,
                  ensure_ascii=False,
                  indent=4,
                  default=amutils.DT_convertor)
    logger.info('{func:s}: created json file {json:s}'.format(func=cFuncName,
                                                              json=colored(
                                                                  json_out,
                                                                  'green')))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            amc.dRTK['dir_root'],
            '{obs:s}-{prog:s}'.format(obs=amc.dRTK['glab_out'].split('.')[0],
                                      prog='output.log')))
    os.remove(log_name)

    return amc.E_SUCCESS
Esempio n. 11
0
def main(argv) -> bool:
    """
    glabplotposn plots data from gLAB (v6) OUTPUT messages

    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # store cli parameters
    amc.dRTK = {}
    cli_opt = {}
    cli_opt['glab_db'], cli_opt['gnsss'], cli_opt['prcodes'], cli_opt[
        'markers'], cli_opt['yyyy'], cli_opt['doy_begin'], cli_opt[
            'doy_last'], show_plot, log_levels = treatCmdOpts(argv)
    amc.dRTK['options'] = cli_opt

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=os.getcwd(),
                                         logLevels=log_levels)

    # check  arguments
    ret_val = check_arguments(logger=logger)
    if ret_val != amc.E_SUCCESS:
        sys.exit(ret_val)

    # for crds in ['ENU', 'dENU']:
    for crds in ['ENU']:
        # parse the database file to get the GNSSs and prcodes we need
        tmp_name = glabdb_parse.db_parse_gnss_codes(
            db_name=amc.dRTK['options']['glab_db'],
            crd_types=glc.dgLab['OUTPUT'][crds],
            logger=logger)

        # read into dataframe
        logger.info(
            '{func:s}: reading selected information into dataframe'.format(
                func=cFuncName))

        colnames = ['yyyy', 'doy', 'gnss', 'marker', 'prcodes', 'crds']
        if crds == 'ENU':
            colnames += ['mean', 'std', 'max', 'min']

        print('colnames = {!s}'.format(colnames))
        try:
            df_crds = pd.read_csv(tmp_name, names=colnames, header=None)
            # test time
            d = datetime.date(2020, 1, 1) + datetime.timedelta(39 - 1)
            print(d)

            # convert YYYY/DOY to a datetime.date field
            df_crds['DT'] = df_crds.apply(lambda x: datetime.date(
                x['yyyy'], 1, 1) + datetime.timedelta(x['doy'] - 1),
                                          axis=1)

        except FileNotFoundError as e:
            logger.critical('{func:s}: Error = {err!s}'.format(err=e,
                                                               func=cFuncName))
            sys.exit(amc.E_FILE_NOT_EXIST)

        amutils.logHeadTailDataFrame(logger=logger,
                                     callerName=cFuncName,
                                     df=df_crds,
                                     dfName='df[{crds:s}]'.format(crds=crds))

        # determine statistics
        if crds == 'ENU':
            # statistics over the coordinates ENU per prcode selected
            amc.dRTK['stats_{crd:s}'.format(
                crd=crds)] = glabdb_statistics.crd_statistics(
                    crds=crds,
                    prcodes=amc.dRTK['options']['prcodes'],
                    df_crds=df_crds,
                    logger=logger)
            # plot the mean / std values for all prcodes per ENU coordinates
            glabdb_plot_crds.plot_glabdb_position(
                crds=crds,
                prcodes=amc.dRTK['options']['prcodes'],
                df_crds=df_crds,
                logger=logger,
                showplot=show_plot)

    # report to the user
    logger.info('{func:s}: Project information =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))

    return amc.E_SUCCESS
Esempio n. 12
0
def main(argv) -> bool:
    """
    glabplotposn plots data from gLAB (v6) OUTPUT messages

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # limit float precision
    # encoder.FLOAT_REPR = lambda o: format(o, '.3f')
    # pd.options.display.float_format = "{:,.3f}".format

    # treat command line options
    dir_root, glab_cmp_out, scale_enu, center_enu, db_cvs, show_plot, log_levels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=dir_root,
                                         logLevels=log_levels)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['dir_root'] = dir_root
    amc.dRTK['glab_cmp_out'] = glab_cmp_out

    # create sub dict for gLAB related info
    dgLABng = {}
    dgLABng['dir_glab'] = 'glabng'
    dgLABng['db'] = db_cvs

    amc.dRTK['dgLABng'] = dgLABng

    # check some arguments
    ret_val = check_arguments(logger=logger)
    if ret_val != amc.E_SUCCESS:
        sys.exit(ret_val)

    # open or create the database file for storing the statistics
    glab_updatedb.open_database(db_name=amc.dRTK['dgLABng']['db'],
                                logger=logger)

    # glab_updatedb.db_update_line(db_name=amc.dRTK['dgLABng']['db'], line_id='2019,134', info_line='2019,134,new thing whole line for ', logger=logger)

    # get location of progs used
    amc.dRTK['progs'] = {}
    amc.dRTK['progs']['gunzip'] = location.locateProg('gunzip', logger)
    amc.dRTK['progs']['gzip'] = location.locateProg('gzip', logger)

    # uncompress the "out" file
    runGUNZIP = '{prog:s} -f -v {zip:s}'.format(
        prog=amc.dRTK['progs']['gunzip'],
        zip=os.path.join(amc.dRTK['dir_root'], amc.dRTK['glab_cmp_out']))
    logger.info('{func:s}: Uncompressing file {cmp:s}:\n{cmd:s}'.format(
        func=cFuncName,
        cmd=colored(runGUNZIP, 'green'),
        cmp=colored(amc.dRTK['glab_cmp_out'], 'green')))

    # run the program
    exeprogram.subProcessDisplayStdErr(cmd=runGUNZIP, verbose=True)

    # get name of uncompressed file
    amc.dRTK['glab_out'] = amc.dRTK['glab_cmp_out'][:-3]

    # split gLABs out file in parts
    glab_msgs = glc.dgLab['messages'][0:2]  # INFO & OUTPUT messages needed
    dglab_tmpfiles = glab_split_outfile.split_glab_outfile(
        msgs=glab_msgs, glab_outfile=amc.dRTK['glab_out'], logger=logger)

    # read in the INFO messages from INFO temp file
    amc.dRTK['INFO'] = glab_parser_info.parse_glab_info(
        glab_info=dglab_tmpfiles['INFO'], logger=logger)
    # write the identification to the database file for glabng output messages
    # glab_updatedb.db_update_line(db_name=amc.dRTK['dgLABng']['db'], line_id=amc.dRTK['INFO']['db_lineID'], info_line=amc.dRTK['INFO']['db_lineID'], logger=logger)

    # read in the OUTPUT messages from OUTPUT temp file
    df_output = glab_parser_output.parse_glab_output(
        glab_output=dglab_tmpfiles['OUTPUT'], logger=logger)
    # save df_output as CSV file
    amc.dRTK['dgLABng']['pos'] = store_to_cvs(df=df_output,
                                              ext='pos',
                                              logger=logger,
                                              index=False)

    # compress the stored CVS file
    runGZIP = '{prog:s} -f -v {zip:s}'.format(
        prog=amc.dRTK['progs']['gzip'],
        zip=os.path.join(amc.dRTK['dir_root'], amc.dRTK['dgLABng']['dir_glab'],
                         amc.dRTK['dgLABng']['pos']))
    logger.info('{func:s}: Compressing file {cmp:s}:\n{cmd:s}'.format(
        func=cFuncName,
        cmd=colored(runGZIP, 'green'),
        cmp=colored(amc.dRTK['dgLABng']['pos'], 'green')))
    # run the program
    exeprogram.subProcessDisplayStdErr(cmd=runGZIP, verbose=True)

    # calculate statitics gLAB OUTPUT messages
    amc.dRTK['dgLABng'][
        'stats'], dDB_crds = glab_statistics.statistics_glab_outfile(
            df_outp=df_output, logger=logger)

    for key, val in dDB_crds.items():
        glab_updatedb.db_update_line(
            db_name=amc.dRTK['dgLABng']['db'],
            line_id='{id:s},{crd:s}'.format(id=amc.dRTK['INFO']['db_lineID'],
                                            crd=key),
            info_line='{id:s},{val:s}'.format(id=amc.dRTK['INFO']['db_lineID'],
                                              val=val),
            logger=logger)

    # sort the glab_output_db
    glab_updatedb.db_sort(db_name=amc.dRTK['dgLABng']['db'], logger=logger)
    # sys.exit(2)

    # plot the gLABs OUTPUT messages
    # - position ENU and PDOP plots
    glab_plot_output_enu.plot_glab_position(dfCrd=df_output,
                                            scale=scale_enu,
                                            showplot=show_plot,
                                            logger=logger)
    # - scatter plot of EN per dop bind
    glab_plot_output_enu.plot_glab_scatter(dfCrd=df_output,
                                           scale=scale_enu,
                                           center=center_enu,
                                           showplot=show_plot,
                                           logger=logger)
    # - scatter plot of EN per dop bind (separate)
    glab_plot_output_enu.plot_glab_scatter_bin(dfCrd=df_output,
                                               scale=scale_enu,
                                               center=center_enu,
                                               showplot=show_plot,
                                               logger=logger)
    # - plot the DOP parameters
    glab_plot_output_enu.plot_glab_xdop(dfCrd=df_output,
                                        showplot=show_plot,
                                        logger=logger)
    # - plot the ENU box plots per DOP bin
    glab_plot_output_stats.plot_glab_statistics(
        df_dopenu=df_output[glc.dgLab['OUTPUT']['XDOP'] +
                            glc.dgLab['OUTPUT']['dENU']],
        scale=scale_enu,
        showplot=show_plot,
        logger=logger)

    # report to the user
    logger.info('{func:s}: Project information =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))

    # sort the glab_output_db
    glab_updatedb.db_sort(db_name=amc.dRTK['dgLABng']['db'], logger=logger)

    # recompress the "out" file
    runGZIP = '{prog:s} -f -v {zip:s}'.format(prog=amc.dRTK['progs']['gzip'],
                                              zip=os.path.join(
                                                  amc.dRTK['dir_root'],
                                                  amc.dRTK['glab_out']))
    logger.info('{func:s}: Compressing file {cmp:s}:\n{cmd:s}'.format(
        func=cFuncName,
        cmd=colored(runGZIP, 'green'),
        cmp=colored(amc.dRTK['glab_out'], 'green')))
    # run the program
    exeprogram.subProcessDisplayStdErr(cmd=runGZIP, verbose=True)

    # store the json structure
    json_out = amc.dRTK['glab_out'].split('.')[0] + '.json'
    with open(json_out, 'w') as f:
        json.dump(amc.dRTK,
                  f,
                  ensure_ascii=False,
                  indent=4,
                  default=amutils.DT_convertor)
    logger.info('{func:s}: created json file {json:s}'.format(func=cFuncName,
                                                              json=colored(
                                                                  json_out,
                                                                  'green')))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            amc.dRTK['dir_root'],
            '{obs:s}-{prog:s}'.format(obs=amc.dRTK['glab_out'].split('.')[0],
                                      prog='output.log')))
    os.remove(log_name)

    return amc.E_SUCCESS
Esempio n. 13
0
def main(argv) -> bool:
    """
    glabplotposn plots data from gLAB (v6) OUTPUT messages

    """
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # store cli parameters
    amc.dRTK = {}
    cli_opt = {}
    cli_opt['rxtype'], cli_opt['igs_root'], cli_opt['marker'], cli_opt[
        'year'], cli_opt['doy'], cli_opt['gnss'], cli_opt['prcodes'], cli_opt[
            'cutoff'], cli_opt['template'], log_levels = treatCmdOpts(argv)
    amc.dRTK['options'] = cli_opt

    # check some arguments

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=os.getcwd(),
                                         logLevels=log_levels)

    ret_val = check_arguments(logger=logger)
    if ret_val != amc.E_SUCCESS:
        sys.exit(ret_val)

    # locate the program used for execution
    amc.dRTK['progs'] = {}
    amc.dRTK['progs']['glabng'] = location.locateProg('glabng', logger)
    amc.dRTK['progs']['crz2rnx'] = location.locateProg('crz2rnx', logger)
    amc.dRTK['progs']['gunzip'] = location.locateProg('gunzip', logger)
    amc.dRTK['progs']['gzip'] = location.locateProg('gzip', logger)

    # uncompress RINEX files
    uncompress_rnx_files(logger=logger)

    # use the template file for creation of glab config file
    create_session_template(logger=logger)

    # run glabng using created cfg file
    run_glabng_session(logger=logger)

    # remove the decompressed RINEX files
    cleanup_rnx_files(logger=logger)

    # report to the user
    logger.info('{func:s}: Project information =\n{json!s}'.format(
        func=cFuncName,
        json=json.dumps(amc.dRTK,
                        sort_keys=False,
                        indent=4,
                        default=amutils.DT_convertor)))

    # move the log file to the glab directory
    code_txt = ''
    for code in amc.dRTK['proc']['codes']:
        code_txt += ('_' + code)
    move(
        log_name,
        os.path.join(
            amc.dRTK['proc']['dir_glab'],
            'glab_proc_{gnss:s}{prcodes:s}.log'.format(gnss=''.join(
                amc.dRTK['proc']['gnss']),
                                                       prcodes=code_txt)))

    return amc.E_SUCCESS
Esempio n. 14
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirSBF, overwrite, logLevels = treatCmdOpts(argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=dirSBF,
                               logLevels=logLevels)

    # change to the directory dirSBF if it exists
    workDir = os.getcwd()
    if dirSBF != '.':
        workDir = os.path.normpath(os.path.join(workDir, dirSBF))
    logger.info('{func:s}: working directory is {dir:s}'.format(func=cFuncName,
                                                                dir=workDir))

    if not os.path.exists(workDir):
        logger.error('{func:s}: directory {dir:s} does not exists.'.format(
            func=cFuncName, dir=colored(workDir, 'red')))
        sys.exit(amc.E_DIR_NOT_EXIST)
    else:
        os.chdir(workDir)
        logger.info('{func:s}: changed to directory {dir:s}'.format(
            func=cFuncName, dir=workDir))

    # find the files corresponsing to hourly SBF logged data, else serach for 6-hourly data
    hourlySBFs = sorted(glob.glob(r"????[0-9][0-9][0-9][A-X].[0-9][0-9]_"))
    sixHourlySBFs = sorted(glob.glob(r"????[0-9][0-9][0-9][1-4].[0-9][0-9]_"))

    # combine the files to create the daily SBF file
    logger.info(
        '{func:s}: combine SBF (six-)hourly files to daily SBF file'.format(
            func=cFuncName))
    if len(hourlySBFs) > 0:
        dailySBF = hourlySBFs[0][:7] + '0' + hourlySBFs[0][8:]

        if not os.path.isfile(dailySBF) or overwrite:
            logger.info('{func:s}: creating daily SBF file {daily:s}'.format(
                func=cFuncName, daily=colored(dailySBF, 'green')))

            fDaily = open(dailySBF, 'wb')
            for i, hourlySBF in enumerate(hourlySBFs):
                fHourly = open(hourlySBF, 'rb')
                shutil.copyfileobj(fHourly, fDaily, 65536)
                fHourly.close()
            fDaily.close()
        else:
            logger.info('{func:s}: reusing daily SBF file {daily:s}'.format(
                func=cFuncName, daily=colored(dailySBF, 'green')))
    elif len(sixHourlySBFs) > 0:
        dailySBF = sixHourlySBFs[0][:7] + '0' + sixHourlySBFs[0][8:]

        if not os.path.isfile(dailySBF) or overwrite:
            logger.info('{func:s}: creating daily SBF file {daily:s}'.format(
                func=cFuncName, daily=colored(dailySBF, 'green')))

            fDaily = open(dailySBF, 'wb')
            for i, sixHourlySBF in enumerate(sixHourlySBFs):
                fSixHourly = open(sixHourlySBF, 'rb')
                shutil.copyfileobj(fSixHourly, fDaily, 65536)
                fSixHourly.close()
            fDaily.close()
        else:
            logger.info('{func:s}: reusing daily SBF file {daily:s}'.format(
                func=cFuncName, daily=colored(dailySBF, 'green')))
    else:
        logger.info(
            '{func:s}: No SBF files found with syntax STATDOYS.YY_'.format(
                func=cFuncName))
Esempio n. 15
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # limit float precision
    encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # dictionary of GNSS systems
    dGNSSSysts = {
        'G': 'GPS',
        'R': 'Glonass',
        'E': 'Galileo',
        'S': 'SBAS',
        'C': 'Beidou',
        'J': 'QZSS',
        'I': 'IRNSS'
    }

    # treat command line options
    rootDir, binFile, binType, rinexDir, rinexVersion, gnssSyst, rinexNaming, overwrite, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=rootDir,
                               logLevels=logLevels)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['rootDir'] = rootDir
    amc.dRTK['binFile'] = binFile
    amc.dRTK['binType'] = binType
    amc.dRTK['rinexDir'] = rinexDir
    amc.dRTK['rinexVersion'] = rinexVersion
    amc.dRTK['gnssSyst'] = gnssSyst
    amc.dRTK['rinexNaming'] = rinexNaming

    logger.info('{func:s}: arguments processed: amc.dRTK = {drtk!s}'.format(
        func=cFuncName, drtk=amc.dRTK))

    # check validity of passed arguments
    retCode = checkValidityArgs(logger=logger)
    if retCode != amc.E_SUCCESS:
        logger.error('{func:s}: Program exits with code {error:s}'.format(
            func=cFuncName, error=colored('{!s}'.format(retCode), 'red')))
        sys.exit(retCode)

    # locate the conversion programs SBF2RIN and CONVBIN
    amc.dRTK['bin2rnx'] = {}
    amc.dRTK['bin2rnx']['CONVBIN'] = location.locateProg('convbin', logger)
    amc.dRTK['bin2rnx']['SBF2RIN'] = location.locateProg('sbf2rin', logger)

    # convert binary file to rinex
    logger.info(
        '{func:s}: convert binary file to rinex'.format(func=cFuncName))
    if amc.dRTK['binType'] == 'SBF':
        sbf2rinex(logger=logger, dGnssSysts=dGNSSSysts)
    else:
        ublox2rinex(logger=logger, dGnssSysts=dGNSSSysts)

    # report to the user
    logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(func=cFuncName,
                                                        json=json.dumps(
                                                            amc.dRTK,
                                                            sort_keys=False,
                                                            indent=4)))
Esempio n. 16
0
def main(argv):
    """
    creates a combined SBF file from hourly or six-hourly SBF files
    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    dirCSV, filesCSV, GNSSsyst, GNSSsignals, movAvg, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger = amc.createLoggers(os.path.basename(__file__),
                               dir=dirCSV,
                               logLevels=logLevels)

    # check if arguments are accepted
    workDir = checkExistenceArgs(cvsDir=dirCSV,
                                 csvFiles=filesCSV,
                                 logger=logger)

    # create dictionary with the current info
    global dCSV
    dCSV = {}
    dCSV['dir'] = workDir
    dCSV['gnss'] = GNSSsyst
    for i, (signal, csv) in enumerate(zip(GNSSsignals, filesCSV)):
        dCSV[i] = {'signal': signal, 'file': csv}
    dCSV['movavg'] = movAvg
    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dCSV,
                                                          func=cFuncName))

    # read and merge into a single dataframe
    dfObsMerged = mergeSignals(csvFiles=filesCSV, logger=logger)
    # create signalwise difference
    dfObsMerged = signalDifference(dfObs=dfObsMerged, logger=logger)
    amutils.logHeadTailDataFrame(df=dfObsMerged,
                                 dfName='dfObsMerged',
                                 callerName=cFuncName,
                                 logger=logger)

    # find max/min values for signals and for difference over all PRNs
    dCSV['dMax'] = amutils.divround((dfObsMerged[dCSV['SVs']].max()).max(), 5,
                                    2.5)
    dCSV['dMin'] = amutils.divround((dfObsMerged[dCSV['SVs']].min()).min(), 5,
                                    2.5)
    for i in [0, 1]:
        stCols = dCSV[i]['SVs'] + '-{st:s}'.format(st=dCSV[i]['signal'])

        dCSV[i]['max'] = amutils.divround(dfObsMerged[stCols].max().max(), 5,
                                          2.5)
        dCSV[i]['min'] = amutils.divround(dfObsMerged[stCols].min().min(), 5,
                                          2.5)

    logger.info('{func:s}: information:\n{dict!s}'.format(dict=dCSV,
                                                          func=cFuncName))

    # create plots per prn
    signalDiffPlot.plotSignalDiff(dCsv=dCSV, dfSig=dfObsMerged, logger=logger)
Esempio n. 17
0
def main(argv):
    """
    pyRnxProc processes RINEX data using 'amrnx2rtkp'

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # pandas options
    pd.options.display.max_rows = 40
    pd.options.display.max_columns = 36
    pd.options.display.width = 2000

    # limit float precision
    encoder.FLOAT_REPR = lambda o: format(o, '.3f')

    # treat command line options
    rootDir, roverObs, posMode, freq, cutOff, baseObs, ephemeris, gnss, typeEphem, tropo, iono, template, overwrite, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger = amc.createLoggers(baseName=os.path.basename(__file__),
                               dir=rootDir,
                               logLevels=logLevels)

    # store cli parameters
    amc.dRTK = {}
    amc.dRTK['rootDir'] = rootDir
    amc.dRTK['roverObs'] = roverObs
    amc.dRTK['posMode'] = posMode
    amc.dRTK['freq'] = [v for k, v in rtkc.dFreq.items() if k == freq][0]
    amc.dRTK['cutOff'] = cutOff
    amc.dRTK['baseObs'] = baseObs
    amc.dRTK['ephems'] = ephemeris
    amc.dRTK['GNSS'] = gnss
    amc.dRTK['typeEphem'] = typeEphem
    amc.dRTK['Tropo'] = tropo
    amc.dRTK['Iono'] = iono
    amc.dRTK['template'] = template

    # check validity of passed arguments
    retCode = checkValidityArgs(logger=logger)
    if retCode != amc.E_SUCCESS:
        logger.error('{func:s}: Program exits with code {error:s}'.format(
            func=cFuncName, error=colored('{!s}'.format(retCode), 'red')))
        sys.exit(retCode)

    # create the configuration file for the GNSSs to process
    amc.dRTK['config'] = os.path.join(
        amc.dRTK['rtkDir'], '{rover:s}-{syst:s}.conf'.format(
            rover=amc.dRTK['roverObs'].split('.')[0],
            syst=amc.dRTK['GNSS'].upper()))

    logger.info(
        '{func:s}: Creating {syst:s} configuration file {conf:s}'.format(
            func=cFuncName,
            syst=colored(gnss, 'green'),
            conf=colored(amc.dRTK['config'], 'green')))

    # create the settings used for replacing the fields in the template file
    template_rnx2rtkp.create_rnx2rtkp_settings(overwrite=overwrite,
                                               logger=logger)

    template_rnx2rtkp.create_rnx2rtkp_template(cfgFile=amc.dRTK['config'],
                                               overwrite=overwrite,
                                               logger=logger)

    logger.info('{func:s}: amc.dRTK = \n{json!s}'.format(func=cFuncName,
                                                         json=json.dumps(
                                                             amc.dRTK,
                                                             sort_keys=False,
                                                             indent=4)))

    # locate the rnx2rtkp program used for execution
    exeRNX2RTKP = location.locateProg('rnx2rtkp', logger)

    cmdRNX2RTKP = '{prog:s} -k {conf:s} -o {pos:s} {rover:s} {base:s} {nav:s}'.format(
        prog=exeRNX2RTKP,
        conf=amc.dRTK['config'],
        pos=amc.dRTK['filePos'],
        rover=amc.dRTK['roverObs'],
        base=amc.dRTK['baseObs'],
        nav=' '.join(amc.dRTK['ephems']))

    logger.info('{func:s}: Running:\n{cmd:s}'.format(func=cFuncName,
                                                     cmd=colored(
                                                         cmdRNX2RTKP,
                                                         'green')))

    # run the program
    if amc.dLogLevel[logLevels[0]] >= amc.dLogLevel['INFO']:
        exeprogram.subProcessDisplayStdErr(cmd=cmdRNX2RTKP, verbose=True)
    else:
        exeprogram.subProcessDisplayStdErr(cmd=cmdRNX2RTKP, verbose=False)

    # inform user
    logger.info('{func:s}: Created position file: {pos:s}'.format(
        func=cFuncName, pos=colored(amc.dRTK['filePos'], 'blue')))
    logger.info('{func:s}: Created statistics file: {stat:s}'.format(
        func=cFuncName, stat=colored(amc.dRTK['fileStat'], 'blue')))
Esempio n. 18
0
def main(argv):
    """
    pyconvbin converts raw data from SBF/UBlox to RINEX

    """
    amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
    cFuncName = colored(os.path.basename(__file__),
                        'yellow') + ' - ' + colored(
                            sys._getframe().f_code.co_name, 'green')

    # treat command line options
    rnx_dir, gnss, cutoff, multiplier, showPlots, logLevels = treatCmdOpts(
        argv)

    # create logging for better debugging
    logger, log_name = amc.createLoggers(os.path.basename(__file__),
                                         dir=rnx_dir,
                                         logLevels=logLevels)

    logger.info('{func:s}: arguments processed: {args!s}'.format(
        args=rnx_dir, func=cFuncName))

    # check validity of passed arguments
    retCode = checkValidityArgs(dir_rnx=rnx_dir, logger=logger)
    if retCode != amc.E_SUCCESS:
        logger.error('{func:s}: Program exits with code {error:s}'.format(
            error=colored('{!s}'.format(retCode), 'red'), func=cFuncName))
        sys.exit(retCode)

    # store parameters
    amc.dRTK = {}
    # get the information from pyconvbin created json file
    read_json(dir_rnx=rnx_dir, logger=logger)

    # load the requested OBSTAB file into a pandas dataframe
    df_obs = rnxobs_tabular.read_obs_tabular(gnss=gnss, logger=logger)
    df_obs['gap'] = np.nan

    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df_obs,
                                 dfName='df_obs')
    # get unique list of PRNs in dataframe
    prn_lst = sorted(df_obs['PRN'].unique())
    logger.info('{func:s}: observed PRNs are {prns!s} (#{total:d})'.format(
        prns=prn_lst, total=len(prn_lst), func=cFuncName))

    logger.info(
        '{func:s}; getting corresponding NORAD info'.format(func=cFuncName))

    # read the files galileo-NORAD-PRN.t and gps-ops-NORAD-PRN.t
    dfNORAD = tle_parser.read_norad2prn(logger=logger)
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=dfNORAD,
                                 dfName='dfNORAD')

    # get the corresponding NORAD nrs for the given PRNs
    dNORADs = tle_parser.get_norad_numbers(prns=prn_lst,
                                           dfNorad=dfNORAD,
                                           logger=logger)
    logger.info('{func:s}: corresponding NORAD nrs (#{count:d}):'.format(
        count=len(dNORADs), func=cFuncName))

    # load a time scale and set RMA as Topo
    # loader = sf.Loader(dir_tle, expire=True)  # loads the needed data files into the tle dir
    ts = sf.load.timescale()
    RMA = sf.Topos('50.8438 N', '4.3928 E')
    logger.info('{func:s}: Earth station RMA @ {topo!s}'.format(
        topo=colored(RMA, 'green'), func=cFuncName))
    # get the datetime that corresponds to yydoy
    date_yydoy = datetime.strptime(amc.dRTK['rnx']['times']['DT'],
                                   '%Y-%m-%d %H:%M:%S')
    yydoy = date_yydoy.strftime('%y%j')
    logger.info(
        '{func:s}: calculating rise / set times for {date:s} ({yydoy:s})'.
        format(date=colored(date_yydoy.strftime('%d-%m-%Y'), 'green'),
               yydoy=yydoy,
               func=cFuncName))

    t0 = ts.utc(int(date_yydoy.strftime('%Y')), int(date_yydoy.strftime('%m')),
                int(date_yydoy.strftime('%d')))
    date_tomorrow = date_yydoy + timedelta(days=1)
    t1 = ts.utc(int(date_tomorrow.strftime('%Y')),
                int(date_tomorrow.strftime('%m')),
                int(date_tomorrow.strftime('%d')))

    # find corresponding TLE record for NORAD nrs
    df_tles = tle_parser.find_norad_tle_yydoy(dNorads=dNORADs,
                                              yydoy=yydoy,
                                              logger=logger)

    # list of rise / set times by observation / TLEs
    lst_obs_rise = []

    # find in observations and by TLEs what the riuse/set times are and number of observations
    for prn in prn_lst:
        # find rise & set times for each SV and store into list dt_obs_rise_set and dt_obs_set
        nom_interval, dt_obs_rise, dt_obs_set, obs_arc_count = rnxobs_tabular.rise_set_times(
            prn=prn, df_obstab=df_obs, nomint_multi=multiplier, logger=logger)

        # find rise:set times using TLEs
        dt_tle_rise, dt_tle_set, dt_tle_cul, tle_arc_count = tle_parser.tle_rise_set_times(
            prn=prn,
            df_tle=df_tles,
            marker=RMA,
            t0=t0,
            t1=t1,
            elev_min=cutoff,
            obs_int=nom_interval,
            logger=logger)

        # add to list for creating dataframe
        lst_obs_rise.append([
            dt_obs_rise, dt_obs_set, obs_arc_count, dt_tle_rise, dt_tle_set,
            dt_tle_cul, tle_arc_count
        ])

    # test to import in dataframe
    df_rise_set_tmp = pd.DataFrame(lst_obs_rise,
                                   columns=[
                                       'obs_rise', 'obs_set', 'obs_arc_count',
                                       'tle_rise', 'tle_set', 'tle_cul',
                                       'tle_arc_count'
                                   ],
                                   index=prn_lst)

    # find corresponding arcs between observation and predicted TLE
    max_arcs, df_rise_set = rnxobs_tabular.intersect_arcs(
        df_rs=df_rise_set_tmp, logger=logger)

    # inform user
    amutils.logHeadTailDataFrame(logger=logger,
                                 callerName=cFuncName,
                                 df=df_rise_set,
                                 dfName='df_rise_set')
    # write to csv file
    csvName = os.path.join(amc.dRTK['gfzrnxDir'],
                           amc.dRTK['rnx']['gnss'][gnss]['marker'],
                           'rise-set-dt.csv')
    df_rise_set.to_csv(csvName, index=None, header=True)

    # create a new dataframe that has PRNs as index and the max_arcs columns with number of obs / TLEs
    df_obs_arcs = rnxobs_tabular.rearrange_arcs(nr_arcs=max_arcs,
                                                df_rs=df_rise_set,
                                                logger=logger)
    # write to csv file
    csvName = os.path.join(amc.dRTK['gfzrnxDir'],
                           amc.dRTK['rnx']['gnss'][gnss]['marker'],
                           'obs_arcs.csv')
    df_obs_arcs.to_csv(csvName, index=None, header=True)

    # plot the statistics of observed vs TLE predicted
    plot_obstab.plot_rise_set_times(gnss=gnss,
                                    df_rs=df_rise_set,
                                    logger=logger,
                                    showplot=showPlots)
    plot_obstab.plot_rise_set_stats(gnss=gnss,
                                    df_arcs=df_obs_arcs,
                                    nr_arcs=max_arcs,
                                    logger=logger,
                                    showplot=showPlots)

    # amutils.logHeadTailDataFrame(logger=logger, callerName=cFuncName, df=df_obs[(df_obs['gap'] > 1.) | (df_obs['gap'].isna())], dfName='df_obs', head=50)

    # logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(json=json.dumps(amc.dRTK, sort_keys=False, indent=4, default=amutils.DT_convertor), func=cFuncName))

    # copy temp log file to the YYDOY directory
    copyfile(
        log_name,
        os.path.join(
            os.path.join(amc.dRTK['gfzrnxDir'],
                         amc.dRTK['rnx']['gnss'][gnss]['marker']),
            'pyobstab.log'))
    os.remove(log_name)