def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")

    parser.add_argument('sinex', type=str, nargs=1, metavar='{project name}',
                        help="SINEX file to update.")

    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    sinex = args.sinex[0]
    project = args.project[0]

    process_sinex(cnn, project, dates, sinex)
Exemple #2
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    # create the execution log

    dates = [pyDate.Date(year=1980, doy=1),
             pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = dra(cnn, args.project[0], dates)
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, run_node_test=False)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    # create the execution log

    # load polyhedrons
    project = Project(cnn, args.project[0])

    # plot initial state
    tqdm.write(' -- Plotting initial ETMs (unaligned)...')

    #for etm in tqdm(project.etms, ncols=160):
    #    etm.plot(pngfile=args.project[0] + '/' + etm.NetworkCode + '.' + etm.StationCode + '_0.png', residuals=True)

    project.align_stack()

    tqdm.write(' -- Plotting intermediate step ETMs (aligned)...')
def check_rinex_stn(NetworkCode, StationCode, start_date, end_date):

    # load the connection
    try:
        # try to open a connection to the database
        cnn = dbConnection.Cnn("gnss_data.cfg")
        Config = pyOptions.ReadOptions("gnss_data.cfg")
    except Exception:
        return traceback.format_exc() + ' processing: (' + NetworkCode + '.' + StationCode \
                + ') using node ' + platform.node(), None

    try:
        Archive = pyArchiveStruct.RinexStruct(cnn)

        rs = cnn.query('SELECT * FROM rinex WHERE "NetworkCode" = \'%s\' AND '
                       '"StationCode" = \'%s\' AND '
                       '"ObservationSTime" BETWEEN \'%s\' AND \'%s\' '
                       'ORDER BY "ObservationSTime"' %
                       (NetworkCode, StationCode, start_date.yyyymmdd(),
                        end_date.yyyymmdd()))

        rnxtbl = rs.dictresult()
        missing_files = []

        for rnx in rnxtbl:

            crinex_path = os.path.join(
                Config.archive_path,
                Archive.build_rinex_path(NetworkCode,
                                         StationCode,
                                         rnx['ObservationYear'],
                                         rnx['ObservationDOY'],
                                         filename=rnx['Filename']))

            if not os.path.exists(crinex_path):
                # problem with file! does not appear to be in the archive

                Archive.remove_rinex(rnx)

                event = pyEvents.Event(
                    Description=
                    'A missing RINEX file was found during RINEX integrity check: '
                    + crinex_path +
                    '. It has been removed from the database. Consider rerunning PPP for this station.',
                    NetworkCode=NetworkCode,
                    StationCode=StationCode,
                    Year=rnx['ObservationYear'],
                    DOY=rnx['ObservationDOY'])

                cnn.insert_event(event)

                missing_files += [crinex_path]

        return None, missing_files

    except Exception:
        return traceback.format_exc() + ' processing: ' + NetworkCode + '.' + \
               StationCode + ' using node ' + platform.node(), None
Exemple #5
0
def UpdateRecord(rinex, path):

    cnn = dbConnection.Cnn('gnss_data.cfg')
    Config = pyOptions.ReadOptions('gnss_data.cfg')

    try:
        rnxobj = pyRinex.ReadRinex(rinex['NetworkCode'], rinex['StationCode'],
                                   path)

        date = pyDate.Date(year=rinex['ObservationYear'],
                           doy=rinex['ObservationDOY'])

        if not verify_rinex_date_multiday(date, rnxobj, Config):
            cnn.begin_transac()
            # propagate the deletes
            cnn.query(
                'DELETE FROM gamit_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM ppp_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM rinex WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "ObservationYear" = %i AND "ObservationDOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.commit_transac()

            return 'Multiday rinex file moved out of the archive: ' + rinex[
                'NetworkCode'] + '.' + rinex['StationCode'] + ' ' + str(
                    rinex['ObservationYear']) + ' ' + str(
                        rinex['ObservationDOY']
                    ) + ' using node ' + platform.node()
        else:
            cnn.update('rinex', rinex, Completion=rnxobj.completion)

    except pyRinex.pyRinexExceptionBadFile:
        # empty file or problem with crinex format, move out
        archive = pyArchiveStruct.RinexStruct(cnn)
        archive.remove_rinex(
            rinex,
            os.path.join(
                Config.repository_data_reject, 'bad_rinex/%i/%03i' %
                (rinex['ObservationYear'], rinex['ObservationDOY'])))

    except Exception:
        return traceback.format_exc(
        ) + ' processing rinex: ' + rinex['NetworkCode'] + '.' + rinex[
            'StationCode'] + ' ' + str(rinex['ObservationYear']) + ' ' + str(
                rinex['ObservationDOY']) + ' using node ' + platform.node()
Exemple #6
0
def main():
    parser = argparse.ArgumentParser(description='Script to synchronize AWS with OSU\'s archive database')

    parser.add_argument('date', type=str, nargs=1, help="Check the sync state for this given date. Format can be fyear or yyyy_ddd.")
    parser.add_argument('-mark', '--mark_uploaded', nargs='+', type=str, help="Pass net.stnm to mark these files as transferred to the AWS", metavar='{net.stnm}')
    parser.add_argument('-pull', '--pull_rinex', action='store_true', help="Get all the unsynchronized RINEX files in the local dir")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # before attempting anything, check aliases!!
    print ' >> Checking GAMIT aliases'
    check_aliases(cnn)

    # initialize the PP job server
    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, 1500)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    dd = args.date[0]

    if '_' in dd:
        date = pyDate.Date(year=int(dd.split('_')[0]), doy=int(dd.split('_')[1]))
    elif dd == 'all':
        # run all dates (1994 to 2018)
        ts = range(pyDate.Date(year=2004, doy=20).mjd, pyDate.Date(year=2018, doy=87).mjd, 1)
        ts = [pyDate.Date(mjd=tts) for tts in ts]
        for date in ts:
            print ' >> Processing ' + str(date)
            pull_rinex(cnn, date, Config, JobServer)

        return
    else:
        date = pyDate.Date(fyear=float(dd))

    if args.pull_rinex:
        pull_rinex(cnn, date, Config, JobServer)

    if args.mark_uploaded is not None:
        print 'Processing %i for day %s' % (len(args.mark_uploaded), date.yyyyddd())
        # mark the list of stations as transferred to the AWS
        mark_uploaded(cnn, date, args.mark_uploaded)
Exemple #7
0
def main():

    Config = pyOptions.ReadOptions('gnss_data.cfg')
    JobServer = pyJobServer.JobServer(Config)  # type: pyJobServer.JobServer

    cnn = dbConnection.Cnn('gnss_data.cfg')

    archive = pyArchiveStruct.RinexStruct(cnn)

    rinex = cnn.query_float(
        'SELECT * FROM rinex WHERE "ObservationYear" <= 1995 ORDER BY "NetworkCode", '
        '"StationCode", "ObservationYear", "ObservationDOY"',
        as_dict=True)

    pbar = tqdm(desc='%-30s' % ' >> Processing rinex files',
                total=len(rinex),
                ncols=160)

    modules = ('os', 'pyRinex')
    callback = []

    for rnx in rinex:

        filename = archive.build_rinex_path(rnx['NetworkCode'],
                                            rnx['StationCode'],
                                            rnx['ObservationYear'],
                                            rnx['ObservationDOY'],
                                            filename=rnx['Filename'])

        arguments = (rnx['NetworkCode'], rnx['StationCode'],
                     Config.archive_path, filename)

        JobServer.SubmitJob(check_rinex, arguments, (), modules, callback,
                            callback_class(pbar), 'callbackfunc')

        if JobServer.process_callback:
            # handle any output messages during this batch
            callback = output_handle(callback)
            JobServer.process_callback = False

    tqdm.write(' >> waiting for jobs to finish...')
    JobServer.job_server.wait()
    tqdm.write(' >> Done.')

    # process the errors and the new stations
    output_handle(callback)

    pbar.close()
    def __init__(self, cnn):

        self.cnn = cnn

        # read the structure definition table
        self.levels = cnn.query(
            'SELECT rinex_tank_struct.*, keys.* FROM rinex_tank_struct '
            'LEFT JOIN keys ON keys."KeyCode" = rinex_tank_struct."KeyCode" '
            'ORDER BY "Level"').dictresult()

        self.keys = cnn.query('SELECT * FROM keys').dictresult()
        # read the station and network tables
        self.networks = cnn.query('SELECT * FROM networks').dictresult()
        self.stations = cnn.query('SELECT * FROM stations').dictresult()

        self.Config = pyOptions.ReadOptions('gnss_data.cfg')
Exemple #9
0
def test_node(check_gamit_tables=None, software_sync=()):
    # test node: function that makes sure that all required packages and tools are present in the nodes
    import traceback
    import platform
    import os
    import sys

    def check_tab_file(tabfile, date):
        if os.path.isfile(tabfile):
            # file exists, check contents
            with open(tabfile, 'rt', encoding='utf-8', errors='ignore') as f:
                lines = f.readlines()

            tabdate = pyDate.Date(mjd=lines[-1].split()[0])
            if tabdate < date:
                return ' -- %s: Last entry in %s is %s but processing %s' \
                       % (platform.node(), tabfile, tabdate.yyyyddd(), date.yyyyddd())
            return []
        else:
            return ' -- %s: Could not find file %s' % (platform.node(),
                                                       tabfile)

    # BEFORE ANYTHING! check the python version
    version = sys.version_info
    # if version.major > 2 or version.minor < 7 or (version.micro < 12 and version.minor <= 7):
    #     return ' -- %s: Incorrect Python version: %i.%i.%i. Recommended version >= 2.7.12' \
    #            % (platform.node(), version.major, version.minor, version.micro)
    if version.major < 3:
        return ' -- %s: Incorrect Python version: %i.%i.%i. Recommended version >= 3.0.0' \
               % (platform.node(), version.major, version.minor, version.micro)

    # start importing the modules needed
    try:
        import shutil
        import datetime
        import time
        import uuid
        import traceback
        # deps
        import numpy
        import pg
        import dirsync
        # app
        import pyRinex
        import dbConnection
        import pyStationInfo
        import pyArchiveStruct
        import pyPPP
        import pyBrdc
        import pyOptions
        import Utils
        import pyOTL
        import pySp3
        import pyETM
        import pyRunWithRetry
        import pyDate

    except:
        return ' -- %s: Problem found while importing modules:\n%s' % (
            platform.node(), traceback.format_exc())

    try:
        if len(software_sync) > 0:
            # synchronize directories listed in the src and dst arguments
            from dirsync import sync

            for source_dest in software_sync:
                if isinstance(source_dest, str) and ',' in source_dest:
                    s = source_dest.split(',')[0].strip()
                    d = source_dest.split(',')[1].strip()

                    print('    -- Synchronizing %s -> %s' % (s, d))

                    updated = sync(s, d, 'sync', purge=True, create=True)

                    for f in updated:
                        print('    -- Updated %s' % f)

    except:
        return ' -- %s: Problem found while synchronizing software:\n%s ' % (
            platform.node(), traceback.format_exc())

    # continue with a test SQL connection
    # make sure that the gnss_data.cfg is present
    try:
        cnn = dbConnection.Cnn('gnss_data.cfg')

        q = cnn.query('SELECT count(*) FROM networks')

        if int(pg.version[0]) < 5:
            return ' -- %s: Incorrect PyGreSQL version!: %s' % (
                platform.node(), pg.version)

    except:
        return ' -- %s: Problem found while connecting to postgres:\n%s ' % (
            platform.node(), traceback.format_exc())

    # make sure we can create the production folder
    try:
        test_dir = os.path.join('production/node_test')
        if not os.path.exists(test_dir):
            os.makedirs(test_dir)
    except:
        return ' -- %s: Could not create production folder:\n%s ' % (
            platform.node(), traceback.format_exc())

    # test
    try:
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        # check that all paths exist and can be reached
        if not os.path.exists(Config.archive_path):
            return ' -- %s: Could not reach archive path %s' % (
                platform.node(), Config.archive_path)

        if not os.path.exists(Config.repository):
            return ' -- %s: Could not reach repository path %s' % (
                platform.node(), Config.repository)

        # pick a test date to replace any possible parameters in the config file
        date = pyDate.Date(year=2010, doy=1)

    except:
        return ' -- %s: Problem while reading config file and/or testing archive access:\n%s' \
               % (platform.node(), traceback.format_exc())

    try:
        brdc = pyBrdc.GetBrdcOrbits(Config.brdc_path, date, test_dir)
    except:
        return ' -- %s: Problem while testing the broadcast ephemeris archive (%s) access:\n%s' \
               % (platform.node(), Config.brdc_path, traceback.format_exc())

    try:
        sp3 = pySp3.GetSp3Orbits(Config.sp3_path, date, Config.sp3types,
                                 test_dir)
    except:
        return ' -- %s: Problem while testing the sp3 orbits archive (%s) access:\n%s' \
               % (platform.node(), Config.sp3_path, traceback.format_exc())

    # check that all executables and GAMIT bins are in the path
    for prg in ('crz2rnx', 'crx2rnx', 'rnx2crx', 'rnx2crz', 'gfzrnx_lx',
                'svdiff', 'svpos', 'tform', 'sh_rx2apr', 'doy', 'sed',
                'compress'):
        with pyRunWithRetry.command('which ' + prg) as run:
            run.run()
            if run.stdout == '':
                return ' -- %s: Could not find path to %s' % (platform.node(),
                                                              prg)

    # check grdtab and ppp from the config file
    for opt in ('grdtab', 'otlgrid', 'ppp_exe'):
        path = Config.options[opt]
        if not os.path.isfile(path):
            return ' -- %s: Could not find %s in %s' % (platform.node(), opt,
                                                        path)

    ppp_path = Config.options['ppp_path']
    for f in ('gpsppp.stc', 'gpsppp.svb_gps_yrly', 'gpsppp.flt', 'gpsppp.stc',
              'gpsppp.met'):
        if not os.path.isfile(os.path.join(ppp_path, f)):
            return ' -- %s: Could not find %s in %s' % (platform.node(), f,
                                                        ppp_path)

    for frame in Config.options['frames']:
        if not os.path.isfile(frame['atx']):
            return ' -- %s: Could not find atx in %s' % (platform.node(),
                                                         frame['atx'])

    if check_gamit_tables is not None:
        # check the gamit tables if not none

        date = check_gamit_tables[0]
        eop = check_gamit_tables[1]

        gg = os.path.expanduser('~/gg')
        tables = os.path.expanduser('~/gg/tables')

        if not os.path.isdir(gg):
            return ' -- %s: Could not GAMIT installation dir (gg)' % (
                platform.node())

        elif not os.path.isdir(tables):
            return ' -- %s: Could not GAMIT tables dir (gg)' % (
                platform.node())

        # DDG: deprecated -> GAMIT now uses a single nbody file (binary)
        # for t_name in ('luntab.' + date.yyyy() + '.J2000',
        #               'soltab.' + date.yyyy() + '.J2000',
        #               'ut1.' + eop,
        #               # leapseconds
        #               # vmf1
        #               'pole.' + eop
        #               ):
        #    result = check_tab_file(os.path.join(tables, t_name), date)
        #    if result:
        #        return result

        # fes_cmc consistency

    return ' -- %s: Test passed!' % platform.node()
Exemple #10
0
def main():

    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '1' to export time "
        "series or append '0' to just output the ETM parameters.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=2,
        metavar='{project} {type}',
        help=
        "Plot the GAMIT time series. Specify project and type = \'stack\' to plot the time "
        "series after stacking or \'gamit\' to just plot the coordinates of the polyhedron"
    )

    args = parser.parse_args()

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]):
        print ' >> Station list read from ' + args.stnlist[0]
        stnlist = [line.strip() for line in open(args.stnlist[0], 'r')]
        stnlist = [{
            'NetworkCode': item.split('.')[0],
            'StationCode': item.split('.')[1]
        } for item in stnlist]
    else:
        stnlist = Utils.process_stnlist(cnn, args.stnlist)

    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                else:
                    if args.gamit[1] == 'stack':
                        polyhedrons = cnn.query_float(
                            'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                            'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                            '"StationCode" = \'%s\' '
                            'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                            % (args.gamit[0], stn['NetworkCode'],
                               stn['StationCode']))

                        soln = pyETM.GamitSoln(cnn, polyhedrons,
                                               stn['NetworkCode'],
                                               stn['StationCode'],
                                               args.gamit[0])

                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             gamit_soln=soln)

                        # print ' > %5.2f %5.2f %5.2f %i %i' % \
                        #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                        #       etm.soln.t.shape[0] - np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                        # print two largest outliers
                        if etm.A is not None:
                            lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                            slres = lres[np.argsort(-lres)]

                            print ' >> Two largest residuals:'
                            for i in [0, 1]:
                                print(' %s %6.3f %6.3f %6.3f' %
                                      (pyDate.Date(mjd=etm.soln.mjd[
                                          lres == slres[i]]).yyyyddd(),
                                       etm.R[0, lres == slres[i]],
                                       etm.R[1, lres == slres[i]],
                                       etm.R[2, lres == slres[i]]))

                    elif args.gamit[1] == 'gamit':
                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             project=args.gamit[1])
                    else:
                        parser.error('Invalid option for -gamit switch')
                        etm = None

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data)

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json != 0:
                            json.dump(etm.todictionary(True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
Exemple #11
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Simple PPP python wrapper. Calculate a coordinate for a RINEX file. '
        'Output one line per file with stnm epoch x y z lat lon h')

    parser.add_argument(
        'files',
        type=str,
        nargs='+',
        help=
        "List of files, directories or wildcards to process. If directories are given, searches "
        "for .Z files. Individual files or wildcards can be either .Z or ??o. "
        "Eg: LocationRinex.py ./igm10010.10d.Z ./igm1002a.10o ./cs*.Z ./rinex2process/"
    )

    parser.add_argument(
        '-otl',
        '--ocean_loading',
        action='store_true',
        help="Apply ocean loading coefficients (obtained from grdtab).")

    parser.add_argument(
        '-ns',
        '--no_split',
        action='store_true',
        help="Do not split multiday RINEX files and obtain a single coordinate."
    )

    parser.add_argument(
        '-no_met',
        '--no_met',
        action='store_true',
        help=
        "Do not apply the GPT2 model to correct tropospheric delays (use GPT)."
    )

    parser.add_argument('-dec',
                        '--decimate',
                        action='store_true',
                        help="Decimate RINEX to 30 s if interval < 15.")

    parser.add_argument(
        '-rnx',
        '--load_rinex',
        action='store_true',
        help=
        "Fix RINEX using pyRinex, create a local copy (with session number+1) and exit. "
        "Do not run PPP.")

    parser.add_argument(
        '-ins',
        '--insert_sql',
        action='store_true',
        help=
        "Produce a SQL INSERT statement for this station including OTL and coordinates."
    )

    parser.add_argument('-find',
                        '--find',
                        action='store_true',
                        help="Find the matching station in the db using the "
                        "spatial location algorithm.")

    parser.add_argument(
        '-ne',
        '--no_erase',
        action='store_true',
        help="Do not erase PPP folder structure after completion.")

    parser.add_argument('-back',
                        '--backward_substitution',
                        action='store_true',
                        default=False,
                        help="Run PPP with backward substitution.")

    parser.add_argument(
        '-fix',
        '--fix_coordinate',
        nargs='+',
        metavar='coordinate_file | x y z',
        default=None,
        help=
        'Do not solve for station coordinates, fix station position as given in [coordinate_file] '
        'or provide a list of X Y Z coordinates. File should contain the '
        'apriori coordinates as a list starting with the station name '
        'and the X Y Z coordinates. For example: OSU1  595355.1776 -4856629.7091  4077991.9857'
    )

    parser.add_argument(
        '-st',
        '--solve_troposphere',
        type=int,
        nargs=1,
        default=105,
        choices=(1, 2, 3, 4, 5, 102, 103, 104, 105),
        help=
        'Solve for the tropospheric wet delay. Possible options are 1: do not solve, 2-5: solve '
        'without gradients (number determine the random walk in mm/hr), +100: solve gradients.'
    )

    parser.add_argument('-elv',
                        '--elevation_mask',
                        type=int,
                        nargs=1,
                        default=10,
                        help='Elevation mask (default=10).')

    parser.add_argument(
        '-c',
        '--copy_results',
        type=str,
        nargs=1,
        metavar='storage_dir',
        help=
        'Copy the output files (.ses, .sum, .res, .pos) to [storage_dir]. A folder with the '
        'station name will be created in [storage_dir].')

    parser.add_argument(
        '-nocfg',
        '--no_config_file',
        type=str,
        nargs=3,
        metavar=('sp3_directory', 'sp3_types', 'brdc_directory'),
        help=
        'Do not attempt to open gnss_data.cfg. Append [sp3_directory], [sp3_types] '
        'and [brdc_directory] to access the precise and broadcast orbit files. Use the keywords '
        '$year, $doy, $month, $day, $gpsweek, $gpswkday to dynamically replace with the '
        'appropriate values (based on the date in the RINEX file). Grdtab and otl_grid should '
        'have the standard names if -otl is invoked and ppp should be in the PATH '
        '(with executable name = ppp).')

    args = parser.parse_args()

    Config = pyOptions.ReadOptions(
        'gnss_data.cfg')  # type: pyOptions.ReadOptions
    options = Config.options
    sp3types = Config.sp3types
    sp3altrn = Config.sp3altrn
    brdc_path = Config.brdc_path

    if args.no_config_file is not None:
        # options['ppp_path'] = ''
        # options['ppp_exe']  = 'ppp'
        # options['grdtab']   = 'grdtab'
        # options['otlgrid']  = 'otl.grid'
        options['sp3'] = args.no_config_file[0]

        sp3types = args.no_config_file[1].split(',')
        sp3altrn = ['jpl', 'jp2', 'jpr']
        # brdc_path = args.no_config_file[2]

    # flog to determine if should erase or not folder
    erase = not args.no_erase

    rinex_list = []
    for xfile in args.files:
        if os.path.isdir(xfile):
            # add all d.Z files in folder
            rinex_list += glob.glob(os.path.join(xfile, '*d.Z'))
        elif os.path.isfile(xfile):
            # a single file
            rinex_list += [xfile]
        else:
            # a wildcard: expand
            rinex_list += glob.glob(xfile)

    for rinex in rinex_list:
        # read the station name from the file
        stnm = rinex.split('/')[-1][0:4]

        try:
            with pyRinex.ReadRinex('???',
                                   stnm,
                                   rinex,
                                   allow_multiday=args.no_split) as rinexinfo:
                rnx_days = [rinexinfo]
                if rinexinfo.multiday and not args.no_split:
                    print('Provided RINEX file is a multiday file!')
                    # rinex file is a multiday file, output all the solutions
                    rnx_list = rinexinfo.multiday_rnx_list

                for rnx in rnx_days:
                    execute_ppp(rnx, args, stnm, options, sp3types, sp3altrn,
                                brdc_path, erase, not args.no_met,
                                args.decimate, args.fix_coordinate,
                                args.solve_troposphere[0], args.copy_results,
                                args.backward_substitution,
                                args.elevation_mask[0])

        except pyRinex.pyRinexException as e:
            print(str(e))
            continue
def process_crinex_file(crinez, filename, data_rejected, data_retry):

    # create a uuid temporary folder in case we cannot read the year and doy from the file (and gets rejected)
    reject_folder = os.path.join(data_rejected, str(uuid.uuid4()))

    try:
        cnn = dbConnection.Cnn("gnss_data.cfg")
        Config = pyOptions.ReadOptions("gnss_data.cfg")
        archive = pyArchiveStruct.RinexStruct(cnn)
        # apply local configuration (path to repo) in the executing node
        crinez = os.path.join(Config.repository_data_in, crinez)

    except Exception:

        return traceback.format_exc() + ' while opening the database to process file ' + \
               crinez + ' node ' + platform.node(), None

    # assume a default networkcode
    NetworkCode = 'rnx'
    # get the station code year and doy from the filename
    fileparts = archive.parse_crinex_filename(filename)

    if fileparts:
        StationCode = fileparts[0].lower()
        doy = int(fileparts[1])
        year = int(Utils.get_norm_year_str(fileparts[3]))
    else:
        event = pyEvents.Event(
            Description='Could not read the station code, year or doy for file '
            + crinez,
            EventType='error')
        error_handle(cnn,
                     event,
                     crinez,
                     reject_folder,
                     filename,
                     no_db_log=True)
        return event['Description'], None

    # we can now make better reject and retry folders
    reject_folder = os.path.join(
        data_rejected, '%reason%/' + Utils.get_norm_year_str(year) + '/' +
        Utils.get_norm_doy_str(doy))

    retry_folder = os.path.join(
        data_retry, '%reason%/' + Utils.get_norm_year_str(year) + '/' +
        Utils.get_norm_doy_str(doy))

    try:
        # main try except block
        with pyRinex.ReadRinex(NetworkCode, StationCode,
                               crinez) as rinexinfo:  # type: pyRinex.ReadRinex

            # STOP! see if rinexinfo is a multiday rinex file
            if not verify_rinex_multiday(cnn, rinexinfo, Config):
                # was a multiday rinex. verify_rinex_date_multiday took care of it
                return None, None

            # DDG: we don't use otl coefficients because we need an approximated coordinate
            # we therefore just calculate the first coordinate without otl
            # NOTICE that we have to trust the information coming in the RINEX header (receiver type, antenna type, etc)
            # we don't have station info data! Still, good enough
            # the final PPP coordinate will be calculated by pyScanArchive on a different process

            # make sure that the file has the appropriate coordinates in the header for PPP.
            # put the correct APR coordinates in the header.
            # ppp didn't work, try using sh_rx2apr
            brdc = pyBrdc.GetBrdcOrbits(Config.brdc_path, rinexinfo.date,
                                        rinexinfo.rootdir)

            # inflate the chi**2 limit to make sure it will pass (even if we get a crappy coordinate)
            try:
                rinexinfo.auto_coord(brdc, chi_limit=1000)

                # normalize header to add the APR coordinate
                # empty dict since nothing extra to change (other than the APR coordinate)
                rinexinfo.normalize_header(dict())
            except pyRinex.pyRinexExceptionNoAutoCoord:
                # could not determine an autonomous coordinate, try PPP anyways. 50% chance it will work
                pass

            with pyPPP.RunPPP(
                    rinexinfo,
                    '',
                    Config.options,
                    Config.sp3types,
                    Config.sp3altrn,
                    rinexinfo.antOffset,
                    strict=False,
                    apply_met=False,
                    clock_interpolation=True) as ppp:  # type: pyPPP.RunPPP

                try:
                    ppp.exec_ppp()

                except pyPPP.pyRunPPPException as ePPP:

                    # inflate the chi**2 limit to make sure it will pass (even if we get a crappy coordinate)
                    # if coordinate is TOO bad it will get kicked off by the unreasonable geodetic height
                    try:
                        auto_coords_xyz, auto_coords_lla = rinexinfo.auto_coord(
                            brdc, chi_limit=1000)

                    except pyRinex.pyRinexExceptionNoAutoCoord as e:
                        # catch pyRinexExceptionNoAutoCoord and convert it into a pyRunPPPException

                        raise pyPPP.pyRunPPPException(
                            'Both PPP and sh_rx2apr failed to obtain a coordinate for %s.\n'
                            'The file has been moved into the rejection folder. '
                            'Summary PPP file and error (if exists) follows:\n%s\n\n'
                            'ERROR section:\n%s\npyRinex.auto_coord error follows:\n%s'
                            % (crinez.replace(Config.repository_data_in, ''),
                               ppp.summary, str(ePPP).strip(), str(e).strip()))

                    # DDG: this is correct - auto_coord returns a numpy array (calculated in ecef2lla),
                    # so ppp.lat = auto_coords_lla is consistent.
                    ppp.lat = auto_coords_lla[0]
                    ppp.lon = auto_coords_lla[1]
                    ppp.h = auto_coords_lla[2]
                    ppp.x = auto_coords_xyz[0]
                    ppp.y = auto_coords_xyz[1]
                    ppp.z = auto_coords_xyz[2]

                # check for unreasonable heights
                if ppp.h[0] > 9000 or ppp.h[0] < -400:
                    raise pyRinex.pyRinexException(
                        os.path.relpath(crinez, Config.repository_data_in) +
                        ' : unreasonable geodetic height (%.3f). '
                        'RINEX file will not enter the archive.' % (ppp.h[0]))

                Result, match, _ = ppp.verify_spatial_coherence(
                    cnn, StationCode)

                if Result:
                    # insert: there is only 1 match with the same StationCode.
                    rinexinfo.rename(NetworkCode=match[0]['NetworkCode'])
                    insert_data(cnn, archive, rinexinfo)
                else:

                    if len(match) == 1:
                        error = "%s matches the coordinate of %s.%s (distance = %8.3f m) but the filename " \
                                "indicates it is %s. Please verify that this file belongs to %s.%s, rename it and " \
                                "try again. The file was moved to the retry folder. " \
                                "Rename script and pSQL sentence follows:\n" \
                                "BASH# mv %s %s\n" \
                                "PSQL# INSERT INTO stations (\"NetworkCode\", \"StationCode\", \"auto_x\", " \
                                "\"auto_y\", \"auto_z\", \"lat\", \"lon\", \"height\") VALUES " \
                                "('???','%s', %12.3f, %12.3f, %12.3f, " \
                                "%10.6f, %10.6f, %8.3f)\n" \
                                % (os.path.relpath(crinez, Config.repository_data_in), match[0]['NetworkCode'],
                                   match[0]['StationCode'], float(match[0]['distance']), StationCode,
                                   match[0]['NetworkCode'], match[0]['StationCode'],
                                   os.path.join(retry_folder, filename),
                                   os.path.join(retry_folder, filename.replace(StationCode, match[0]['StationCode'])),
                                   StationCode, ppp.x, ppp.y, ppp.z, ppp.lat[0], ppp.lon[0], ppp.h[0])

                        raise pyPPP.pyRunPPPExceptionCoordConflict(error)

                    elif len(match) > 1:
                        # a number of things could have happened:
                        # 1) wrong station code, and more than one matching stations
                        #    (that do not match the station code, of course)
                        #    see rms.lhcl 2007 113 -> matches rms.igm0: 34.293 m, rms.igm1: 40.604 m, rms.byns: 4.819 m
                        # 2) no entry in the database for this solution -> add a lock and populate the exit args

                        # no match, but we have some candidates

                        error = "Solution for RINEX in repository (%s %s) did not match a unique station location " \
                                "(and station code) within 5 km. Possible cantidate(s): %s. This file has been moved " \
                                "to data_in_retry. pSQL sentence follows:\n" \
                                "PSQL# INSERT INTO stations (\"NetworkCode\", \"StationCode\", \"auto_x\", " \
                                "\"auto_y\", \"auto_z\", \"lat\", \"lon\", \"height\") VALUES " \
                                "('???','%s', %12.3f, %12.3f, %12.3f, %10.6f, %10.6f, %8.3f)\n" \
                                % (os.path.relpath(crinez, Config.repository_data_in), rinexinfo.date.yyyyddd(),
                                   ', '.join(['%s.%s: %.3f m' %
                                              (m['NetworkCode'], m['StationCode'], m['distance']) for m in match]),
                                   StationCode, ppp.x, ppp.y, ppp.z, ppp.lat[0], ppp.lon[0], ppp.h[0])

                        raise pyPPP.pyRunPPPExceptionCoordConflict(error)

                    else:
                        # only found a station removing the distance limit (could be thousands of km away!)

                        # The user will have to add the metadata to the database before the file can be added,
                        # but in principle no problem was detected by the process. This file will stay in this folder
                        # so that it gets analyzed again but a "lock" will be added to the file that will have to be
                        # removed before the service analyzes again.
                        # if the user inserted the station by then, it will get moved to the appropriate place.
                        # we return all the relevant metadata to ease the insert of the station in the database

                        otl = pyOTL.OceanLoading(StationCode,
                                                 Config.options['grdtab'],
                                                 Config.options['otlgrid'])
                        # use the ppp coordinates to calculate the otl
                        coeff = otl.calculate_otl_coeff(x=ppp.x,
                                                        y=ppp.y,
                                                        z=ppp.z)

                        # add the file to the locks table so that it doesn't get processed over and over
                        # this will be removed by user so that the file gets reprocessed once all the metadata is ready
                        cnn.insert('locks',
                                   filename=os.path.relpath(
                                       crinez, Config.repository_data_in))

                        return None, [
                            StationCode, (ppp.x, ppp.y, ppp.z), coeff,
                            (ppp.lat[0], ppp.lon[0], ppp.h[0]), crinez
                        ]

    except (pyRinex.pyRinexExceptionBadFile, pyRinex.pyRinexExceptionSingleEpoch, pyRinex.pyRinexExceptionNoAutoCoord) \
            as e:

        reject_folder = reject_folder.replace('%reason%', 'bad_rinex')

        # add more verbose output
        e.event['Description'] = e.event['Description'] + '\n' + os.path.relpath(crinez, Config.repository_data_in) + \
                                 ': (file moved to ' + reject_folder + ')'
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy
        # error, move the file to rejected folder
        error_handle(cnn, e.event, crinez, reject_folder, filename)

        return None, None

    except pyRinex.pyRinexException as e:

        retry_folder = retry_folder.replace('%reason%', 'rinex_issues')

        # add more verbose output
        e.event['Description'] = e.event['Description'] + '\n' + os.path.relpath(crinez, Config.repository_data_in) + \
                                 ': (file moved to ' + retry_folder + ')'
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy
        # error, move the file to rejected folder
        error_handle(cnn, e.event, crinez, retry_folder, filename)

        return None, None

    except pyPPP.pyRunPPPExceptionCoordConflict as e:

        retry_folder = retry_folder.replace('%reason%', 'coord_conflicts')

        e.event['Description'] = e.event['Description'].replace(
            '%reason%', 'coord_conflicts')

        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, retry_folder, filename)

        return None, None

    except pyPPP.pyRunPPPException as e:

        reject_folder = reject_folder.replace('%reason%', 'no_ppp_solution')

        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, reject_folder, filename)

        return None, None

    except pyStationInfo.pyStationInfoException as e:

        retry_folder = retry_folder.replace('%reason%',
                                            'station_info_exception')

        e.event['Description'] = e.event['Description'] + '. The file will stay in the repository and will be ' \
                                                          'processed during the next cycle of pyArchiveService.'
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, retry_folder, filename)

        return None, None

    except pyOTL.pyOTLException as e:

        retry_folder = retry_folder.replace('%reason%', 'otl_exception')

        e.event['Description'] = e.event['Description'] + ' while calculating OTL for %s. ' \
                                                          'The file has been moved into the retry folder.' \
                                                          % os.path.relpath(crinez, Config.repository_data_in)
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, retry_folder, filename)

        return None, None

    except pyProducts.pyProductsExceptionUnreasonableDate as e:
        # a bad RINEX file requested an orbit for a date < 0 or > now()
        reject_folder = reject_folder.replace('%reason%', 'bad_rinex')

        e.event['Description'] = e.event['Description'] + ' during %s. The file has been moved to the rejected ' \
                                                          'folder. Most likely bad RINEX header/data.' \
                                                          % os.path.relpath(crinez, Config.repository_data_in)
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, reject_folder, filename)

        return None, None

    except pyProducts.pyProductsException as e:

        # if PPP fails and ArchiveService tries to run sh_rnx2apr and it doesn't find the orbits, send to retry
        retry_folder = retry_folder.replace('%reason%', 'sp3_exception')

        e.event['Description'] = e.event['Description'] + ': %s. Check the brdc/sp3/clk files and also check that ' \
                                                          'the RINEX data is not corrupt.' \
                                                          % os.path.relpath(crinez, Config.repository_data_in)
        e.event['StationCode'] = StationCode
        e.event['NetworkCode'] = '???'
        e.event['Year'] = year
        e.event['DOY'] = doy

        error_handle(cnn, e.event, crinez, retry_folder, filename)

        return None, None

    except dbConnection.dbErrInsert as e:

        reject_folder = reject_folder.replace('%reason%', 'duplicate_insert')

        # insert duplicate values: two parallel processes tried to insert different filenames
        # (or the same) of the same station to the db: move it to the rejected folder.
        # The user might want to retry later. Log it in events
        # this case should be very rare
        event = pyEvents.Event(
            Description='Duplicate rinex insertion attempted while processing '
            + os.path.relpath(crinez, Config.repository_data_in) +
            ' : (file moved to rejected folder)\n' + str(e),
            EventType='warn',
            StationCode=StationCode,
            NetworkCode='???',
            Year=year,
            DOY=doy)

        error_handle(cnn, event, crinez, reject_folder, filename)

        return None, None

    except Exception:

        retry_folder = retry_folder.replace('%reason%', 'general_exception')

        event = pyEvents.Event(
            Description=traceback.format_exc() + ' processing: ' +
            os.path.relpath(crinez, Config.repository_data_in) + ' in node ' +
            platform.node() + ' (file moved to retry folder)',
            EventType='error')

        error_handle(cnn,
                     event,
                     crinez,
                     retry_folder,
                     filename,
                     no_db_log=True)

        return event['Description'], None

    return None, None
Exemple #13
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument(
        '-itrf',
        '--itrf',
        nargs='+',
        help=
        "File with the ITRF periodic space parameters as given by Zuheir Altamimi and the list "
        "of stations to inherit the periodic terms from. Example: -itrf periodic.dat igs.braz "
        "rms.autf rms.igm1 rms.sant ...")
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.itrf:
        periodic = load_periodic_space(args.itrf[0])
    else:
        periodic = None

    # create the stack object
    stack = pyStack.Stack(cnn, args.project[0], args.redo_stack)

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if stack[j].date != target[j].date:
                # raise an error if dates don't agree!
                raise StandardError(
                    'Error processing %s: dates don\'t agree (target date %s)'
                    % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
            else:
                if not stack[j].aligned:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        qbar.close()

    # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
    calculate_etms(cnn, stack, JobServer, iterations=None)

    if args.redo_stack:
        # only apply common mode removal if redoing the stack
        if args.itrf:
            stack.remove_common_modes(periodic, args.itrf[1:])
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        # TODO: include alignment to velocity and coordinate space

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None)

    # save polyhedrons to the database
    stack.save()

    qbar = tqdm(total=len(stack.stations), ncols=160)

    for stn in stack.stations:
        # plot the ETMs
        qbar.update()
        qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
        plot_etm(cnn, stack, stn, args.directory)

    qbar.close()
Exemple #14
0
def main():

    global wrms_n, wrms_e, wrms_u, project

    parser = argparse.ArgumentParser(
        description='GNSS daily repetitivities analysis (DRA)')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help=
        'Date range filter. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )

    parser.add_argument(
        '-w',
        '--plot_window',
        nargs='+',
        metavar='date',
        help=
        'Date window range to plot. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot a histogram of the daily repetitivities")
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        help=
        "Provide additional information during the alignment process (for debugging purposes)"
    )

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions
    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    pdates = None
    if args.plot_window is not None:
        if len(args.plot_window) == 1:
            try:
                pdates = process_date(args.plot_window,
                                      missing_input=None,
                                      allow_days=False)
                pdates = (pdates[0].fyear, )
            except ValueError:
                # an integer value
                pdates = float(args.plot_window[0])
        else:
            pdates = process_date(args.plot_window)
            pdates = (pdates[0].fyear, pdates[1].fyear)

    # create folder for plots
    path_plot = project + '_dra'
    if not os.path.isdir(path_plot):
        os.makedirs(path_plot)

    ########################################
    # load polyhedrons
    # create the DRA object
    dra = DRA(cnn, args.project[0], dates[0], dates[1], args.verbose)

    dra.stack_dra()

    dra.to_json(project + '_dra.json')

    missing_doys = []

    tqdm.write(
        ' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm are shown below:'
    )
    for i, d in enumerate(dra):
        if d.wrms is not None:
            if d.wrms > 0.008:
                tqdm.write(
                    ' -- %s (%04i) %2i it wrms: %4.1f D-W: %5.3f IQR: %4.1f' %
                    (d.date.yyyyddd(), d.stations_used, d.iterations,
                     d.wrms * 1000, d.down_frac, d.iqr * 1000))

    qbar = tqdm(total=len(dra.stations),
                desc=' >> Computing DRAs',
                ncols=160,
                disable=None)

    modules = ('pyETM', 'dbConnection', 'traceback', 'io', 'numpy')
    JobServer.create_cluster(compute_dra,
                             progress_bar=qbar,
                             callback=callback_handler,
                             modules=modules)

    # plot each DRA
    for stn in dra.stations:
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        ts = dra.get_station(NetworkCode, StationCode)
        JobServer.submit(ts, NetworkCode, StationCode, pdates, project,
                         args.histogram)

    JobServer.wait()
    qbar.close()
    JobServer.close_cluster()

    wrms_n = np.array(wrms_n)
    wrms_e = np.array(wrms_e)
    wrms_u = np.array(wrms_u)

    # plot the WRM of the DRA stack and number of stations
    f, axis = plt.subplots(nrows=3, ncols=2,
                           figsize=(15, 10))  # type: plt.subplots

    # WRMS
    ax = axis[0][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['wrms'] * 1000 for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('WRMS [mm]')
    ax.grid(True)
    ax.set_ylim(0, 10)

    # station count
    ax = axis[1][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['stations_used'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('Station count')
    ax.grid(True)

    # d-w fraction
    ax = axis[2][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['downweighted_fraction'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('DW fraction')
    ax.grid(True)

    ax = axis[0][1]
    ax.hist(wrms_n[wrms_n <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit N [mm]')
    ax.set_title('Daily repetitivities NEU')

    ax = axis[1][1]
    ax.hist(wrms_e[wrms_e <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 8)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit E [mm]')

    ax = axis[2][1]
    ax.hist(wrms_u[wrms_u <= 10], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 10)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit U [mm]')

    f.suptitle('Daily repetitivity analysis for project %s\n'
               'Solutions with WRMS > 10 mm are not shown' % project,
               fontsize=12,
               family='monospace')
    plt.savefig(project + '_dra.png')
    plt.close()

    ax.set_xlim(0, 8)
"""

import pyOptions
import argparse
import dbConnection
import pyStationInfo
import pyDate
import curses
from curses import panel
import curses.ascii
from curses.textpad import Textbox, rectangle
from collections import OrderedDict
import traceback

cnn = dbConnection.Cnn('gnss_data.cfg')
Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions
selection = 0
stn = None
records = []


class _Textbox(Textbox):
    """
    curses.textpad.Textbox requires users to ^g on completion, which is sort
    of annoying for an interactive chat client such as this, which typically only
    reuquires an enter. This subclass fixes this problem by signalling completion
    on Enter as well as ^g. Also, map <Backspace> key to ^h.
    """
    def __init__(self, win, insert_mode=False, text=''):
        Textbox.__init__(self, win, insert_mode)
Exemple #16
0
            ' >> There were unhandled errors during this batch. Please check errors_pyScanArchive.log for details'
        )

    # function to print any error that are encountered during parallel execution
    for msg in messages:
        if msg:
            file_append(
                'errors_amend.log',
                'ON ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                ' an unhandled error occurred:\n' + msg + '\n' +
                'END OF ERROR =================== \n\n')
    return []


cnn = dbConnection.Cnn('gnss_data.cfg')
options = pyOptions.ReadOptions('gnss_data.cfg')

JobServer = pyJobServer.JobServer(options)
archive = pyArchiveStruct.RinexStruct(cnn)

for table in ['rinex']:

    print(" >> Processing " + table)

    tbl = cnn.query('SELECT * FROM ' + table + ' WHERE "Completion" is null')

    rnx = tbl.dictresult()

    callback = []
    pbar = tqdm(total=len(rnx), ncols=80)
def main():

    # put connection and config in global variable to use inside callback_handle
    global cnn
    global repository_data_in

    # bind to the repository directory
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        '-purge',
        '--purge_locks',
        action='store_true',
        help=
        "Delete any network starting with '?' from the stations table and purge the contents of "
        "the locks table, deleting the associated files from data_in.")

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions('gnss_data.cfg')

    repository_data_in = Config.repository_data_in

    if not os.path.isdir(Config.repository):
        print "the provided repository path in gnss_data.cfg is not a folder"
        exit()

    JobServer = pyJobServer.JobServer(Config,
                                      run_parallel=not args.noparallel,
                                      software_sync=[
                                          Config.options['ppp_remote_local']
                                      ])  # type: pyJobServer.JobServer

    cnn = dbConnection.Cnn('gnss_data.cfg')
    # create the execution log
    cnn.insert('executions', script='ArchiveService.py')

    # set the data_xx directories
    data_in = os.path.join(Config.repository, 'data_in')
    data_in_retry = os.path.join(Config.repository, 'data_in_retry')
    data_reject = os.path.join(Config.repository, 'data_rejected')

    # if if the subdirs exist
    if not os.path.isdir(data_in):
        os.makedirs(data_in)

    if not os.path.isdir(data_in_retry):
        os.makedirs(data_in_retry)

    if not os.path.isdir(data_reject):
        os.makedirs(data_reject)

    # delete any locks with a NetworkCode != '?%'
    cnn.query('delete from locks where "NetworkCode" not like \'?%\'')
    # get the locks to avoid reprocessing files that had no metadata in the database
    locks = cnn.query('SELECT * FROM locks')
    locks = locks.dictresult()

    if args.purge_locks:
        # first, delete all associated files
        for lock in tqdm(locks,
                         ncols=160,
                         unit='crz',
                         desc='%-30s' % ' >> Purging locks',
                         disable=None):
            try:
                os.remove(
                    os.path.join(Config.repository_data_in, lock['filename']))
            except Exception:
                sys.exc_clear()

        # purge the contents of stations. This will automatically purge the locks table
        cnn.query('delete from stations where "NetworkCode" like \'?%\'')
        # purge the networks
        cnn.query('delete from networks where "NetworkCode" like \'?%\'')
        # purge the locks already taken care of (just in case)
        cnn.query('delete from locks where "NetworkCode" not like \'?%\'')
        # get the locks to avoid reprocessing files that had no metadata in the database
        locks = cnn.query('SELECT * FROM locks')
        locks = locks.dictresult()

    # look for data in the data_in_retry and move it to data_in

    archive = pyArchiveStruct.RinexStruct(cnn)

    pbar = tqdm(desc='%-30s' % ' >> Scanning data_in_retry',
                ncols=160,
                unit='crz',
                disable=None)

    rfiles, paths, _ = archive.scan_archive_struct(data_in_retry, pbar)

    pbar.close()

    pbar = tqdm(desc='%-30s' % ' -- Moving files to data_in',
                total=len(rfiles),
                ncols=160,
                unit='crz',
                disable=None)

    for rfile, path in zip(rfiles, paths):

        dest_file = os.path.join(data_in, rfile)

        # move the file into the folder
        Utils.move(path, dest_file)

        pbar.set_postfix(crinez=rfile)
        pbar.update()

        # remove folder from data_in_retry (also removes the log file)
        try:
            # remove the log file that accompanies this Z file
            os.remove(path.replace('d.Z', '.log'))
        except Exception:
            sys.exc_clear()

    pbar.close()
    tqdm.write(' -- Cleaning data_in_retry')
    remove_empty_folders(data_in_retry)

    # take a break to allow the FS to finish the task
    time.sleep(5)

    files_path = []
    files_list = []

    pbar = tqdm(desc='%-30s' % ' >> Repository crinez scan',
                ncols=160,
                disable=None)

    rpaths, _, files = archive.scan_archive_struct(data_in, pbar)

    pbar.close()

    pbar = tqdm(desc='%-30s' % ' -- Checking the locks table',
                total=len(files),
                ncols=130,
                unit='crz',
                disable=None)

    for file, path in zip(files, rpaths):
        pbar.set_postfix(crinez=file)
        pbar.update()
        if path not in [lock['filename'] for lock in locks]:
            files_path.append(path)
            files_list.append(file)

    pbar.close()

    tqdm.write(" -- Found %i files in the lock list..." % (len(locks)))
    tqdm.write(
        " -- Found %i files (matching format [stnm][doy][s].[yy]d.Z) to process..."
        % (len(files_list)))

    pbar = tqdm(desc='%-30s' % ' >> Processing repository',
                total=len(files_path),
                ncols=160,
                unit='crz',
                disable=None)

    # dependency functions
    depfuncs = (check_rinex_timespan_int, write_error, error_handle,
                insert_data, verify_rinex_multiday)
    # import modules
    modules = ('pyRinex', 'pyArchiveStruct', 'pyOTL', 'pyPPP', 'pyStationInfo',
               'dbConnection', 'Utils', 'os', 'uuid', 'datetime', 'pyDate',
               'numpy', 'traceback', 'platform', 'pyBrdc', 'pyProducts',
               'pyOptions', 'pyEvents')

    JobServer.create_cluster(process_crinex_file,
                             depfuncs,
                             callback_handle,
                             pbar,
                             modules=modules)

    for file_to_process, sfile in zip(files_path, files_list):

        JobServer.submit(file_to_process, sfile, data_reject, data_in_retry)

    JobServer.wait()

    pbar.close()

    JobServer.close_cluster()

    print_archive_service_summary()

    # iterate to delete empty folders
    remove_empty_folders(data_in)
Exemple #18
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        'stack_name',
        type=str,
        nargs=1,
        metavar='{stack name}',
        help=
        "Specify a name for the stack: eg. itrf2014 or posgar07b. This name should be unique "
        "and cannot be repeated for any other solution project")
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument('-plot',
                        '--plot_stack_etms',
                        action='store_true',
                        default=False,
                        help="Plot the stack ETMs after computation is done")
    parser.add_argument(
        '-constrains',
        '--external_constrains',
        nargs='+',
        help=
        "File with external constrains parameters (position, velocity and periodic). These may be "
        "from a parent frame such as ITRF. "
        "Inheritance will occur with stations on the list whenever a parameter exists. "
        "Example: -constrains itrf14.txt "
        "Format is: net.stn x y z epoch vx vy vz sn_1y sn_6m cn_1y cn_6m se_1y se_6m ce_1y ce_6m "
        "su_1y su_6m cu_1y cu_6m ")
    parser.add_argument(
        '-d',
        '--date_end',
        nargs=1,
        metavar='date',
        help=
        'Limit the polyhedrons to the specified date. Can be in wwww-d, yyyy_ddd, yyyy/mm/dd '
        'or fyear format')
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    dates = [Date(year=1980, doy=1), Date(datetime=datetime.now())]
    if args.date_end is not None:
        try:
            dates = process_date(
                [str(Date(year=1980, doy=1).fyear), args.date_end[0]])
        except ValueError as e:
            parser.error(str(e))

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.external_constrains:
        constrains = load_constrains(args.external_constrains[0])
    else:
        constrains = None

    # create the stack object
    stack = pyStack.Stack(cnn,
                          args.project[0],
                          args.stack_name[0],
                          args.redo_stack,
                          end_date=dates[1])

    # stack.align_spaces(frame_params)
    # stack.to_json('alignment.json')
    # exit()

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if not stack[j].aligned:
                # do not move this if up one level: to speed up the target polyhedron loading process, the target is
                # set to an empty list when the polyhedron is already aligned
                if stack[j].date != target[j].date:
                    # raise an error if dates don't agree!
                    raise StandardError(
                        'Error processing %s: dates don\'t agree (target date %s)'
                        % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
                else:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        stack.transformations.append([poly.info() for poly in stack])
        qbar.close()

    if args.redo_stack:
        # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
        calculate_etms(cnn,
                       stack,
                       JobServer,
                       iterations=None,
                       create_target=False)
        # only apply common mode removal if redoing the stack
        if args.external_constrains:
            stack.remove_common_modes(constrains)
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        stack.align_spaces(constrains)

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None, create_target=False)

    # save the json with the information about the alignment
    stack.to_json(args.stack_name[0] + '_alignment.json')
    # save polyhedrons to the database
    stack.save()

    if args.plot_stack_etms:
        qbar = tqdm(total=len(stack.stations), ncols=160)
        for stn in stack.stations:
            # plot the ETMs
            qbar.update()
            qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
            plot_etm(cnn, stack, stn, args.directory)

        qbar.close()
Exemple #19
0
def main():
    parser = argparse.ArgumentParser(description='Archive operations Main Program')

    parser.add_argument('-date', '--date_range', nargs='+', action=required_length(1, 2), metavar='date_start|date_end',
                        help="Date range to check given as [date_start] or [date_start] and [date_end]. "
                             "Allowed formats are yyyy.doy or yyyy/mm/dd..")

    parser.add_argument('-win', '--window', nargs=1, metavar='days', type=int,
                        help="Download data from a given time window determined by today - {days}.")
    try:
        args = parser.parse_args()

        Config = pyOptions.ReadOptions('gnss_data.cfg')

        dates = []

        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=datetime.now().year, month=datetime.now().month, day=datetime.now().day)
                dates = [d-int(args.window[0]), d]
            else:
                dates = process_date(args.date_range)
        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = [pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                     pyDate.Date(year=datetime.now().year, month=datetime.now().month, day=datetime.now().day)]

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd, 1)

        pbar = tqdm(desc='%-30s' % ' >> Synchronizing orbit files', total=len(drange), ncols=160)

        # connect to ftp
        ftp = ftplib.FTP('198.118.242.40', 'Anonymous', '*****@*****.**')

        for date in [pyDate.Date(mjd=mdj) for mdj in drange]:

            sp3_archive = get_archive_path(Config.sp3_path, date)

            if not os.path.exists(sp3_archive):
                os.makedirs(sp3_archive)

            for repro in ['', '/repro2']:
                # try both in the repro and / folders
                folder = "/pub/gps/products/" + date.wwww() + repro
                try:
                    ftp.cwd(folder)
                except Exception:
                    # folder not present, skip
                    continue

                tqdm.write(' -- Changing folder to ' + folder)
                ftp_list = ftp.nlst()

                for orbit in Config.sp3types + Config.sp3altrn:

                    for ext in ['.sp3.Z', '.clk.Z', '.erp.Z']:
                        filename = orbit + date.wwwwd() + ext

                        if not os.path.isfile(os.path.join(sp3_archive, filename)) and filename in ftp_list:
                            tqdm.write('%-31s: %s' % (' -- trying to download ' + ext.replace('.Z', '').upper(), filename))
                            try:
                                ftp.retrbinary("RETR " + filename, open(os.path.join(sp3_archive, filename), 'wb').write)
                            except Exception:
                                continue

                    # now the eop file
                    filename = orbit + date.wwww() + '7.erp.Z'
                    if not os.path.isfile(os.path.join(sp3_archive, filename)) and filename in ftp_list:
                        tqdm.write('%-31s: %s' % (' -- trying to download EOP', filename))
                        try:
                            ftp.retrbinary("RETR " + filename, open(os.path.join(sp3_archive, filename), 'wb').write)
                        except Exception:
                            continue

            ###### now the brdc files #########

            try:
                folder = "/pub/gps/data/daily/%s/%s/%sn" % (date.yyyy(), date.ddd(), date.yyyy()[2:])
                tqdm.write(' -- Changing folder to ' + folder)
                ftp.cwd(folder)
                ftp_list = ftp.nlst()
            except Exception:
                continue

            brdc_archive = get_archive_path(Config.brdc_path, date)

            if not os.path.exists(brdc_archive):
                os.makedirs(brdc_archive)

            filename = 'brdc' + str(date.doy).zfill(3) + '0.' + str(date.year)[2:4] + 'n'

            if not os.path.isfile(os.path.join(brdc_archive, filename)) and filename + '.Z' in ftp_list:
                tqdm.write('%-31s: %s' % (' -- trying to download BRDC', filename))
                try:
                    ftp.retrbinary("RETR " + filename + '.Z', open(os.path.join(brdc_archive, filename + '.Z'), 'wb').write)
                    # decompress file
                    cmd = pyRunWithRetry.RunCommand('gunzip -f ' + os.path.join(brdc_archive, filename + '.Z'), 15)
                    cmd.run_shell()
                except Exception:
                    continue

            pbar.set_postfix(gpsWeek='%i %i' % (date.gpsWeek, date.gpsWeekDay))
            pbar.update()

        pbar.close()
        ftp.quit()

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
def main():

    parser = argparse.ArgumentParser(
        description=
        'Database integrity tools, metadata check and fixing tools program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help='Date range filter for all operations. '
        'Can be specified in wwww-d, yyyy_ddd, yyyy/mm/dd or fyear format')

    parser.add_argument(
        '-rinex',
        '--check_rinex',
        choices=['fix', 'report'],
        type=str,
        nargs=1,
        help=
        'Check the RINEX integrity of the archive-database by verifying that the RINEX files '
        'reported in the rinex table exist in the archive. If argument = "fix" and a RINEX file '
        'does not exist, remove the record. PPP records or gamit_soln are deleted. If argument = '
        '"report" then just list the missing files.')

    parser.add_argument(
        '-rnx_count',
        '--rinex_count',
        action='store_true',
        help='Count the total number of RINEX files (unique station-days) '
        'per day for a given time interval.')

    parser.add_argument(
        '-stnr',
        '--station_info_rinex',
        action='store_true',
        help=
        'Check that the receiver serial number in the rinex headers agrees with the station info '
        'receiver serial number.')

    parser.add_argument(
        '-stns',
        '--station_info_solutions',
        action='store_true',
        help='Check that the PPP hash values match the station info hash.')

    parser.add_argument(
        '-stnp',
        '--station_info_proposed',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Output a proposed station.info using the RINEX metadata. Optional, specify [ignore_days] '
        'to ignore station.info records <= days.')

    parser.add_argument(
        '-stnc',
        '--station_info_check',
        action='store_true',
        help=
        'Check the consistency of the station information records in the database. Date range '
        'does not apply. Also, check that the RINEX files fall within a valid station information '
        'record.')

    parser.add_argument(
        '-g',
        '--data_gaps',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Check the RINEX files in the database and look for gaps (missing days). '
        'Optional, [ignore_days] with the smallest gap to display.')

    parser.add_argument('-gg',
                        '--graphical_gaps',
                        action='store_true',
                        help='Visually output RINEX gaps for stations.')

    parser.add_argument(
        '-sc',
        '--spatial_coherence',
        choices=['exclude', 'delete', 'noop'],
        type=str,
        nargs=1,
        help=
        'Check that the RINEX files correspond to the stations they are linked to using their '
        'PPP coordinate. If keyword [exclude] or [delete], add the PPP solution to the excluded '
        'table or delete the PPP solution. If [noop], then only report but do not '
        'exlude or delete.')

    parser.add_argument(
        '-print',
        '--print_stninfo',
        choices=['long', 'short'],
        type=str,
        nargs=1,
        help=
        'Output the station info to stdout. [long] outputs the full line of the station info. '
        '[short] outputs a short version (better for screen visualization).')

    parser.add_argument(
        '-r',
        '--rename',
        metavar='net.stnm',
        nargs=1,
        help=
        "Takes the data from the station list and renames (merges) it to net.stnm. "
        "It also changes the rinex filenames in the archive to match those of the new destiny "
        "station. Only a single station can be given as the origin and destiny. "
        "Limit the date range using the -d option.")

    parser.add_argument(
        '-es',
        '--exclude_solutions',
        metavar=('{start_date}', '{end_date}'),
        nargs=2,
        help=
        'Exclude PPP solutions (by adding them to the excluded table) between {start_date} '
        'and {end_date}')

    parser.add_argument(
        '-del',
        '--delete_rinex',
        metavar=('{start_date}', '{end_date}', '{completion}'),
        nargs=3,
        help='Delete RINEX files (and associated solutions, PPP and GAMIT) '
        'from archive between {start_date} and {end_date} with completion <= {completion}. '
        'Completion ranges form 1.0 to 0.0. Use 1.0 to delete all data. '
        'Operation cannot be undone!')

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")  # type: dbConnection.Cnn

    # create the execution log
    cnn.insert('executions', script='pyIntegrityCheck.py')

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    #####################################
    # date filter

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    #####################################

    if args.check_rinex:
        CheckRinexIntegrity(cnn, Config, stnlist, dates[0], dates[1],
                            args.check_rinex[0], JobServer)

    #####################################

    if args.rinex_count:
        RinexCount(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.station_info_rinex:
        StnInfoRinexIntegrity(cnn, stnlist, dates[0], dates[1], JobServer)

    #####################################

    if args.station_info_check:
        StnInfoCheck(cnn, stnlist, Config)

    #####################################

    if args.data_gaps is not None:
        GetStnGaps(cnn, stnlist, args.data_gaps, dates[0], dates[1])

    if args.graphical_gaps:
        VisualizeGaps(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.spatial_coherence is not None:
        CheckSpatialCoherence(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.exclude_solutions is not None:
        try:
            dates = process_date(args.exclude_solutions)
        except ValueError as e:
            parser.error(str(e))

        ExcludeSolutions(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.print_stninfo is not None:
        if args.print_stninfo[0] == 'short':
            PrintStationInfo(cnn, stnlist, True)
        elif args.print_stninfo[0] == 'long':
            PrintStationInfo(cnn, stnlist, False)
        else:
            parser.error(
                'Argument for print_stninfo has to be either long or short')

    #####################################

    if args.station_info_proposed is not None:
        for stn in stnlist:
            stninfo = pyStationInfo.StationInfo(cnn,
                                                stn['NetworkCode'],
                                                stn['StationCode'],
                                                allow_empty=True)
            sys.stdout.write(
                stninfo.rinex_based_stninfo(args.station_info_proposed))

    #####################################

    if args.delete_rinex is not None:
        try:
            dates = process_date(args.delete_rinex[0:2])
        except ValueError as e:
            parser.error(str(e))

        DeleteRinex(cnn, stnlist, dates[0], dates[1],
                    float(args.delete_rinex[2]))

    #####################################

    if args.rename:
        if len(stnlist) > 1:
            parser.error(
                'Only a single station should be given for the origin station')

        if '.' not in args.rename[0]:
            parser.error('Format for destiny station should be net.stnm')
        else:
            DestNetworkCode = args.rename[0].split('.')[0]
            DestStationCode = args.rename[0].split('.')[1]

            RenameStation(cnn, stnlist[0]['NetworkCode'],
                          stnlist[0]['StationCode'], DestNetworkCode,
                          DestStationCode, dates[0], dates[1],
                          Config.archive_path)

    JobServer.close_cluster()
Exemple #21
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Simple PPP python wrapper. Calculate a coordinate for a RINEX file. '
        'Output one line per file with stnm epoch x y z lat lon h')

    parser.add_argument(
        'files',
        type=str,
        nargs='+',
        help=
        "List of files, directories or wildcards to process. If directories are given, searches "
        "for .Z files. Individual files or wildcards can be either .Z or ??o. "
        "Eg: LocationRinex.py ./igm10010.10d.Z ./igm1002a.10o ./cs*.Z ./rinex2process/"
    )

    parser.add_argument(
        '-otl',
        '--ocean_loading',
        action='store_true',
        help="Apply ocean loading coefficients (obtained from grdtab).")

    parser.add_argument(
        '-ns',
        '--no_split',
        action='store_true',
        help="Do not split multiday RINEX files and obtain a single coordinate."
    )

    parser.add_argument(
        '-no_met',
        '--no_met',
        action='store_true',
        help=
        "Do not apply the GPT2 model to correct tropospheric delays (use GPT)."
    )

    parser.add_argument('-dec',
                        '--decimate',
                        action='store_true',
                        help="Decimate RINEX to 30 s if interval < 15.")

    parser.add_argument(
        '-rnx',
        '--load_rinex',
        action='store_true',
        help=
        "Fix RINEX using pyRinex, create a local copy (with session number+1) and exit. "
        "Do not run PPP.")

    parser.add_argument(
        '-ins',
        '--insert_sql',
        action='store_true',
        help=
        "Produce a SQL INSERT statement for this station including OTL and coordinates."
    )

    parser.add_argument('-find',
                        '--find',
                        action='store_true',
                        help="Find the matching station in the db using the "
                        "spatial location algorithm.")

    parser.add_argument(
        '-ne',
        '--no_erase',
        action='store_true',
        help="Do not erase PPP folder structure after completion.")

    parser.add_argument(
        '-nocfg',
        '--no_config_file',
        type=str,
        nargs=3,
        metavar=('sp3_directory', 'sp3_types', 'brdc_directory'),
        help=
        'Do not attempt to open gnss_data.cfg. Append [sp3_directory], [sp3_types] '
        'and [brdc_directory] to access the precise and broadcast orbit files. Use the keywords '
        '$year, $doy, $month, $day, $gpsweek, $gpswkday to dynamically replace with the '
        'appropriate values (based on the date in the RINEX file). Grdtab and otl_grid should '
        'have the standard names if -otl is invoked and ppp should be in the PATH '
        '(with executable name = ppp).')

    args = parser.parse_args()

    options = {}
    if args.no_config_file is not None:
        options['ppp_path'] = ''
        options['ppp_exe'] = 'ppp'
        options['grdtab'] = 'grdtab'
        options['otlgrid'] = 'otl.grid'
        options['sp3'] = args.no_config_file[0]
        sp3types = args.no_config_file[1].split(',')
        sp3altrn = ['jpl', 'jp2', 'jpr']
        brdc_path = args.no_config_file[2]
    else:
        Config = pyOptions.ReadOptions(
            'gnss_data.cfg')  # type: pyOptions.ReadOptions
        options = Config.options
        sp3types = Config.sp3types
        sp3altrn = Config.sp3altrn
        brdc_path = Config.brdc_path

    # flog to determine if should erase or not folder
    if args.no_erase:
        erase = False
    else:
        erase = True

    rinex = []
    for xfile in args.files:
        if os.path.isdir(xfile):
            # add all d.Z files in folder
            rinex = rinex + glob.glob(os.path.join(xfile, '*d.Z'))
        elif os.path.isfile(xfile):
            # a single file
            rinex = rinex + [xfile]
        else:
            # a wildcard: expand
            rinex = rinex + glob.glob(xfile)

    for rinex in rinex:
        # read the station name from the file
        stnm = rinex.split('/')[-1][0:4]

        try:
            with pyRinex.ReadRinex('???',
                                   stnm,
                                   rinex,
                                   allow_multiday=args.no_split) as rinexinfo:
                if rinexinfo.multiday and not args.no_split:
                    print 'Provided RINEX file is a multiday file!'
                    # rinex file is a multiday file, output all the solutions
                    for rnx in rinexinfo.multiday_rnx_list:
                        execute_ppp(rnx, args, stnm, options, sp3types,
                                    sp3altrn, brdc_path, erase,
                                    not args.no_met, args.decimate)
                else:
                    execute_ppp(rinexinfo, args, stnm, options, sp3types,
                                sp3altrn, brdc_path, erase, not args.no_met,
                                args.decimate)

        except pyRinex.pyRinexException as e:
            print str(e)
            continue
Exemple #22
0
def main():
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-date',
        '--date_range',
        nargs='+',
        action=required_length(1, 2),
        metavar='date_start|date_end',
        help="Date range to check given as [date_start] or [date_start] "
        "and [date_end]. Allowed formats are yyyy.doy or yyyy/mm/dd..")
    parser.add_argument(
        '-win',
        '--window',
        nargs=1,
        metavar='days',
        type=int,
        help=
        "Download data from a given time window determined by today - {days}.")

    try:
        args = parser.parse_args()

        cnn = dbConnection.Cnn('gnss_data.cfg')
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        stnlist = Utils.process_stnlist(cnn, args.stnlist)

        print ' >> Selected station list:'
        print_columns([
            item['NetworkCode'] + '.' + item['StationCode'] for item in stnlist
        ])

        dates = []

        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=datetime.now().year,
                                month=datetime.now().month,
                                day=datetime.now().day)
                dates = [d - int(args.window[0]), d]
            else:
                dates = process_date(args.date_range)

        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = [
                pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                pyDate.Date(year=datetime.now().year,
                            month=datetime.now().month,
                            day=datetime.now().day)
            ]

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd + 1, 1)

        download_data(cnn, Config, stnlist, drange)

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
Exemple #23
0
def main():
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        '-date',
        '--date_range',
        nargs='+',
        action=required_length(1, 2),
        metavar='date_start|date_end',
        help=
        "Date range to check given as [date_start] or [date_start] and [date_end]. "
        "Allowed formats are yyyy.doy or yyyy/mm/dd..")

    parser.add_argument(
        '-win',
        '--window',
        nargs=1,
        metavar='days',
        type=int,
        help=
        "Download data from a given time window determined by today - {days}.")

    try:
        args = parser.parse_args()
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        dates = ()
        now = datetime.now()
        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=now.year, month=now.month, day=now.day)
                dates = (d - int(args.window[0]), d)
            else:
                dates = process_date(args.date_range)
        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = (pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                     pyDate.Date(year=now.year, month=now.month, day=now.day))

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd, 1)

        pbar = tqdm(desc='%-30s' % ' >> Synchronizing orbit files',
                    total=len(drange),
                    ncols=160)

        # connect to ftp
        ftp = ftplib.FTP_TLS(FTP_HOST, FTP_USER, FTP_PASS)

        ftp.set_pasv(True)
        ftp.prot_p()

        def downloadIfMissing(ftp_list, ftp_filename, local_filename,
                              local_dir, desc):
            mark_path = os.path.join(local_dir, local_filename)
            if not os.path.isfile(mark_path) and ftp_filename in ftp_list:
                tqdm.write('%-31s: %s' %
                           (' -- trying to download ' + desc, filename))
                down_path = os.path.join(local_dir, ftp_filename)
                with open(down_path, 'wb') as f:
                    ftp.retrbinary("RETR " + ftp_filename, f.write)
                return True

        def get_archive_path(archive, date):
            return archive.replace('$year',     str(date.year)) \
                          .replace('$doy',      str(date.doy).zfill(3)) \
                          .replace('$gpsweek',  str(date.gpsWeek).zfill(4)) \
                          .replace('$gpswkday', str(date.gpsWeekDay))

        for date in (pyDate.Date(mjd=mdj) for mdj in drange):

            sp3_archive = get_archive_path(Config.sp3_path, date)

            if not os.path.exists(sp3_archive):
                os.makedirs(sp3_archive)

            for repro in ('', '/repro2'):
                # try both in the repro and / folders
                folder = "/pub/gps/products/" + date.wwww() + repro
                try:
                    tqdm.write(' -- Changing folder to ' + folder)
                    ftp.cwd(folder)
                    ftp_list = set(ftp.nlst())
                except:
                    # folder not present, skip
                    continue

                for orbit in Config.sp3types + Config.sp3altrn:
                    for ext in ('.sp3', '.clk', '.erp', '7.erp'):
                        try:
                            filename = orbit + date.wwwwd() + ext + '.Z'
                            downloadIfMissing(
                                ftp_list, filename, filename, sp3_archive,
                                'EOP' if ext == '7.erp' else ext.upper())
                        except:
                            pass

            ###### now the brdc files #########

            try:
                folder = "/pub/gps/data/daily/%s/%s/%sn" % (
                    date.yyyy(), date.ddd(), date.yyyy()[2:])
                tqdm.write(' -- Changing folder to ' + folder)
                ftp.cwd(folder)
                ftp_list = set(ftp.nlst())
            except:
                continue

            brdc_archive = get_archive_path(Config.brdc_path, date)

            if not os.path.exists(brdc_archive):
                os.makedirs(brdc_archive)

            try:
                filename = 'brdc%s0.%sn' % (str(
                    date.doy).zfill(3), str(date.year)[2:4])
                ftp_filename = filename + '.Z'
                if downloadIfMissing(ftp_list, ftp_filename, filename,
                                     brdc_archive, 'BRDC'):
                    # decompress file
                    pyRunWithRetry.RunCommand('gunzip -f ' + ftp_filename,
                                              15).run_shell()
            except:
                continue

            pbar.set_postfix(gpsWeek='%i %i' % (date.gpsWeek, date.gpsWeekDay))
            pbar.update()

        pbar.close()
        ftp.quit()

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
Exemple #24
0
def rinex_task(NetworkCode, StationCode, date, ObservationFYear, metafile):

    from pyRunWithRetry import RunCommandWithRetryExeception

    etm_err = ''

    # local directory as destiny for the CRINEZ files
    pwd_rinex = '/media/leleiona/aws-files/' + date.yyyy() + '/' + date.ddd()

    stop_no_aprs = False

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # create Archive object

    Archive = pyArchiveStruct.RinexStruct(
        cnn)  # type: pyArchiveStruct.RinexStruct

    ArchiveFile = Archive.build_rinex_path(NetworkCode, StationCode, date.year,
                                           date.doy)
    ArchiveFile = os.path.join(Config.archive_path, ArchiveFile)

    # check for a station alias in the alias table
    alias = cnn.query(
        'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
        % (NetworkCode, StationCode))

    sa = alias.dictresult()

    if len(sa) > 0:
        StationAlias = sa[0]['StationAlias']
    else:
        StationAlias = StationCode

    # create the crinez filename
    filename = StationAlias + date.ddd() + '0.' + date.yyyy()[2:4] + 'd.Z'

    try:
        # create the ETM object
        etm = pyETM.PPPETM(cnn, NetworkCode, StationCode)

        # get APRs and sigmas (only in NEU)
        Apr, sigmas, Window, source = etm.get_xyz_s(date.year, date.doy)

        del etm

    except pyETM.pyETMException as e:
        # no PPP solutions available! MUST have aprs in the last run, try that
        stop_no_aprs = True
        Window = None
        source = ''
        etm_err = str(e)

    except Exception:

        return (None, None,
                traceback.format_exc() + ' processing ' + NetworkCode + '.' +
                StationCode + ' using node ' + platform.node() + '\n',
                metafile)

    # find this station-day in the lastest global run APRs
    apr_tbl = cnn.query(
        'SELECT * FROM apr_coords WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
        'AND "Year" = %i AND "DOY" = %i' %
        (NetworkCode, StationCode, date.year, date.doy))
    apr = apr_tbl.dictresult()

    if len(apr) > 0:
        # APRs exist for this station-day
        # replace PPP ETM with Mike's APRs
        Apr = numpy.array(
            ([float(apr[0]['x'])], [float(apr[0]['y'])], [float(apr[0]['z'])]))
        sigmas = numpy.array(([float(apr[0]['sn'])], [float(apr[0]['se'])],
                              [float(apr[0]['su'])]))
        source = apr[0]['ReferenceFrame'] + ' APRs'

    elif len(apr) == 0 and stop_no_aprs:

        return (
            None, None,
            '%s.%s has no PPP solutions and no APRs from last global run for %s! '
            'Specific error from pyETM.PPPETM (if available) was: %s' %
            (NetworkCode, StationCode, date.yyyyddd(), etm_err), metafile)

    # convert sigmas to XYZ
    stn = cnn.query(
        'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
        % (NetworkCode, StationCode))
    stn = stn.dictresult()
    sigmas_xyz = sigmas_neu2xyz(stn[0]['lat'], stn[0]['lon'], sigmas)

    # write the station.info
    # if no station info comes back for this date, program will print a message and continue with next
    try:

        # Use the argument 'ObservationFYear' to get the exact RINEX session fyear without opening the file
        rnx_date = pyDate.Date(fyear=float(ObservationFYear))
        stninfo = pyStationInfo.StationInfo(cnn,
                                            NetworkCode,
                                            StationCode,
                                            rnx_date,
                                            h_tolerance=12)

    except pyStationInfo.pyStationInfoException:
        # if no metadata, warn user and continue
        return (
            None, None,
            '%s.%s has no metadata available for this date, but a RINEX exists!'
            % (NetworkCode, StationCode), metafile)

    # check if RINEX file needs to be synced or not.
    aws_sync = cnn.query(
        'SELECT * FROM aws_sync WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
        'AND "Year" = %i AND "DOY" = %i' %
        (NetworkCode, StationCode, date.year, date.doy)).dictresult()

    if len(aws_sync) == 0:

        # only copy RINEX if not synced!
        # open the RINEX file in the Archive
        try:
            with pyRinex.ReadRinex(NetworkCode, StationCode, ArchiveFile,
                                   False) as Rinex:  # type: pyRinex.ReadRinex

                Rnx = None

                if Rinex.multiday:
                    # find the rinex that corresponds to the session being processed, if multiday
                    for rinex in Rinex.multiday_rnx_list:
                        if rinex.date == date:
                            Rnx = rinex
                            break

                    if Rnx is None:
                        return (
                            None, None,
                            '%s.%s was a multiday file and date %8.3f could not be found!'
                            % (NetworkCode, StationCode, date.fyear), metafile)
                else:
                    # if Rinex is not multiday
                    Rnx = Rinex

                    Rnx.purge_comments()
                    Rnx.normalize_header(stninfo)
                    Rnx.rename(filename)

                    if Window is not None:
                        window_rinex(Rnx, Window)
                        source += ' windowed from/to ' + Window.datetime(
                        ).strftime('%Y-%M-%d %H:%M:%S')
                    # before creating local copy, decimate file
                    Rnx.decimate(30)
                    Rnx.compress_local_copyto(pwd_rinex)

        except (pyRinex.pyRinexException, RunCommandWithRetryExeception):
            # new behavior: if error occurs while generating RINEX, then copy raw file from the archive
            try:
                shutil.copy(ArchiveFile, os.path.join(pwd_rinex, filename))

            except Exception:
                return (None, None, traceback.format_exc() + ' processing ' +
                        NetworkCode + '.' + StationCode + ' using node ' +
                        platform.node() + '\n', metafile)

        except Exception:
            return (None, None, traceback.format_exc() + ' processing ' +
                    NetworkCode + '.' + StationCode + ' using node ' +
                    platform.node() + '\n', metafile)

    # everything ok, return information
    APR = '%s.%s %s %12.3f %12.3f %12.3f %5.3f %5.3f %5.3f %5.3f %5.3f %5.3f %s' % (
        NetworkCode, StationCode, StationAlias, Apr[0, 0], Apr[1, 0],
        Apr[2, 0], sigmas_xyz[0, 0], sigmas_xyz[1, 0], sigmas_xyz[2, 0],
        sigmas[1, 0], sigmas[0, 0], sigmas[2, 0], source.replace(' ', '_'))

    return APR, stninfo.return_stninfo().replace(
        StationCode.upper(), StationAlias.upper()), None, metafile
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    if not args.noparallel:
        JobServer = pyJobServer.JobServer(
            Config, run_node_test=False)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = Project(cnn,
                      args.project[0],
                      max_iters,
                      use=use_stn,
                      exclude=exclude_stn)

    #project.remove_common_modes(cnn)
    #exit()

    calculate_etms(cnn, project, JobServer)

    align_stack(cnn, project, JobServer)

    # remove common modes
    updated_poly = project.remove_common_modes(cnn)
    updated_poly.sort(key=lambda k: k['FYear'])

    # replace with new polyhedrons
    project.polyhedrons = updated_poly
    # last call to calculate ETMs
    calculate_etms(cnn, project, JobServer)

    tqdm.write(' -- Plotting final ETMs (aligned)...')

    project.plot_etms()