예제 #1
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    # create the execution log

    dates = [pyDate.Date(year=1980, doy=1),
             pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = dra(cnn, args.project[0], dates)
예제 #2
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")

    parser.add_argument('sinex', type=str, nargs=1, metavar='{project name}',
                        help="SINEX file to update.")

    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    sinex = args.sinex[0]
    project = args.project[0]

    process_sinex(cnn, project, dates, sinex)
예제 #3
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(project + '_dra'):
        os.makedirs(project + '_dra')

    ########################################
    # load polyhedrons

    dra = DRA(cnn, args.project[0], dates[1])

    dra.stack_dra()

    for stn in tqdm(dra.stations):
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        # load from the db
        ts = dra.get_station(NetworkCode, StationCode)

        if ts.size:
            try:
                if ts.shape[0] > 2:
                    dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)

                    dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)

                    etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts)

                    etm.plot(pngfile='%s/%s.%s_DRA.png' % (project + '_dra', NetworkCode, StationCode),
                             plot_missing=False)

            except Exception as e:
                tqdm.write(' -->' + str(e))

    dra.to_json(project + '_dra.json')
예제 #4
0
def main():
    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '0' to just output "
        "the ETM parameters, '1' to export time series without "
        "model and '2' to export both time series and model.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-q',
        '--query',
        nargs=2,
        metavar='{type} {date}',
        type=str,
        help=
        'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
        'of the daily solution (if exists). Output is in XYZ.')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=1,
        metavar='{stack}',
        help="Plot the GAMIT time series specifying which stack name to plot.")
    parser.add_argument(
        '-lang',
        '--language',
        type=str,
        help="Change the language of the plots. Default is English. "
        "Use ESP to select Spanish. To add more languages, "
        "include the ISO 639-1 code in pyETM.py",
        default='ENG')
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot histogram of residuals")
    parser.add_argument(
        '-file',
        '--filename',
        type=str,
        help=
        "Obtain data from an external source (filename). Format should be specified with -format."
    )
    parser.add_argument(
        '-format',
        '--format',
        nargs='+',
        type=str,
        help=
        "To be used together with --filename. Specify order of the fields as found in the input "
        "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
        "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
        "are at the end of the line, then there is no need to specify those.")
    parser.add_argument('-outliers',
                        '--plot_outliers',
                        action='store_true',
                        help="Plot an additional panel with the outliers")
    parser.add_argument('-vel',
                        '--velocity',
                        action='store_true',
                        help="During query, output the velocity in XYZ.")
    parser.add_argument('-seasonal',
                        '--seasonal_terms',
                        action='store_true',
                        help="During query, output the seasonal terms in NEU.")
    parser.add_argument('-quiet',
                        '--suppress_messages',
                        action='store_true',
                        help="Quiet mode: suppress information messages")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # define the language
    pyETM.LANG = args.language.lower()
    # set the logging level
    if not args.suppress_messages:
        pyETM.logger.setLevel(pyETM.INFO)
    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None and args.filename is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                elif args.filename is not None:
                    etm = from_file(args, cnn, stn)
                else:
                    polyhedrons = cnn.query_float(
                        'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                        'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                        '"StationCode" = \'%s\' '
                        'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                        % (args.gamit[0], stn['NetworkCode'],
                           stn['StationCode']))

                    soln = pyETM.GamitSoln(cnn, polyhedrons,
                                           stn['NetworkCode'],
                                           stn['StationCode'], args.gamit[0])

                    etm = pyETM.GamitETM(cnn,
                                         stn['NetworkCode'],
                                         stn['StationCode'],
                                         False,
                                         args.no_model,
                                         gamit_soln=soln)

                    # print ' > %5.2f %5.2f %5.2f %i %i' % \
                    #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                    #       etm.soln.t.shape[0] -
                    #       np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                    # print two largest outliers
                    if etm.A is not None:
                        lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                        slres = lres[np.argsort(-lres)]

                        print ' >> Two largest residuals:'
                        for i in [0, 1]:
                            print(' %s %6.3f %6.3f %6.3f' %
                                  (pyDate.Date(mjd=etm.soln.mjd[
                                      lres == slres[i]]).yyyyddd(),
                                   etm.R[0, lres == slres[i]],
                                   etm.R[1, lres == slres[i]],
                                   etm.R[2, lres == slres[i]]))

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        if args.filename is None:
                            xfile = os.path.join(
                                args.directory, '%s.%s_ppp' %
                                (etm.NetworkCode, etm.StationCode))
                        else:
                            xfile = os.path.join(
                                args.directory, '%s.%s_file' %
                                (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data,
                             plot_outliers=args.plot_outliers)

                    if args.histogram:
                        etm.plot_hist(xfile + '_hist.png')

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json == 1:
                            json.dump(etm.todictionary(time_series=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        elif args.json == 2:
                            json.dump(etm.todictionary(time_series=True,
                                                       model=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                if args.query is not None:
                    model = True if args.query[0] == 'model' else False
                    q_date = pyDate.Date(fyear=float(args.query[1]))

                    xyz, _, _, txt = etm.get_xyz_s(q_date.year,
                                                   q_date.doy,
                                                   force_model=model)

                    strp = ''
                    # if user requests velocity too, output it
                    if args.velocity:
                        if etm.A is not None:
                            vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                            strp = '%8.5f %8.5f %8.5f ' \
                                   % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0])

                    # also output seasonal terms, if requested
                    if args.seasonal_terms:
                        if etm.Periodic.frequency_count > 0:
                            strp += ' '.join([
                                '%8.5f' % (x * 1000) for x in
                                etm.Periodic.p.params.flatten().tolist()
                            ])

                    print ' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                          % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
예제 #5
0
def main():
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-date',
        '--date_range',
        nargs='+',
        action=required_length(1, 2),
        metavar='date_start|date_end',
        help="Date range to check given as [date_start] or [date_start] "
        "and [date_end]. Allowed formats are yyyy.doy or yyyy/mm/dd..")
    parser.add_argument(
        '-win',
        '--window',
        nargs=1,
        metavar='days',
        type=int,
        help=
        "Download data from a given time window determined by today - {days}.")

    try:
        args = parser.parse_args()

        cnn = dbConnection.Cnn('gnss_data.cfg')
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        stnlist = Utils.process_stnlist(cnn, args.stnlist)

        print ' >> Selected station list:'
        print_columns([
            item['NetworkCode'] + '.' + item['StationCode'] for item in stnlist
        ])

        dates = []

        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=datetime.now().year,
                                month=datetime.now().month,
                                day=datetime.now().day)
                dates = [d - int(args.window[0]), d]
            else:
                dates = process_date(args.date_range)

        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = [
                pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                pyDate.Date(year=datetime.now().year,
                            month=datetime.now().month,
                            day=datetime.now().day)
            ]

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd + 1, 1)

        download_data(cnn, Config, stnlist, drange)

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
예제 #6
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Database integrity tools, metadata check and fixing tools program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help='Date range filter for all operations. '
        'Can be specified in wwww-d, yyyy_ddd, yyyy/mm/dd or fyear format')

    parser.add_argument(
        '-rinex',
        '--check_rinex',
        choices=['fix', 'report'],
        type=str,
        nargs=1,
        help=
        'Check the RINEX integrity of the archive-database by verifying that the RINEX files '
        'reported in the rinex table exist in the archive. If argument = "fix" and a RINEX file '
        'does not exist, remove the record. PPP records or gamit_soln are deleted. If argument = '
        '"report" then just list the missing files.')

    parser.add_argument(
        '-rnx_count',
        '--rinex_count',
        action='store_true',
        help='Count the total number of RINEX files (unique station-days) '
        'per day for a given time interval.')

    parser.add_argument(
        '-stnr',
        '--station_info_rinex',
        action='store_true',
        help=
        'Check that the receiver serial number in the rinex headers agrees with the station info '
        'receiver serial number.')

    parser.add_argument(
        '-stns',
        '--station_info_solutions',
        action='store_true',
        help='Check that the PPP hash values match the station info hash.')

    parser.add_argument(
        '-stnp',
        '--station_info_proposed',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Output a proposed station.info using the RINEX metadata. Optional, specify [ignore_days] '
        'to ignore station.info records <= days.')

    parser.add_argument(
        '-stnc',
        '--station_info_check',
        action='store_true',
        help=
        'Check the consistency of the station information records in the database. Date range '
        'does not apply. Also, check that the RINEX files fall within a valid station information '
        'record.')

    parser.add_argument(
        '-g',
        '--data_gaps',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Check the RINEX files in the database and look for gaps (missing days). '
        'Optional, [ignore_days] with the smallest gap to display.')

    parser.add_argument('-gg',
                        '--graphical_gaps',
                        action='store_true',
                        help='Visually output RINEX gaps for stations.')

    parser.add_argument(
        '-sc',
        '--spatial_coherence',
        choices=['exclude', 'delete', 'noop'],
        type=str,
        nargs=1,
        help=
        'Check that the RINEX files correspond to the stations they are linked to using their '
        'PPP coordinate. If keyword [exclude] or [delete], add the PPP solution to the excluded '
        'table or delete the PPP solution. If [noop], then only report but do not '
        'exlude or delete.')

    parser.add_argument(
        '-print',
        '--print_stninfo',
        choices=['long', 'short'],
        type=str,
        nargs=1,
        help=
        'Output the station info to stdout. [long] outputs the full line of the station info. '
        '[short] outputs a short version (better for screen visualization).')

    parser.add_argument(
        '-r',
        '--rename',
        metavar='net.stnm',
        nargs=1,
        help=
        "Takes the data from the station list and renames (merges) it to net.stnm. "
        "It also changes the rinex filenames in the archive to match those of the new destiny "
        "station. Only a single station can be given as the origin and destiny. "
        "Limit the date range using the -d option.")

    parser.add_argument(
        '-es',
        '--exclude_solutions',
        metavar=('{start_date}', '{end_date}'),
        nargs=2,
        help=
        'Exclude PPP solutions (by adding them to the excluded table) between {start_date} '
        'and {end_date}')

    parser.add_argument(
        '-del',
        '--delete_rinex',
        metavar=('{start_date}', '{end_date}', '{completion}'),
        nargs=3,
        help='Delete RINEX files (and associated solutions, PPP and GAMIT) '
        'from archive between {start_date} and {end_date} with completion <= {completion}. '
        'Completion ranges form 1.0 to 0.0. Use 1.0 to delete all data. '
        'Operation cannot be undone!')

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")  # type: dbConnection.Cnn

    # create the execution log
    cnn.insert('executions', script='pyIntegrityCheck.py')

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    #####################################
    # date filter

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    #####################################

    if args.check_rinex:
        CheckRinexIntegrity(cnn, Config, stnlist, dates[0], dates[1],
                            args.check_rinex[0], JobServer)

    #####################################

    if args.rinex_count:
        RinexCount(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.station_info_rinex:
        StnInfoRinexIntegrity(cnn, stnlist, dates[0], dates[1], JobServer)

    #####################################

    if args.station_info_check:
        StnInfoCheck(cnn, stnlist, Config)

    #####################################

    if args.data_gaps is not None:
        GetStnGaps(cnn, stnlist, args.data_gaps, dates[0], dates[1])

    if args.graphical_gaps:
        VisualizeGaps(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.spatial_coherence is not None:
        CheckSpatialCoherence(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.exclude_solutions is not None:
        try:
            dates = process_date(args.exclude_solutions)
        except ValueError as e:
            parser.error(str(e))

        ExcludeSolutions(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.print_stninfo is not None:
        if args.print_stninfo[0] == 'short':
            PrintStationInfo(cnn, stnlist, True)
        elif args.print_stninfo[0] == 'long':
            PrintStationInfo(cnn, stnlist, False)
        else:
            parser.error(
                'Argument for print_stninfo has to be either long or short')

    #####################################

    if args.station_info_proposed is not None:
        for stn in stnlist:
            stninfo = pyStationInfo.StationInfo(cnn,
                                                stn['NetworkCode'],
                                                stn['StationCode'],
                                                allow_empty=True)
            sys.stdout.write(
                stninfo.rinex_based_stninfo(args.station_info_proposed))

    #####################################

    if args.delete_rinex is not None:
        try:
            dates = process_date(args.delete_rinex[0:2])
        except ValueError as e:
            parser.error(str(e))

        DeleteRinex(cnn, stnlist, dates[0], dates[1],
                    float(args.delete_rinex[2]))

    #####################################

    if args.rename:
        if len(stnlist) > 1:
            parser.error(
                'Only a single station should be given for the origin station')

        if '.' not in args.rename[0]:
            parser.error('Format for destiny station should be net.stnm')
        else:
            DestNetworkCode = args.rename[0].split('.')[0]
            DestStationCode = args.rename[0].split('.')[1]

            RenameStation(cnn, stnlist[0]['NetworkCode'],
                          stnlist[0]['StationCode'], DestNetworkCode,
                          DestStationCode, dates[0], dates[1],
                          Config.archive_path)

    JobServer.close_cluster()
예제 #7
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        'stack_name',
        type=str,
        nargs=1,
        metavar='{stack name}',
        help=
        "Specify a name for the stack: eg. itrf2014 or posgar07b. This name should be unique "
        "and cannot be repeated for any other solution project")
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument('-plot',
                        '--plot_stack_etms',
                        action='store_true',
                        default=False,
                        help="Plot the stack ETMs after computation is done")
    parser.add_argument(
        '-constrains',
        '--external_constrains',
        nargs='+',
        help=
        "File with external constrains parameters (position, velocity and periodic). These may be "
        "from a parent frame such as ITRF. "
        "Inheritance will occur with stations on the list whenever a parameter exists. "
        "Example: -constrains itrf14.txt "
        "Format is: net.stn x y z epoch vx vy vz sn_1y sn_6m cn_1y cn_6m se_1y se_6m ce_1y ce_6m "
        "su_1y su_6m cu_1y cu_6m ")
    parser.add_argument(
        '-d',
        '--date_end',
        nargs=1,
        metavar='date',
        help=
        'Limit the polyhedrons to the specified date. Can be in wwww-d, yyyy_ddd, yyyy/mm/dd '
        'or fyear format')
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    dates = [Date(year=1980, doy=1), Date(datetime=datetime.now())]
    if args.date_end is not None:
        try:
            dates = process_date(
                [str(Date(year=1980, doy=1).fyear), args.date_end[0]])
        except ValueError as e:
            parser.error(str(e))

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.external_constrains:
        constrains = load_constrains(args.external_constrains[0])
    else:
        constrains = None

    # create the stack object
    stack = pyStack.Stack(cnn,
                          args.project[0],
                          args.stack_name[0],
                          args.redo_stack,
                          end_date=dates[1])

    # stack.align_spaces(frame_params)
    # stack.to_json('alignment.json')
    # exit()

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if not stack[j].aligned:
                # do not move this if up one level: to speed up the target polyhedron loading process, the target is
                # set to an empty list when the polyhedron is already aligned
                if stack[j].date != target[j].date:
                    # raise an error if dates don't agree!
                    raise StandardError(
                        'Error processing %s: dates don\'t agree (target date %s)'
                        % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
                else:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        stack.transformations.append([poly.info() for poly in stack])
        qbar.close()

    if args.redo_stack:
        # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
        calculate_etms(cnn,
                       stack,
                       JobServer,
                       iterations=None,
                       create_target=False)
        # only apply common mode removal if redoing the stack
        if args.external_constrains:
            stack.remove_common_modes(constrains)
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        stack.align_spaces(constrains)

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None, create_target=False)

    # save the json with the information about the alignment
    stack.to_json(args.stack_name[0] + '_alignment.json')
    # save polyhedrons to the database
    stack.save()

    if args.plot_stack_etms:
        qbar = tqdm(total=len(stack.stations), ncols=160)
        for stn in stack.stations:
            # plot the ETMs
            qbar.update()
            qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
            plot_etm(cnn, stack, stn, args.directory)

        qbar.close()
예제 #8
0
def main():

    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '1' to export time "
        "series or append '0' to just output the ETM parameters.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=2,
        metavar='{project} {type}',
        help=
        "Plot the GAMIT time series. Specify project and type = \'stack\' to plot the time "
        "series after stacking or \'gamit\' to just plot the coordinates of the polyhedron"
    )

    args = parser.parse_args()

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]):
        print ' >> Station list read from ' + args.stnlist[0]
        stnlist = [line.strip() for line in open(args.stnlist[0], 'r')]
        stnlist = [{
            'NetworkCode': item.split('.')[0],
            'StationCode': item.split('.')[1]
        } for item in stnlist]
    else:
        stnlist = Utils.process_stnlist(cnn, args.stnlist)

    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                else:
                    if args.gamit[1] == 'stack':
                        polyhedrons = cnn.query_float(
                            'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                            'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                            '"StationCode" = \'%s\' '
                            'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                            % (args.gamit[0], stn['NetworkCode'],
                               stn['StationCode']))

                        soln = pyETM.GamitSoln(cnn, polyhedrons,
                                               stn['NetworkCode'],
                                               stn['StationCode'],
                                               args.gamit[0])

                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             gamit_soln=soln)

                        # print ' > %5.2f %5.2f %5.2f %i %i' % \
                        #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                        #       etm.soln.t.shape[0] - np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                        # print two largest outliers
                        if etm.A is not None:
                            lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                            slres = lres[np.argsort(-lres)]

                            print ' >> Two largest residuals:'
                            for i in [0, 1]:
                                print(' %s %6.3f %6.3f %6.3f' %
                                      (pyDate.Date(mjd=etm.soln.mjd[
                                          lres == slres[i]]).yyyyddd(),
                                       etm.R[0, lres == slres[i]],
                                       etm.R[1, lres == slres[i]],
                                       etm.R[2, lres == slres[i]]))

                    elif args.gamit[1] == 'gamit':
                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             project=args.gamit[1])
                    else:
                        parser.error('Invalid option for -gamit switch')
                        etm = None

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data)

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json != 0:
                            json.dump(etm.todictionary(True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
예제 #9
0
def main():
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        '-date',
        '--date_range',
        nargs='+',
        action=required_length(1, 2),
        metavar='date_start|date_end',
        help=
        "Date range to check given as [date_start] or [date_start] and [date_end]. "
        "Allowed formats are yyyy.doy or yyyy/mm/dd..")

    parser.add_argument(
        '-win',
        '--window',
        nargs=1,
        metavar='days',
        type=int,
        help=
        "Download data from a given time window determined by today - {days}.")

    try:
        args = parser.parse_args()
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        dates = ()
        now = datetime.now()
        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=now.year, month=now.month, day=now.day)
                dates = (d - int(args.window[0]), d)
            else:
                dates = process_date(args.date_range)
        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = (pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                     pyDate.Date(year=now.year, month=now.month, day=now.day))

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd, 1)

        pbar = tqdm(desc='%-30s' % ' >> Synchronizing orbit files',
                    total=len(drange),
                    ncols=160)

        # connect to ftp
        ftp = ftplib.FTP_TLS(FTP_HOST, FTP_USER, FTP_PASS)

        ftp.set_pasv(True)
        ftp.prot_p()

        def downloadIfMissing(ftp_list, ftp_filename, local_filename,
                              local_dir, desc):
            mark_path = os.path.join(local_dir, local_filename)
            if not os.path.isfile(mark_path) and ftp_filename in ftp_list:
                tqdm.write('%-31s: %s' %
                           (' -- trying to download ' + desc, filename))
                down_path = os.path.join(local_dir, ftp_filename)
                with open(down_path, 'wb') as f:
                    ftp.retrbinary("RETR " + ftp_filename, f.write)
                return True

        def get_archive_path(archive, date):
            return archive.replace('$year',     str(date.year)) \
                          .replace('$doy',      str(date.doy).zfill(3)) \
                          .replace('$gpsweek',  str(date.gpsWeek).zfill(4)) \
                          .replace('$gpswkday', str(date.gpsWeekDay))

        for date in (pyDate.Date(mjd=mdj) for mdj in drange):

            sp3_archive = get_archive_path(Config.sp3_path, date)

            if not os.path.exists(sp3_archive):
                os.makedirs(sp3_archive)

            for repro in ('', '/repro2'):
                # try both in the repro and / folders
                folder = "/pub/gps/products/" + date.wwww() + repro
                try:
                    tqdm.write(' -- Changing folder to ' + folder)
                    ftp.cwd(folder)
                    ftp_list = set(ftp.nlst())
                except:
                    # folder not present, skip
                    continue

                for orbit in Config.sp3types + Config.sp3altrn:
                    for ext in ('.sp3', '.clk', '.erp', '7.erp'):
                        try:
                            filename = orbit + date.wwwwd() + ext + '.Z'
                            downloadIfMissing(
                                ftp_list, filename, filename, sp3_archive,
                                'EOP' if ext == '7.erp' else ext.upper())
                        except:
                            pass

            ###### now the brdc files #########

            try:
                folder = "/pub/gps/data/daily/%s/%s/%sn" % (
                    date.yyyy(), date.ddd(), date.yyyy()[2:])
                tqdm.write(' -- Changing folder to ' + folder)
                ftp.cwd(folder)
                ftp_list = set(ftp.nlst())
            except:
                continue

            brdc_archive = get_archive_path(Config.brdc_path, date)

            if not os.path.exists(brdc_archive):
                os.makedirs(brdc_archive)

            try:
                filename = 'brdc%s0.%sn' % (str(
                    date.doy).zfill(3), str(date.year)[2:4])
                ftp_filename = filename + '.Z'
                if downloadIfMissing(ftp_list, ftp_filename, filename,
                                     brdc_archive, 'BRDC'):
                    # decompress file
                    pyRunWithRetry.RunCommand('gunzip -f ' + ftp_filename,
                                              15).run_shell()
            except:
                continue

            pbar.set_postfix(gpsWeek='%i %i' % (date.gpsWeek, date.gpsWeekDay))
            pbar.update()

        pbar.close()
        ftp.quit()

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
예제 #10
0
    def __init__(self, configfile):

        self.options = {
            'path': None,
            'repository': None,
            'parallel': False,
            'cups': None,
            'node_list': None,
            'ip_address': None,
            'brdc': None,
            'sp3_type_1': None,
            'sp3_type_2': None,
            'sp3_type_3': None,
            'sp3_altr_1': None,
            'sp3_altr_2': None,
            'sp3_altr_3': None,
            'grdtab': None,
            'otlgrid': None,
            'otlmodel': 'FES2014b',
            'ppp_path': None,
            'institution': None,
            'info': None,
            'sp3': None,
            'frames': None,
            'atx': None,
            'height_codes': None,
            'ppp_exe': None,
            'ppp_remote_local': ()
        }

        config = ConfigParser.ConfigParser()
        config.readfp(open(configfile))

        # get the archive config
        for iconfig, val in dict(config.items('archive')).iteritems():
            self.options[iconfig] = val

        # get the otl config
        for iconfig, val in dict(config.items('otl')).iteritems():
            self.options[iconfig] = val

        # get the ppp config
        for iconfig, val in dict(config.items('ppp')).iteritems():
            self.options[iconfig] = os.path.expandvars(val).replace('//', '/')

        # frames and dates
        frames = [item.strip() for item in self.options['frames'].split(',')]
        atx = [item.strip() for item in self.options['atx'].split(',')]

        self.Frames = []

        for frame, atx in zip(frames, atx):
            date = process_date(self.options[frame.lower()].split(','))
            self.Frames += [{
                'name':
                frame,
                'atx':
                atx,
                'dates': (Date(year=date[0].year,
                               doy=date[0].doy,
                               hour=0,
                               minute=0,
                               second=0),
                          Date(year=date[1].year,
                               doy=date[1].doy,
                               hour=23,
                               minute=59,
                               second=59))
            }]

        self.options['frames'] = self.Frames

        self.archive_path = self.options['path']
        self.sp3_path = self.options['sp3']
        self.brdc_path = self.options['brdc']
        self.repository = self.options['repository']

        self.repository_data_in = os.path.join(self.repository, 'data_in')
        self.repository_data_in_retry = os.path.join(self.repository,
                                                     'data_in_retry')
        self.repository_data_reject = os.path.join(self.repository,
                                                   'data_rejected')

        self.sp3types = [
            self.options['sp3_type_1'], self.options['sp3_type_2'],
            self.options['sp3_type_3']
        ]

        self.sp3types = [
            sp3type for sp3type in self.sp3types if sp3type is not None
        ]

        # alternative sp3 types
        self.sp3altrn = [
            self.options['sp3_altr_1'], self.options['sp3_altr_2'],
            self.options['sp3_altr_3']
        ]

        self.sp3altrn = [
            sp3alter for sp3alter in self.sp3altrn if sp3alter is not None
        ]

        if self.options['parallel'] == 'True':
            self.run_parallel = True
        else:
            self.run_parallel = False

        return
예제 #11
0
def main():
    parser = argparse.ArgumentParser(description='Archive operations Main Program')

    parser.add_argument('-date', '--date_range', nargs='+', action=required_length(1, 2), metavar='date_start|date_end',
                        help="Date range to check given as [date_start] or [date_start] and [date_end]. "
                             "Allowed formats are yyyy.doy or yyyy/mm/dd..")

    parser.add_argument('-win', '--window', nargs=1, metavar='days', type=int,
                        help="Download data from a given time window determined by today - {days}.")
    try:
        args = parser.parse_args()

        Config = pyOptions.ReadOptions('gnss_data.cfg')

        dates = []

        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=datetime.now().year, month=datetime.now().month, day=datetime.now().day)
                dates = [d-int(args.window[0]), d]
            else:
                dates = process_date(args.date_range)
        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = [pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                     pyDate.Date(year=datetime.now().year, month=datetime.now().month, day=datetime.now().day)]

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd, 1)

        pbar = tqdm(desc='%-30s' % ' >> Synchronizing orbit files', total=len(drange), ncols=160)

        # connect to ftp
        ftp = ftplib.FTP('198.118.242.40', 'Anonymous', '*****@*****.**')

        for date in [pyDate.Date(mjd=mdj) for mdj in drange]:

            sp3_archive = get_archive_path(Config.sp3_path, date)

            if not os.path.exists(sp3_archive):
                os.makedirs(sp3_archive)

            for repro in ['', '/repro2']:
                # try both in the repro and / folders
                folder = "/pub/gps/products/" + date.wwww() + repro
                try:
                    ftp.cwd(folder)
                except Exception:
                    # folder not present, skip
                    continue

                tqdm.write(' -- Changing folder to ' + folder)
                ftp_list = ftp.nlst()

                for orbit in Config.sp3types + Config.sp3altrn:

                    for ext in ['.sp3.Z', '.clk.Z', '.erp.Z']:
                        filename = orbit + date.wwwwd() + ext

                        if not os.path.isfile(os.path.join(sp3_archive, filename)) and filename in ftp_list:
                            tqdm.write('%-31s: %s' % (' -- trying to download ' + ext.replace('.Z', '').upper(), filename))
                            try:
                                ftp.retrbinary("RETR " + filename, open(os.path.join(sp3_archive, filename), 'wb').write)
                            except Exception:
                                continue

                    # now the eop file
                    filename = orbit + date.wwww() + '7.erp.Z'
                    if not os.path.isfile(os.path.join(sp3_archive, filename)) and filename in ftp_list:
                        tqdm.write('%-31s: %s' % (' -- trying to download EOP', filename))
                        try:
                            ftp.retrbinary("RETR " + filename, open(os.path.join(sp3_archive, filename), 'wb').write)
                        except Exception:
                            continue

            ###### now the brdc files #########

            try:
                folder = "/pub/gps/data/daily/%s/%s/%sn" % (date.yyyy(), date.ddd(), date.yyyy()[2:])
                tqdm.write(' -- Changing folder to ' + folder)
                ftp.cwd(folder)
                ftp_list = ftp.nlst()
            except Exception:
                continue

            brdc_archive = get_archive_path(Config.brdc_path, date)

            if not os.path.exists(brdc_archive):
                os.makedirs(brdc_archive)

            filename = 'brdc' + str(date.doy).zfill(3) + '0.' + str(date.year)[2:4] + 'n'

            if not os.path.isfile(os.path.join(brdc_archive, filename)) and filename + '.Z' in ftp_list:
                tqdm.write('%-31s: %s' % (' -- trying to download BRDC', filename))
                try:
                    ftp.retrbinary("RETR " + filename + '.Z', open(os.path.join(brdc_archive, filename + '.Z'), 'wb').write)
                    # decompress file
                    cmd = pyRunWithRetry.RunCommand('gunzip -f ' + os.path.join(brdc_archive, filename + '.Z'), 15)
                    cmd.run_shell()
                except Exception:
                    continue

            pbar.set_postfix(gpsWeek='%i %i' % (date.gpsWeek, date.gpsWeekDay))
            pbar.update()

        pbar.close()
        ftp.quit()

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
예제 #12
0
def main():

    parser = argparse.ArgumentParser(
        description='Parallel.GAMIT main execution program')

    parser.add_argument(
        'session_cfg',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument(
        '-d',
        '--date',
        type=str,
        nargs=2,
        metavar='{date}',
        help=
        "Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format"
    )

    parser.add_argument(
        '-dp',
        '--date_parser',
        type=str,
        nargs=2,
        metavar='{year} {doys}',
        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
        "Cannot cross year boundaries")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)"
    )

    parser.add_argument(
        '-c',
        '--check_mode',
        type=str,
        nargs='+',
        metavar='{station}',
        help=
        "Check station(s) mode. If station(s) are not present in the GAMIT polyhedron, "
        "(i.e. the RINEX file(s) were missing at the time of the processing) Parallel.GAMIT will "
        "add the station to the closest subnetwork(s) and reprocess them. If station(s) were "
        "present at the time of the processing but failed to process (i.e. they are in the "
        "missing stations list), these subnetworks will be reprocessed to try to obtain a "
        "solution. Station list provided in the cfg is ignored in this mode. Therefore, changes "
        "in the station list will not produce any changes in network configuration. Purge not "
        "allowed when using this mode. (Syntax: -c igm1 lpgs rms.vbca)")

    parser.add_argument(
        '-i',
        '--ignore_missing',
        action='store_true',
        help=
        "When using check mode or processing existing sessions, ignore missing stations. In other "
        "words, do not try to reprocess sessions that have missing solutions.")

    parser.add_argument(
        '-p',
        '--purge',
        action='store_true',
        default=False,
        help=
        "Purge year doys from the database and directory structure and re-run the solution."
    )

    parser.add_argument(
        '-dry',
        '--dry_run',
        action='store_true',
        help="Generate the directory structures (locally) but do not run GAMIT. "
        "Output is left in the production directory.")

    parser.add_argument(
        '-kml',
        '--create_kml',
        action='store_true',
        help="Create a KML with everything processed in this run.")

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')  # type: dbConnection.Cnn

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any(doy for doy in doys if doy < 1):
                parser.error(
                    'DOYs cannot start with zero. Please selected a DOY range between 1-365/366'
                )

            if 366 in doys:
                if year % 4 != 0:
                    parser.error(
                        'Year ' + str(year) +
                        ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error(
                    'Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [
                pyDate.Date(mjd=i)
                for i in range(drange[0].mjd, drange[1].mjd + 1)
            ]

    except ValueError as e:
        parser.error(str(e))

    print(
        ' >> Reading configuration files and creating project network, please wait...'
    )

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(
        ' >> Checking GAMIT tables for requested config and year, please wait...'
    )

    JobServer = pyJobServer.JobServer(
        GamitConfig,
        check_gamit_tables=(pyDate.Date(year=drange[1].year,
                                        doy=drange[1].doy),
                            GamitConfig.gamitopt['eop_type']),
        run_parallel=not args.noparallel,
        software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    # initialize stations in the project
    stations = station_list(cnn,
                            GamitConfig.NetworkConfig['stn_list'].split(','),
                            drange)

    check_station_list = args.check_mode
    if check_station_list is not None:
        print(' >> Check mode. List of stations to check for selected days:')
        Utils.print_columns(check_station_list)
        check_stations = station_list(cnn, check_station_list, drange)
    else:
        check_stations = StationCollection()

    dry_run = False if args.dry_run is None else args.dry_run

    if not dry_run and not len(check_stations):
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)
    elif args.purge:
        tqdm.write(
            ' >> Dry run or check mode activated. Cannot purge solutions in these modes.'
        )

    # run the job server
    sessions = ExecuteGamit(cnn, JobServer, GamitConfig, stations,
                            check_stations, args.ignore_missing, dates,
                            args.dry_run, args.create_kml)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(cnn, JobServer, GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id.lower(), dates, sessions,
                 GamitConfig, JobServer)

    tqdm.write(' >> %s Successful exit from Parallel.GAMIT' % print_datetime())
예제 #13
0
def main():

    global cnn

    parser = argparse.ArgumentParser(description='Parallel.GAMIT main execution program')

    parser.add_argument('session_cfg', type=str, nargs=1, metavar='session.cfg',
                        help="Filename with the session configuration to run Parallel.GAMIT")
    parser.add_argument('-d', '--date', type=str, nargs=2, metavar='{date}',
                        help="Date range to process. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format")
    parser.add_argument('-dp', '--date_parser', type=str, nargs=2, metavar='{year} {doys}',
                        help="Parse date using ranges and commas (e.g. 2018 1,3-6). "
                             "Cannot cross year boundaries")
    parser.add_argument('-e', '--exclude', type=str, nargs='+', metavar='station',
                        help="List of stations to exclude from this processing (e.g. -e igm1 lpgs vbca)")
    parser.add_argument('-p', '--purge', action='store_true',
                        help="Purge year doys from the database and directory structure and re-run the solution.")
    parser.add_argument('-dry', '--dry_run', action='store_true',
                        help="Generate the directory structures (locally) but do not run GAMIT. "
                             "Output is left in the production directory.")
    parser.add_argument('-kml', '--generate_kml', action='store_true',
                        help="Generate KML and exit without running GAMIT.")

    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    dates = None
    drange = None
    try:
        if args.date_parser:
            year = int(args.date_parser[0])
            doys = parseIntSet(args.date_parser[1])

            if any([doy for doy in doys if doy < 1]):
                parser.error('DOYs cannot start with zero. Please selected a DOY range between 1-365/366')

            if 366 in doys:
                if year % 4 != 0:
                    parser.error('Year ' + str(year) + ' is not a leap year: DOY 366 does not exist.')

            dates = [pyDate.Date(year=year, doy=i) for i in doys]
            drange = [dates[0], dates[-1]]
        else:
            drange = process_date(args.date, missing_input=None)

            if not all(drange):
                parser.error('Must specify a start and end date for the processing.')

            # get the dates to purge
            dates = [pyDate.Date(mjd=i) for i in range(drange[0].mjd, drange[1].mjd + 1)]

    except ValueError as e:
        parser.error(str(e))

    print(' >> Reading configuration files and creating project network, please wait...')

    GamitConfig = pyGamitConfig.GamitConfiguration(args.session_cfg[0])  # type: pyGamitConfig.GamitConfiguration

    print(' >> Checing GAMIT tables for requested config and year, please wait...')

    JobServer = pyJobServer.JobServer(GamitConfig,
                                      check_gamit_tables=(pyDate.Date(year=drange[1].year, doy=drange[1].doy),
                                                          GamitConfig.gamitopt['eop_type']),
                                      run_parallel=not args.noparallel,
                                      software_sync=GamitConfig.gamitopt['gamit_remote_local'])

    cnn = dbConnection.Cnn(GamitConfig.gamitopt['gnss_data'])  # type: dbConnection.Cnn

    # to exclude stations, append them to GamitConfig.NetworkConfig with a - in front
    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        GamitConfig.NetworkConfig['stn_list'] += ',-' + ',-'.join(exclude)

    if args.dry_run is not None:
        dry_run = args.dry_run
    else:
        dry_run = False

    if not dry_run:
        # ignore if calling a dry run
        # purge solutions if requested
        purge_solutions(JobServer, args, dates, GamitConfig)

    # initialize stations in the project
    stations = station_list(cnn, GamitConfig.NetworkConfig, drange)

    tqdm.write(' >> Creating GAMIT session instances, please wait...')

    sessions = []
    archive = pyArchiveStruct.RinexStruct(cnn)  # type: pyArchiveStruct.RinexStruct

    for date in tqdm(dates, ncols=80):

        # make the dir for these sessions
        # this avoids a racing condition when starting each process
        pwd = GamitConfig.gamitopt['solutions_dir'].rstrip('/') + '/' + date.yyyy() + '/' + date.ddd()

        if not os.path.exists(pwd):
            os.makedirs(pwd)

        net_object = Network(cnn, archive, GamitConfig, stations, date)

        sessions += net_object.sessions

    if args.generate_kml:
        # generate a KML of the sessions
        generate_kml(dates, sessions, GamitConfig)
        exit()

    # print a summary of the current project (NOT VERY USEFUL AFTER ALL)
    # print_summary(stations, sessions, drange)

    # run the job server
    ExecuteGamit(sessions, JobServer, dry_run)

    # execute globk on doys that had to be divided into subnets
    if not args.dry_run:
        ExecuteGlobk(GamitConfig, sessions, dates)

        # parse the zenith delay outputs
        ParseZTD(GamitConfig.NetworkConfig.network_id, sessions, GamitConfig)

    print(' >> Done processing and parsing information. Successful exit from Parallel.GAMIT')
예제 #14
0
def main():

    global wrms_n, wrms_e, wrms_u, project

    parser = argparse.ArgumentParser(
        description='GNSS daily repetitivities analysis (DRA)')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help=
        'Date range filter. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )

    parser.add_argument(
        '-w',
        '--plot_window',
        nargs='+',
        metavar='date',
        help=
        'Date window range to plot. Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format'
    )
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot a histogram of the daily repetitivities")
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        help=
        "Provide additional information during the alignment process (for debugging purposes)"
    )

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions
    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    pdates = None
    if args.plot_window is not None:
        if len(args.plot_window) == 1:
            try:
                pdates = process_date(args.plot_window,
                                      missing_input=None,
                                      allow_days=False)
                pdates = (pdates[0].fyear, )
            except ValueError:
                # an integer value
                pdates = float(args.plot_window[0])
        else:
            pdates = process_date(args.plot_window)
            pdates = (pdates[0].fyear, pdates[1].fyear)

    # create folder for plots
    path_plot = project + '_dra'
    if not os.path.isdir(path_plot):
        os.makedirs(path_plot)

    ########################################
    # load polyhedrons
    # create the DRA object
    dra = DRA(cnn, args.project[0], dates[0], dates[1], args.verbose)

    dra.stack_dra()

    dra.to_json(project + '_dra.json')

    missing_doys = []

    tqdm.write(
        ' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm are shown below:'
    )
    for i, d in enumerate(dra):
        if d.wrms is not None:
            if d.wrms > 0.008:
                tqdm.write(
                    ' -- %s (%04i) %2i it wrms: %4.1f D-W: %5.3f IQR: %4.1f' %
                    (d.date.yyyyddd(), d.stations_used, d.iterations,
                     d.wrms * 1000, d.down_frac, d.iqr * 1000))

    qbar = tqdm(total=len(dra.stations),
                desc=' >> Computing DRAs',
                ncols=160,
                disable=None)

    modules = ('pyETM', 'dbConnection', 'traceback', 'io', 'numpy')
    JobServer.create_cluster(compute_dra,
                             progress_bar=qbar,
                             callback=callback_handler,
                             modules=modules)

    # plot each DRA
    for stn in dra.stations:
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        ts = dra.get_station(NetworkCode, StationCode)
        JobServer.submit(ts, NetworkCode, StationCode, pdates, project,
                         args.histogram)

    JobServer.wait()
    qbar.close()
    JobServer.close_cluster()

    wrms_n = np.array(wrms_n)
    wrms_e = np.array(wrms_e)
    wrms_u = np.array(wrms_u)

    # plot the WRM of the DRA stack and number of stations
    f, axis = plt.subplots(nrows=3, ncols=2,
                           figsize=(15, 10))  # type: plt.subplots

    # WRMS
    ax = axis[0][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['wrms'] * 1000 for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('WRMS [mm]')
    ax.grid(True)
    ax.set_ylim(0, 10)

    # station count
    ax = axis[1][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['stations_used'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('Station count')
    ax.grid(True)

    # d-w fraction
    ax = axis[2][0]
    ax.plot([t['fyear'] for t in dra.transformations[0]],
            [t['downweighted_fraction'] for t in dra.transformations[0]],
            'ob',
            markersize=2)
    ax.set_ylabel('DW fraction')
    ax.grid(True)

    ax = axis[0][1]
    ax.hist(wrms_n[wrms_n <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit N [mm]')
    ax.set_title('Daily repetitivities NEU')

    ax = axis[1][1]
    ax.hist(wrms_e[wrms_e <= 8], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 8)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit E [mm]')

    ax = axis[2][1]
    ax.hist(wrms_u[wrms_u <= 10], 40, alpha=0.75, facecolor='blue')
    ax.grid(True)
    ax.set_xlim(0, 10)
    ax.set_ylabel('# stations')
    ax.set_xlabel('WRMS misfit U [mm]')

    f.suptitle('Daily repetitivity analysis for project %s\n'
               'Solutions with WRMS > 10 mm are not shown' % project,
               fontsize=12,
               family='monospace')
    plt.savefig(project + '_dra.png')
    plt.close()

    ax.set_xlim(0, 8)