Beispiel #1
0
def purge_solution(pwd, project, date):

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # delete the main solution dir (may be entire GAMIT run or combination directory)
    if os.path.isdir(os.path.join(pwd, project)):
        shutil.rmtree(os.path.join(pwd, project))

    # possible subnetworks
    for sub in glob.glob(os.path.join(pwd, project + '.*')):
        shutil.rmtree(sub)

    # now remove the database entries
    cnn.query('DELETE FROM gamit_soln_excl WHERE "Year" = %i AND "DOY" = %i '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.query('DELETE FROM stacks WHERE "Year" = %i AND "DOY" = %i '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.query('DELETE FROM gamit_soln WHERE "Year" = %i AND "DOY" = %i '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.query('DELETE FROM gamit_stats WHERE "Year" = %i AND "DOY" = %i '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.query('DELETE FROM gamit_subnets WHERE "Year" = %i AND "DOY" = %i '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.query('DELETE FROM gamit_ztd WHERE "Year" = %i AND "DOY" = %i  '
              'AND "Project" = \'%s\'' % (date.year, date.doy, project))

    cnn.close()
Beispiel #2
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project_file',
        type=str,
        nargs=1,
        metavar='{project cfg file}',
        help=
        "Project CFG file with all the stations being processed in Parallel.GAMIT"
    )

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    GamitConfig = GamitConfiguration(
        args.project_file[0], check_config=False)  # type: GamitConfiguration

    stations = station_list(
        cnn, GamitConfig.NetworkConfig,
        [Date(year=1999, doy=100),
         Date(year=1999, doy=128)])

    # split the stations into subnet_count subnetworks

    archive = pyArchiveStruct.RinexStruct(
        cnn)  # type: pyArchiveStruct.RinexStruct

    net_object = pyNetwork.Network(cnn, archive, GamitConfig, stations,
                                   Date(year=1999, doy=128))

    generate_kml([Date(year=1999, doy=128)], net_object.sessions, GamitConfig)
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, run_node_test=False)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    # create the execution log

    # load polyhedrons
    project = Project(cnn, args.project[0])

    # plot initial state
    tqdm.write(' -- Plotting initial ETMs (unaligned)...')

    #for etm in tqdm(project.etms, ncols=160):
    #    etm.plot(pngfile=args.project[0] + '/' + etm.NetworkCode + '.' + etm.StationCode + '_0.png', residuals=True)

    project.align_stack()

    tqdm.write(' -- Plotting intermediate step ETMs (aligned)...')
Beispiel #4
0
def station_etm(station, stn_ts, stack_name, iteration=0):

    cnn = dbConnection.Cnn("gnss_data.cfg")

    vertices = None

    try:
        # save the time series
        ts = pyETM.GamitSoln(cnn, stn_ts, station['NetworkCode'],
                             station['StationCode'], stack_name)

        # create the ETM object
        etm = pyETM.GamitETM(cnn, station['NetworkCode'],
                             station['StationCode'], False, False, ts)

        if etm.A is not None:
            if iteration == 0:
                # if iteration is == 0, then the target frame has to be the PPP ETMs
                vertices = etm.get_etm_soln_list(use_ppp_model=True, cnn=cnn)
            else:
                # on next iters, the target frame is the inner geometry of the stack
                vertices = etm.get_etm_soln_list()

    except pyETM.pyETMException:

        vertices = None

    return vertices if vertices else None
def compare_stninfo_rinex(NetworkCode, StationCode, STime, ETime,
                          rinex_serial):

    try:
        cnn = dbConnection.Cnn("gnss_data.cfg")
    except Exception:
        return traceback.format_exc() + ' open de database when processing ' \
                                         'processing %s.%s' % (NetworkCode, StationCode), None

    try:
        # get the center of the session
        date = STime + (ETime - STime) / 2
        date = pyDate.Date(datetime=date)

        stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode,
                                            date)

    except pyStationInfo.pyStationInfoException as e:
        return "Station Information error: " + str(e), None

    if stninfo.currentrecord.ReceiverSerial.lower() != rinex_serial.lower():
        return None, [
            date, rinex_serial,
            stninfo.currentrecord.ReceiverSerial.lower()
        ]

    return None, None
Beispiel #6
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")

    parser.add_argument('sinex', type=str, nargs=1, metavar='{project name}',
                        help="SINEX file to update.")

    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    sinex = args.sinex[0]
    project = args.project[0]

    process_sinex(cnn, project, dates, sinex)
Beispiel #7
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    # create the execution log

    dates = [pyDate.Date(year=1980, doy=1),
             pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = dra(cnn, args.project[0], dates)
def check_rinex_stn(NetworkCode, StationCode, start_date, end_date):

    # load the connection
    try:
        # try to open a connection to the database
        cnn = dbConnection.Cnn("gnss_data.cfg")
        Config = pyOptions.ReadOptions("gnss_data.cfg")
    except Exception:
        return traceback.format_exc() + ' processing: (' + NetworkCode + '.' + StationCode \
                + ') using node ' + platform.node(), None

    try:
        Archive = pyArchiveStruct.RinexStruct(cnn)

        rs = cnn.query('SELECT * FROM rinex WHERE "NetworkCode" = \'%s\' AND '
                       '"StationCode" = \'%s\' AND '
                       '"ObservationSTime" BETWEEN \'%s\' AND \'%s\' '
                       'ORDER BY "ObservationSTime"' %
                       (NetworkCode, StationCode, start_date.yyyymmdd(),
                        end_date.yyyymmdd()))

        rnxtbl = rs.dictresult()
        missing_files = []

        for rnx in rnxtbl:

            crinex_path = os.path.join(
                Config.archive_path,
                Archive.build_rinex_path(NetworkCode,
                                         StationCode,
                                         rnx['ObservationYear'],
                                         rnx['ObservationDOY'],
                                         filename=rnx['Filename']))

            if not os.path.exists(crinex_path):
                # problem with file! does not appear to be in the archive

                Archive.remove_rinex(rnx)

                event = pyEvents.Event(
                    Description=
                    'A missing RINEX file was found during RINEX integrity check: '
                    + crinex_path +
                    '. It has been removed from the database. Consider rerunning PPP for this station.',
                    NetworkCode=NetworkCode,
                    StationCode=StationCode,
                    Year=rnx['ObservationYear'],
                    DOY=rnx['ObservationDOY'])

                cnn.insert_event(event)

                missing_files += [crinex_path]

        return None, missing_files

    except Exception:
        return traceback.format_exc() + ' processing: ' + NetworkCode + '.' + \
               StationCode + ' using node ' + platform.node(), None
def station_etm(project, station, stn_ts, exclude, iteration=0):

    msg = None
    add_exclude = []

    cnn = dbConnection.Cnn("gnss_data.cfg")

    sql_r = 'INSERT INTO stack_residuals ' \
            '("NetworkCode", "StationCode", "Project", x, y, z, sigmax, sigmay, sigmaz, "Year", "DOY") ' \
            'VALUES (%s, %s, \'' + project + '\', %f, %f, %f, %f, %f, %f, %i, %i)'

    sql_s = 'INSERT INTO stacks ' \
            '("NetworkCode", "StationCode", "Project", "X", "Y", "Z", sigmax, sigmay, sigmaz, "Year", "DOY", "FYear") ' \
            'VALUES (\'' + station.NetworkCode + '\', \'' + station.StationCode + '\', \'' \
            + project + '\', %f, %f, %f, 0, 0, 0, %i, %i, %f)'

    # make sure it is sorted by date
    stn_ts.sort(key=lambda k: (k[3], k[4]))

    try:
        # save the time series
        ts = pyETM.GamitSoln(cnn, stn_ts, station.NetworkCode,
                             station.StationCode)

        cnn.executemany(
            sql_s,
            zip(ts.x.tolist(), ts.y.tolist(), ts.z.tolist(),
                [t.year for t in ts.date], [t.doy for t in ts.date],
                [t.fyear for t in ts.date]))

        if not exclude:
            # create the ETM object
            etm = pyETM.GamitETM(cnn, station.NetworkCode, station.StationCode,
                                 False, False, ts)

            if etm.A is None:
                # no contribution to stack, remove from the station list
                add_exclude = [station.dictionary]
            else:
                # insert the residuals for the station in stack_residuals
                # these values will be used later on in helmert_stack
                if iteration == 0:
                    # if iteration is == 0, then the target frame has to be the PPP ETMs
                    cnn.executemany(
                        sql_r,
                        etm.get_residuals_dict(use_ppp_model=True, cnn=cnn))
                else:
                    # on next iters, the target frame is the inner geometry of the stack
                    cnn.executemany(sql_r, etm.get_residuals_dict())

    except Exception as e:

        add_exclude = [station.dictionary]
        msg = 'Error while producing ETM for %s.%s: ' % (
            station.NetworkCode, station.StationCode) + str(e)

    return add_exclude, msg
Beispiel #10
0
def gamit_callback(job):

    result = job.result

    if result is not None:
        msg = []
        if 'error' not in result.keys():
            if result['nrms'] > 1:
                msg.append('    > NRMS > 1.0 (%.3f)' % result['nrms'])

            if result['wl'] < 60:
                msg.append('    > WL fixed < 60 (%.1f)' % result['wl'])

            if result['missing']:
                msg.append('    > Missing sites in solution: ' +
                           ', '.join(result['missing']))

            # DDG: only show sessions with problems to facilitate debugging.
            if result['success']:
                if len(msg) > 0:
                    tqdm.write(
                        ' -- %s Done processing: %s -> WARNINGS:\n%s' %
                        (print_datetime(), result['session'], '\n'.join(msg)))

                # insert information in gamit_stats
                try:
                    cnn = dbConnection.Cnn(
                        'gnss_data.cfg')  # type: dbConnection.Cnn
                    cnn.insert('gamit_stats', result)
                    cnn.close()
                except dbConnection.dbErrInsert as e:
                    tqdm.write(
                        ' -- %s Error while inserting GAMIT stat for %s: ' %
                        (print_datetime(), result['session'] + ' ' + str(e)))

            else:
                tqdm.write(' -- %s Done processing: %s -> FATAL:\n'
                           '    > Failed to complete. Check monitor.log:\n%s' %
                           (print_datetime(), result['session'],
                            indent('\n'.join(result['fatals']), 4)))
                # write FATAL to file
                f = open('FATAL.log', 'a')
                f.write(
                    'ON %s session %s -> FATAL: Failed to complete. Check monitor.log\n%s\n'
                    % (print_datetime(), result['session'],
                       indent('\n'.join(result['fatals']), 4)))
                f.close()
        else:
            tqdm.write(
                ' -- %s Error in session %s message from node follows -> \n%s'
                % (print_datetime(), result['session'], result['error']))

    else:
        tqdm.write(
            ' -- %s Fatal error on node %s message from node follows -> \n%s' %
            (print_datetime(), job.ip_addr, job.exception))
Beispiel #11
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(project + '_dra'):
        os.makedirs(project + '_dra')

    ########################################
    # load polyhedrons

    dra = DRA(cnn, args.project[0], dates[1])

    dra.stack_dra()

    for stn in tqdm(dra.stations):
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        # load from the db
        ts = dra.get_station(NetworkCode, StationCode)

        if ts.size:
            try:
                if ts.shape[0] > 2:
                    dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)

                    dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)

                    etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts)

                    etm.plot(pngfile='%s/%s.%s_DRA.png' % (project + '_dra', NetworkCode, StationCode),
                             plot_missing=False)

            except Exception as e:
                tqdm.write(' -->' + str(e))

    dra.to_json(project + '_dra.json')
Beispiel #12
0
def load_harpos(header, otl):

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # begin removing the network code from the OTL
    NetStn = re.findall('S\s+(\w+.\w+)\s+', ''.join(otl))

    NetworkCode, StationCode = NetStn[0].split('.')

    OTL = (''.join(header) + ''.join(otl)).replace(NetStn[0], StationCode + '    ') + 'HARPOS Format version of 2002.12.12'

    print(' >> updating %s.%s' % (NetworkCode, StationCode))
    cnn.query('UPDATE stations SET "Harpos_coeff_otl" = \'%s\' WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (OTL, NetworkCode, StationCode))
Beispiel #13
0
def UpdateRecord(rinex, path):

    cnn = dbConnection.Cnn('gnss_data.cfg')
    Config = pyOptions.ReadOptions('gnss_data.cfg')

    try:
        rnxobj = pyRinex.ReadRinex(rinex['NetworkCode'], rinex['StationCode'],
                                   path)

        date = pyDate.Date(year=rinex['ObservationYear'],
                           doy=rinex['ObservationDOY'])

        if not verify_rinex_date_multiday(date, rnxobj, Config):
            cnn.begin_transac()
            # propagate the deletes
            cnn.query(
                'DELETE FROM gamit_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM ppp_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM rinex WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "ObservationYear" = %i AND "ObservationDOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.commit_transac()

            return 'Multiday rinex file moved out of the archive: ' + rinex[
                'NetworkCode'] + '.' + rinex['StationCode'] + ' ' + str(
                    rinex['ObservationYear']) + ' ' + str(
                        rinex['ObservationDOY']
                    ) + ' using node ' + platform.node()
        else:
            cnn.update('rinex', rinex, Completion=rnxobj.completion)

    except pyRinex.pyRinexExceptionBadFile:
        # empty file or problem with crinex format, move out
        archive = pyArchiveStruct.RinexStruct(cnn)
        archive.remove_rinex(
            rinex,
            os.path.join(
                Config.repository_data_reject, 'bad_rinex/%i/%03i' %
                (rinex['ObservationYear'], rinex['ObservationDOY'])))

    except Exception:
        return traceback.format_exc(
        ) + ' processing rinex: ' + rinex['NetworkCode'] + '.' + rinex[
            'StationCode'] + ' ' + str(rinex['ObservationYear']) + ' ' + str(
                rinex['ObservationDOY']) + ' using node ' + platform.node()
Beispiel #14
0
def main():

    parser = argparse.ArgumentParser(description='Program to alter the default ETM parameters for each station. '
                                                 'The command can be executed on several stations at the same time. '
                                                 'It is also possible to alter parameters for PPP and GAMIT '
                                                 'simultaneously.')

    parser.add_argument('stnlist', type=str, nargs='+', metavar='all|net.stnm',
                        help="List of networks/stations to process given in [net].[stnm] format or just [stnm] "
                             "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
                             "name will be processed). Use keyword 'all' to process all stations in the database. "
                             "If [net].all is given, all stations from network [net] will be processed. "
                             "Alternatively, a file with the station list can be provided.")

    parser.add_argument('-fun', '--function_type', nargs='+', metavar=('function', 'argument'), default=[],
                        help="Specifies the type of function to work with. Can be polynomial (p), jump (j), "
                             "periodic (q) or bulk earthquake jump removal (t). Each one accepts a list of arguments. "
                             "p {terms} where terms equals the number of polynomial terms in the ETM, i.e. "
                             "terms = 2 is constant velocity and terms = 3 is velocity + acceleration, etc.\n"
                             "j {action} {type} {date} {relax} where action can be + or -. A + indicates that a jump "
                             "should be added while a - means that an existing jump should be removed; "
                             "type = 0 is a mechanic jump and 1 is a geophysical jump; "
                             "date is the date of the event in all the accepted formats "
                             "(yyyy/mm/dd yyyy_doy gpswk-wkday fyear); and relax is a list of relaxation times for the "
                             "logarithmic decays (only used when type = 1, they are ignored when type = 0).\n"
                             "q {periods} where periods is a list expressed in days (1 yr = 365.25). "
                             "t {max_magnitude} {stack_name} removes any earthquake Mw <= max_magnitude from "
                             "the specified stations' trajectory models; if GAMIT solutions are invoked, provide the "
                             "stack_name to obtain the ETMs of the stations.")

    parser.add_argument('-soln', '--solution_type', nargs='+', choices=['ppp', 'gamit'],
                        default=['ppp', 'gamit'], action=required_length(1, 2),
                        help="Specifies the type of solution that this command will affect. If left empty, the ETMs "
                             "for both PPP and GAMIT will be affected. Otherwise, specify gamit to insert or "
                             "remove the function on GAMIT ETMs only or ppp to insert or remove the function on PPP "
                             "ETMs only.")

    parser.add_argument('-print', '--print_params', action='store_true',
                        help="Print the parameters present in the database for the selected stations.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    if args.print_params:
        print_params(cnn, stnlist)
    else:
        insert_modify_param(parser, cnn, stnlist, args)
Beispiel #15
0
def create_files():

    cnn = dbConnection.Cnn("gnss_data.cfg")

    rs = cnn.query(
        'SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%%\' AND "Harpos_coeff_otl" LIKE \'%%HARPOS%%\' ORDER BY "NetworkCode", "StationCode"'
    )
    # rs = cnn.query(
    #    'SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%%\' ORDER BY "NetworkCode", "StationCode"')

    stations = rs.dictresult()
    print ' >> Cantidad de estaciones a procesar en Chalmers: ' + str(
        len(stations))
    stnlist = []
    index = 0
    for stn in stations:
        stnlist += [
            '%-24s %16.3f%16.3f%16.3f' %
            (stn['NetworkCode'] + '.' + stn['StationCode'], float(
                stn['auto_x']), float(stn['auto_y']), float(stn['auto_z']))
        ]

        if len(stnlist) == 99:

            body = '\n'.join(stnlist)

            with open('otl_%i.list' % index, 'w') as otl_list:
                otl_list.write(body)

            index += 1
            # msg = MIMEText(body)
            # msg['Subject'] = 'Subject: Ocean Loading Tides'
            # msg['From'] = '*****@*****.**'
            # msg['To'] = '*****@*****.**'
            #
            # s = smtplib.SMTP_SSL('64.233.190.108', 465)
            # s.ehlo()
            # s.login('*****@*****.**', 'demostenes0624')
            # s.sendmail('*****@*****.**', '*****@*****.**', msg.as_string())
            # s.close()

            stnlist = []

    if len(stnlist) > 0:
        body = '\n'.join(stnlist)

        with open('otl_%i.list' % index, 'w') as otl_list:
            otl_list.write(body)
Beispiel #16
0
def main():

    Config = pyOptions.ReadOptions('gnss_data.cfg')
    JobServer = pyJobServer.JobServer(Config)  # type: pyJobServer.JobServer

    cnn = dbConnection.Cnn('gnss_data.cfg')

    archive = pyArchiveStruct.RinexStruct(cnn)

    rinex = cnn.query_float(
        'SELECT * FROM rinex WHERE "ObservationYear" <= 1995 ORDER BY "NetworkCode", '
        '"StationCode", "ObservationYear", "ObservationDOY"',
        as_dict=True)

    pbar = tqdm(desc='%-30s' % ' >> Processing rinex files',
                total=len(rinex),
                ncols=160)

    modules = ('os', 'pyRinex')
    callback = []

    for rnx in rinex:

        filename = archive.build_rinex_path(rnx['NetworkCode'],
                                            rnx['StationCode'],
                                            rnx['ObservationYear'],
                                            rnx['ObservationDOY'],
                                            filename=rnx['Filename'])

        arguments = (rnx['NetworkCode'], rnx['StationCode'],
                     Config.archive_path, filename)

        JobServer.SubmitJob(check_rinex, arguments, (), modules, callback,
                            callback_class(pbar), 'callbackfunc')

        if JobServer.process_callback:
            # handle any output messages during this batch
            callback = output_handle(callback)
            JobServer.process_callback = False

    tqdm.write(' >> waiting for jobs to finish...')
    JobServer.job_server.wait()
    tqdm.write(' >> Done.')

    # process the errors and the new stations
    output_handle(callback)

    pbar.close()
Beispiel #17
0
def main():
    parser = argparse.ArgumentParser(description='Script to synchronize AWS with OSU\'s archive database')

    parser.add_argument('date', type=str, nargs=1, help="Check the sync state for this given date. Format can be fyear or yyyy_ddd.")
    parser.add_argument('-mark', '--mark_uploaded', nargs='+', type=str, help="Pass net.stnm to mark these files as transferred to the AWS", metavar='{net.stnm}')
    parser.add_argument('-pull', '--pull_rinex', action='store_true', help="Get all the unsynchronized RINEX files in the local dir")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # before attempting anything, check aliases!!
    print ' >> Checking GAMIT aliases'
    check_aliases(cnn)

    # initialize the PP job server
    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, 1500)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    dd = args.date[0]

    if '_' in dd:
        date = pyDate.Date(year=int(dd.split('_')[0]), doy=int(dd.split('_')[1]))
    elif dd == 'all':
        # run all dates (1994 to 2018)
        ts = range(pyDate.Date(year=2004, doy=20).mjd, pyDate.Date(year=2018, doy=87).mjd, 1)
        ts = [pyDate.Date(mjd=tts) for tts in ts]
        for date in ts:
            print ' >> Processing ' + str(date)
            pull_rinex(cnn, date, Config, JobServer)

        return
    else:
        date = pyDate.Date(fyear=float(dd))

    if args.pull_rinex:
        pull_rinex(cnn, date, Config, JobServer)

    if args.mark_uploaded is not None:
        print 'Processing %i for day %s' % (len(args.mark_uploaded), date.yyyyddd())
        # mark the list of stations as transferred to the AWS
        mark_uploaded(cnn, date, args.mark_uploaded)
Beispiel #18
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project_file', type=str, nargs=1, metavar='{project cfg file}',
                        help="Project CFG file with all the stations being processed in Parallel.GAMIT")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    GamitConfig = GamitConfiguration(args.project_file[0], check_config=False)  # type: GamitConfiguration

    stations = process_stnlist(cnn, GamitConfig.NetworkConfig['stn_list'].split(','))

    generate_kml(cnn, GamitConfig.NetworkConfig.network_id.lower(), stations)
Beispiel #19
0
def run_globk(globk_task, project, date):

    from datetime import datetime
    polyhedron, variance = globk_task.execute()
    # open a database connection (this is on the node)
    cnn = dbConnection.Cnn('gnss_data.cfg')
    err = []

    # kill the existing polyhedron to make sure all the new vertices get in
    # this is because when adding a station, only the
    cnn.query(
        'DELETE FROM gamit_soln WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i'
        % (project, date.year, date.doy))

    # insert polyherdon in gamit_soln table
    for key, value in polyhedron.items():
        if '.' in key:
            try:
                sqrt_variance = math.sqrt(variance)
                cnn.insert('gamit_soln',
                           NetworkCode=key.split('.')[0],
                           StationCode=key.split('.')[1],
                           Project=project,
                           Year=date.year,
                           DOY=date.doy,
                           FYear=date.fyear,
                           X=value.X,
                           Y=value.Y,
                           Z=value.Z,
                           sigmax=value.sigX * sqrt_variance,
                           sigmay=value.sigY * sqrt_variance,
                           sigmaz=value.sigZ * sqrt_variance,
                           sigmaxy=value.sigXY * sqrt_variance,
                           sigmaxz=value.sigXZ * sqrt_variance,
                           sigmayz=value.sigYZ * sqrt_variance,
                           VarianceFactor=variance)
            except dbConnection.dbErrInsert as e:
                # tqdm.write('    --> Error inserting ' + key + ' -> ' + str(e))
                pass
        else:
            err.append(
                ' -- %s Error while combining with GLOBK -> Invalid key found in session %s -> %s '
                'polyhedron in database may be incomplete.' %
                (datetime.now().strftime('%Y-%m-%d %H:%M:%S'), date.yyyyddd(),
                 key))
    cnn.close()
    return err
Beispiel #20
0
def main():

    print(' >> Loading g08d APRs...')
    mat = hdf5storage.loadmat('PRIORS_from_g08d.mat')

    # stn_index = np.where(mat['pv_stnm'] == rnx['NetworkCode'].uppper() + '_' + rnx['StationCode'].upper())[0][0]
    # ydm_index = np.where((mat['pv_Epoch']['iyear'] == date.year) & (mat['pv_Epoch']['doy'] == date.doy))

    cnn = dbConnection.Cnn('gnss_data.cfg')

    for stnm in mat['pv_stnm'].tolist():
        NetworkCode = stnm[0].split('_')[0].lower()
        StationCode = stnm[0].split('_')[1].lower()

        station_id = NetworkCode + '.' + StationCode

        print(' -- inserting ' + station_id)

        if cnn.query(
                'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
                % (NetworkCode, StationCode)).ntuples() != 0:
            # get the rows for this station
            stn_index = np.where(mat['pv_stnm'] == stnm[0])[0][0]
            xyz = mat['pv_xyz'][stn_index * 3:stn_index * 3 + 3]
            enu = mat['pv_sig_enu'][stn_index * 3:stn_index * 3 + 3]

            # loop through the epochs
            for i, fyear in enumerate(mat['pv_Epoch']['fyear'][0][0]):
                date = pyDate.Date(fyear=fyear)

                if enu[0][i] < 10:
                    # print ' -- ' + station_id  + ' ' + date.yyyyddd()
                    # actual sigma value, otherwise it's a super unconstrained (should not be inserted)
                    try:
                        cnn.query(
                            'INSERT INTO apr_coords '
                            '("NetworkCode", "StationCode", "Year", "DOY", "FYear", "x", "y", "z", "sn", "se", "su", "ReferenceFrame") VALUES '
                            '(\'%s\', \'%s\', %i, %i, %f, %f, %f, %f, %f, %f, %f, \'g08d\')'
                            % (NetworkCode, StationCode, date.year, date.doy,
                               date.fyear, xyz[0][i], xyz[1][i], xyz[2][i],
                               enu[0][i], enu[1][i], enu[2][i]))
                    except pg.IntegrityError:
                        print(' -- ' + station_id + ' ' + date.yyyyddd() +
                              ' already exists!')

        else:
            print(' -- COULD NOT FIND STATION ' + station_id)
Beispiel #21
0
def load_blq(header, otl):

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # begin removing the network code from the OTL
    NetStn = re.findall('\s{2}(\w{3}\.\w{4})', ''.join(otl))

    NetworkCode, StationCode = NetStn[0].split('.')

    OTL = (''.join(header) + ''.join(otl)).replace('  ' + NetStn[0], '  ' + StationCode)
    OTL = OTL.replace('$$ ' + NetStn[0], '$$ %-8s' % StationCode)
    OTL = OTL.replace('$$ END TABLE', '$$')
    OTL = OTL.replace("'", "")

    print(' >> updating %s.%s' % (NetworkCode, StationCode))

    cnn.query('UPDATE stations SET "Harpos_coeff_otl" = \'%s\' WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (OTL, NetworkCode, StationCode))
Beispiel #22
0
def compute_dra(ts,
                NetworkCode,
                StationCode,
                pdates,
                project,
                histogram=False):
    try:
        # load from the db
        cnn = dbConnection.Cnn('gnss_data.cfg')

        # to pass the filename back to the callback_handler
        filename = project + '_dra/' + NetworkCode + '.' + StationCode
        if ts.size:

            if ts.shape[0] > 2:
                dts = numpy.append(numpy.diff(ts[:, 0:3], axis=0),
                                   ts[1:, -3:],
                                   axis=1)

                dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode,
                                         project)

                etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False,
                                     False, dra_ts)

                figfile = ''
                hisfile = ''

                if etm.A is not None:
                    figfile = etm.plot(fileio=io.BytesIO(),
                                       plot_missing=False,
                                       t_win=pdates)
                    if histogram:
                        hisfile = etm.plot_hist(fileio=io.BytesIO())

                    # save the wrms
                    return etm.factor[0] * 1000, etm.factor[1] * 1000, etm.factor[2] * 1000, figfile, hisfile, \
                           filename, NetworkCode, StationCode
                else:
                    return None, None, None, figfile, hisfile, filename, NetworkCode, StationCode
    except Exception as e:
        raise Exception('While working on %s.%s' % (NetworkCode, StationCode) +
                        '\n') from e
Beispiel #23
0
def main():

    cnn = dbConnection.Cnn("gnss_data.cfg")

    stack = Stack(cnn, 'igs-sirgas', redo=True)

    for i in tqdm(range(1, len(stack)), ncols=160):
        stack[i].align(stack[i - 1])

    net = 'igs'
    stn = 'braz'

    ts = stack.get_station(net, stn)

    dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)

    ts = pyETM.GamitSoln(cnn, dts, net, stn, 'igs-sirgas')

    pyETM.GamitETM(cnn, net, stn, True, gamit_soln=ts)
Beispiel #24
0
def main():

    cnn = dbConnection.Cnn('gnss_data.cfg')

    yrs = glob.glob('/data/[1-2]*')

    for dir in yrs:
        days = glob.glob(dir + '/*')

        for day in days:
            sessions = glob.glob(day + '/*')

            for sess in sessions:
                parts = os.path.split(sess)

                path = sess + '/monitor.log'
                if '.' in parts[1]:
                    # gamit session, parse monitor insert data
                    parse_monitor(cnn, path)

                elif os.path.isfile(path):
                    parse_monitor(cnn, path)
Beispiel #25
0
def main():
    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '0' to just output "
        "the ETM parameters, '1' to export time series without "
        "model and '2' to export both time series and model.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-q',
        '--query',
        nargs=2,
        metavar='{type} {date}',
        type=str,
        help=
        'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
        'of the daily solution (if exists). Output is in XYZ.')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=1,
        metavar='{stack}',
        help="Plot the GAMIT time series specifying which stack name to plot.")
    parser.add_argument(
        '-lang',
        '--language',
        type=str,
        help="Change the language of the plots. Default is English. "
        "Use ESP to select Spanish. To add more languages, "
        "include the ISO 639-1 code in pyETM.py",
        default='ENG')
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot histogram of residuals")
    parser.add_argument(
        '-file',
        '--filename',
        type=str,
        help=
        "Obtain data from an external source (filename). Format should be specified with -format."
    )
    parser.add_argument(
        '-format',
        '--format',
        nargs='+',
        type=str,
        help=
        "To be used together with --filename. Specify order of the fields as found in the input "
        "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
        "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
        "are at the end of the line, then there is no need to specify those.")
    parser.add_argument('-outliers',
                        '--plot_outliers',
                        action='store_true',
                        help="Plot an additional panel with the outliers")
    parser.add_argument('-vel',
                        '--velocity',
                        action='store_true',
                        help="During query, output the velocity in XYZ.")
    parser.add_argument('-seasonal',
                        '--seasonal_terms',
                        action='store_true',
                        help="During query, output the seasonal terms in NEU.")
    parser.add_argument('-quiet',
                        '--suppress_messages',
                        action='store_true',
                        help="Quiet mode: suppress information messages")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # define the language
    pyETM.LANG = args.language.lower()
    # set the logging level
    if not args.suppress_messages:
        pyETM.logger.setLevel(pyETM.INFO)
    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None and args.filename is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                elif args.filename is not None:
                    etm = from_file(args, cnn, stn)
                else:
                    polyhedrons = cnn.query_float(
                        'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                        'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                        '"StationCode" = \'%s\' '
                        'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                        % (args.gamit[0], stn['NetworkCode'],
                           stn['StationCode']))

                    soln = pyETM.GamitSoln(cnn, polyhedrons,
                                           stn['NetworkCode'],
                                           stn['StationCode'], args.gamit[0])

                    etm = pyETM.GamitETM(cnn,
                                         stn['NetworkCode'],
                                         stn['StationCode'],
                                         False,
                                         args.no_model,
                                         gamit_soln=soln)

                    # print ' > %5.2f %5.2f %5.2f %i %i' % \
                    #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                    #       etm.soln.t.shape[0] -
                    #       np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                    # print two largest outliers
                    if etm.A is not None:
                        lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                        slres = lres[np.argsort(-lres)]

                        print ' >> Two largest residuals:'
                        for i in [0, 1]:
                            print(' %s %6.3f %6.3f %6.3f' %
                                  (pyDate.Date(mjd=etm.soln.mjd[
                                      lres == slres[i]]).yyyyddd(),
                                   etm.R[0, lres == slres[i]],
                                   etm.R[1, lres == slres[i]],
                                   etm.R[2, lres == slres[i]]))

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        if args.filename is None:
                            xfile = os.path.join(
                                args.directory, '%s.%s_ppp' %
                                (etm.NetworkCode, etm.StationCode))
                        else:
                            xfile = os.path.join(
                                args.directory, '%s.%s_file' %
                                (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data,
                             plot_outliers=args.plot_outliers)

                    if args.histogram:
                        etm.plot_hist(xfile + '_hist.png')

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json == 1:
                            json.dump(etm.todictionary(time_series=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        elif args.json == 2:
                            json.dump(etm.todictionary(time_series=True,
                                                       model=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                if args.query is not None:
                    model = True if args.query[0] == 'model' else False
                    q_date = pyDate.Date(fyear=float(args.query[1]))

                    xyz, _, _, txt = etm.get_xyz_s(q_date.year,
                                                   q_date.doy,
                                                   force_model=model)

                    strp = ''
                    # if user requests velocity too, output it
                    if args.velocity:
                        if etm.A is not None:
                            vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                            strp = '%8.5f %8.5f %8.5f ' \
                                   % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0])

                    # also output seasonal terms, if requested
                    if args.seasonal_terms:
                        if etm.Periodic.frequency_count > 0:
                            strp += ' '.join([
                                '%8.5f' % (x * 1000) for x in
                                etm.Periodic.p.params.flatten().tolist()
                            ])

                    print ' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                          % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
Beispiel #26
0
def rinex_task(NetworkCode, StationCode, date, ObservationFYear, metafile):

    from pyRunWithRetry import RunCommandWithRetryExeception

    etm_err = ''

    # local directory as destiny for the CRINEZ files
    pwd_rinex = '/media/leleiona/aws-files/' + date.yyyy() + '/' + date.ddd()

    stop_no_aprs = False

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # create Archive object

    Archive = pyArchiveStruct.RinexStruct(
        cnn)  # type: pyArchiveStruct.RinexStruct

    ArchiveFile = Archive.build_rinex_path(NetworkCode, StationCode, date.year,
                                           date.doy)
    ArchiveFile = os.path.join(Config.archive_path, ArchiveFile)

    # check for a station alias in the alias table
    alias = cnn.query(
        'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
        % (NetworkCode, StationCode))

    sa = alias.dictresult()

    if len(sa) > 0:
        StationAlias = sa[0]['StationAlias']
    else:
        StationAlias = StationCode

    # create the crinez filename
    filename = StationAlias + date.ddd() + '0.' + date.yyyy()[2:4] + 'd.Z'

    try:
        # create the ETM object
        etm = pyETM.PPPETM(cnn, NetworkCode, StationCode)

        # get APRs and sigmas (only in NEU)
        Apr, sigmas, Window, source = etm.get_xyz_s(date.year, date.doy)

        del etm

    except pyETM.pyETMException as e:
        # no PPP solutions available! MUST have aprs in the last run, try that
        stop_no_aprs = True
        Window = None
        source = ''
        etm_err = str(e)

    except Exception:

        return (None, None,
                traceback.format_exc() + ' processing ' + NetworkCode + '.' +
                StationCode + ' using node ' + platform.node() + '\n',
                metafile)

    # find this station-day in the lastest global run APRs
    apr_tbl = cnn.query(
        'SELECT * FROM apr_coords WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
        'AND "Year" = %i AND "DOY" = %i' %
        (NetworkCode, StationCode, date.year, date.doy))
    apr = apr_tbl.dictresult()

    if len(apr) > 0:
        # APRs exist for this station-day
        # replace PPP ETM with Mike's APRs
        Apr = numpy.array(
            ([float(apr[0]['x'])], [float(apr[0]['y'])], [float(apr[0]['z'])]))
        sigmas = numpy.array(([float(apr[0]['sn'])], [float(apr[0]['se'])],
                              [float(apr[0]['su'])]))
        source = apr[0]['ReferenceFrame'] + ' APRs'

    elif len(apr) == 0 and stop_no_aprs:

        return (
            None, None,
            '%s.%s has no PPP solutions and no APRs from last global run for %s! '
            'Specific error from pyETM.PPPETM (if available) was: %s' %
            (NetworkCode, StationCode, date.yyyyddd(), etm_err), metafile)

    # convert sigmas to XYZ
    stn = cnn.query(
        'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
        % (NetworkCode, StationCode))
    stn = stn.dictresult()
    sigmas_xyz = sigmas_neu2xyz(stn[0]['lat'], stn[0]['lon'], sigmas)

    # write the station.info
    # if no station info comes back for this date, program will print a message and continue with next
    try:

        # Use the argument 'ObservationFYear' to get the exact RINEX session fyear without opening the file
        rnx_date = pyDate.Date(fyear=float(ObservationFYear))
        stninfo = pyStationInfo.StationInfo(cnn,
                                            NetworkCode,
                                            StationCode,
                                            rnx_date,
                                            h_tolerance=12)

    except pyStationInfo.pyStationInfoException:
        # if no metadata, warn user and continue
        return (
            None, None,
            '%s.%s has no metadata available for this date, but a RINEX exists!'
            % (NetworkCode, StationCode), metafile)

    # check if RINEX file needs to be synced or not.
    aws_sync = cnn.query(
        'SELECT * FROM aws_sync WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
        'AND "Year" = %i AND "DOY" = %i' %
        (NetworkCode, StationCode, date.year, date.doy)).dictresult()

    if len(aws_sync) == 0:

        # only copy RINEX if not synced!
        # open the RINEX file in the Archive
        try:
            with pyRinex.ReadRinex(NetworkCode, StationCode, ArchiveFile,
                                   False) as Rinex:  # type: pyRinex.ReadRinex

                Rnx = None

                if Rinex.multiday:
                    # find the rinex that corresponds to the session being processed, if multiday
                    for rinex in Rinex.multiday_rnx_list:
                        if rinex.date == date:
                            Rnx = rinex
                            break

                    if Rnx is None:
                        return (
                            None, None,
                            '%s.%s was a multiday file and date %8.3f could not be found!'
                            % (NetworkCode, StationCode, date.fyear), metafile)
                else:
                    # if Rinex is not multiday
                    Rnx = Rinex

                    Rnx.purge_comments()
                    Rnx.normalize_header(stninfo)
                    Rnx.rename(filename)

                    if Window is not None:
                        window_rinex(Rnx, Window)
                        source += ' windowed from/to ' + Window.datetime(
                        ).strftime('%Y-%M-%d %H:%M:%S')
                    # before creating local copy, decimate file
                    Rnx.decimate(30)
                    Rnx.compress_local_copyto(pwd_rinex)

        except (pyRinex.pyRinexException, RunCommandWithRetryExeception):
            # new behavior: if error occurs while generating RINEX, then copy raw file from the archive
            try:
                shutil.copy(ArchiveFile, os.path.join(pwd_rinex, filename))

            except Exception:
                return (None, None, traceback.format_exc() + ' processing ' +
                        NetworkCode + '.' + StationCode + ' using node ' +
                        platform.node() + '\n', metafile)

        except Exception:
            return (None, None, traceback.format_exc() + ' processing ' +
                    NetworkCode + '.' + StationCode + ' using node ' +
                    platform.node() + '\n', metafile)

    # everything ok, return information
    APR = '%s.%s %s %12.3f %12.3f %12.3f %5.3f %5.3f %5.3f %5.3f %5.3f %5.3f %s' % (
        NetworkCode, StationCode, StationAlias, Apr[0, 0], Apr[1, 0],
        Apr[2, 0], sigmas_xyz[0, 0], sigmas_xyz[1, 0], sigmas_xyz[2, 0],
        sigmas[1, 0], sigmas[0, 0], sigmas[2, 0], source.replace(' ', '_'))

    return APR, stninfo.return_stninfo().replace(
        StationCode.upper(), StationAlias.upper()), None, metafile
def main():

    parser = argparse.ArgumentParser(
        description=
        'Program to perform weekly loosely-constrained solutions. Combination '
        'is performed using GLOBK. Result is output in SINEX format.')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help="List of networks/stations to include in the solution.")

    parser.add_argument(
        '-s',
        '--session_config',
        type=str,
        nargs=1,
        metavar='session.cfg',
        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument('-w',
                        '--gpsweek',
                        nargs=1,
                        help="GPS week to combine.")

    parser.add_argument(
        '-e',
        '--exclude',
        type=str,
        nargs='+',
        metavar='station',
        help="List of stations to exclude (e.g. -e igm1 lpgs vbca)")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # get the working dates
    date_s = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=0)
    date_e = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=6)

    print ' >> Working with GPS week ' + args.gpsweek[0] + ' (%s to %s)' % (
        date_s.yyyyddd(), date_e.yyyyddd())

    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        args.stnlist += ['-' + exc for exc in exclude]

    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # check that the selected stations have all different station codes
    # otherwise, exit with error
    for i in range(len(stnlist) - 1):
        for j in range(i + 1, len(stnlist)):
            if stnlist[i]['StationCode'] == stnlist[j]['StationCode']:
                print 'During station selection, two identical station codes were found. Please remove one and ' \
                      'try again.'
                exit()

    GamitConfig = pyGamitConfig.GamitConfiguration(
        args.session_config[0])  # type: pyGamitConfig.GamitConfiguration

    project = GamitConfig.NetworkConfig.network_id.lower()
    org = GamitConfig.gamitopt['org']

    # start making sure that the solutions were already incorporated into the stack
    cc = cnn.query_float(
        'SELECT count(*) FROM stacks WHERE ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
        'AND "Project" = \'%s\'' %
        (date_s.year, date_s.doy, date_e.year, date_e.doy, project))

    if cc[0] == 0:
        print ' >> No solutions could be found in the stack for the specified gps week. Did you run the stacker ' \
              'before attempting to combine the solutions?'
        exit()
    else:
        soln_pwd = GamitConfig.gamitopt['solutions_dir']

        # create a globk directory in production
        if not os.path.exists('production/globk'):
            os.makedirs('production/globk')

        # check if week folder exists
        if os.path.exists('production/globk/' + args.gpsweek[0]):
            rmtree('production/globk/' + args.gpsweek[0])

        # create the directory
        os.makedirs('production/globk/' + args.gpsweek[0])

        globk_pwd = 'production/globk/' + args.gpsweek[0]

        glx_list = []

        # make a list of the h files that need to be combined
        for day in range(0, 7):
            date = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=day)

            soln_dir = os.path.join(
                soln_pwd,
                date.yyyy() + '/' + date.ddd() + '/' + project + '/glbf')

            if os.path.exists(soln_dir):
                glx = glob.glob(os.path.join(soln_dir, '*.GLX.*'))
                if len(glx) > 0:
                    glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})
                else:
                    glx = glob.glob(os.path.join(soln_dir, '*.glx'))
                    glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})

        # create the earthquakes.txt file to remove outliers
        with open(globk_pwd + '/eq_rename.txt', 'w') as fd:
            for stn in stnlist:
                rm = cnn.query_float(
                    'SELECT * FROM gamit_soln_excl WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                    ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i)'
                    % (project, stn['NetworkCode'], stn['StationCode'],
                       date_s.year, date_s.doy, date_e.year, date_e.doy),
                    as_dict=True)
                for r in rm:
                    date = pyDate.Date(year=r['Year'], doy=r['DOY'])

                    fd.write(
                        ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                        % (stn['StationCode'], stn['StationCode'], org +
                           date.wwwwd() + '.GLX', date.yyyy()[2:], date.month,
                           date.day, date.yyyy()[2:], date.month, date.day))

                # check for renames that might not agree between days
                mv = cnn.query_float(
                    'SELECT * FROM gamit_subnets WHERE "Project" = \'%s\' AND ("Year", "DOY") '
                    'BETWEEN (%i, %i) AND (%i, %i) AND \'%s.%s\' = ANY(stations)'
                    % (project, date_s.year, date_s.doy, date_e.year,
                       date_e.doy, stn['NetworkCode'], stn['StationCode']),
                    as_dict=True)

                for m in mv:
                    date = pyDate.Date(year=m['Year'], doy=m['DOY'])
                    # check on each day to see if alias agrees with station code
                    for i, s in enumerate(m['stations']):
                        if s.split('.')[1] != m['alias'][i] and \
                                s == stn['NetworkCode'] + '.' + stn['StationCode']:

                            print ' -- Alias for %s.%s = %s: renaming' \
                                  % (stn['NetworkCode'], stn['StationCode'], m['alias'][i])

                            # change the name of the station to the original name
                            fd.write(
                                ' rename %s_gps %s_gps %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                                % (m['alias'], stn['StationCode'],
                                   org + date.wwwwd() + '.GLX',
                                   date.yyyy()[2:], date.month, date.day,
                                   date.yyyy()[2:], date.month, date.day))

                        elif s not in [
                                st['NetworkCode'] + '.' + st['StationCode']
                                for st in stnlist
                        ]:
                            print ' -- Removing %s: not selected' % s
                            # just in case, remove any other occurrences of this station code
                            fd.write(
                                ' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n'
                                % (m['alias'][i],
                                   id_generator(), org + date.wwwwd() + '.GLX',
                                   date.yyyy()[2:], date.month, date.day,
                                   date.yyyy()[2:], date.month, date.day))

        # ready to pass list to globk object
        Globk(globk_pwd, org, glx_list, date_s.wwww(),
              ' '.join([stn['StationCode'].upper() for stn in stnlist]))

        process_sinex(cnn, project, [date_s, date_e],
                      globk_pwd + '/' + org + date_s.wwww() + '7.snx')
Beispiel #28
0
def main():
    parser = argparse.ArgumentParser(
        description='Archive operations Main Program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-date',
        '--date_range',
        nargs='+',
        action=required_length(1, 2),
        metavar='date_start|date_end',
        help="Date range to check given as [date_start] or [date_start] "
        "and [date_end]. Allowed formats are yyyy.doy or yyyy/mm/dd..")
    parser.add_argument(
        '-win',
        '--window',
        nargs=1,
        metavar='days',
        type=int,
        help=
        "Download data from a given time window determined by today - {days}.")

    try:
        args = parser.parse_args()

        cnn = dbConnection.Cnn('gnss_data.cfg')
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        stnlist = Utils.process_stnlist(cnn, args.stnlist)

        print ' >> Selected station list:'
        print_columns([
            item['NetworkCode'] + '.' + item['StationCode'] for item in stnlist
        ])

        dates = []

        try:
            if args.window:
                # today - ndays
                d = pyDate.Date(year=datetime.now().year,
                                month=datetime.now().month,
                                day=datetime.now().day)
                dates = [d - int(args.window[0]), d]
            else:
                dates = process_date(args.date_range)

        except ValueError as e:
            parser.error(str(e))

        if dates[0] < pyDate.Date(gpsWeek=650, gpsWeekDay=0):
            dates = [
                pyDate.Date(gpsWeek=650, gpsWeekDay=0),
                pyDate.Date(year=datetime.now().year,
                            month=datetime.now().month,
                            day=datetime.now().day)
            ]

        # go through the dates
        drange = np.arange(dates[0].mjd, dates[1].mjd + 1, 1)

        download_data(cnn, Config, stnlist, drange)

    except argparse.ArgumentTypeError as e:
        parser.error(str(e))
Beispiel #29
0
            raise pyStationCollectionException('Requested station code ' +
                                               item.netstn +
                                               ' could not be found')

        else:
            raise pyStationException(
                'type: ' + str(type(item)) +
                ' invalid. Can only pass Station or String objects.')

    def __contains__(self, item):
        return self.ismember(item)


if __name__ == '__main__':
    import dbConnection
    cnn = dbConnection.Cnn('gnss_data.cfg')
    import pyDate
    dr = [pyDate.Date(year=2010, doy=1), pyDate.Date(year=2010, doy=2)]
    s1 = Station(cnn, 'rms', 'igm1', dr)
    s2 = Station(cnn, 'rms', 'lpgs', dr)
    s3 = Station(cnn, 'rms', 'chac', dr)
    s4 = Station(cnn, 'cap', 'chac', dr)
    c = StationCollection()
    c.append(s1)
    c.append(s2)
    c.append(s3)
    print c
    c.append(s4)
    print c
    c.replace_alias([s1, s2], ['zzz1', 'zzz1'])
    print c
def main():

    parser = argparse.ArgumentParser(
        description=
        'Database integrity tools, metadata check and fixing tools program')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        metavar='all|net.stnm',
        help=
        "List of networks/stations to process given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be processed). Use keyword 'all' to process all stations in the database. "
        "If [net].all is given, all stations from network [net] will be processed. "
        "Alternatevily, a file with the station list can be provided.")

    parser.add_argument(
        '-d',
        '--date_filter',
        nargs='+',
        metavar='date',
        help='Date range filter for all operations. '
        'Can be specified in wwww-d, yyyy_ddd, yyyy/mm/dd or fyear format')

    parser.add_argument(
        '-rinex',
        '--check_rinex',
        choices=['fix', 'report'],
        type=str,
        nargs=1,
        help=
        'Check the RINEX integrity of the archive-database by verifying that the RINEX files '
        'reported in the rinex table exist in the archive. If argument = "fix" and a RINEX file '
        'does not exist, remove the record. PPP records or gamit_soln are deleted. If argument = '
        '"report" then just list the missing files.')

    parser.add_argument(
        '-rnx_count',
        '--rinex_count',
        action='store_true',
        help='Count the total number of RINEX files (unique station-days) '
        'per day for a given time interval.')

    parser.add_argument(
        '-stnr',
        '--station_info_rinex',
        action='store_true',
        help=
        'Check that the receiver serial number in the rinex headers agrees with the station info '
        'receiver serial number.')

    parser.add_argument(
        '-stns',
        '--station_info_solutions',
        action='store_true',
        help='Check that the PPP hash values match the station info hash.')

    parser.add_argument(
        '-stnp',
        '--station_info_proposed',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Output a proposed station.info using the RINEX metadata. Optional, specify [ignore_days] '
        'to ignore station.info records <= days.')

    parser.add_argument(
        '-stnc',
        '--station_info_check',
        action='store_true',
        help=
        'Check the consistency of the station information records in the database. Date range '
        'does not apply. Also, check that the RINEX files fall within a valid station information '
        'record.')

    parser.add_argument(
        '-g',
        '--data_gaps',
        metavar='ignore_days',
        const=0,
        type=int,
        nargs='?',
        help=
        'Check the RINEX files in the database and look for gaps (missing days). '
        'Optional, [ignore_days] with the smallest gap to display.')

    parser.add_argument('-gg',
                        '--graphical_gaps',
                        action='store_true',
                        help='Visually output RINEX gaps for stations.')

    parser.add_argument(
        '-sc',
        '--spatial_coherence',
        choices=['exclude', 'delete', 'noop'],
        type=str,
        nargs=1,
        help=
        'Check that the RINEX files correspond to the stations they are linked to using their '
        'PPP coordinate. If keyword [exclude] or [delete], add the PPP solution to the excluded '
        'table or delete the PPP solution. If [noop], then only report but do not '
        'exlude or delete.')

    parser.add_argument(
        '-print',
        '--print_stninfo',
        choices=['long', 'short'],
        type=str,
        nargs=1,
        help=
        'Output the station info to stdout. [long] outputs the full line of the station info. '
        '[short] outputs a short version (better for screen visualization).')

    parser.add_argument(
        '-r',
        '--rename',
        metavar='net.stnm',
        nargs=1,
        help=
        "Takes the data from the station list and renames (merges) it to net.stnm. "
        "It also changes the rinex filenames in the archive to match those of the new destiny "
        "station. Only a single station can be given as the origin and destiny. "
        "Limit the date range using the -d option.")

    parser.add_argument(
        '-es',
        '--exclude_solutions',
        metavar=('{start_date}', '{end_date}'),
        nargs=2,
        help=
        'Exclude PPP solutions (by adding them to the excluded table) between {start_date} '
        'and {end_date}')

    parser.add_argument(
        '-del',
        '--delete_rinex',
        metavar=('{start_date}', '{end_date}', '{completion}'),
        nargs=3,
        help='Delete RINEX files (and associated solutions, PPP and GAMIT) '
        'from archive between {start_date} and {end_date} with completion <= {completion}. '
        'Completion ranges form 1.0 to 0.0. Use 1.0 to delete all data. '
        'Operation cannot be undone!')

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")  # type: dbConnection.Cnn

    # create the execution log
    cnn.insert('executions', script='pyIntegrityCheck.py')

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    #####################################
    # date filter

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    #####################################

    if args.check_rinex:
        CheckRinexIntegrity(cnn, Config, stnlist, dates[0], dates[1],
                            args.check_rinex[0], JobServer)

    #####################################

    if args.rinex_count:
        RinexCount(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.station_info_rinex:
        StnInfoRinexIntegrity(cnn, stnlist, dates[0], dates[1], JobServer)

    #####################################

    if args.station_info_check:
        StnInfoCheck(cnn, stnlist, Config)

    #####################################

    if args.data_gaps is not None:
        GetStnGaps(cnn, stnlist, args.data_gaps, dates[0], dates[1])

    if args.graphical_gaps:
        VisualizeGaps(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.spatial_coherence is not None:
        CheckSpatialCoherence(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.exclude_solutions is not None:
        try:
            dates = process_date(args.exclude_solutions)
        except ValueError as e:
            parser.error(str(e))

        ExcludeSolutions(cnn, stnlist, dates[0], dates[1])

    #####################################

    if args.print_stninfo is not None:
        if args.print_stninfo[0] == 'short':
            PrintStationInfo(cnn, stnlist, True)
        elif args.print_stninfo[0] == 'long':
            PrintStationInfo(cnn, stnlist, False)
        else:
            parser.error(
                'Argument for print_stninfo has to be either long or short')

    #####################################

    if args.station_info_proposed is not None:
        for stn in stnlist:
            stninfo = pyStationInfo.StationInfo(cnn,
                                                stn['NetworkCode'],
                                                stn['StationCode'],
                                                allow_empty=True)
            sys.stdout.write(
                stninfo.rinex_based_stninfo(args.station_info_proposed))

    #####################################

    if args.delete_rinex is not None:
        try:
            dates = process_date(args.delete_rinex[0:2])
        except ValueError as e:
            parser.error(str(e))

        DeleteRinex(cnn, stnlist, dates[0], dates[1],
                    float(args.delete_rinex[2]))

    #####################################

    if args.rename:
        if len(stnlist) > 1:
            parser.error(
                'Only a single station should be given for the origin station')

        if '.' not in args.rename[0]:
            parser.error('Format for destiny station should be net.stnm')
        else:
            DestNetworkCode = args.rename[0].split('.')[0]
            DestStationCode = args.rename[0].split('.')[1]

            RenameStation(cnn, stnlist[0]['NetworkCode'],
                          stnlist[0]['StationCode'], DestNetworkCode,
                          DestStationCode, dates[0], dates[1],
                          Config.archive_path)

    JobServer.close_cluster()