コード例 #1
0
ファイル: pyPPP.py プロジェクト: demiangomez/Parallel.GAMIT
    def __exec_ppp__(self, raise_error=True):

        try:
            # DDG: handle the error found in PPP (happens every now and then)
            # Fortran runtime error: End of file
            for i in range(2):
                out, err = pyRunWithRetry.RunCommand(self.ppp, 60, self.rootdir, 'input.inp').run_shell()

                if '*END - NORMAL COMPLETION' not in out:

                    if 'Fortran runtime error: End of file' in err and i == 0:
                        # error detected, try again!
                        continue

                    msg = 'PPP ended abnormally for ' + self.rinex.rinex_path + ':\n' + err + '\n' + out
                    if raise_error:
                        raise pyRunPPPException(msg)
                    else:
                        return False, msg
                else:
                    path = os.path.join(self.rootdir, self.rinex.rinex[:-3])
                    self.out = file_readlines(path + 'sum')
                    self.pos = file_readlines(path + 'pos')
                    break

        except pyRunWithRetry.RunCommandWithRetryExeception as e:
            msg = str(e)
            if raise_error:
                raise pyRunPPPException(e)
            else:
                return False, msg
        except IOError as e:
            raise pyRunPPPException(e)

        return True, ''
コード例 #2
0
ファイル: OTL_FES2014b.py プロジェクト: nahuel/Parallel.GAMIT
def import_harpos(filename):
    # parse the file to see if it is HARPOS
    otl = file_readlines(filename)

    if otl[0][0:6] != 'HARPOS':
        print(' >> Input files does not appear to be in HARPOS format!')
        return

    # it's HARPOS alright
    # find the linenumber of the phase and frequency components
    header = []
    pattern = re.compile('H\s+Ssa\s+\d+.\d+[eEdD][-+]\d+\s+\d+.\d+[eEdD][-+]\d+\s+\d+.\d+[eEdD][-+]\d+')
    for line in otl:
        if pattern.match(line):
            header = otl[0:otl.index(line)+1]
            break

    if header:
        pattern = re.compile('S\s+\w+.\w+\s+[-]?\d+.\d+\s+[-]?\d+.\d+\s+[-]?\d+.\d+\s+[-]?\d+.\d+\s+[-]?\d+.\d+\s+[-]?\d+.\d+')

        for line in otl:
            if pattern.match(line):
                load_harpos(header, otl[otl.index(line) - 2:otl.index(line)+13])

    else:
        print(' >> Could not find a valid header')
コード例 #3
0
    def parse_station_info(self, stninfo_file_list):
        """
        function used to parse a station information file
        :param stninfo_file_list: a station information file or list containing station info records
        :return: a list of StationInformationRecords
        """

        if isinstance(stninfo_file_list, list):
            # a list is comming in
            stninfo = stninfo_file_list
        else:
            # a file is comming in
            stninfo = file_readlines(stninfo_file_list)

        records = []
        for line in stninfo:

            if line[0] == ' ' and len(line) >= 77:
                record = StationInfoRecord(self.NetworkCode, self.StationCode,
                                           line)

                if record.DateStart is not None:
                    records.append(record)

        return records
コード例 #4
0
    def execute(self):
        # loop through the folders and execute the script
        self.p = subprocess.Popen('./globk.sh', shell=False, stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE, cwd=self.pwd_comb)

        self.stdout, self.stderr = self.p.communicate()

        # check if any files where not used
        out   = file_readlines(os.path.join(self.pwd_comb + '/globk.log'))
        error = re.findall(r'.*will not be used', ''.join(out))
        if error:
            print(' >> WARNING!')
            print('\n'.join(error))
コード例 #5
0
ファイル: OTL_FES2014b.py プロジェクト: nahuel/Parallel.GAMIT
def import_blq(filename):
    # parse the file to see if it is HARPOS
    otl = file_readlines(filename)

    if otl[0][0:2] != '$$':
        print(' >> Input files does not appear to be in BLQ format!')

    # it's BLQ alright
    # find the linenumber of the phase and frequency components
    header  = otl[0:29]
    pattern = re.compile('\s{2}\w{3}\.\w{4}')

    for line in otl[29:]:
        if pattern.match(line):
            load_blq(header, otl[otl.index(line):
                                 otl.index(line) + 11])
コード例 #6
0
ファイル: pyParseAntex.py プロジェクト: nahuel/Parallel.GAMIT
    def __init__(self, filename):
        antex = file_readlines(filename)

        antennas = set()
        radomes = set()

        for line in antex:
            if 'TYPE / SERIAL NO' in line:
                fields = line.split()
                if len(fields) <= 6:
                    antennas.add(fields[0])
                    radomes.add(fields[1])

        # make a unique list
        self.Antennas = list(antennas)
        self.Radomes = list(radomes)
コード例 #7
0
def load_periodic_space(periodic_file):
    """
    Load the periodic space parameters from an ITRF file
    :param periodic_file:
    :return: dictionary with the periodic terms
    """
    lines = file_readlines(periodic_file)
    periods = {}

    for l in lines:
        if l.startswith('F'):
            per = re.findall(r'Frequency\s+.\s:\s*(\d+.\d+)', l)[0]
        else:
            # parse the NEU and convert to XYZ
            neu = re.findall(
                r'\s(\w+)\s+\w\s.{9}\s*\d*\s(\w)\s+(.{7})\s+.{7}\s+(.{7})',
                l)[0]

            stn = neu[0].lower().strip()
            com = neu[1].lower().strip()

            if stn not in periods.keys():
                periods[stn] = {}

            if per not in periods[stn].keys():
                periods[stn][per] = {}

            if com not in periods[stn][per].keys():
                periods[stn][per][com] = []

            # neu[3] and then neu[2] to arrange it as we have it in the database (sin cos)
            # while Altamimi uses cos sin
            periods[stn][per][com].append([
                np.divide(float(neu[3]), 1000.),
                np.divide(float(neu[2]), 1000.)
            ])

    # average the values (multiple fits for a single station??)
    for stn in periods.keys():
        for per in periods[stn].keys():
            for com in periods[stn][per].keys():
                periods[stn][per][com] = np.mean(np.array(
                    periods[stn][per][com]),
                                                 axis=0).tolist()

    return periods
コード例 #8
0
ファイル: gamit_stats.py プロジェクト: nahuel/Parallel.GAMIT
def parse_monitor(cnn, monitor):
    lines = file_readlines(monitor)
    output = ''.join(lines)

    try:
        project, subnet, year, doy = re.findall(
            'GamitTask initialized for (\w+.*?).(\w+\d+): (\d+) (\d+)', output,
            re.MULTILINE)[0]
        subnet = int(subnet[3:])
        year = int(year)
        doy = int(doy)
    except:
        # maybe it is a project with no subnets
        try:
            project, year, doy = re.findall(
                'GamitTask initialized for (\w+.*?): (\d+) (\d+)', output,
                re.MULTILINE)[0]
            subnet = 0
            year = int(year)
            doy = int(doy)
        except:
            print(' -- could not determine project! ' + monitor)
            return

    try:
        node = re.findall('executing on (\w+)', output, re.MULTILINE)[0]
    except:
        node = 'PUGAMIT100'

    try:
        start_time = datetime.strptime(
            re.findall(
                'run.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Iteration depth: 1',
                output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
    except:
        print(' -- could not determine start_time! ' + monitor)
        return

    try:
        end_time = datetime.strptime(
            re.findall(
                'finish.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Done processing h-files and generating SINEX.',
                output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
    except:
        print(' -- could not determine end_time! ' + monitor)
        return

    try:
        iterations = int(
            re.findall(
                'run.sh \(\d+-\d+-\d+ \d+:\d+:\d+\): Iteration depth: (\d+)',
                output, re.MULTILINE)[-1])
    except:
        print(' -- could not determine iterations!')
        return

    try:
        nrms = float(
            re.findall(
                'Prefit nrms:\s+\d+.\d+[eEdD]\+\d+\s+Postfit nrms:\s+(\d+.\d+[eEdD][+-]\d+)',
                output, re.MULTILINE)[-1])
    except:
        # maybe GAMIT didn't finish
        nrms = 1

    try:
        updated_apr = re.findall(' (\w+).*?Updated from', output,
                                 re.MULTILINE)[0]
        updated_apr = [upd.replace('_GPS', '').lower() for upd in updated_apr]
        upd_stn = []
        for stn in updated_apr:
            upd_stn += re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)

        upd_stn = ','.join(upd_stn)
    except:
        # maybe GAMIT didn't finish
        upd_stn = None

    try:
        wl = float(re.findall('WL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
    except:
        # maybe GAMIT didn't finish
        wl = 0

    try:
        nl = float(re.findall('NL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
    except:
        # maybe GAMIT didn't finish
        nl = 0

    try:
        oc = re.findall('relaxing over constrained stations (\w+.*)', output,
                        re.MULTILINE)[0]
        oc = oc.replace('|', ',').replace('_GPS', '').lower()

        oc_stn = []
        for stn in oc.split(','):
            oc_stn += re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)

        oc_stn = ','.join(oc_stn)

    except:
        # maybe GAMIT didn't finish
        oc_stn = None

    try:
        overcons = re.findall('GCR APTOL (\w+).{10}\s+([-]?\d+.\d+)', output,
                              re.MULTILINE)

        if len(overcons) > 0:
            i = np.argmax(np.abs([float(o[1]) for o in overcons]))
            stn = overcons[int(i)][0]

            # get the real station code
            max_overconstrained = re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)[0]
        else:
            max_overconstrained = None
    except:
        # maybe GAMIT didn't finish
        max_overconstrained = None

    try:
        cnn.insert(
            'gamit_stats', {
                'Project':
                project,
                'subnet':
                subnet,
                'Year':
                year,
                'DOY':
                doy,
                'FYear':
                Date(year=year, doy=doy).fyear,
                'wl':
                wl,
                'nl':
                nl,
                'nrms':
                nrms,
                'relaxed_constrains':
                oc_stn,
                'max_overconstrained':
                max_overconstrained,
                'updated_apr':
                upd_stn,
                'iterations':
                iterations,
                'node':
                node,
                'execution_time':
                int((end_time - start_time).total_seconds() / 60.0),
                'execution_date':
                start_time
            })
    except dbConnection.dbErrInsert:
        print(' -- record already exists ' + monitor)
コード例 #9
0
    def parse_monitor(self, success):
        lines  = file_readlines(self.pwd + '/monitor.log')
        output = ''.join(lines)

        try:
            start_time = datetime.strptime(
                re.findall(r'run.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Iteration depth: 1',
                           output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
        except:
            start_time = datetime(2001, 1, 1, 0, 0, 0)


        try:
            if success:
                end_time = datetime.strptime(
                    re.findall(r'finish.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Done processing h-files and generating SINEX.'
                               , output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
            else:
                end_time = datetime.now()

        except:
            end_time = datetime(2001, 1, 1, 0, 0, 0)


        try:
            if not success:
                fatals = set(re.findall(r'(.*?FATAL.*)', output, re.MULTILINE))
            else:
                fatals = []
        except Exception as e:
            fatals = ['Could not retrieve FATALS: ' + str(e)]


        try:
            iterations = int(re.findall(r'run.sh \(\d+-\d+-\d+ \d+:\d+:\d+\): Iteration depth: (\d+)',
                             output, re.MULTILINE)[-1])
        except:
            iterations = 0


        try:
            nrms = float(
                re.findall(r'Prefit nrms:\s+\d+.\d+[eEdD]\+\d+\s+Postfit nrms:\s+(\d+.\d+[eEdD][+-]\d+)', output,
                           re.MULTILINE)[-1])
        except:
            # maybe GAMIT didn't finish
            nrms = 100


        try:
            updated_apr = re.findall(r' (\w+).*?Updated from', output, re.MULTILINE)[0]
            updated_apr = [upd.replace('_GPS', '').lower() for upd in updated_apr]
            upd_stn = []
            for stn in updated_apr:
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        upd_stn += [stationID(rinex)]

            upd_stn = ','.join(upd_stn)
        except:
            # maybe GAMIT didn't finish
            upd_stn = None


        try:
            wl = float(re.findall(r'WL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
        except:
            # maybe GAMIT didn't finish
            wl = 0


        try:
            nl = float(re.findall(r'NL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
        except:
            # maybe GAMIT didn't finish
            nl = 0


        try:
            oc = re.findall(r'relaxing over constrained stations (\w+.*)', output, re.MULTILINE)[0]
            oc = oc.replace('|', ',').replace('_GPS', '').lower()

            oc_stn = []
            for stn in oc.split(','):
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        oc_stn += [stationID(rinex)]

            oc_stn = ','.join(oc_stn)

        except:
            # maybe GAMIT didn't finish
            oc_stn = None


        try:
            max_overconstrained = None
            overcons = re.findall(r'GCR APTOL (\w+).{10}\s+([-]?\d+.\d+)', output, re.MULTILINE)

            if len(overcons) > 0:
                vals = [float(o[1]) for o in overcons]
                i    = vals.index(max(abs(v) for v in vals))
                stn  = overcons[i][0]

                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        # get the real station code
                        max_overconstrained = stationID(rinex)
            else:
                max_overconstrained = None

        except:
            # maybe GAMIT didn't finish
            max_overconstrained = None


        try:
            ms = re.findall(r'No data for site (\w+)',   output, re.MULTILINE)
            ds = re.findall(r'.*deleting station (\w+)', output, re.MULTILINE)
            missing_sites = []
            for stn in ms + ds:
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower() and \
                       stationID(rinex) not in missing_sites:
                        if stn in ms:
                            missing_sites += ['(' + stationID(rinex) + ')']
                        else:
                            missing_sites += [stationID(rinex)]

        except:
            # maybe GAMIT didn't finish
            missing_sites = []


        return {'session'             : '%s %s' % (self.date.yyyyddd(), self.params['DirName']),
                'Project'             : self.params['NetName'],
                'subnet'              : self.params['subnet'],
                'Year'                : self.date.year,
                'DOY'                 : self.date.doy,
                'FYear'               : self.date.fyear,
                'wl'                  : wl,
                'nl'                  : nl,
                'nrms'                : nrms,
                'relaxed_constrains'  : oc_stn,
                'max_overconstrained' : max_overconstrained,
                'updated_apr'         : upd_stn,
                'iterations'          : iterations,
                'node'                : platform.node(),
                'execution_time'      : int((end_time - start_time).total_seconds() / 60.0),
                'execution_date'      : start_time,
                'missing'             : missing_sites,
                'success'             : success,
                'fatals'              : fatals
                }
コード例 #10
0
ファイル: pyParseZTD.py プロジェクト: nahuel/Parallel.GAMIT
    def execute(self):

        cnn = dbConnection.Cnn('gnss_data.cfg')
        # atmospheric zenith delay list
        atmzen = []
        # a dictionary for the station aliases lookup table
        alias = {}
        err   = []

        for GamitSession in self.sessions:
            try:
                znd = os.path.join(GamitSession.pwd_glbf, self.org + self.date.wwwwd() + '.znd')

                if os.path.isfile(znd):
                    # read the content of the file
                    output = file_readlines(znd)
                    v = re.findall(r'ATM_ZEN X (\w+) .. (\d+)\s*(\d*)\s*(\d*)\s*(\d*)\s*(\d*)\s*\d*\s*([- ]?'
                                   r'\d*.\d+)\s*[+-]*\s*(\d*.\d*)\s*(\d*.\d*)', ''.join(output), re.MULTILINE)
                    # add the year doy tuple to the result
                    atmzen += [i + (GamitSession.date.year, GamitSession.date.doy) for i in v]

                    # create a lookup table for station aliases
                    for zd in v:
                        for StnIns in GamitSession.StationInstances:
                            if StnIns.StationAlias.upper() == zd[0] and zd[0] not in alias:
                                alias[zd[0]] = [StnIns.NetworkCode, StnIns.StationCode]

            except:
                err.append(' -- Error parsing zenith delays for session %s:\n%s'
                           % (GamitSession.NetName, traceback.format_exc()))
                return err


        if not len(atmzen):
            err.append(' -- %s No sessions with usable atmospheric zenith delays were found for %s'
                       % (datetime.now().strftime('%Y-%m-%d %H:%M:%S'), self.date.yyyyddd()))
        else:
            # turn atmzen into a numpy array
            atmzen = numpy.array(atmzen,
                                 dtype=[ # ('stn', 'S4'), # python2 
                                        ('stn', 'U4'),   
                                        ('y', 'i4'), ('m', 'i4'), ('d', 'i4'), ('h', 'i4'),
                                        ('mm', 'i4'), ('mo', 'float64'), ('s', 'float64'), ('z', 'float64'),
                                        ('yr', 'i4'), ('doy', 'i4')])

            atmzen.sort(order=['stn', 'y', 'm', 'd', 'h', 'mm'])

            # get the stations in the processing
            stations = [str(stn) for stn in numpy.unique(atmzen['stn'])]

            cnn.query('DELETE FROM gamit_ztd WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                      % (self.project.lower(), self.date.year, self.date.doy))

            ztd = []
            for stn in stations:
                zd = atmzen[(atmzen['stn'] == stn)]
                # careful, don't do anything if there is no data for this station-day
                if zd.size > 0:
                    # find the unique knots
                    knots = numpy.unique(numpy.array([zd['y'], zd['m'], zd['d'], zd['h'], zd['mm']]).transpose(),
                                         axis=0)
                    # average over the existing records
                    for d in knots:
                        rows = zd[numpy.logical_and.reduce((zd['y']  == d[0],
                                                            zd['m']  == d[1],
                                                            zd['d']  == d[2],
                                                            zd['h']  == d[3],
                                                            zd['mm'] == d[4]))]

                        try:
                            ztd.append(alias[stn] +
                                       [datetime(d[0], d[1], d[2], d[3], d[4]).strftime('%Y-%m-%d %H:%M:%S'),
                                        self.project.lower(),
                                        numpy.mean(rows['z']) - numpy.mean(rows['mo']),
                                        numpy.mean(rows['s']),
                                        numpy.mean(rows['z']),
                                        self.date.year, self.date.doy])

                        except KeyError:
                            err.append(' -- Key error: could not translate station alias %s' % stn)

            for ztd in ztd:
                # now do the insert
                try:
                    cnn.insert('gamit_ztd',
                               NetworkCode = ztd[0],
                               StationCode = ztd[1],
                               Date        = ztd[2],
                               Project     = ztd[3],
                               model       = numpy.round(ztd[4], 4),
                               sigma       = numpy.round(ztd[5], 4),
                               ZTD         = numpy.round(ztd[6], 4),
                               Year        = ztd[7],
                               DOY         = ztd[8])

                except Exception as e:
                    err.append(' -- Error inserting parsed zenith delay: %s' % str(e))

        cnn.close()
        return err
コード例 #11
0
ファイル: QueryETM.py プロジェクト: nahuel/Parallel.GAMIT
def main():
    parser = argparse.ArgumentParser(description='Query ETM for stations in the database. Default is PPP ETMs.')

    parser.add_argument('stnlist', type=str, nargs='+',
                        help="List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
                             "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
                             "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
                             "If [net].all is given, all stations from network [net] will be plotted")

    parser.add_argument('-q', '--query', nargs=2, metavar='{type} {date}', type=str,
                        help='Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
                             'of the daily solution (if exists). Output is in XYZ.')

    parser.add_argument('-gamit', '--gamit', type=str, nargs=1, metavar='{stack}',
                        help="Plot the GAMIT time series specifying which stack name to plot.")

    parser.add_argument('-file', '--filename', type=str,
                        help="Obtain data from an external source (filename). Format should be specified with -format.")

    parser.add_argument('-format', '--format', nargs='+', type=str,
                        help="To be used together with --filename. Specify order of the fields as found in the input "
                             "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
                             "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
                             "are at the end of the line, then there is no need to specify those.")

    parser.add_argument('-quiet', '--quiet', action='store_true',
                        help="Do not print message when no solutions are available.")

    parser.add_argument('-vel', '--velocity', action='store_true',
                        help="Output the velocity in XYZ.")

    parser.add_argument('-seasonal', '--seasonal_terms', action='store_true',
                        help="Output the seasonal terms in NEU.")

    args = parser.parse_args()

    ##
    cnn = dbConnection.Cnn('gnss_data.cfg')

    if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]):
        print(' >> Station list read from ' + args.stnlist[0])
        stnlist = [{'NetworkCode': items[0],
                    'StationCode': items[1]}
                   for items in
                   (line.strip().split('.') for line in file_readlines(args.stnlist[0]))]
    else:
        stnlist = Utils.process_stnlist(cnn, args.stnlist)


    for stn in stnlist:
        try:

            if args.gamit is None and args.filename is None:
                etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False)

            elif args.filename is not None:
                etm = from_file(args, cnn, stn)

            else:
                polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                                              'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                                              '"StationCode" = \'%s\' '
                                              'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                                              % (args.gamit[0], stn['NetworkCode'], stn['StationCode']))

                soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0])

                etm  = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, gamit_soln=soln)



            if args.query is not None:
                model  = (args.query[0] == 'model')
                q_date = pyDate.Date(fyear=float(args.query[1]))

                # get the coordinate
                xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model)

                strp = ''
                # if user requests velocity too, output it
                if args.velocity and etm.A is not None:
                    vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                    strp = '%8.5f %8.5f %8.5f ' % (vxyz[0, 0],
                                                   vxyz[1, 0],
                                                   vxyz[2, 0])

                # also output seasonal terms, if requested
                if args.seasonal_terms and etm.Periodic.frequency_count > 0:
                    strp += ' '.join('%8.5f' % (x * 1000)
                                     for x in etm.Periodic.p.params.flatten())

                print(' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                      % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt))

        except pyETM.pyETMException as e:
            if not args.quiet:
                print(str(e))

        except:
            print('Error during processing of ' + stn['NetworkCode'] + '.' + stn['StationCode'])
            print(traceback.format_exc())
コード例 #12
0
def execute_ppp(rinexinfo,
                args,
                stnm,
                options,
                sp3types,
                sp3altrn,
                brdc_path,
                erase,
                apply_met=True,
                decimate=True,
                fix_coordinate=None,
                solve_troposphere=105,
                copy_results=None,
                backward_substitution=False,
                elevation_mask=5):

    # put the correct APR coordinates in the header.
    # stninfo = pyStationInfo.StationInfo(None, allow_empty=True)
    brdc = pyBrdc.GetBrdcOrbits(brdc_path, rinexinfo.date, rinexinfo.rootdir)

    try:
        # inflate the chi**2 limit
        rinexinfo.purge_comments()
        rinexinfo.auto_coord(brdc=brdc, chi_limit=1000)
        stninfo = {}
        rinexinfo.normalize_header(
            stninfo)  # empty dict: only applies the coordinate change
    except pyRinex.pyRinexException as e:
        print(str(e))

    if args.load_rinex:
        rinexinfo.compress_local_copyto('./')
        print('RINEX created in current directory.')
        return

    try:
        otl_coeff = ''

        if args.ocean_loading or args.insert_sql:
            # get a first ppp coordinate
            ppp = pyPPP.RunPPP(rinexinfo,
                               '',
                               options,
                               sp3types,
                               sp3altrn,
                               0,
                               strict=False,
                               apply_met=False,
                               kinematic=False,
                               clock_interpolation=True)

            ppp.exec_ppp()

            # use it to get the OTL (when the auto_coord is very bad, PPP doesn't like the resulting OTL).
            otl = pyOTL.OceanLoading(stnm, options['grdtab'],
                                     options['otlgrid'], ppp.x, ppp.y, ppp.z)
            otl_coeff = otl.calculate_otl_coeff()
            # run again, now with OTL coeff:

        # determine if need to solve for coordinates or not
        x = y = z = 0
        if fix_coordinate is not None:
            if len(fix_coordinate) > 1:
                x = float(fix_coordinate[0])
                y = float(fix_coordinate[1])
                z = float(fix_coordinate[2])
            else:
                # read from file
                cstr = file_readlines(fix_coordinate[0])
                xyz = re.findall(
                    r'%s (-?\d+\.\d+)\s+(-?\d+\.\d+)\s+(-?\d+\.\d+)' %
                    rinexinfo.StationCode, ''.join(cstr), re.IGNORECASE)
                if len(xyz):
                    x = float(xyz[0][0])
                    y = float(xyz[0][1])
                    z = float(xyz[0][2])
                else:
                    print(
                        'WARNING: coordinate fixing invoked but could not find %s in list of coordinates -> '
                        'unfixing station coordinate in PPP' %
                        rinexinfo.StationCode)
                    fix_coordinate = False
            print('%14.4f %14.4f %14.4f' % (x, y, z))

        ppp = pyPPP.RunPPP(
            rinexinfo,
            otl_coeff,
            options,
            sp3types,
            sp3altrn,
            0,
            strict=False,
            apply_met=apply_met,
            kinematic=False,
            clock_interpolation=True,
            erase=erase,
            decimate=decimate,
            solve_coordinates=True if not fix_coordinate else False,
            solve_troposphere=solve_troposphere,
            back_substitution=backward_substitution,
            elev_mask=elevation_mask,
            x=x,
            y=y,
            z=z)

        ppp.exec_ppp()

        if not ppp.check_phase_center(ppp.proc_parameters):
            print(
                'WARNING: phase center parameters not found for declared antenna!'
            )

        if not args.insert_sql:
            print(
                '%s %10.5f %13.4f %13.4f %13.4f %14.9f %14.9f %8.3f %8.3f %8.3f %8.3f %8.3f %8.3f'
                %
                (stnm, rinexinfo.date.fyear, ppp.x, ppp.y, ppp.z, ppp.lat[0],
                 ppp.lon[0], ppp.h[0], ppp.clock_phase, ppp.clock_phase_sigma,
                 ppp.phase_drift, ppp.phase_drift_sigma, ppp.clock_rms))
        else:
            print('INSERT INTO stations ("NetworkCode", "StationCode", "auto_x", "auto_y", "auto_z", ' \
                  '"Harpos_coeff_otl", lat, lon, height) VALUES ' \
                  '(\'???\', \'%s\', %.4f, %.4f, %.4f, \'%s\', %.8f, %.8f, %.3f)' \
                  % (stnm, ppp.x, ppp.y, ppp.z, otl_coeff, ppp.lat[0], ppp.lon[0], ppp.h[0]))

        if args.find:
            cnn = dbConnection.Cnn('gnss_data.cfg')

            Result, match, closest_stn = ppp.verify_spatial_coherence(
                cnn, stnm)

            if Result:
                print('Found matching station: %s.%s' %
                      (match[0]['NetworkCode'], match[0]['StationCode']))

            elif len(match) == 1:
                print('%s matches the coordinate of %s.%s (distance = %8.3f m) but the filename indicates it is %s' \
                      % (rinexinfo.rinex,
                         match[0]['NetworkCode'],
                         match[0]['StationCode'],
                         float(match[0]['distance']),
                         stnm))

            elif len(match) > 0:
                print('Solution for RINEX (%s %s) did not match a unique station location (and station code) ' \
                      'within 10 km. Possible cantidate(s): %s' \
                      % (rinexinfo.rinex,
                         rinexinfo.date.yyyyddd(),
                         ', '.join(['%s.%s: %.3f m' %
                                    (m['NetworkCode'],
                                     m['StationCode'],
                                     m['distance']) for m in match])))

            elif len(match) == 0 and len(closest_stn) > 0:
                print('No matches found. Closest station: %s.%s. (distance = %8.3f m)' \
                      % (closest_stn[0]['NetworkCode'],
                         closest_stn[0]['StationCode'],
                         closest_stn[0]['distance']))

        if copy_results:
            copy_results = copy_results[0]
            try:
                fpath = os.path.join(copy_results, rinexinfo.StationCode)
                if not os.path.exists(fpath):
                    os.makedirs(fpath)
                shutil.copyfile(
                    ppp.path_res_file,
                    os.path.join(fpath, os.path.basename(ppp.path_res_file)))
                shutil.copyfile(
                    ppp.path_pos_file,
                    os.path.join(fpath, os.path.basename(ppp.path_pos_file)))
                shutil.copyfile(
                    ppp.path_ses_file,
                    os.path.join(fpath, os.path.basename(ppp.path_ses_file)))
                shutil.copyfile(
                    ppp.path_sum_file,
                    os.path.join(fpath, os.path.basename(ppp.path_sum_file)))
                shutil.copyfile(
                    os.path.join(ppp.rootdir, 'commands.cmd'),
                    os.path.join(fpath,
                                 os.path.basename(ppp.path_sum_file) + '.cmd'))
            except Exception as e:
                print(
                    'WARNING: There was a problem copying results to %s: %s' %
                    (copy_results, str(e)))

    except pyPPP.pyRunPPPException as e:
        print('Exception in PPP: ' + str(e))

    except pyRinex.pyRinexException as e:
        print('Exception in pyRinex: ' + str(e))