Beispiel #1
0
def station_list(cnn, stations, dates):

    stations = process_stnlist(cnn, stations)
    stn_obj = StationCollection()

    # use the connection to the db to get the stations
    for Stn in tqdm(sorted(stations,
                           key=lambda s: (s['NetworkCode'], s['StationCode'])),
                    ncols=80,
                    disable=None):

        NetworkCode = Stn['NetworkCode']
        StationCode = Stn['StationCode']

        rs = cnn.query(
            'SELECT * FROM rinex_proc WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND '
            '("ObservationYear", "ObservationDOY") BETWEEN (%s) AND (%s)' %
            (NetworkCode, StationCode, dates[0].yyyy() + ', ' + dates[0].ddd(),
             dates[1].yyyy() + ', ' + dates[1].ddd()))

        if rs.ntuples() > 0:
            tqdm.write(' -- %s -> adding...' % stationID(Stn))
            try:
                stn_obj.append(Station(cnn, NetworkCode, StationCode, dates))
            except pyETMException:
                tqdm.write(
                    '    %s -> station exists, but there was a problem initializing ETM.'
                    % stationID(Stn))
        else:
            tqdm.write(' -- %s -> no data for requested time window' %
                       stationID(Stn))

    return stn_obj
Beispiel #2
0
    def to_dharp(self, record):
        """
        function to convert the current height code to DHARP
        :return: DHARP height
        """

        if record.HeightCode == 'DHARP':
            return record
        else:
            htc = self.cnn.query_float('SELECT * FROM gamit_htc WHERE "AntennaCode" = \'%s\' AND "HeightCode" = \'%s\''
                                       % (record.AntennaCode, record.HeightCode), as_dict=True)

            if len(htc):

                record.AntennaHeight = np.sqrt(np.square(float(record.AntennaHeight)) -
                                               np.square(float(htc[0]['h_offset']))) - float(htc[0]['v_offset'])
                if record.Comments is not None:
                    record.Comments = record.Comments + '\nChanged from %s to DHARP by pyStationInfo.\n' \
                                      % record.HeightCode
                else:
                    record.Comments = 'Changed from %s to DHARP by pyStationInfo.\n' % record.HeightCode

                record.HeightCode = 'DHARP'

                return record
            else:
                raise pyStationInfoHeightCodeNotFound('%s: %s -> Could not translate height code %s to DHARP. '
                                                      'Check the height codes table.'
                                                      % (stationID(self),
                                                         record.AntennaCode,
                                                         record.HeightCode))
Beispiel #3
0
    def load_stationinfo_records(self):
        # function to load the station info records in the database
        # returns true if records found
        # returns false if none found, unless allow_empty = False in which case it raises an error.
        stninfo = self.cnn.query(
            'SELECT * FROM stationinfo WHERE "NetworkCode" = \'' +
            self.NetworkCode + '\' AND "StationCode" = \'' + self.StationCode +
            '\' ORDER BY "DateStart"')

        if stninfo.ntuples() == 0:
            if not self.allow_empty:
                # allow no station info if explicitly requested by the user.
                # Purpose: insert a station info for a new station!
                raise pyStationInfoException(
                    'Could not find ANY valid station info entry for ' +
                    stationID(self))
            self.record_count = 0
            return False
        else:
            for record in stninfo.dictresult():
                self.records.append(
                    StationInfoRecord(self.NetworkCode, self.StationCode,
                                      record))

            self.record_count = stninfo.ntuples()
            return True
Beispiel #4
0
def plot_etm(cnn, stack, station, directory):
    try:
        ts = stack.get_station(station['NetworkCode'], station['StationCode'])

        ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'],
                             station['StationCode'], stack.project)
        etm = pyETM.GamitETM(cnn,
                             station['NetworkCode'],
                             station['StationCode'],
                             gamit_soln=ts)

        pngfile = os.path.join(directory, stationID(etm) + '_gamit.png')
        jsonfile = os.path.join(directory, stationID(etm) + '_gamit.json')

        etm.plot(pngfile, plot_missing=False)
        file_write(
            os.path.join(jsonfile),
            json.dumps(etm.todictionary(False), indent=4, sort_keys=False))

    except pyETM.pyETMException as e:
        tqdm.write(str(e))
Beispiel #5
0
    def __init__(self,
                 cnn,
                 NetworkCode=None,
                 StationCode=None,
                 date=None,
                 allow_empty=False,
                 h_tolerance=0):

        self.record_count = 0
        self.NetworkCode = NetworkCode
        self.StationCode = StationCode
        self.allow_empty = allow_empty
        self.date = None
        self.records = []
        self.currentrecord = StationInfoRecord(NetworkCode, StationCode)

        self.header = '*SITE  Station Name      Session Start      Session Stop       Ant Ht   HtCod  Ant N    ' \
                      'Ant E    Receiver Type         Vers                  SwVer  Receiver SN           ' \
                      'Antenna Type     Dome   Antenna SN          '

        # connect to the db and load the station info table
        if NetworkCode is not None and StationCode is not None:

            self.cnn = cnn

            if self.load_stationinfo_records():
                # find the record that matches the given date
                if date is not None:
                    self.date = date

                    pDate = date.datetime()

                    for record in self.records:

                        DateStart = record['DateStart'].datetime()
                        DateEnd = record['DateEnd'].datetime()

                        # make the gap-tolerant comparison
                        tolerance = datetime.timedelta(hours=h_tolerance)
                        if DateStart - tolerance <= pDate <= DateEnd + tolerance:
                            # found the record that corresponds to this date
                            self.currentrecord = record
                            break

                    if self.currentrecord.DateStart is None:
                        raise pyStationInfoException(
                            'Could not find a matching station.info record for '
                            + stationID(self) + ' ' + date.yyyymmdd() + ' (' +
                            date.yyyyddd() + ')')
Beispiel #6
0
    def parse_sinex(self):

        for sinex in os.listdir(self.pwd_glbf):
            if sinex.endswith('.snx'):
                snx = snxParse.snxFileParser(os.path.join(self.pwd_glbf, sinex))
                snx.parse()
                self.polyhedron     = snx.stationDict
                self.VarianceFactor = snx.varianceFactor

        if self.polyhedron:
            # remame any aliases and change keys to net.stn
            for stn in self.StationInstances:
                # replace the key
                try:
                    self.polyhedron[stationID(stn)] = self.polyhedron.pop(stn.StationAlias.upper())
                except KeyError:
                    # maybe the station didn't have a solution
                    pass
        return self.polyhedron, self.VarianceFactor
Beispiel #7
0
def generate_kml(cnn, project, stations):

    stnlist = [stationID(s) for s in stations]

    tqdm.write(
        '  >> Generating KML for this run (see production directory)...')

    kml = simplekml.Kml()

    rs = cnn.query_float(
        'SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%\' '
        'ORDER BY "NetworkCode", "StationCode" ',
        as_dict=True)

    tqdm.write(' >> Adding stations in database')

    folder1 = kml.newfolder(name=project)
    folder2 = kml.newfolder(name='all stations')

    ICON_CIRCLE = 'http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png'
    ICON_SQUARE = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png'

    stylec = simplekml.StyleMap()
    stylec.normalstyle.iconstyle.icon.href = ICON_CIRCLE
    stylec.normalstyle.iconstyle.scale = 1.5
    stylec.normalstyle.labelstyle.scale = 0

    stylec.highlightstyle.iconstyle.icon.href = ICON_CIRCLE
    stylec.highlightstyle.iconstyle.scale = 2
    stylec.highlightstyle.labelstyle.scale = 2

    styles_ok = simplekml.StyleMap()
    styles_ok.normalstyle.iconstyle.icon.href = ICON_SQUARE
    styles_ok.normalstyle.iconstyle.color = 'ff00ff00'
    styles_ok.normalstyle.iconstyle.scale = 1.5
    styles_ok.normalstyle.labelstyle.scale = 0

    styles_ok.highlightstyle.iconstyle.icon.href = ICON_SQUARE
    styles_ok.highlightstyle.iconstyle.color = 'ff00ff00'
    styles_ok.highlightstyle.iconstyle.scale = 2
    styles_ok.highlightstyle.labelstyle.scale = 2

    styles_nok = simplekml.StyleMap()
    styles_nok.normalstyle.iconstyle.icon.href = ICON_SQUARE
    styles_nok.normalstyle.iconstyle.color = 'ff0000ff'
    styles_nok.normalstyle.iconstyle.scale = 1.5
    styles_nok.normalstyle.labelstyle.scale = 0

    styles_nok.highlightstyle.iconstyle.icon.href = ICON_SQUARE
    styles_nok.highlightstyle.iconstyle.color = 'ff0000ff'
    styles_nok.highlightstyle.iconstyle.scale = 2
    styles_nok.highlightstyle.labelstyle.scale = 2

    for stn in tqdm(rs, ncols=80):
        stn_id = stationID(stn)

        count = cnn.query_float(
            'SELECT count(*) as cc FROM rinex_proc WHERE "NetworkCode" = \'%s\' '
            'AND "StationCode" = \'%s\'' %
            (stn['NetworkCode'], stn['StationCode']))

        ppp_s = cnn.query_float(
            'SELECT count(*) as cc FROM ppp_soln WHERE "NetworkCode" = \'%s\' '
            'AND "StationCode" = \'%s\'' %
            (stn['NetworkCode'], stn['StationCode']))

        try:
            stninfo = pyStationInfo.StationInfo(cnn,
                                                stn['NetworkCode'],
                                                stn['StationCode'],
                                                allow_empty=True)
            _ = stninfo.return_stninfo_short()
        except pyStationInfo.pyStationInfoHeightCodeNotFound as e:
            tqdm.write('Error: %s. Station will be skipped.' % str(e))
            continue

        if count[0][0]:
            completion = '%.1f' % (float(ppp_s[0][0]) / float(count[0][0]) *
                                   100)
        else:
            completion = 'NA'

        if stn['DateStart']:
            DS = '%.3f' % stn['DateStart']
            DE = '%.3f' % stn['DateEnd']
        else:
            DS = 'NA'
            DE = 'NA'

        if stn_id in stnlist:
            folder = folder1
            # mark the stations with less than 100 observations or with less than 60% completion (PPP)
            if count[0][0] >= 100 and (float(ppp_s[0][0]) /
                                       float(count[0][0]) * 100) >= 60.0:
                style = styles_ok
            else:
                style = styles_nok
        else:
            folder = folder2
            style = stylec

        plt = plot_station_info_rinex(cnn, stn['NetworkCode'],
                                      stn['StationCode'], stninfo)

        pt = folder.newpoint(name=stn_id, coords=[(stn['lon'], stn['lat'])])
        pt.stylemap = style

        pt.description = """<strong>%s -> %s</strong> RINEX count: %i PPP soln: %s%%<br><br>
<strong>Station Information:</strong><br>
<table width="880" cellpadding="0" cellspacing="0">
<tr>
<td align="left" valign="top">
<p style="font-family: monospace; font-size: 8pt;">%s<br><br>
<strong>Observation distribution:</strong><br>
</p>
<img src="data:image/png;base64, %s" alt="Observation information" />
</tr>
</td>
</table>""" % (DS, DE, count[0][0], completion,
               stninfo.return_stninfo_short().replace('\n', '<br>'), plt)

    if not os.path.exists('production'):
        os.makedirs('production')

    kml.savekmz('production/' + project + '.kmz')
def generate_kmz(kmz,
                 stations,
                 discarded,
                 deformation_type='interseismic',
                 units='mm/yr'):

    tqdm.write(' >> Generating KML (see production directory)...')

    kml = simplekml.Kml()
    folder1 = kml.newfolder(name=deformation_type)
    folder2 = kml.newfolder(name='discarded')

    # define styles
    styles_ok = simplekml.StyleMap()
    styles_nok = simplekml.StyleMap()
    for (s, icon_color,
         label_scale) in ((styles_ok.normalstyle, 'ff00ff00',
                           0), (styles_ok.highlightstyle, 'ff00ff00',
                                3), (styles_nok.normalstyle, 'ff0000ff', 0),
                          (styles_nok.highlightstyle, 'ff0000ff', 3)):
        s.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png'
        s.iconstyle.color = icon_color
        s.labelstyle.scale = label_scale

    for stn in tqdm(stations,
                    ncols=160,
                    disable=None,
                    desc=' -- Included station list'):
        stn_id = stationID(stn)
        plot = plot_station_param(stn['NetworkCode'], stn['StationCode'],
                                  deformation_type, units, stn['n'], stn['e'])

        pt = folder1.newpoint(name=stn_id, coords=[(stn['lon'], stn['lat'])])
        pt.stylemap = styles_ok

        pt.description = """<strong>NE (%s): %5.2f %5.2f [%s]</strong><br><br>
        <table width="880" cellpadding="0" cellspacing="0">
        <tr>
        <td align="center" valign="top">
        <strong>Parameters:</strong><br>
        <img src="data:image/png;base64, %s" alt="Observation information" height="300" width="300"/><br>
        <strong>Trajectory model:</strong><br>
        <img src="data:image/png;base64, %s" alt="Observation information" height="750" width="1100"/>
        </p>
        </tr>
        </td>
        </table>
        """ % (deformation_type, stn['n'] * 1000, stn['e'] * 1000, units, plot,
               stn['etm'])

        ls = folder1.newlinestring(name=stn_id)

        ls.coords = [
            (stn['lon'], stn['lat']),
            (stn['lon'] + stn['e'] * 10,
             stn['lat'] + stn['n'] * 10 * np.cos(stn['lat'] * np.pi / 180))
        ]
        ls.style.linestyle.width = 3
        ls.style.linestyle.color = 'ff0000ff'

    for stn in tqdm(discarded,
                    ncols=160,
                    disable=None,
                    desc=' -- Excluded station list'):
        stn_id = stationID(stn)
        plot = plot_station_param(stn['NetworkCode'], stn['StationCode'],
                                  deformation_type, units, stn['n'], stn['e'])

        pt = folder2.newpoint(name=stn_id, coords=[(stn['lon'], stn['lat'])])
        pt.stylemap = styles_nok

        pt.description = """<strong>NE (%s): %5.2f %5.2f [%s]</strong><br><br>
        <table width="880" cellpadding="0" cellspacing="0">
        <tr>
        <td align="center" valign="top">
        <strong>Parameters:</strong><br>
        <img src="data:image/png;base64, %s" alt="Observation information" height="300" width="300"/><br>
        <strong>Trajectory model:</strong><br>
        <img src="data:image/png;base64, %s" alt="Observation information" height="750" width="1100"/>
        </p>
        </tr>
        </td>
        </table>
        """ % (deformation_type, stn['n'] * 1000, stn['e'] * 1000, units, plot,
               stn['etm'])

        ls = folder2.newlinestring(name=stn_id)

        ls.coords = [
            (stn['lon'], stn['lat']),
            (stn['lon'] + stn['e'] * 10,
             stn['lat'] + stn['n'] * 10 * np.cos(stn['lat'] * np.pi / 180))
        ]
        ls.style.linestyle.width = 3
        ls.style.linestyle.color = 'ff0000ff'

    if not os.path.exists('production'):
        os.makedirs('production')

    tqdm.write(' >> Saving kmz...')
    kml.savekmz(kmz)
def process_postseismic(cnn, stnlist, force_stnlist, stack,
                        interseimic_filename, event, prev_events, sigma_cutoff,
                        lat_lim, filename, kmz):
    tqdm.write(
        ' >> Analyzing suitability of station list to participate in postseismic model...'
    )
    tqdm.write(' -- output filename: %s' % filename)

    use_station = []
    discarded = []

    # load the interseismic model
    model = np.loadtxt(interseimic_filename)

    # model[:, 0] -= 360
    params = []

    def getpost():
        return {
            'NetworkCode':
            etm.NetworkCode,
            'StationCode':
            etm.StationCode,
            'lat':
            etm.gamit_soln.lat[0],
            'lon':
            etm.gamit_soln.lon[0],
            'n':
            eq.p.params[0, 0]
            if eq.p.jump_type is CO_SEISMIC_DECAY else eq.p.params[0, 1],
            'e':
            eq.p.params[1, 0]
            if eq.p.jump_type is CO_SEISMIC_DECAY else eq.p.params[1, 1],
            'etm':
            etm.plot(plot_missing=False, plot_outliers=True, fileio=BytesIO())
        }

    for stn in tqdm(stnlist, ncols=160, disable=None):
        stn_id = stationID(stn)
        tqdm.write(' -- Processing station %s' % stn_id)
        try:
            lla = cnn.query_float(
                'SELECT lat,lon FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
                % (stn['NetworkCode'], stn['StationCode']),
                as_dict=True)[0]

            ve = griddata(model[:, 0:2],
                          model[:, 2] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')
            vn = griddata(model[:, 0:2],
                          model[:, 3] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')

            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack,
                                 interseismic=[vn, ve, 0.])

            for eq in [
                    e for e in etm.Jumps.table if e.p.jump_type in (
                        CO_SEISMIC_DECAY,
                        CO_SEISMIC_JUMP_DECAY) and e.fit and etm.A is not None
            ]:
                if eq.date == event:
                    tqdm.write(
                        '    co-seismic decay detected for event %s (years: %.3f; data points: %i)'
                        % (str(eq.p.jump_date), eq.constrain_years,
                           eq.constrain_data_points))
                    if (eq.constrain_years >= 2.5 and eq.constrain_data_points >= eq.constrain_years * 5) \
                            or stn in force_stnlist:
                        params.append(getpost())
                        tqdm.write(
                            '    co-seismic decay added to the list for interpolation'
                        )
                    else:
                        tqdm.write(
                            '    co-seismic decay not added (conditions not met)'
                        )
                        discarded.append(getpost())
                    break

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s: %s' % (stn_id, str(e)))

    outvar = np.array([(v['NetworkCode'] + '.' + v['StationCode'], v['lon'],
                        v['lat'], v['e'], v['n']) for v in params],
                      dtype=[('stn', 'U8'), ('lon', 'float64'),
                             ('lat', 'float64'), ('e', 'float64'),
                             ('n', 'float64')])

    np.savetxt(filename,
               outvar,
               fmt=("%s", "%13.8f", "%12.8f", "%12.8f", "%12.8f"))
    if kmz:
        generate_kmz(kmz, params, discarded, 'postseismic', 'mm')
def process_interseismic(cnn, stnlist, force_stnlist, stack, sigma_cutoff,
                         vel_cutoff, lat_lim, filename, kmz):
    # start by checking that the stations in the list have a linear start (no post-seismic)
    # and more than 2 years of data until the first earthquake or non-linear behavior

    tqdm.write(
        ' >> Analyzing suitability of station list to participate in interseismic model...'
    )
    tqdm.write(' -- latitude cutoff: south %.2f, north %.2f' %
               (lat_lim[0], lat_lim[1]))
    tqdm.write(' -- velocity cutoff: %.2f mm/yr; output filename: %s' %
               (vel_cutoff, filename))

    use_station = []
    discarded = []
    velocities = []
    # min_lon     =  9999
    # max_lon     = -9999
    # min_lat     =  9999
    # max_lat     = -9999

    for stn in tqdm(stnlist, ncols=160, disable=None):
        try:
            stn_id = stationID(stn)

            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack)

            use = True
            if stn in force_stnlist:
                tqdm.write(' -- %s was forced to be included in the list' %
                           stn_id)
            else:
                # only check everything is station not included in the force list
                # check that station is within latitude range
                if etm.gamit_soln.lat[0] < lat_lim[0] or \
                   etm.gamit_soln.lat[0] > lat_lim[1]:
                    tqdm.write(
                        ' -- %s excluded because it is outside of the latitude limit'
                        % stn_id)
                    use = False

                # check that station has at least 2 years of data
                if etm.gamit_soln.date[-1].fyear - etm.gamit_soln.date[
                        0].fyear < 2 and use:
                    tqdm.write(
                        ' -- %s rejected because it has less than two years of observations %s -> %s'
                        % (stn_id, etm.gamit_soln.date[0].yyyyddd(),
                           etm.gamit_soln.date[-1].yyyyddd()))
                    use = False

                # other checks
                if etm.A is not None:
                    if len(etm.Jumps.table) > 0 and use:
                        j = next((j
                                  for j in etm.Jumps.table
                                  if j.p.jump_type == pyETM.CO_SEISMIC_JUMP_DECAY and j.fit and \
                                  j.magnitude >= 7 and j.date.fyear < etm.gamit_soln.date[0].fyear + 1.5
                                  ), None)
                        if j:
                            tqdm.write(
                                ' -- %s has a Mw %.1f in %s and data starts in %s'
                                % (stn_id, j.magnitude, j.date.yyyyddd(),
                                   etm.gamit_soln.date[0].yyyyddd()))
                            use = False

                        else:
                            has_eq_jumps = any(
                                True for j in etm.Jumps.table
                                if j.p.jump_type == pyETM.CO_SEISMIC_DECAY
                                and j.fit)
                            if has_eq_jumps:
                                tqdm.write(
                                    ' -- %s has one or more earthquakes before data started in %s'
                                    %
                                    (stn_id, etm.gamit_soln.date[0].yyyyddd()))
                                use = False

                    if (etm.factor[0] * 1000 > sigma_cutoff
                            or etm.factor[1] * 1000 > sigma_cutoff) and use:
                        tqdm.write(
                            ' -- %s rejected due to large wrms %5.2f %5.2f %5.2f'
                            % (stn_id, etm.factor[0] * 1000,
                               etm.factor[1] * 1000, etm.factor[2] * 1000))
                        use = False

                    norm = np.sqrt(
                        np.sum(np.square(etm.Linear.p.params[0:2, 1] * 1000)))
                    if norm > vel_cutoff and use:
                        tqdm.write(
                            ' -- %s rejected due to large NEU velocity: %5.2f %5.2f %5.2f NE norm %5.2f'
                            % (stn_id, etm.Linear.p.params[0, 1] * 1000,
                               etm.Linear.p.params[1, 1] * 1000,
                               etm.Linear.p.params[2, 1] * 1000, norm))
                        use = False
                elif use:
                    tqdm.write(' -- %s too few solutions to calculate ETM' %
                               stn_id)
                    use = False

            def getvel():
                return {
                    'NetworkCode':
                    etm.NetworkCode,
                    'StationCode':
                    etm.StationCode,
                    'lat':
                    etm.gamit_soln.lat[0],
                    'lon':
                    etm.gamit_soln.lon[0],
                    'n':
                    etm.Linear.p.params[0, 1],
                    'e':
                    etm.Linear.p.params[1, 1],
                    'etm':
                    etm.plot(plot_missing=False,
                             plot_outliers=True,
                             fileio=BytesIO())
                }

            if use:
                tqdm.write(
                    ' -- %s added NEU wrms: %5.2f %5.2f %5.2f NEU vel: %5.2f %5.2f %5.2f'
                    % (stn_id, etm.factor[0] * 1000, etm.factor[1] * 1000,
                       etm.factor[2] * 1000, etm.Linear.p.params[0, 1] * 1000,
                       etm.Linear.p.params[1, 1] * 1000,
                       etm.Linear.p.params[2, 1] * 1000))
                use_station.append(stn)
                v = getvel()
                velocities.append(v)

                #min_lon = min(v['lon'], min_lon)
                #max_lon = max(v['lon'], max_lon)
                #min_lat = min(v['lat'], min_lat)
                #max_lat = max(v['lat'], max_lat)

            elif etm.A is not None:
                discarded.append(getvel())

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s: %s' % (stn_id, str(e)))

    tqdm.write(' >> Total number of stations for linear model: %i' %
               len(use_station))

    outvar = np.array([(v['NetworkCode'] + '.' + v['StationCode'], v['lon'],
                        v['lat'], v['e'], v['n']) for v in velocities],
                      dtype=[('stn', 'U8'), ('lon', 'float64'),
                             ('lat', 'float64'), ('e', 'float64'),
                             ('n', 'float64')])
    np.savetxt(filename,
               outvar,
               fmt=("%s", "%13.8f", "%12.8f", "%12.8f", "%12.8f"))
    if kmz:
        generate_kmz(kmz, velocities, discarded, 'interseismic', 'mm/yr')
Beispiel #11
0
    def start(self, dirname, year, doy, dry_run=False):
        monitor_open = False

        try:
            # copy the folder created by GamitSession in the solution_pwd to the remote_pwd (pwd)
            try:
                if not os.path.exists(os.path.dirname(self.pwd)):
                    os.makedirs(os.path.dirname(self.pwd))
            except OSError:
                # racing condition having several processes trying to create the same folder
                # if OSError occurs, ignore and continue
                pass

            # if the local folder exists (due to previous incomplete processing, erase it).
            if os.path.exists(self.pwd):
                shutil.rmtree(self.pwd)

            # ready to copy the shared solution_dir to pwd
            shutil.copytree(self.solution_pwd, self.pwd, symlinks=True)

            with file_open(os.path.join(self.pwd, 'monitor.log'), 'a') as monitor:
                monitor_open = True

                def log(s):
                    monitor.write(now_str() + ' -> ' + s + '\n')

                log('%s %i %i executing on %s' % (dirname, year, doy, platform.node()))
                log('fetching orbits')

                try:
                    Sp3 = pySp3.GetSp3Orbits(self.orbits['sp3_path'], self.date, self.orbits['sp3types'],
                                             self.pwd_igs, True)  # type: pySp3.GetSp3Orbits

                except pySp3.pySp3Exception:
                    log('could not find principal orbits, fetching alternative')

                    # try alternative orbits
                    if self.options['sp3altrn']:
                        Sp3 = pySp3.GetSp3Orbits(self.orbits['sp3_path'], self.date, self.orbits['sp3altrn'],
                                                 self.pwd_igs, True)  # type: pySp3.GetSp3Orbits
                    else:
                        raise

                if Sp3.type != 'igs':
                    # rename file
                    shutil.copyfile(Sp3.file_path, Sp3.file_path.replace(Sp3.type, 'igs'))

                log('fetching broadcast orbits')

                pyBrdc.GetBrdcOrbits(self.orbits['brdc_path'], self.date, self.pwd_brdc,
                                     no_cleanup=True)  # type: pyBrdc.GetBrdcOrbits

                for rinex in self.params['rinex']:

                    log('fetching rinex for %s %s %s %s'
                        % (stationID(rinex), rinex['StationAlias'],
                           '{:10.6f} {:11.6f}'.format(rinex['lat'], rinex['lon']), 'tie' if rinex['is_tie'] else ''))

                    try:
                        with pyRinex.ReadRinex(rinex['NetworkCode'],
                                               rinex['StationCode'],
                                               rinex['source'], False) as Rinex:  # type: pyRinex.ReadRinex

                            # WARNING! some multiday RINEX were generating conflicts because the RINEX has a name, say,
                            # tuc12302.10o and the program wants to rename it as tuc12030.10o but because it's a
                            # multiday file, during __init__ it's already split and renamed as tuc12300.10o and
                            # additional folders are generated with the information for each file. Therefore, find
                            # the rinex that corresponds to the date being processed and use that one instead of the
                            # original file. These files are not allowed by pyArchiveService, but the "start point" of
                            # the database (i.e. the files already in the folders read by pyScanArchive) has such
                            # problems.

                            # figure out if this station has been affected by an earthquake
                            # if so, window the data
                            if rinex['jump'] is not None:
                                monitor.write(
                                    '                    -> RINEX file has been windowed: ETM detected jump on ' +
                                    rinex['jump'].datetime().strftime('%Y-%m-%d %H:%M:%S') + '\n')

                            if Rinex.multiday:
                                # find the rinex that corresponds to the session being processed
                                for Rnx in Rinex.multiday_rnx_list:
                                    if Rnx.date == self.date:
                                        Rnx.rename(rinex['destiny'])

                                        if rinex['jump'] is not None:
                                            self.window_rinex(Rnx, rinex['jump'])
                                        # before creating local copy, decimate file
                                        Rnx.decimate(30)
                                        Rnx.purge_comments()
                                        Rnx.compress_local_copyto(self.pwd_rinex)
                                        break
                            else:
                                Rinex.rename(rinex['destiny'])

                                if rinex['jump'] is not None:
                                    self.window_rinex(Rinex, rinex['jump'])
                                # before creating local copy, decimate file
                                Rinex.decimate(30)
                                Rinex.purge_comments()
                                Rinex.compress_local_copyto(self.pwd_rinex)

                    except (OSError, IOError):
                        log('An error occurred while trying to copy ' +
                            rinex['source'] + ' to ' + rinex['destiny'] + ': File skipped.')

                    except (pyRinex.pyRinexException, Exception) as e:
                        log('An error occurred while trying to copy ' +
                            rinex['source'] + ': ' + str(e))

                log('executing GAMIT')

                # create the run script
                self.create_replace_links()
                self.create_run_script()
                self.create_finish_script()

            # run the script to replace the links of the tables directory
            self.p = subprocess.Popen('find ./tables ! -name "otl.grid" -type l -exec ./replace_links.sh {} +',
                                      shell=True, stdout=subprocess.PIPE,stderr=subprocess.PIPE, cwd=self.pwd)
            _, _ = self.p.communicate()

            # now execute the run script
            if not dry_run:
                self.p = subprocess.Popen('./run.sh', shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                self.p = subprocess.Popen('./finish.sh', shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                # check for any fatals
                self.p = subprocess.Popen('grep -q \'FATAL\' monitor.log', shell=True, stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE, cwd=self.pwd)

                _, _ = self.p.communicate()

                self.success = (self.p.returncode != 0)

            # output statistics to the parent to display
            result = self.parse_monitor(self.success)

            file_append(os.path.join(self.pwd, 'monitor.log'),
                        now_str() + ' -> return to Parallel.GAMIT\n')

            # no matter the result of the processing, move folder to final destination
            if not dry_run:
                self.finish()

            return result

        except:

            msg = traceback.format_exc() + '\nProcessing %s date %s on node %s' \
                  % (self.params['NetName'], self.date.yyyyddd(), platform.node())

            # DDG: do not attempt to write to monitor.log or do any file operations (maybe permission problem)
            # problem might occur during copytree or rmtree or some other operation before opening monitor.log
            if monitor_open:
                file_append(os.path.join(self.pwd, 'monitor.log'),
                            now_str() +
                            ' -> ERROR in pyGamitTask.start()\n%s' % msg)

                # the solution folder exists because it was created by GamitSession to start the processing.
                # erase it to upload the result
                if os.path.exists(self.solution_pwd):
                    shutil.rmtree(self.solution_pwd)

                # execute final error step: copy to self.solution_pwd
                shutil.copytree(self.pwd, self.solution_pwd, symlinks=True)
                # remove the remote pwd
                shutil.rmtree(self.pwd)

                # output statistics to the parent to display
                result = self.parse_monitor(False)
            else:
                result = {'session'             : '%s %s' % (self.date.yyyyddd(), self.params['DirName']),
                          'Project'             : self.params['NetName'],
                          'subnet'              : self.params['subnet'],
                          'Year'                : self.date.year,
                          'DOY'                 : self.date.doy,
                          'FYear'               : self.date.fyear,
                          'wl'                  : 0,
                          'nl'                  : 0,
                          'nrms'                : 0,
                          'relaxed_constrains'  : '',
                          'max_overconstrained' : '',
                          'node'                : platform.node(),
                          'execution_time'      : 0,
                          'execution_date'      : 0,
                          'missing'             : '',
                          'success'             : False,
                          'fatals'              : []
                          }

            result['error'] = msg

            # return useful information to the main node
            return result
Beispiel #12
0
    def parse_monitor(self, success):
        lines  = file_readlines(self.pwd + '/monitor.log')
        output = ''.join(lines)

        try:
            start_time = datetime.strptime(
                re.findall(r'run.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Iteration depth: 1',
                           output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
        except:
            start_time = datetime(2001, 1, 1, 0, 0, 0)


        try:
            if success:
                end_time = datetime.strptime(
                    re.findall(r'finish.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Done processing h-files and generating SINEX.'
                               , output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
            else:
                end_time = datetime.now()

        except:
            end_time = datetime(2001, 1, 1, 0, 0, 0)


        try:
            if not success:
                fatals = set(re.findall(r'(.*?FATAL.*)', output, re.MULTILINE))
            else:
                fatals = []
        except Exception as e:
            fatals = ['Could not retrieve FATALS: ' + str(e)]


        try:
            iterations = int(re.findall(r'run.sh \(\d+-\d+-\d+ \d+:\d+:\d+\): Iteration depth: (\d+)',
                             output, re.MULTILINE)[-1])
        except:
            iterations = 0


        try:
            nrms = float(
                re.findall(r'Prefit nrms:\s+\d+.\d+[eEdD]\+\d+\s+Postfit nrms:\s+(\d+.\d+[eEdD][+-]\d+)', output,
                           re.MULTILINE)[-1])
        except:
            # maybe GAMIT didn't finish
            nrms = 100


        try:
            updated_apr = re.findall(r' (\w+).*?Updated from', output, re.MULTILINE)[0]
            updated_apr = [upd.replace('_GPS', '').lower() for upd in updated_apr]
            upd_stn = []
            for stn in updated_apr:
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        upd_stn += [stationID(rinex)]

            upd_stn = ','.join(upd_stn)
        except:
            # maybe GAMIT didn't finish
            upd_stn = None


        try:
            wl = float(re.findall(r'WL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
        except:
            # maybe GAMIT didn't finish
            wl = 0


        try:
            nl = float(re.findall(r'NL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
        except:
            # maybe GAMIT didn't finish
            nl = 0


        try:
            oc = re.findall(r'relaxing over constrained stations (\w+.*)', output, re.MULTILINE)[0]
            oc = oc.replace('|', ',').replace('_GPS', '').lower()

            oc_stn = []
            for stn in oc.split(','):
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        oc_stn += [stationID(rinex)]

            oc_stn = ','.join(oc_stn)

        except:
            # maybe GAMIT didn't finish
            oc_stn = None


        try:
            max_overconstrained = None
            overcons = re.findall(r'GCR APTOL (\w+).{10}\s+([-]?\d+.\d+)', output, re.MULTILINE)

            if len(overcons) > 0:
                vals = [float(o[1]) for o in overcons]
                i    = vals.index(max(abs(v) for v in vals))
                stn  = overcons[i][0]

                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower():
                        # get the real station code
                        max_overconstrained = stationID(rinex)
            else:
                max_overconstrained = None

        except:
            # maybe GAMIT didn't finish
            max_overconstrained = None


        try:
            ms = re.findall(r'No data for site (\w+)',   output, re.MULTILINE)
            ds = re.findall(r'.*deleting station (\w+)', output, re.MULTILINE)
            missing_sites = []
            for stn in ms + ds:
                for rinex in self.params['rinex']:
                    if rinex['StationAlias'].lower() == stn.lower() and \
                       stationID(rinex) not in missing_sites:
                        if stn in ms:
                            missing_sites += ['(' + stationID(rinex) + ')']
                        else:
                            missing_sites += [stationID(rinex)]

        except:
            # maybe GAMIT didn't finish
            missing_sites = []


        return {'session'             : '%s %s' % (self.date.yyyyddd(), self.params['DirName']),
                'Project'             : self.params['NetName'],
                'subnet'              : self.params['subnet'],
                'Year'                : self.date.year,
                'DOY'                 : self.date.doy,
                'FYear'               : self.date.fyear,
                'wl'                  : wl,
                'nl'                  : nl,
                'nrms'                : nrms,
                'relaxed_constrains'  : oc_stn,
                'max_overconstrained' : max_overconstrained,
                'updated_apr'         : upd_stn,
                'iterations'          : iterations,
                'node'                : platform.node(),
                'execution_time'      : int((end_time - start_time).total_seconds() / 60.0),
                'execution_date'      : start_time,
                'missing'             : missing_sites,
                'success'             : success,
                'fatals'              : fatals
                }
Beispiel #13
0
def print_summary(stations, sessions, dates):
    # output a summary of each network
    print('')
    print(' >> Summary of stations in this project')
    print(' -- Selected stations (%i):' % (len(stations)))
    Utils.print_columns([stationID(item) for item in stations])

    min_stn = 99999
    min_date = pyDate.Date(year=1980, doy=1)
    for session in sessions:
        if min_stn > len(session.stations_dict):
            min_stn = len(session.stations_dict)
            min_date = session.date

    print('')
    print(' >> Minimum number of stations (%i) on day %s' %
          (min_stn, min_date.yyyyddd()))

    # output a summary of the missing days per station:
    print('')
    sys.stdout.write(' >> Summary of data per station (' + chr(0x258C) +
                     ' = 1 DOY)\n')

    if (dates[1] - dates[0]) / 2.0 > 120:
        cut_len = int(math.ceil((dates[1] - dates[0]) / 4.0))
    else:
        cut_len = dates[1] - dates[0]

    for stn in stations:
        # make a group per year
        for year in sorted(set(d.year for d in stn.good_rinex)):

            sys.stdout.write('\n -- %s:\n' % stationID(stn))

            missing_dates = set(m.doy for m in stn.missing_rinex
                                if m.year == year)
            p_doys = [m.doy for m in stn.good_rinex if m.year == year]

            sys.stdout.write('\n%i:\n    %03i>' % (year, p_doys[0]))

            for i, doy in enumerate(zip(p_doys[0:-1:2], p_doys[1::2])):

                if doy[0] in missing_dates:
                    if doy[1] in missing_dates:
                        c = ' '
                    else:
                        c = chr(0x2590)
                else:
                    if doy[1] in missing_dates:
                        c = chr(0x258C)
                    else:
                        c = chr(0x2588)

                sys.stdout.write(c)

                if i + 1 == cut_len:
                    sys.stdout.write('<%03i\n' % doy[0])
                    sys.stdout.write('    %03i>' % (doy[0] + 1))

            if len(p_doys) % 2 != 0:
                # last one missing
                if p_doys[-1] in missing_dates:
                    sys.stdout.write(' ')
                else:
                    sys.stdout.write(chr(0x258C))

                if cut_len < len(p_doys):
                    sys.stdout.write('< %03i\n' % (p_doys[-1]))
                else:
                    sys.stdout.write('<%03i\n' % (p_doys[-1]))
            else:
                sys.stdout.write('<%03i\n' % (p_doys[-1]))

    return
def main():

    parser = argparse.ArgumentParser(description='Program to perform weekly loosely-constrained solutions. Combination '
                                                 'is performed using GLOBK. Result is output in SINEX format.')

    parser.add_argument('stnlist', type=str, nargs='+', metavar='all|net.stnm',
                        help="List of networks/stations to include in the solution.")

    parser.add_argument('-s', '--session_config', type=str, nargs=1, metavar='session.cfg',
                        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument('-w', '--gpsweek', nargs=1,
                        help="GPS week to combine.")

    parser.add_argument('-e', '--exclude', type=str, nargs='+', metavar='station',
                        help="List of stations to exclude (e.g. -e igm1 lpgs vbca)")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # get the working dates
    date_s = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=0)
    date_e = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=6)

    print(' >> Working with GPS week ' + args.gpsweek[0] + ' (%s to %s)' % (date_s.yyyyddd(), date_e.yyyyddd()))

    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        args.stnlist += ['-' + exc for exc in exclude]

    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # check that the selected stations have all different station codes
    # otherwise, exit with error
    for i in range(len(stnlist) - 1):
        for j in range(i + 1, len(stnlist)):
            if stnlist[i]['StationCode'] == stnlist[j]['StationCode']:
                print('During station selection, two identical station codes were found. '
                      'Please remove one and try again.')
                exit()

    GamitConfig = pyGamitConfig.GamitConfiguration(args.session_config[0])  # type: pyGamitConfig.GamitConfiguration

    project = GamitConfig.NetworkConfig.network_id.lower()
    org     = GamitConfig.gamitopt['org']

    print(' >> REMINDER: To automatically remove outliers during the weekly combination, '
          'first run DRA.py to analyze the daily repetitivities')

    soln_pwd = GamitConfig.gamitopt['solutions_dir']

    # create a globk directory in production
    if not os.path.exists('production/globk'):
        os.makedirs('production/globk')

    # check if week folder exists
    globk_pwd = 'production/globk/' + args.gpsweek[0]
    if os.path.exists(globk_pwd):
        rmtree(globk_pwd)

    # create the directory
    os.makedirs(globk_pwd)

    glx_list = []

    # make a list of the h files that need to be combined
    for day in range(0, 7):
        date = pyDate.Date(gpsWeek    = int(args.gpsweek[0]),
                           gpsWeekDay = day)

        soln_dir = os.path.join(soln_pwd, "%s/%s/%s/glbf" % (date.yyyy(), date.ddd(), project))

        if os.path.exists(soln_dir):
            glx = glob.glob(os.path.join(soln_dir, '*.GLX.*'))
            if not glx:
                glx = glob.glob(os.path.join(soln_dir, '*.glx'))
                
            glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})

    # create the earthquakes.txt file to remove outliers
    with file_open(globk_pwd + '/eq_rename.txt', 'w') as fd:
        rename   = []
        remove   = []
        use_site = []
        fd.write('# LIST OF OUTLIERS DETECTED BY DRA\n')
        for stn in stnlist:
            # obtain the filtered solutions
            rm = cnn.query_float('SELECT * FROM gamit_soln_excl WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                                 ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
                                 'ORDER BY residual' %
                                 (project, stn['NetworkCode'], stn['StationCode'], date_s.year, date_s.doy,
                                  date_e.year, date_e.doy), as_dict=True)

            # obtain the total number of solutions
            sl = cnn.query_float('SELECT * FROM gamit_soln WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                                 ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) ' %
                                 (project, stn['NetworkCode'], stn['StationCode'], date_s.year, date_s.doy,
                                  date_e.year, date_e.doy), as_dict=True)
            for i, r in enumerate(rm):
                date = pyDate.Date(year=r['Year'], doy=r['DOY'])
                # if the number of rejected solutions is equal to the number of total solutions,
                # leave out the first one (i == 0) which is the one with the lowest residual (see ORDER BY in rm)
                if len(rm) < len(sl) or (len(rm) == len(sl) and i != 0):
                    fd.write(' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                             (stn['StationCode'], stn['StationCode'], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                              date.month, date.day, date.yyyy()[2:], date.month, date.day))

            # check for renames that might not agree between days
            mv = cnn.query_float('SELECT * FROM gamit_subnets WHERE "Project" = \'%s\' AND ("Year", "DOY") '
                                 'BETWEEN (%i, %i) AND (%i, %i) AND \'%s.%s\' = ANY(stations)' %
                                 (project, date_s.year, date_s.doy, date_e.year, date_e.doy,
                                  stn['NetworkCode'], stn['StationCode']), as_dict=True)

            for m in mv:
                date = pyDate.Date(year=m['Year'], doy=m['DOY'])
                # check on each day to see if alias agrees with station code
                for i, s in enumerate(m['stations']):
                    if s.split('.')[1] != m['alias'][i] and s == stationID(stn):

                        print(' -- %s alias for %s = %s: renaming' \
                              % (date.yyyyddd(), stationID(stn), m['alias'][i]))

                        # change the name of the station to the original name
                        rename.append(' rename %s_gps %s_dup %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                                      (m['alias'][i], stn['StationCode'], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                                       date.month, date.day, date.yyyy()[2:], date.month, date.day))
                        use_site.append('%s_dup' % stn['StationCode'])

                    elif s not in [stationID(st) for st in stnlist]:
                        # print ' -- Removing %s: not selected' % s
                        # just in case, remove any other occurrences of this station code
                        remove.append(' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                                      (m['alias'][i], m['alias'][i], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                                       date.month, date.day, date.yyyy()[2:], date.month, date.day))
                    else:
                        use_site.append('%s_gps' % stn['StationCode'])

        fd.write('# LIST OF STATIONS TO BE REMOVED\n')
        fd.write(''.join(remove))
        fd.write('# LIST OF STATIONS TO BE RENAMED\n')
        fd.write(''.join(rename))

    print(' >> Converting to SINEX the daily solutions')

    for day, glx in enumerate(glx_list):
        date = pyDate.Date(gpsWeek    = int(args.gpsweek[0]),
                           gpsWeekDay = day)

        print(' -- Working on %s' % date.wwwwd())
        # delete the existing GLX files
        for ff in glob.glob(globk_pwd + '/*.GLX'):
            os.remove(ff)

        Globk(globk_pwd, org, [glx], date.wwww(), date.gpsWeekDay + 8, ' '.join(set(use_site)))
        # convert the file to a valid gpsweek day
        move(globk_pwd + '/' + org + date.wwww() + '%i.snx' % (date.gpsWeekDay + 8),
             globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

        process_sinex(cnn, project, [date, date], globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

    # delete the existing GLX files: get ready for weekly combination
    for ff in glob.glob(globk_pwd + '/*.GLX'):
        os.remove(ff)
    # ready to pass list to globk object
    Globk(globk_pwd, org, glx_list, date_s.wwww(), 7, ' '.join(set(use_site)))
    print(' >> Formatting the SINEX file')

    process_sinex(cnn, project, [date_s, date_e], globk_pwd + '/' + org + date_s.wwww() + '7.snx')
Beispiel #15
0
    def __init__(self, cnn, archive, name, org, subnet, date, GamitConfig, stations, ties=(), centroid=()):
        """
        The GAMIT session object creates all the directory structure and configuration files according to the parameters
        set in GamitConfig. Two stations list are passed and merged to create the session
        :param cnn: connection to database object
        :param archive: archive object to find rinex files in archive structure
        :param name: name of the project/network
        :param org: name of the organization
        :param subnet: subnet number (may be None, in which case the directory name will not show ORGXX
        :param date: date that is being processed
        :param GamitConfig: configuration to run gamit
        :param stations: list of stations to be processed
        :param ties: tie stations as obtained by pyNetwork
        """
        self.NetName = name
        self.org     = org
        self.subnet  = subnet

        if subnet is not None:
            self.DirName = '%s.%s%02i' % (self.NetName, self.org, self.subnet)
        else:
            self.DirName = self.NetName

        self.date           = date
        self.GamitOpts      = GamitConfig.gamitopt  # type: pyGamitConfig.GamitConfiguration().gamitopt
        self.Config         = GamitConfig           # type: pyGamitConfig.GamitConfiguration
        self.frame          = None
        self.params         = None
        # to store the polyhedron read from the final SINEX
        self.polyhedron     = None
        self.VarianceFactor = None
        # gamit task will be filled with the GamitTask object
        self.GamitTask      = None

        self.solution_base = self.GamitOpts['solutions_dir'].rstrip('/')

        # tie station dictionary (to build KMLs, do not change)
        self.tie_dict = [{'name'  : stationID(stn),
                          'coords': [(stn.lon, stn.lat)]}
                         for stn in ties]

        # station dictionary (to build KMLs, do not change)
        self.stations_dict = [{'name'   : stationID(stn),
                               'coords' : [(stn.lon, stn.lat)]}
                              for stn in stations]

        # make StationInstances
        station_instances = []
        for stn in stations:
            try:
                station_instances += [StationInstance(cnn, archive, stn, date, GamitConfig)]
            except pyRinexName.RinexNameException:
                tqdm.write(' -- WARNING (station instance): station %s on day %s appears to have a badly formed RINEX '
                           'filename. Please check the archive and make sure all filenames follow the RINEX 2/3 '
                           'convention. Station has been excluded from the GAMIT session.'
                           % (stationID(stn), date.yyyyddd()))

        # do the same with ties
        for stn in ties:
            try:
                station_instances += [StationInstance(cnn, archive, stn, date, GamitConfig, is_tie=True)]
            except pyRinexName.RinexNameException:
                tqdm.write(' -- WARNING (tie instance): station %s on day %s appears to have a badly formed RINEX '
                           'filename. Please check the archive and make sure all filenames follow the RINEX 2/3 '
                           'convention. Station has been excluded from the GAMIT session.'
                           % (stationID(stn), date.yyyyddd()))

        self.StationInstances = station_instances

        # create working dirs for this session
        last_path = '/%s/%s/%s' % (date.yyyy(), date.ddd(), self.DirName)
        self.solution_pwd = self.solution_base + last_path
        # the remote pwd is the directory where the processing will be performed
        self.remote_pwd   = 'production/gamit' + last_path

        row_key = {'Year'    : date.year,
                   'DOY'     : date.doy,
                   'Project' : self.NetName,
                   'subnet'  : 0 if subnet is None else subnet}

        try:
            # attempt to retrieve the session from the database. If error is raised, then the session has to be
            # reprocessed
            cnn.get('gamit_stats', row_key.copy())
            self.ready = True
        except:
            self.ready = False

            try:
                # since ready == False, then try to delete record in subnets
                cnn.delete('gamit_subnets', row_key.copy())
            except:
                pass

        # a list to report missing data for this session
        self.missing_data = []

        if not os.path.exists(self.solution_pwd):
            # if the path does not exist, create it!
            os.makedirs(self.solution_pwd)
            # force ready = False, no matter what the database says
            self.ready = False
            try:
                cnn.delete('gamit_stats',   row_key.copy())
                cnn.delete('gamit_subnets', row_key.copy())
            except:
                pass

        elif os.path.exists(self.solution_pwd) and not self.ready:
            # if the solution directory exists but the session is not ready, kill the directory
            rmtree(self.solution_pwd)

        if not self.ready:
            # insert the subnet in the database
            cnn.insert('gamit_subnets', {**row_key,
                                         'stations' : '{%s}' % ','.join(stationID(s)   for s in stations + list(ties)),
                                         'alias'    : '{%s}' % ','.join(s.StationAlias for s in stations + list(ties)),
                                         'ties'     : '{%s}' % ','.join(s['name']      for s in self.tie_dict),
                                         'centroid' : '{%s}' % ','.join('%.1f' % c     for c in centroid)})

        self.pwd_igs    = os.path.join(self.solution_pwd, 'igs')
        self.pwd_brdc   = os.path.join(self.solution_pwd, 'brdc')
        self.pwd_rinex  = os.path.join(self.solution_pwd, 'rinex')
        self.pwd_tables = os.path.join(self.solution_pwd, 'tables')
        self.pwd_glbf   = os.path.join(self.solution_pwd, 'glbf')
        self.pwd_proc   = os.path.join(self.solution_pwd, date.ddd())

        if not self.ready:
            # only create folders, etc if it was determined the solution isn't ready
            if not os.path.exists(self.pwd_igs):
                os.makedirs(self.pwd_igs)

            if not os.path.exists(self.pwd_brdc):
                os.makedirs(self.pwd_brdc)

            if os.path.exists(self.pwd_rinex):
                # delete any possible rinex files from a truncated session
                rmtree(self.pwd_rinex)
            os.makedirs(self.pwd_rinex)

            if not os.path.exists(self.pwd_tables):
                os.makedirs(self.pwd_tables)

            # check that the processing directory doesn't exist.
            # if it does, remove (it has already been determined that the solution is not ready
            if os.path.exists(self.pwd_glbf):
                rmtree(self.pwd_glbf)

            if os.path.exists(self.pwd_proc):
                rmtree(self.pwd_proc)

            self.generate_kml()
Beispiel #16
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )

    parser.add_argument(
        'stack_name',
        type=str,
        nargs=1,
        metavar='{stack name}',
        help=
        "Specify a name for the stack: eg. itrf2014 or posgar07b. This name should be unique "
        "and cannot be repeated for any other solution project")

    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")

    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")

    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")

    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")

    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")

    parser.add_argument('-plot',
                        '--plot_stack_etms',
                        action='store_true',
                        default=False,
                        help="Plot the stack ETMs after computation is done")

    parser.add_argument(
        '-constrains',
        '--external_constrains',
        nargs='+',
        help=
        "File with external constrains parameters (position, velocity and periodic). These may be "
        "from a parent frame such as ITRF. "
        "Inheritance will occur with stations on the list whenever a parameter exists. "
        "Example: -constrains itrf14.txt "
        "Format is: net.stn x y z epoch vx vy vz sn_1y sn_6m cn_1y cn_6m se_1y se_6m ce_1y ce_6m "
        "su_1y su_6m cu_1y cu_6m ")

    parser.add_argument(
        '-d',
        '--date_end',
        nargs=1,
        metavar='date',
        help=
        'Limit the polyhedrons to the specified date. Can be in wwww-d, yyyy_ddd, yyyy/mm/dd '
        'or fyear format')

    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print(' >> Defaulting to 4 iterations')

    exclude_stn = args.exclude_stations if args.exclude_stations else []
    use_stn = args.use_stations if args.use_stations else []

    dates = [Date(year=1980, doy=1), Date(datetime=datetime.now())]
    if args.date_end is not None:
        try:
            dates = process_date(
                [str(Date(year=1980, doy=1).fyear), args.date_end[0]])
        except ValueError as e:
            parser.error(str(e))

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.external_constrains:
        constrains = load_constrains(args.external_constrains[0])
    else:
        constrains = None

    # create the stack object
    stack = pyStack.Stack(cnn,
                          args.project[0],
                          args.stack_name[0],
                          args.redo_stack,
                          end_date=dates[1])

    # stack.align_spaces(frame_params)
    # stack.to_json('alignment.json')
    # exit()

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters),
                    disable=None)

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if not stack[j].aligned:
                # do not move this if up one level: to speed up the target polyhedron loading process, the target is
                # set to an empty list when the polyhedron is already aligned
                if stack[j].date != target[j].date:
                    # raise an error if dates don't agree!
                    raise Exception(
                        "Error processing %s: dates don't agree (target date %s)"
                        % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
                else:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%04i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        stack.transformations.append([poly.info() for poly in stack])
        qbar.close()

    if args.redo_stack:
        # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
        calculate_etms(cnn,
                       stack,
                       JobServer,
                       iterations=None,
                       create_target=False)
        # only apply common mode removal if redoing the stack
        if args.external_constrains:
            stack.remove_common_modes(constrains)
            # here, we also align the stack in velocity and coordinate space
            stack.align_spaces(constrains)
        else:
            stack.remove_common_modes()

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None, create_target=False)

    # save the json with the information about the alignment
    stack.to_json(args.stack_name[0] + '_alignment.json')
    # save polyhedrons to the database
    stack.save()

    if args.plot_stack_etms:
        qbar = tqdm(total=len(stack.stations), ncols=160, disable=None)
        for stn in stack.stations:
            # plot the ETMs
            qbar.update()
            qbar.postfix = stationID(stn)
            plot_etm(cnn, stack, stn, args.directory)

        qbar.close()