def main(): parser = argparse.ArgumentParser( description='Plot ETM for stations in the database') parser.add_argument( 'stnlist', type=str, nargs='+', help= "List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted" ) parser.add_argument('-nop', '--no_plots', action='store_true', help="Do not produce plots", default=False) parser.add_argument('-nom', '--no_missing_data', action='store_true', help="Do not show missing days", default=False) parser.add_argument('-nm', '--no_model', action='store_true', help="Plot time series without fitting a model") parser.add_argument('-r', '--residuals', action='store_true', help="Plot time series residuals", default=False) parser.add_argument( '-dir', '--directory', type=str, help= "Directory to save the resulting PNG files. If not specified, assumed to be the " "production directory") parser.add_argument( '-json', '--json', type=int, help="Export ETM adjustment to JSON. Append '0' to just output " "the ETM parameters, '1' to export time series without " "model and '2' to export both time series and model.") parser.add_argument( '-gui', '--interactive', action='store_true', help="Interactive mode: allows to zoom and view the plot interactively" ) parser.add_argument( '-win', '--time_window', nargs='+', metavar='interval', help= 'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single ' 'integer value (N) which shall be interpreted as last epoch-N') parser.add_argument( '-q', '--query', nargs=2, metavar='{type} {date}', type=str, help= 'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value ' 'of the daily solution (if exists). Output is in XYZ.') parser.add_argument( '-gamit', '--gamit', type=str, nargs=1, metavar='{stack}', help="Plot the GAMIT time series specifying which stack name to plot.") parser.add_argument( '-lang', '--language', type=str, help="Change the language of the plots. Default is English. " "Use ESP to select Spanish. To add more languages, " "include the ISO 639-1 code in pyETM.py", default='ENG') parser.add_argument('-hist', '--histogram', action='store_true', help="Plot histogram of residuals") parser.add_argument( '-file', '--filename', type=str, help= "Obtain data from an external source (filename). Format should be specified with -format." ) parser.add_argument( '-format', '--format', nargs='+', type=str, help= "To be used together with --filename. Specify order of the fields as found in the input " "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, " "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored " "are at the end of the line, then there is no need to specify those.") parser.add_argument('-outliers', '--plot_outliers', action='store_true', help="Plot an additional panel with the outliers") parser.add_argument('-vel', '--velocity', action='store_true', help="During query, output the velocity in XYZ.") parser.add_argument('-seasonal', '--seasonal_terms', action='store_true', help="During query, output the seasonal terms in NEU.") parser.add_argument('-quiet', '--suppress_messages', action='store_true', help="Quiet mode: suppress information messages") args = parser.parse_args() cnn = dbConnection.Cnn('gnss_data.cfg') stnlist = Utils.process_stnlist(cnn, args.stnlist) # define the language pyETM.LANG = args.language.lower() # set the logging level if not args.suppress_messages: pyETM.logger.setLevel(pyETM.INFO) ##################################### # date filter dates = None if args.time_window is not None: if len(args.time_window) == 1: try: dates = process_date(args.time_window, missing_input=None, allow_days=False) dates = (dates[0].fyear, ) except ValueError: # an integer value dates = float(args.time_window[0]) else: dates = process_date(args.time_window) dates = (dates[0].fyear, dates[1].fyear) if stnlist: # do the thing if args.directory: if not os.path.exists(args.directory): os.mkdir(args.directory) else: if not os.path.exists('production'): os.mkdir('production') args.directory = 'production' for stn in stnlist: try: if args.gamit is None and args.filename is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model) elif args.filename is not None: etm = from_file(args, cnn, stn) else: polyhedrons = cnn.query_float( 'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, gamit_soln=soln) # print ' > %5.2f %5.2f %5.2f %i %i' % \ # (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0], # etm.soln.t.shape[0] - # np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2]))) # print two largest outliers if etm.A is not None: lres = np.sqrt(np.sum(np.square(etm.R), axis=0)) slres = lres[np.argsort(-lres)] print ' >> Two largest residuals:' for i in [0, 1]: print(' %s %6.3f %6.3f %6.3f' % (pyDate.Date(mjd=etm.soln.mjd[ lres == slres[i]]).yyyyddd(), etm.R[0, lres == slres[i]], etm.R[1, lres == slres[i]], etm.R[2, lres == slres[i]])) if args.interactive: xfile = None else: if args.gamit is None: if args.filename is None: xfile = os.path.join( args.directory, '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_file' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode)) # leave pngfile empty to enter interactive mode (GUI) if not args.no_plots: etm.plot(xfile + '.png', t_win=dates, residuals=args.residuals, plot_missing=not args.no_missing_data, plot_outliers=args.plot_outliers) if args.histogram: etm.plot_hist(xfile + '_hist.png') if args.json is not None: with open(xfile + '.json', 'w') as f: if args.json == 1: json.dump(etm.todictionary(time_series=True), f, indent=4, sort_keys=False) elif args.json == 2: json.dump(etm.todictionary(time_series=True, model=True), f, indent=4, sort_keys=False) else: json.dump(etm.todictionary(False), f, indent=4, sort_keys=False) if args.query is not None: model = True if args.query[0] == 'model' else False q_date = pyDate.Date(fyear=float(args.query[1])) xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model) strp = '' # if user requests velocity too, output it if args.velocity: if etm.A is not None: vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1]) strp = '%8.5f %8.5f %8.5f ' \ % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0]) # also output seasonal terms, if requested if args.seasonal_terms: if etm.Periodic.frequency_count > 0: strp += ' '.join([ '%8.5f' % (x * 1000) for x in etm.Periodic.p.params.flatten().tolist() ]) print ' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \ % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt) print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[ 'StationCode'] except pyETM.pyETMException as e: print str(e) except Exception: print 'Error during processing of ' + stn[ 'NetworkCode'] + '.' + stn['StationCode'] print traceback.format_exc() pass
def __init__(self, cnn, NetworkCode, StationCode, dates, StationAlias=None): self.NetworkCode = NetworkCode self.StationCode = StationCode self.netstn = self.NetworkCode + '.' + self.StationCode if StationAlias is None: if 'public.stationalias' in cnn.get_tables(): rs = cnn.query_float( 'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs): self.StationAlias = rs[0]['StationAlias'] else: # if no record, then Alias = StationCode self.StationAlias = StationCode else: self.StationAlias = StationCode # upon creation, Alias = StationCode else: self.StationAlias = StationAlias self.record = None self.etm = None self.StationInfo = None self.lat = None self.lon = None self.height = None self.X = None self.Y = None self.Z = None self.otl_H = None rs = cnn.query_float( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs) != 0: self.record = pyBunch.Bunch().fromDict(rs[0]) self.otl_H = self.record.Harpos_coeff_otl self.lat = self.record.lat self.lon = self.record.lon self.height = self.record.height self.X = self.record.auto_x self.Y = self.record.auto_y self.Z = self.record.auto_z # get the available dates for the station (RINEX files with conditions to be processed) rs = cnn.query( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '("ObservationYear", "ObservationDOY") BETWEEN (%s) AND (%s) AND ' '"Completion" >= %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, dates[0].yyyy() + ', ' + dates[0].ddd(), dates[1].yyyy() + ', ' + dates[1].ddd(), COMPLETION, INTERVAL)) self.good_rinex = [ pyDate.Date(year=r['y'], doy=r['d']) for r in rs.dictresult() ] # create a list of the missing days good_rinex = [d.mjd for d in self.good_rinex] self.missing_rinex = [ pyDate.Date(mjd=d) for d in range(dates[0].mjd, dates[1].mjd + 1) if d not in good_rinex ] self.etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # type: pyETM.PPPETM self.StationInfo = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode) # DDG: report RINEX files with Completion < 0.5 rs = cnn.query_float( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '("ObservationYear", "ObservationDOY") BETWEEN (%s) AND (%s) AND ' '"Completion" < %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, dates[0].yyyy() + ', ' + dates[0].ddd(), dates[1].yyyy() + ', ' + dates[1].ddd(), COMPLETION, INTERVAL)) if len(rs): tqdm.write( ' WARNING: The requested date interval has %i days with < 50%% of observations. ' 'These days will not be processed.' % len(rs)) else: raise ValueError('Specified station %s.%s could not be found' % (NetworkCode, StationCode))
def rinex_task(NetworkCode, StationCode, date, ObservationFYear, metafile): from pyRunWithRetry import RunCommandWithRetryExeception etm_err = '' # local directory as destiny for the CRINEZ files pwd_rinex = '/media/leleiona/aws-files/' + date.yyyy() + '/' + date.ddd() stop_no_aprs = False Config = pyOptions.ReadOptions( "gnss_data.cfg") # type: pyOptions.ReadOptions cnn = dbConnection.Cnn('gnss_data.cfg') # create Archive object Archive = pyArchiveStruct.RinexStruct( cnn) # type: pyArchiveStruct.RinexStruct ArchiveFile = Archive.build_rinex_path(NetworkCode, StationCode, date.year, date.doy) ArchiveFile = os.path.join(Config.archive_path, ArchiveFile) # check for a station alias in the alias table alias = cnn.query( 'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode)) sa = alias.dictresult() if len(sa) > 0: StationAlias = sa[0]['StationAlias'] else: StationAlias = StationCode # create the crinez filename filename = StationAlias + date.ddd() + '0.' + date.yyyy()[2:4] + 'd.Z' try: # create the ETM object etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # get APRs and sigmas (only in NEU) Apr, sigmas, Window, source = etm.get_xyz_s(date.year, date.doy) del etm except pyETM.pyETMException as e: # no PPP solutions available! MUST have aprs in the last run, try that stop_no_aprs = True Window = None source = '' etm_err = str(e) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) # find this station-day in the lastest global run APRs apr_tbl = cnn.query( 'SELECT * FROM apr_coords WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' ' 'AND "Year" = %i AND "DOY" = %i' % (NetworkCode, StationCode, date.year, date.doy)) apr = apr_tbl.dictresult() if len(apr) > 0: # APRs exist for this station-day # replace PPP ETM with Mike's APRs Apr = numpy.array( ([float(apr[0]['x'])], [float(apr[0]['y'])], [float(apr[0]['z'])])) sigmas = numpy.array(([float(apr[0]['sn'])], [float(apr[0]['se'])], [float(apr[0]['su'])])) source = apr[0]['ReferenceFrame'] + ' APRs' elif len(apr) == 0 and stop_no_aprs: return ( None, None, '%s.%s has no PPP solutions and no APRs from last global run for %s! ' 'Specific error from pyETM.PPPETM (if available) was: %s' % (NetworkCode, StationCode, date.yyyyddd(), etm_err), metafile) # convert sigmas to XYZ stn = cnn.query( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode)) stn = stn.dictresult() sigmas_xyz = sigmas_neu2xyz(stn[0]['lat'], stn[0]['lon'], sigmas) # write the station.info # if no station info comes back for this date, program will print a message and continue with next try: # Use the argument 'ObservationFYear' to get the exact RINEX session fyear without opening the file rnx_date = pyDate.Date(fyear=float(ObservationFYear)) stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode, rnx_date, h_tolerance=12) except pyStationInfo.pyStationInfoException: # if no metadata, warn user and continue return ( None, None, '%s.%s has no metadata available for this date, but a RINEX exists!' % (NetworkCode, StationCode), metafile) # check if RINEX file needs to be synced or not. aws_sync = cnn.query( 'SELECT * FROM aws_sync WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' ' 'AND "Year" = %i AND "DOY" = %i' % (NetworkCode, StationCode, date.year, date.doy)).dictresult() if len(aws_sync) == 0: # only copy RINEX if not synced! # open the RINEX file in the Archive try: with pyRinex.ReadRinex(NetworkCode, StationCode, ArchiveFile, False) as Rinex: # type: pyRinex.ReadRinex Rnx = None if Rinex.multiday: # find the rinex that corresponds to the session being processed, if multiday for rinex in Rinex.multiday_rnx_list: if rinex.date == date: Rnx = rinex break if Rnx is None: return ( None, None, '%s.%s was a multiday file and date %8.3f could not be found!' % (NetworkCode, StationCode, date.fyear), metafile) else: # if Rinex is not multiday Rnx = Rinex Rnx.purge_comments() Rnx.normalize_header(stninfo) Rnx.rename(filename) if Window is not None: window_rinex(Rnx, Window) source += ' windowed from/to ' + Window.datetime( ).strftime('%Y-%M-%d %H:%M:%S') # before creating local copy, decimate file Rnx.decimate(30) Rnx.compress_local_copyto(pwd_rinex) except (pyRinex.pyRinexException, RunCommandWithRetryExeception): # new behavior: if error occurs while generating RINEX, then copy raw file from the archive try: shutil.copy(ArchiveFile, os.path.join(pwd_rinex, filename)) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) # everything ok, return information APR = '%s.%s %s %12.3f %12.3f %12.3f %5.3f %5.3f %5.3f %5.3f %5.3f %5.3f %s' % ( NetworkCode, StationCode, StationAlias, Apr[0, 0], Apr[1, 0], Apr[2, 0], sigmas_xyz[0, 0], sigmas_xyz[1, 0], sigmas_xyz[2, 0], sigmas[1, 0], sigmas[0, 0], sigmas[2, 0], source.replace(' ', '_')) return APR, stninfo.return_stninfo().replace( StationCode.upper(), StationAlias.upper()), None, metafile
def main(): parser = argparse.ArgumentParser( description='Plot ETM for stations in the database') parser.add_argument( 'stnlist', type=str, nargs='+', help= "List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted" ) parser.add_argument('-nop', '--no_plots', action='store_true', help="Do not produce plots", default=False) parser.add_argument('-nom', '--no_missing_data', action='store_true', help="Do not show missing days", default=False) parser.add_argument('-nm', '--no_model', action='store_true', help="Plot time series without fitting a model") parser.add_argument('-r', '--residuals', action='store_true', help="Plot time series residuals", default=False) parser.add_argument( '-dir', '--directory', type=str, help= "Directory to save the resulting PNG files. If not specified, assumed to be the " "production directory") parser.add_argument( '-json', '--json', type=int, help="Export ETM adjustment to JSON. Append '1' to export time " "series or append '0' to just output the ETM parameters.") parser.add_argument( '-gui', '--interactive', action='store_true', help="Interactive mode: allows to zoom and view the plot interactively" ) parser.add_argument( '-win', '--time_window', nargs='+', metavar='interval', help= 'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single ' 'integer value (N) which shall be interpreted as last epoch-N') parser.add_argument( '-gamit', '--gamit', type=str, nargs=2, metavar='{project} {type}', help= "Plot the GAMIT time series. Specify project and type = \'stack\' to plot the time " "series after stacking or \'gamit\' to just plot the coordinates of the polyhedron" ) args = parser.parse_args() Config = pyOptions.ReadOptions( "gnss_data.cfg") # type: pyOptions.ReadOptions cnn = dbConnection.Cnn('gnss_data.cfg') if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]): print ' >> Station list read from ' + args.stnlist[0] stnlist = [line.strip() for line in open(args.stnlist[0], 'r')] stnlist = [{ 'NetworkCode': item.split('.')[0], 'StationCode': item.split('.')[1] } for item in stnlist] else: stnlist = Utils.process_stnlist(cnn, args.stnlist) ##################################### # date filter dates = None if args.time_window is not None: if len(args.time_window) == 1: try: dates = process_date(args.time_window, missing_input=None, allow_days=False) dates = (dates[0].fyear, ) except ValueError: # an integer value dates = float(args.time_window[0]) else: dates = process_date(args.time_window) dates = (dates[0].fyear, dates[1].fyear) if stnlist: # do the thing if args.directory: if not os.path.exists(args.directory): os.mkdir(args.directory) else: if not os.path.exists('production'): os.mkdir('production') args.directory = 'production' for stn in stnlist: try: if args.gamit is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model) else: if args.gamit[1] == 'stack': polyhedrons = cnn.query_float( 'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, gamit_soln=soln) # print ' > %5.2f %5.2f %5.2f %i %i' % \ # (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0], # etm.soln.t.shape[0] - np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2]))) # print two largest outliers if etm.A is not None: lres = np.sqrt(np.sum(np.square(etm.R), axis=0)) slres = lres[np.argsort(-lres)] print ' >> Two largest residuals:' for i in [0, 1]: print(' %s %6.3f %6.3f %6.3f' % (pyDate.Date(mjd=etm.soln.mjd[ lres == slres[i]]).yyyyddd(), etm.R[0, lres == slres[i]], etm.R[1, lres == slres[i]], etm.R[2, lres == slres[i]])) elif args.gamit[1] == 'gamit': etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, project=args.gamit[1]) else: parser.error('Invalid option for -gamit switch') etm = None if args.interactive: xfile = None else: if args.gamit is None: xfile = os.path.join( args.directory, '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode)) # leave pngfile empty to enter interactive mode (GUI) if not args.no_plots: etm.plot(xfile + '.png', t_win=dates, residuals=args.residuals, plot_missing=not args.no_missing_data) if args.json is not None: with open(xfile + '.json', 'w') as f: if args.json != 0: json.dump(etm.todictionary(True), f, indent=4, sort_keys=False) else: json.dump(etm.todictionary(False), f, indent=4, sort_keys=False) print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[ 'StationCode'] except pyETM.pyETMException as e: print str(e) except Exception: print 'Error during processing of ' + stn[ 'NetworkCode'] + '.' + stn['StationCode'] print traceback.format_exc() pass
def __init__(self, cnn, NetworkCode, StationCode, dates): self.NetworkCode = NetworkCode self.StationCode = StationCode self.StationAlias = StationCode # upon creation, Alias = StationCode self.record = None self.etm = None self.StationInfo = None self.lat = None self.lon = None self.height = None self.X = None self.Y = None self.Z = None self.otl_H = None rs = cnn.query_float( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs) != 0: self.record = pyBunch.Bunch().fromDict(rs[0]) self.otl_H = self.record.Harpos_coeff_otl self.lat = self.record.lat self.lon = self.record.lon self.height = self.record.height self.X = self.record.auto_x self.Y = self.record.auto_y self.Z = self.record.auto_z # get the available dates for the station (RINEX files with conditions to be processed) rs = cnn.query( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"ObservationSTime" >= \'%s\' AND "ObservationETime" <= \'%s\' AND ' '"Completion" >= %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, (dates[0] - 1).first_epoch(), (dates[1] + 1).last_epoch(), COMPLETION, INTERVAL)) self.good_rinex = [ pyDate.Date(year=r['y'], doy=r['d']) for r in rs.dictresult() if dates[0] <= pyDate.Date(year=r['y'], doy=r['d']) <= dates[1] ] # create a list of the missing days good_rinex = [d.mjd for d in self.good_rinex] self.missing_rinex = [ pyDate.Date(mjd=d) for d in range(dates[0].mjd, dates[1].mjd + 1) if d not in good_rinex ] self.etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # type: pyETM.PPPETM self.StationInfo = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode) else: raise ValueError('Specified station %s.%s could not be found' % (NetworkCode, StationCode))
import dbConnection import pyETM from pyDate import Date cnn = dbConnection.Cnn('gnss_data.cfg') stns = cnn.query('SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%\'') for stn in stns.dictresult(): print ' >> working on %s.%s' % (stn['NetworkCode'], stn['StationCode']) etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode']) dates = [Date(mjd=mjd) for mjd in etm.soln.mjd]
def insert_modify_param(parser, cnn, stnlist, args): # determine if passed function is valid if len(args.function_type) < 2: parser.error('invalid number of arguments') elif args.function_type[0] not in ('p', 'j', 'q', 't'): parser.error('function type should be one of the following: polynomial (p), jump (j), periodic (q), or ' 'bulk earthquake jump removal (t)') # create a bunch object to save all the params that will enter the database tpar = Bunch() tpar.NetworkCode = None tpar.StationCode = None tpar.soln = None tpar.object = None tpar.terms = None tpar.frequencies = None tpar.jump_type = None tpar.relaxation = None tpar.Year = None tpar.DOY = None tpar.action = None ftype = args.function_type[0] remove_eq = False try: if ftype == 'p': tpar.object = 'polynomial' tpar.terms = int(args.function_type[1]) if tpar.terms <= 0: parser.error('polynomial terms should be > 0') elif ftype == 'j': tpar.object = 'jump' # insert the action tpar.action = args.function_type[1] if tpar.action not in ('+', '-'): parser.error('action for function type jump (j) should be + or -') # jump type tpar.jump_type = int(args.function_type[2]) if tpar.jump_type not in (0, 1): parser.error('jump type should be either 0 or 1') try: date, _ = Utils.process_date([args.function_type[3]]) # recover the year and doy tpar.Year = date.year tpar.DOY = date.doy except Exception as e: parser.error('while parsing jump date: ' + str(e)) if tpar.jump_type == 1: tpar.relaxation = [float(f) for f in args.function_type[4:]] if not tpar.relaxation: if tpar.action == '-': tpar.relaxation = None elif tpar.action == '+': parser.error('jump type == 1 but no relaxation parameter, please specify relaxation') elif ftype == 'q': tpar.object = 'periodic' tpar.frequencies = [float(1/float(p)) for p in args.function_type[1:]] elif ftype == 't': tpar.object = 'jump' remove_eq = True except ValueError: parser.error('invalid argument type for function "%s"' % ftype) for station in stnlist: for soln in args.solution_type: tpar.NetworkCode = station['NetworkCode'] tpar.StationCode = station['StationCode'] tpar.soln = soln station_soln = "%s.%s (%s)" % (station['NetworkCode'], station['StationCode'], soln) if remove_eq: # load the ETM parameters for this station print(' >> Obtaining ETM parameters for ' + station_soln) if soln == 'ppp': etm = pyETM.PPPETM(cnn, station['NetworkCode'], station['StationCode']) else: etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], stack_name=args.function_type[2]) for eq in [e for e in etm.Jumps.table if e.p.jump_type in (pyETM.CO_SEISMIC_DECAY, pyETM.CO_SEISMIC_JUMP_DECAY, pyETM.CO_SEISMIC_JUMP)]: if eq.magnitude <= float(args.function_type[1]): # this earthquake should be removed, fill in the data tpar.Year = eq.date.year tpar.DOY = eq.date.doy tpar.jump_type = 1 tpar.relaxation = None tpar.action = '-' apply_change(cnn, station, tpar, soln) else: apply_change(cnn, station, tpar, soln)
def main(): parser = argparse.ArgumentParser(description='Query ETM for stations in the database. Default is PPP ETMs.') parser.add_argument('stnlist', type=str, nargs='+', help="List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted") parser.add_argument('-q', '--query', nargs=2, metavar='{type} {date}', type=str, help='Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value ' 'of the daily solution (if exists). Output is in XYZ.') parser.add_argument('-gamit', '--gamit', type=str, nargs=1, metavar='{stack}', help="Plot the GAMIT time series specifying which stack name to plot.") parser.add_argument('-file', '--filename', type=str, help="Obtain data from an external source (filename). Format should be specified with -format.") parser.add_argument('-format', '--format', nargs='+', type=str, help="To be used together with --filename. Specify order of the fields as found in the input " "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, " "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored " "are at the end of the line, then there is no need to specify those.") parser.add_argument('-quiet', '--quiet', action='store_true', help="Do not print message when no solutions are available.") parser.add_argument('-vel', '--velocity', action='store_true', help="Output the velocity in XYZ.") parser.add_argument('-seasonal', '--seasonal_terms', action='store_true', help="Output the seasonal terms in NEU.") args = parser.parse_args() ## cnn = dbConnection.Cnn('gnss_data.cfg') if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]): print(' >> Station list read from ' + args.stnlist[0]) stnlist = [{'NetworkCode': items[0], 'StationCode': items[1]} for items in (line.strip().split('.') for line in file_readlines(args.stnlist[0]))] else: stnlist = Utils.process_stnlist(cnn, args.stnlist) for stn in stnlist: try: if args.gamit is None and args.filename is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False) elif args.filename is not None: etm = from_file(args, cnn, stn) else: polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, gamit_soln=soln) if args.query is not None: model = (args.query[0] == 'model') q_date = pyDate.Date(fyear=float(args.query[1])) # get the coordinate xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model) strp = '' # if user requests velocity too, output it if args.velocity and etm.A is not None: vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1]) strp = '%8.5f %8.5f %8.5f ' % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0]) # also output seasonal terms, if requested if args.seasonal_terms and etm.Periodic.frequency_count > 0: strp += ' '.join('%8.5f' % (x * 1000) for x in etm.Periodic.p.params.flatten()) print(' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \ % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt)) except pyETM.pyETMException as e: if not args.quiet: print(str(e)) except: print('Error during processing of ' + stn['NetworkCode'] + '.' + stn['StationCode']) print(traceback.format_exc())