def station_etm(station, stn_ts, stack_name, iteration=0): cnn = dbConnection.Cnn("gnss_data.cfg") vertices = None try: # save the time series ts = pyETM.GamitSoln(cnn, stn_ts, station['NetworkCode'], station['StationCode'], stack_name) # create the ETM object etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], False, False, ts) if etm.A is not None: if iteration == 0: # if iteration is == 0, then the target frame has to be the PPP ETMs vertices = etm.get_etm_soln_list(use_ppp_model=True, cnn=cnn) else: # on next iters, the target frame is the inner geometry of the stack vertices = etm.get_etm_soln_list() except pyETM.pyETMException: vertices = None return vertices if vertices else None
def station_etm(project, station, stn_ts, exclude, iteration=0): msg = None add_exclude = [] cnn = dbConnection.Cnn("gnss_data.cfg") sql_r = 'INSERT INTO stack_residuals ' \ '("NetworkCode", "StationCode", "Project", x, y, z, sigmax, sigmay, sigmaz, "Year", "DOY") ' \ 'VALUES (%s, %s, \'' + project + '\', %f, %f, %f, %f, %f, %f, %i, %i)' sql_s = 'INSERT INTO stacks ' \ '("NetworkCode", "StationCode", "Project", "X", "Y", "Z", sigmax, sigmay, sigmaz, "Year", "DOY", "FYear") ' \ 'VALUES (\'' + station.NetworkCode + '\', \'' + station.StationCode + '\', \'' \ + project + '\', %f, %f, %f, 0, 0, 0, %i, %i, %f)' # make sure it is sorted by date stn_ts.sort(key=lambda k: (k[3], k[4])) try: # save the time series ts = pyETM.GamitSoln(cnn, stn_ts, station.NetworkCode, station.StationCode) cnn.executemany( sql_s, zip(ts.x.tolist(), ts.y.tolist(), ts.z.tolist(), [t.year for t in ts.date], [t.doy for t in ts.date], [t.fyear for t in ts.date])) if not exclude: # create the ETM object etm = pyETM.GamitETM(cnn, station.NetworkCode, station.StationCode, False, False, ts) if etm.A is None: # no contribution to stack, remove from the station list add_exclude = [station.dictionary] else: # insert the residuals for the station in stack_residuals # these values will be used later on in helmert_stack if iteration == 0: # if iteration is == 0, then the target frame has to be the PPP ETMs cnn.executemany( sql_r, etm.get_residuals_dict(use_ppp_model=True, cnn=cnn)) else: # on next iters, the target frame is the inner geometry of the stack cnn.executemany(sql_r, etm.get_residuals_dict()) except Exception as e: add_exclude = [station.dictionary] msg = 'Error while producing ETM for %s.%s: ' % ( station.NetworkCode, station.StationCode) + str(e) return add_exclude, msg
def main(): parser = argparse.ArgumentParser(description='GNSS time series stacker') parser.add_argument('project', type=str, nargs=1, metavar='{project name}', help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.") parser.add_argument('-d', '--date_filter', nargs='+', metavar='date', help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy wwww-d format') args = parser.parse_args() cnn = dbConnection.Cnn("gnss_data.cfg") project = args.project[0] dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)] try: dates = process_date(args.date_filter) except ValueError as e: parser.error(str(e)) # create folder for plots if not os.path.isdir(project + '_dra'): os.makedirs(project + '_dra') ######################################## # load polyhedrons dra = DRA(cnn, args.project[0], dates[1]) dra.stack_dra() for stn in tqdm(dra.stations): NetworkCode = stn['NetworkCode'] StationCode = stn['StationCode'] # load from the db ts = dra.get_station(NetworkCode, StationCode) if ts.size: try: if ts.shape[0] > 2: dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1) dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project) etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts) etm.plot(pngfile='%s/%s.%s_DRA.png' % (project + '_dra', NetworkCode, StationCode), plot_missing=False) except Exception as e: tqdm.write(' -->' + str(e)) dra.to_json(project + '_dra.json')
def calculate_etms(self): """ Estimates the trajectory models for all stations in the stack :return: """ print ' >> Calculating ETMs for %s...' % self.project for s in tqdm(self.stations, ncols=160, desc=self.project): ts = self.get_station(s['NetworkCode'], s['StationCode']) try: tqdm.postfix = s['NetworkCode'] + '.' + s['StationCode'] ts = pyETM.GamitSoln(self.cnn, ts, s['NetworkCode'], s['StationCode'], self.project) except pyETM.pyETMException as e: tqdm.write(' -- ' + str(e))
def plot_etms(self): qbar = tqdm(total=len(self.stnlist), desc=' >> Plotting ETMs', ncols=160) for station in self.stnlist: qbar.set_postfix(station=str(station)) qbar.update() try: stn_ts = [[ item['X'], item['Y'], item['Z'], item['Year'], item['DOY'] ] for item in self.polyhedrons if item['NetworkCode'] == station.NetworkCode and item['StationCode'] == station.StationCode] # make sure it is sorted by date stn_ts.sort(key=lambda k: (k[3], k[4])) # save the time series ts = pyETM.GamitSoln(self.cnn, stn_ts, station.NetworkCode, station.StationCode) # create the ETM object etm = pyETM.GamitETM(self.cnn, station.NetworkCode, station.StationCode, False, False, ts) etm.plot(pngfile='%s/%s.%s_RR.png' % (self.name, etm.NetworkCode, etm.StationCode), residuals=True, plot_missing=False) etm.plot(pngfile='%s/%s.%s_FF.png' % (self.name, etm.NetworkCode, etm.StationCode), residuals=False, plot_missing=False) except pyETM.pyETMException as e: qbar.write(' -- %s %s' % (str(station), str(e))) qbar.close()
def main(): cnn = dbConnection.Cnn("gnss_data.cfg") stack = Stack(cnn, 'igs-sirgas', redo=True) for i in tqdm(range(1, len(stack)), ncols=160): stack[i].align(stack[i - 1]) net = 'igs' stn = 'braz' ts = stack.get_station(net, stn) dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1) ts = pyETM.GamitSoln(cnn, dts, net, stn, 'igs-sirgas') pyETM.GamitETM(cnn, net, stn, True, gamit_soln=ts)
def compute_dra(ts, NetworkCode, StationCode, pdates, project, histogram=False): try: # load from the db cnn = dbConnection.Cnn('gnss_data.cfg') # to pass the filename back to the callback_handler filename = project + '_dra/' + NetworkCode + '.' + StationCode if ts.size: if ts.shape[0] > 2: dts = numpy.append(numpy.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1) dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project) etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts) figfile = '' hisfile = '' if etm.A is not None: figfile = etm.plot(fileio=io.BytesIO(), plot_missing=False, t_win=pdates) if histogram: hisfile = etm.plot_hist(fileio=io.BytesIO()) # save the wrms return etm.factor[0] * 1000, etm.factor[1] * 1000, etm.factor[2] * 1000, figfile, hisfile, \ filename, NetworkCode, StationCode else: return None, None, None, figfile, hisfile, filename, NetworkCode, StationCode except Exception as e: raise Exception('While working on %s.%s' % (NetworkCode, StationCode) + '\n') from e
def plot_etm(cnn, stack, station, directory): try: ts = stack.get_station(station['NetworkCode'], station['StationCode']) ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'], station['StationCode'], stack.project) etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], gamit_soln=ts) pngfile = os.path.join(directory, stationID(etm) + '_gamit.png') jsonfile = os.path.join(directory, stationID(etm) + '_gamit.json') etm.plot(pngfile, plot_missing=False) file_write( os.path.join(jsonfile), json.dumps(etm.todictionary(False), indent=4, sort_keys=False)) except pyETM.pyETMException as e: tqdm.write(str(e))
def plot_etm(cnn, stack, station, directory): try: ts = stack.get_station(station['NetworkCode'], station['StationCode']) ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'], station['StationCode'], stack.project) etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], gamit_soln=ts) pngfile = os.path.join( directory, etm.NetworkCode + '.' + etm.StationCode + '.png') jsonfile = os.path.join( directory, etm.NetworkCode + '.' + etm.StationCode + '.json') etm.plot(pngfile, plot_missing=False) with open(os.path.join(jsonfile), 'w') as f: json.dump(etm.todictionary(False), f, indent=4, sort_keys=False) except pyETM.pyETMException as e: tqdm.write(str(e))
def main(): parser = argparse.ArgumentParser( description='Plot ETM for stations in the database') parser.add_argument( 'stnlist', type=str, nargs='+', help= "List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted" ) parser.add_argument('-nop', '--no_plots', action='store_true', help="Do not produce plots", default=False) parser.add_argument('-nom', '--no_missing_data', action='store_true', help="Do not show missing days", default=False) parser.add_argument('-nm', '--no_model', action='store_true', help="Plot time series without fitting a model") parser.add_argument('-r', '--residuals', action='store_true', help="Plot time series residuals", default=False) parser.add_argument( '-dir', '--directory', type=str, help= "Directory to save the resulting PNG files. If not specified, assumed to be the " "production directory") parser.add_argument( '-json', '--json', type=int, help="Export ETM adjustment to JSON. Append '0' to just output " "the ETM parameters, '1' to export time series without " "model and '2' to export both time series and model.") parser.add_argument( '-gui', '--interactive', action='store_true', help="Interactive mode: allows to zoom and view the plot interactively" ) parser.add_argument( '-win', '--time_window', nargs='+', metavar='interval', help= 'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single ' 'integer value (N) which shall be interpreted as last epoch-N') parser.add_argument( '-q', '--query', nargs=2, metavar='{type} {date}', type=str, help= 'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value ' 'of the daily solution (if exists). Output is in XYZ.') parser.add_argument( '-gamit', '--gamit', type=str, nargs=1, metavar='{stack}', help="Plot the GAMIT time series specifying which stack name to plot.") parser.add_argument( '-lang', '--language', type=str, help="Change the language of the plots. Default is English. " "Use ESP to select Spanish. To add more languages, " "include the ISO 639-1 code in pyETM.py", default='ENG') parser.add_argument('-hist', '--histogram', action='store_true', help="Plot histogram of residuals") parser.add_argument( '-file', '--filename', type=str, help= "Obtain data from an external source (filename). Format should be specified with -format." ) parser.add_argument( '-format', '--format', nargs='+', type=str, help= "To be used together with --filename. Specify order of the fields as found in the input " "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, " "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored " "are at the end of the line, then there is no need to specify those.") parser.add_argument('-outliers', '--plot_outliers', action='store_true', help="Plot an additional panel with the outliers") parser.add_argument('-vel', '--velocity', action='store_true', help="During query, output the velocity in XYZ.") parser.add_argument('-seasonal', '--seasonal_terms', action='store_true', help="During query, output the seasonal terms in NEU.") parser.add_argument('-quiet', '--suppress_messages', action='store_true', help="Quiet mode: suppress information messages") args = parser.parse_args() cnn = dbConnection.Cnn('gnss_data.cfg') stnlist = Utils.process_stnlist(cnn, args.stnlist) # define the language pyETM.LANG = args.language.lower() # set the logging level if not args.suppress_messages: pyETM.logger.setLevel(pyETM.INFO) ##################################### # date filter dates = None if args.time_window is not None: if len(args.time_window) == 1: try: dates = process_date(args.time_window, missing_input=None, allow_days=False) dates = (dates[0].fyear, ) except ValueError: # an integer value dates = float(args.time_window[0]) else: dates = process_date(args.time_window) dates = (dates[0].fyear, dates[1].fyear) if stnlist: # do the thing if args.directory: if not os.path.exists(args.directory): os.mkdir(args.directory) else: if not os.path.exists('production'): os.mkdir('production') args.directory = 'production' for stn in stnlist: try: if args.gamit is None and args.filename is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model) elif args.filename is not None: etm = from_file(args, cnn, stn) else: polyhedrons = cnn.query_float( 'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, gamit_soln=soln) # print ' > %5.2f %5.2f %5.2f %i %i' % \ # (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0], # etm.soln.t.shape[0] - # np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2]))) # print two largest outliers if etm.A is not None: lres = np.sqrt(np.sum(np.square(etm.R), axis=0)) slres = lres[np.argsort(-lres)] print ' >> Two largest residuals:' for i in [0, 1]: print(' %s %6.3f %6.3f %6.3f' % (pyDate.Date(mjd=etm.soln.mjd[ lres == slres[i]]).yyyyddd(), etm.R[0, lres == slres[i]], etm.R[1, lres == slres[i]], etm.R[2, lres == slres[i]])) if args.interactive: xfile = None else: if args.gamit is None: if args.filename is None: xfile = os.path.join( args.directory, '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_file' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode)) # leave pngfile empty to enter interactive mode (GUI) if not args.no_plots: etm.plot(xfile + '.png', t_win=dates, residuals=args.residuals, plot_missing=not args.no_missing_data, plot_outliers=args.plot_outliers) if args.histogram: etm.plot_hist(xfile + '_hist.png') if args.json is not None: with open(xfile + '.json', 'w') as f: if args.json == 1: json.dump(etm.todictionary(time_series=True), f, indent=4, sort_keys=False) elif args.json == 2: json.dump(etm.todictionary(time_series=True, model=True), f, indent=4, sort_keys=False) else: json.dump(etm.todictionary(False), f, indent=4, sort_keys=False) if args.query is not None: model = True if args.query[0] == 'model' else False q_date = pyDate.Date(fyear=float(args.query[1])) xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model) strp = '' # if user requests velocity too, output it if args.velocity: if etm.A is not None: vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1]) strp = '%8.5f %8.5f %8.5f ' \ % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0]) # also output seasonal terms, if requested if args.seasonal_terms: if etm.Periodic.frequency_count > 0: strp += ' '.join([ '%8.5f' % (x * 1000) for x in etm.Periodic.p.params.flatten().tolist() ]) print ' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \ % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt) print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[ 'StationCode'] except pyETM.pyETMException as e: print str(e) except Exception: print 'Error during processing of ' + stn[ 'NetworkCode'] + '.' + stn['StationCode'] print traceback.format_exc() pass
def main(): parser = argparse.ArgumentParser( description='Plot ETM for stations in the database') parser.add_argument( 'stnlist', type=str, nargs='+', help= "List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted" ) parser.add_argument('-nop', '--no_plots', action='store_true', help="Do not produce plots", default=False) parser.add_argument('-nom', '--no_missing_data', action='store_true', help="Do not show missing days", default=False) parser.add_argument('-nm', '--no_model', action='store_true', help="Plot time series without fitting a model") parser.add_argument('-r', '--residuals', action='store_true', help="Plot time series residuals", default=False) parser.add_argument( '-dir', '--directory', type=str, help= "Directory to save the resulting PNG files. If not specified, assumed to be the " "production directory") parser.add_argument( '-json', '--json', type=int, help="Export ETM adjustment to JSON. Append '1' to export time " "series or append '0' to just output the ETM parameters.") parser.add_argument( '-gui', '--interactive', action='store_true', help="Interactive mode: allows to zoom and view the plot interactively" ) parser.add_argument( '-win', '--time_window', nargs='+', metavar='interval', help= 'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single ' 'integer value (N) which shall be interpreted as last epoch-N') parser.add_argument( '-gamit', '--gamit', type=str, nargs=2, metavar='{project} {type}', help= "Plot the GAMIT time series. Specify project and type = \'stack\' to plot the time " "series after stacking or \'gamit\' to just plot the coordinates of the polyhedron" ) args = parser.parse_args() Config = pyOptions.ReadOptions( "gnss_data.cfg") # type: pyOptions.ReadOptions cnn = dbConnection.Cnn('gnss_data.cfg') if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]): print ' >> Station list read from ' + args.stnlist[0] stnlist = [line.strip() for line in open(args.stnlist[0], 'r')] stnlist = [{ 'NetworkCode': item.split('.')[0], 'StationCode': item.split('.')[1] } for item in stnlist] else: stnlist = Utils.process_stnlist(cnn, args.stnlist) ##################################### # date filter dates = None if args.time_window is not None: if len(args.time_window) == 1: try: dates = process_date(args.time_window, missing_input=None, allow_days=False) dates = (dates[0].fyear, ) except ValueError: # an integer value dates = float(args.time_window[0]) else: dates = process_date(args.time_window) dates = (dates[0].fyear, dates[1].fyear) if stnlist: # do the thing if args.directory: if not os.path.exists(args.directory): os.mkdir(args.directory) else: if not os.path.exists('production'): os.mkdir('production') args.directory = 'production' for stn in stnlist: try: if args.gamit is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model) else: if args.gamit[1] == 'stack': polyhedrons = cnn.query_float( 'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, gamit_soln=soln) # print ' > %5.2f %5.2f %5.2f %i %i' % \ # (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0], # etm.soln.t.shape[0] - np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2]))) # print two largest outliers if etm.A is not None: lres = np.sqrt(np.sum(np.square(etm.R), axis=0)) slres = lres[np.argsort(-lres)] print ' >> Two largest residuals:' for i in [0, 1]: print(' %s %6.3f %6.3f %6.3f' % (pyDate.Date(mjd=etm.soln.mjd[ lres == slres[i]]).yyyyddd(), etm.R[0, lres == slres[i]], etm.R[1, lres == slres[i]], etm.R[2, lres == slres[i]])) elif args.gamit[1] == 'gamit': etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, args.no_model, project=args.gamit[1]) else: parser.error('Invalid option for -gamit switch') etm = None if args.interactive: xfile = None else: if args.gamit is None: xfile = os.path.join( args.directory, '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode)) else: xfile = os.path.join( args.directory, '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode)) # leave pngfile empty to enter interactive mode (GUI) if not args.no_plots: etm.plot(xfile + '.png', t_win=dates, residuals=args.residuals, plot_missing=not args.no_missing_data) if args.json is not None: with open(xfile + '.json', 'w') as f: if args.json != 0: json.dump(etm.todictionary(True), f, indent=4, sort_keys=False) else: json.dump(etm.todictionary(False), f, indent=4, sort_keys=False) print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[ 'StationCode'] except pyETM.pyETMException as e: print str(e) except Exception: print 'Error during processing of ' + stn[ 'NetworkCode'] + '.' + stn['StationCode'] print traceback.format_exc() pass
def dra(cnn, project, dates): rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln ' 'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f GROUP BY "NetworkCode", "StationCode" ' 'ORDER BY "NetworkCode", "StationCode"' % (project, dates[0].fyear, dates[1].fyear)) stnlist = rs.dictresult() # get the epochs ep = cnn.query('SELECT "Year", "DOY" FROM gamit_soln ' 'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f' 'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % (project, dates[0].fyear, dates[1].fyear)) ep = ep.dictresult() epochs = [Date(year=item['Year'], doy=item['DOY']) for item in ep] # delete DRA starting from the first requested epoch cnn.query('DELETE FROM gamit_dra WHERE "Project" = \'%s\' AND "FYear" >= %f' % (project, epochs[0].fyear)) # query the first polyhedron in the line, which should be the last polyhedron in gamit_dra poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_dra ' 'WHERE "Project" = \'%s\' AND "FYear" = (SELECT max("FYear") FROM gamit_dra)' 'ORDER BY "NetworkCode", "StationCode"' % project) if len(poly) == 0: print ' -- Using gamit_soln: no pre-existent DRA found' # no last entry found in gamit_dra, use gamit_soln poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_soln ' 'WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i' 'ORDER BY "NetworkCode", "StationCode"' % (project, epochs[0].year, epochs[0].doy)) else: print ' -- Pre-existent DRA found. Attaching.' polyhedrons = poly bar = tqdm(total=len(epochs)-1, ncols=160) for date1, date2 in zip(epochs[0:-1], epochs[1:]): poly1 = [] # get the stations common stations between day i and day i+1 (in A format) s = cnn.query_float(sql_select_union(project, '"X", "Y", "Z", "NetworkCode", "StationCode"', date1, date2)) x = cnn.query_float(sql_select_union(project, '0, -"Z", "Y", 1, 0, 0', date1, date2)) y = cnn.query_float(sql_select_union(project, '"Z", 0, -"X", 0, 1, 0', date1, date2)) z = cnn.query_float(sql_select_union(project, '-"Y", "X", 0, 0, 0, 1', date1, date2)) # polyhedron of the common stations Xx = cnn.query_float(sql_select_union(project, '"X", "Y", "Z"', date1, date2)) X = numpy.array(Xx).transpose().flatten() # for vertex in stations for v in s: poly1 += [np.array(pp[0:3], dtype=float) - np.array(v[0:3]) for pp in poly if pp[-2] == v[-2] and pp[-1] == v[-1]] # residuals for adjustment L = np.array(poly1) A = numpy.row_stack((np.array(x), np.array(y), np.array(z))) A[:, 0:3] = A[:, 0:3]*1e-9 # find helmert transformation c, _, _, v, _, p, it = adjust_lsq(A, L.flatten()) # write some info to the screen tqdm.write(' -- %s (%3i): translation (mm mm mm) scale: (%6.1f %6.1f %6.1f) %10.2e ' % (date2.yyyyddd(), it, c[-3] * 1000, c[-2] * 1000, c[-1] * 1000, c[-4])) # make A again with all stations s = cnn.query_float(sql_select(project, '"Year", "DOY", "NetworkCode", "StationCode"', date2)) x = cnn.query_float(sql_select(project, '0, -"Z", "Y", 1, 0, 0', date2)) y = cnn.query_float(sql_select(project, '"Z", 0, -"X", 0, 1, 0', date2)) z = cnn.query_float(sql_select(project, '-"Y", "X", 0, 0, 0, 1', date2)) A = numpy.row_stack((np.array(x), np.array(y), np.array(z))) A[:, 0:3] = A[:, 0:3] * 1e-9 Xx = cnn.query_float(sql_select(project, '"X", "Y", "Z"', date2)) X = numpy.array(Xx).transpose().flatten() X = (numpy.dot(A, c) + X).reshape(3, len(x)).transpose() # save current transformed polyhedron to use in the next iteration polyhedrons += poly poly = [x.tolist() + list(s) for x, s in zip(X, s)] # insert results in gamit_dra for pp in poly: cnn.insert('gamit_dra', NetworkCode=pp[-2], StationCode=pp[-1], Project=project, X=pp[0], Y=pp[1], Z=pp[2], Year=date2.year, DOY=date2.doy, FYear=date2.fyear) bar.update() bar.close() # plot the residuals for stn in tqdm(stnlist): NetworkCode = stn['NetworkCode'] StationCode = stn['StationCode'] # load from the db ts = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM gamit_dra ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"Project" = \'%s\' ORDER BY "Year", "DOY"' % (NetworkCode, StationCode, project)) ts = np.array(ts) if ts.size: try: # save the time series gsoln = pyETM.GamitSoln(cnn, ts, NetworkCode, StationCode, project) # create the ETM object etm = pyETM.GamitETM(cnn, NetworkCode, StationCode, False, False, gsoln) etm.plot(pngfile='%s/%s.%s_SOL.png' % (project, NetworkCode, StationCode), residuals=True, plot_missing=False) if ts.shape[0] > 2: dts = np.append(np.diff(ts[:,0:3], axis=0), ts[1:, -2:], axis=1) dra = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project) etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra) etm.plot(pngfile='%s/%s.%s_DRA.png' % (project, NetworkCode, StationCode), residuals=True, plot_missing=False) except Exception as e: tqdm.write(' -->' + str(e))
def align_spaces(self, target_dict): # get the list of stations to use during the alignment use_stations = target_dict.keys() # reference date used to align the stack # epochs SHOULD all be the same. Get first item and then the epoch ref_date = Date(fyear=target_dict.values()[0]['epoch']) # convert the target dict to a list target_list = [] stack_list = [] tqdm.write(' >> Aligning coordinate space...') for stn in use_stations: if not np.isnan(target_dict[stn]['x']): target_list.append((stn, target_dict[stn]['x'], target_dict[stn]['y'], target_dict[stn]['z'], ref_date.year, ref_date.doy, ref_date.fyear)) # get the ETM coordinate for this station net = stn.split('.')[0] ssn = stn.split('.')[1] ts = pyETM.GamitSoln(self.cnn, self.get_station(net, ssn), net, ssn, self.name) etm = pyETM.GamitETM(self.cnn, net, ssn, gamit_soln=ts) stack_list += etm.get_etm_soln_list() c_array = np.array(stack_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'), ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'), ('fy', 'float64')]) comb = Polyhedron(c_array, 'etm', ref_date) # build a target polyhedron from the target_list vertices = np.array(target_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'), ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'), ('fy', 'float64')]) target = Polyhedron(vertices, 'target_frame', ref_date) # start aligning the coordinates tqdm.write(' -- Aligning polyhedron at %.3f (%s)' % (ref_date.fyear, ref_date.yyyyddd())) scale = False # align the polyhedron to the target r_before, r_after, a_stn = comb.align(target, scale=scale, verbose=True) # extract the Helmert parameters to apply to the rest of the polyhedrons # remove the scale factor helmert = comb.helmert tqdm.write(' -- Reporting coordinate space residuals (in mm) before and after frame alignment\n' ' Before After | Before After ') # format r_before and r_after to satisfy the required print_residuals format r_before = r_before.reshape(3, r_before.shape[0] / 3).transpose() r_after = r_after.reshape(3, r_after.shape[0] / 3).transpose() residuals = np.stack((r_before, r_after), axis=2) stn_lla = [] for i, stn in enumerate(a_stn): n = stn.split('.')[0] s = stn.split('.')[1] # get the lat lon of the station to report back in the json lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (n, s))[0] stn_lla.append([lla[0], lla[1]]) # print residuals to screen print_residuals(n, s, residuals[i], lla[0], lla[1], ['X', 'Y', 'Z']) # save the position space residuals self.position_space = {'stations': {'codes': a_stn.tolist(), 'latlon': stn_lla}, 'residuals_before_alignment': r_before.tolist(), 'residuals_after_alignment': r_after.tolist(), 'reference_date': ref_date, 'helmert_transformation': comb.helmert.tolist(), 'comments': 'No scale factor estimated.'} for poly in tqdm(self, ncols=160, desc=' -- Applying coordinate space transformation'): if poly.date != ref_date: poly.align(helmert=helmert, scale=scale) tqdm.write(' >> Aligning velocity space...') # choose the stations that have a velocity use_stn = [] for stn in use_stations: if not np.isnan(target_dict[stn]['vx']): use_stn.append(stn) # load the polynomial terms of the stations etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, ' 'stations.lon, params FROM etms ' 'LEFT JOIN stations ON ' 'etms."NetworkCode" = stations."NetworkCode" AND ' 'etms."StationCode" = stations."StationCode" ' 'WHERE "object" = \'polynomial\' AND soln = \'gamit\' AND stack = \'%s\' ' 'AND etms."NetworkCode" || \'.\' || etms."StationCode" IN (\'%s\') ' 'ORDER BY etms."NetworkCode", etms."StationCode"' % (self.name, '\', \''.join(use_stn)), as_dict=True) # first, align the velocity space by finding a Helmert transformation that takes vx, vy, and vz of the stack at # each station and makes it equal to vx, vy, and vz of the ITRF structure dvx = np.zeros(len(etm_objects)) dvy = np.zeros(len(etm_objects)) dvz = np.zeros(len(etm_objects)) for s, p in enumerate(etm_objects): stn_ts = self.get_station(p['NetworkCode'], p['StationCode']) self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND stack = \'%s\' ' % (p['NetworkCode'], p['StationCode'], self.name)) # save the time series ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name) # create the ETM object pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts) q = self.cnn.query_float('SELECT params FROM etms ' 'WHERE "object" = \'polynomial\' AND soln = \'gamit\' ' 'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\' ' % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0] params = np.array(q['params']) params = params.reshape((3, params.shape[0] / 3)) # first item, i.e. params[:][0] in array is position # second item is velocity, which is what we are interested in v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten() # put the residuals in an array td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])] dvx[s] = v[0] - np.array(td['vx']) dvy[s] = v[1] - np.array(td['vy']) dvz[s] = v[2] - np.array(td['vz']) scale = False A = self.build_design(etm_objects, scale=scale) # loop through the frequencies L = np.row_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).flatten() c, _, _, _, wrms, _, it = adjust_lsq(A, L) tqdm.write(' -- Velocity space transformation: ' + ' '.join(['%7.4f' % cc for cc in c]) + ' wrms: %.3f it: %i' % (wrms * 1000, it)) # loop through all the polyhedrons for poly in tqdm(self, ncols=160, desc=' -- Applying velocity space transformation'): t = np.repeat(poly.date.fyear - ref_date.fyear, poly.Ax.shape[0]) poly.vertices['x'] = poly.vertices['x'] - t * np.dot(poly.ax(scale=scale), c) poly.vertices['y'] = poly.vertices['y'] - t * np.dot(poly.ay(scale=scale), c) poly.vertices['z'] = poly.vertices['z'] - t * np.dot(poly.az(scale=scale), c) tqdm.write(' -- Reporting velocity space residuals (in mm/yr) before and after frame alignment\n' ' Before After | Before After ') dvxa = np.zeros(len(etm_objects)) dvya = np.zeros(len(etm_objects)) dvza = np.zeros(len(etm_objects)) for s, p in enumerate(etm_objects): # redo the etm for this station stn_ts = self.get_station(p['NetworkCode'], p['StationCode']) self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND stack = \'%s\'' % (p['NetworkCode'], p['StationCode'], self.name)) # save the time series ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name) # create the ETM object pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts) q = self.cnn.query_float('SELECT params FROM etms ' 'WHERE "object" = \'polynomial\' AND soln = \'gamit\' ' 'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\'' % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0] params = np.array(q['params']) params = params.reshape((3, params.shape[0] / 3)) # first item, i.e. params[:][0] in array is position # second item is velocity, which is what we are interested in v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten() # put the residuals in an array td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])] dvxa[s] = v[0] - np.array(td['vx']) dvya[s] = v[1] - np.array(td['vy']) dvza[s] = v[2] - np.array(td['vz']) lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (p['NetworkCode'], p['StationCode']))[0] print_residuals(p['NetworkCode'], p['StationCode'], np.array([[dvx[s], dvxa[s]], [dvy[s], dvya[s]], [dvz[s], dvza[s]]]), lla[0], lla[1], ['X', 'Y', 'Z']) # save the position space residuals self.velocity_space = {'stations': {'codes': [p['NetworkCode'] + '.' + p['StationCode'] for p in etm_objects], 'latlon': [[p['lat'], p['lon']] for p in etm_objects]}, 'residuals_before_alignment': np.column_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).tolist(), 'residuals_after_alignment': np.column_stack((dvxa.flatten(), dvya.flatten(), dvza.flatten())).tolist(), 'reference_date': ref_date, 'helmert_transformation': c.tolist(), 'comments': 'Velocity space transformation.'} tqdm.write(' -- Done!')
def remove_common_modes(self, target_periods=None): if target_periods is None: tqdm.write(' >> Removing periodic common modes...') # load all the periodic terms etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, ' 'stations.lon, ' 'frequencies as freq, params FROM etms ' 'LEFT JOIN stations ON ' 'etms."NetworkCode" = stations."NetworkCode" AND ' 'etms."StationCode" = stations."StationCode" ' 'WHERE "object" = \'periodic\' AND soln = \'gamit\' AND stack = \'%s\' ' 'AND frequencies <> \'{}\' ' 'ORDER BY etms."NetworkCode", etms."StationCode"' % self.name, as_dict=True) else: use_stations = [] for s in target_periods.keys(): # check that the stations have not one or both periods with NaNs if not np.isnan(target_periods[s]['365.250']['n'][0]) and \ not np.isnan(target_periods[s]['182.625']['n'][0]): use_stations.append(s) tqdm.write(' >> Inheriting periodic components...') # load the periodic terms of the stations that will produce the inheritance etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, ' 'stations.lon, ' 'frequencies as freq, params FROM etms ' 'LEFT JOIN stations ON ' 'etms."NetworkCode" = stations."NetworkCode" AND ' 'etms."StationCode" = stations."StationCode" ' 'WHERE "object" = \'periodic\' AND soln = \'gamit\' AND stack = \'%s\' ' 'AND frequencies <> \'{}\' AND etms."NetworkCode" || \'.\' || ' 'etms."StationCode" IN (\'%s\') ' 'ORDER BY etms."NetworkCode", etms."StationCode"' % (self.name, '\', \''.join(use_stations)), as_dict=True) # load the frequencies to subtract frequencies = self.cnn.query_float('SELECT frequencies FROM etms WHERE soln = \'gamit\' AND ' 'object = \'periodic\' AND frequencies <> \'{}\' AND stack = \'%s\' ' 'GROUP BY frequencies' % self.name, as_dict=True) # get the unique list of frequencies f_vector = [] for freq in frequencies: f_vector += [f for f in freq['frequencies']] f_vector = np.array(list(set(f_vector))) # initialize the vectors ox = np.zeros((len(f_vector), len(etm_objects), 2)) oy = np.zeros((len(f_vector), len(etm_objects), 2)) oz = np.zeros((len(f_vector), len(etm_objects), 2)) # vector for residuals after alignment rx = np.zeros((len(f_vector), len(etm_objects), 2)) ry = np.zeros((len(f_vector), len(etm_objects), 2)) rz = np.zeros((len(f_vector), len(etm_objects), 2)) tqdm.write(' -- Reporting periodic residuals (in mm) before %s' % ('inheritance' if target_periods else 'common mode removal')) for s, p in enumerate(etm_objects): # DDG: should only activate this portion of the code if for any reason ETMs stored in database made with # a stack different to what is being used here # stn_ts = self.get_station(p['NetworkCode'], p['StationCode']) # self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND ' # '"StationCode" = \'%s\'' % (p['NetworkCode'], p['StationCode'])) # save the time series # ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.project) # create the ETM object # pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts) # REDUNDANT CALL, but leave anyways q = self.cnn.query_float('SELECT frequencies as freq, * FROM etms ' 'WHERE "object" = \'periodic\' AND soln = \'gamit\' ' 'AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND stack = \'%s\'' % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0] if target_periods: n = [] e = [] u = [] # inheritance invoked! we want to remove the difference between current periodic terms and target # terms from the parent frame for k in range(2): for f in q['freq']: t = target_periods['%s.%s' % (p['NetworkCode'], p['StationCode'])]['%.3f' % (1 / f)] n += [t['n'][k]] e += [t['e'][k]] u += [t['u'][k]] params = np.array(q['params']) - np.array([n, e, u]).flatten() else: # no inheritance: make a vector of current periodic terms to be removed as common modes params = np.array(q['params']) params = params.reshape((3, params.shape[0] / 3)) param_count = params.shape[1] / 2 print_residuals(p['NetworkCode'], p['StationCode'], params, p['lat'], p['lon']) # convert from NEU to XYZ for j in range(params.shape[1]): params[:, j] = np.array(lg2ct(params[0, j], params[1, j], params[2, j], p['lat'], p['lon'])).flatten() for i, f in enumerate(p['freq']): ox[f_vector == f, s] = params[0, i:i + param_count + 1:param_count] oy[f_vector == f, s] = params[1, i:i + param_count + 1:param_count] oz[f_vector == f, s] = params[2, i:i + param_count + 1:param_count] # build the design matrix using the stations involved in inheritance or all stations if no inheritance sql_where = ','.join(["'" + stn['NetworkCode'] + '.' + stn['StationCode'] + "'" for stn in etm_objects]) x = self.cnn.query_float('SELECT 0, -auto_z*1e-9, auto_y*1e-9, 1, 0, 0, auto_x*1e-9 FROM stations WHERE ' '"NetworkCode" || \'.\' || "StationCode" ' 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where) y = self.cnn.query_float('SELECT auto_z*1e-9, 0, -auto_x*1e-9, 0, 1, 0, auto_y*1e-9 FROM stations WHERE ' '"NetworkCode" || \'.\' || "StationCode" ' 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where) z = self.cnn.query_float('SELECT -auto_y*1e-9, auto_x*1e-9, 0, 0, 0, 1, auto_z*1e-9 FROM stations WHERE ' '"NetworkCode" || \'.\' || "StationCode" ' 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where) Ax = np.array(x) Ay = np.array(y) Az = np.array(z) A = np.row_stack((Ax, Ay, Az)) solution_vector = [] # vector to display down-weighted stations st = dict() st['stn'] = [s['NetworkCode'] + '.' + s['StationCode'] for s in etm_objects] xyzstn = ['X-%s' % ss for ss in st['stn']] + ['Y-%s' % ss for ss in st['stn']] + \ ['Z-%s' % ss for ss in st['stn']] # loop through the frequencies for freq in f_vector: for i, cs in enumerate((np.sin, np.cos)): L = np.row_stack((ox[f_vector == freq, :, i].flatten(), oy[f_vector == freq, :, i].flatten(), oz[f_vector == freq, :, i].flatten())).flatten() c, _, index, _, wrms, _, it = adjust_lsq(A, L) # c = np.linalg.lstsq(A, L, rcond=-1)[0] tqdm.write(' -- Transformation for %s(2 * pi * 1/%.2f) : %s' % (cs.__name__, np.divide(1., freq), ' '.join(['%7.4f' % cc for cc in c])) + ' wrms: %.3f it: %i\n' % (wrms * 1000, it) + ' Down-weighted station components: %s' % ' '.join(['%s' % ss for ss in np.array(xyzstn)[np.logical_not(index)]])) # save the transformation parameters to output to json file solution_vector.append(['%s(2 * pi * 1/%.2f)' % (cs.__name__, np.divide(1., freq)), c.tolist()]) # loop through all the polyhedrons for poly in tqdm(self, ncols=160, desc=' -- Applying transformation -> %s(2 * pi * 1/%.2f)' % (cs.__name__, np.divide(1., freq))): # subtract the inverted common modes poly.vertices['x'] = poly.vertices['x'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \ np.dot(poly.ax(scale=True), c) poly.vertices['y'] = poly.vertices['y'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \ np.dot(poly.ay(scale=True), c) poly.vertices['z'] = poly.vertices['z'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \ np.dot(poly.az(scale=True), c) tqdm.write(' -- Reporting periodic residuals (in mm) after %s\n' ' 365.25 182.62 365.25 182.62 \n' ' sin sin cos cos ' % ('inheritance' if target_periods else 'common mode removal')) for s, p in enumerate(etm_objects): # redo the etm for this station # DDG: etms need to be redone because we changed the stack! stn_ts = self.get_station(p['NetworkCode'], p['StationCode']) self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND stack = \'%s\'' % (p['NetworkCode'], p['StationCode'], self.name)) # save the time series ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name) # create the ETM object pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts) # obtain the updated parameters # they should exist for sure! q = self.cnn.query_float('SELECT frequencies as freq, * FROM etms ' 'WHERE "object" = \'periodic\' AND soln = \'gamit\' ' 'AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND stack = \'%s\'' % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0] if target_periods: n = [] e = [] u = [] # inheritance invoked! we want to remove the difference between current periodic terms and target # terms from the parent frame for k in range(2): for f in q['freq']: t = target_periods['%s.%s' % (p['NetworkCode'], p['StationCode'])]['%.3f' % (1 / f)] n += [t['n'][k]] e += [t['e'][k]] u += [t['u'][k]] residuals = (np.array(q['params']) - np.array([n, e, u]).flatten()) else: # residuals are the minimized frequencies residuals = np.array(q['params']) # reshape the array to NEU residuals = residuals.reshape((3, residuals.shape[0] / 3)) param_count = residuals.shape[1] / 2 print_residuals(p['NetworkCode'], p['StationCode'], residuals, p['lat'], p['lon']) # convert from NEU to XYZ for j in range(residuals.shape[1]): residuals[:, j] = np.array(lg2ct(residuals[0, j], residuals[1, j], residuals[2, j], p['lat'], p['lon'])).flatten() for i, f in enumerate(p['freq']): rx[f_vector == f, s] = residuals[0, i:i + param_count + 1:param_count] ry[f_vector == f, s] = residuals[1, i:i + param_count + 1:param_count] rz[f_vector == f, s] = residuals[2, i:i + param_count + 1:param_count] # save the position space residuals self.periodic_space = {'stations': {'codes': [p['NetworkCode'] + '.' + p['StationCode'] for p in etm_objects], 'latlon': [[p['lat'], p['lon']] for p in etm_objects]}, 'frequencies': f_vector.tolist(), 'components': ['sin', 'cos'], 'residuals_before_alignment': np.array([ox, oy, oz]).tolist(), 'residuals_after_alignment': np.array([rx, ry, rz]).tolist(), 'helmert_transformations': solution_vector, 'comments': 'Periodic space transformation. Each residual component (X, Y, Z) ' 'stored as X[freq, station, component]. Frequencies, stations, and ' 'components ordered as in respective elements.'} tqdm.write(' -- Done!')
def main(): parser = argparse.ArgumentParser(description='Query ETM for stations in the database. Default is PPP ETMs.') parser.add_argument('stnlist', type=str, nargs='+', help="List of networks/stations to plot given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be plotted). Use keyword 'all' to plot all stations in all networks. " "If [net].all is given, all stations from network [net] will be plotted") parser.add_argument('-q', '--query', nargs=2, metavar='{type} {date}', type=str, help='Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value ' 'of the daily solution (if exists). Output is in XYZ.') parser.add_argument('-gamit', '--gamit', type=str, nargs=1, metavar='{stack}', help="Plot the GAMIT time series specifying which stack name to plot.") parser.add_argument('-file', '--filename', type=str, help="Obtain data from an external source (filename). Format should be specified with -format.") parser.add_argument('-format', '--format', nargs='+', type=str, help="To be used together with --filename. Specify order of the fields as found in the input " "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, " "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored " "are at the end of the line, then there is no need to specify those.") parser.add_argument('-quiet', '--quiet', action='store_true', help="Do not print message when no solutions are available.") parser.add_argument('-vel', '--velocity', action='store_true', help="Output the velocity in XYZ.") parser.add_argument('-seasonal', '--seasonal_terms', action='store_true', help="Output the seasonal terms in NEU.") args = parser.parse_args() ## cnn = dbConnection.Cnn('gnss_data.cfg') if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]): print(' >> Station list read from ' + args.stnlist[0]) stnlist = [{'NetworkCode': items[0], 'StationCode': items[1]} for items in (line.strip().split('.') for line in file_readlines(args.stnlist[0]))] else: stnlist = Utils.process_stnlist(cnn, args.stnlist) for stn in stnlist: try: if args.gamit is None and args.filename is None: etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False) elif args.filename is not None: etm = from_file(args, cnn, stn) else: polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks ' 'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' ' 'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % (args.gamit[0], stn['NetworkCode'], stn['StationCode'])) soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0]) etm = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, gamit_soln=soln) if args.query is not None: model = (args.query[0] == 'model') q_date = pyDate.Date(fyear=float(args.query[1])) # get the coordinate xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model) strp = '' # if user requests velocity too, output it if args.velocity and etm.A is not None: vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1]) strp = '%8.5f %8.5f %8.5f ' % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0]) # also output seasonal terms, if requested if args.seasonal_terms and etm.Periodic.frequency_count > 0: strp += ' '.join('%8.5f' % (x * 1000) for x in etm.Periodic.p.params.flatten()) print(' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \ % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt)) except pyETM.pyETMException as e: if not args.quiet: print(str(e)) except: print('Error during processing of ' + stn['NetworkCode'] + '.' + stn['StationCode']) print(traceback.format_exc())