def __init__(self, cnn, archive, station, date, GamitConfig): self.NetworkCode = station.NetworkCode self.StationCode = station.StationCode self.StationAlias = station.StationAlias self.lat = station.record.lat self.lon = station.record.lon self.height = station.record.height self.X = station.record.auto_x self.Y = station.record.auto_y self.Z = station.record.auto_z self.otl_H = station.otl_H # save the station information as text try: self.StationInfo = pyStationInfo.StationInfo( cnn, station.NetworkCode, station.StationCode, date).return_stninfo() except pyStationInfo.pyStationInfoHeightCodeNotFound as e: tqdm.write( ' -- WARNING: ' + str(e) + '. Antenna height will be used as is and GAMIT may produce a fatal.' ) self.StationInfo = pyStationInfo.StationInfo( cnn, station.NetworkCode, station.StationCode, date).return_stninfo(no_dharp_translate=True) self.date = date # type: pyDate.Date self.Archive_path = GamitConfig.archive_path # get the APR and sigmas for this date (let get_xyz_s determine which side of the jump returns, if any) self.Apr, self.Sigmas, \ self.Window, self.source = station.etm.get_xyz_s(self.date.year, self.date.doy, sigma_h=float(GamitConfig.gamitopt['sigma_floor_h']), sigma_v=float(GamitConfig.gamitopt['sigma_floor_v'])) # rinex file self.ArchiveFile = archive.build_rinex_path(self.NetworkCode, self.StationCode, self.date.year, self.date.doy) # DDG: force RINEX 2 filenames even with RINEX 3 data self.filename = self.StationAlias + self.date.ddd( ) + '0.' + self.date.yyyy()[2:4] + 'd.Z' # save some information for debugging purposes rs = cnn.query_float( 'SELECT * FROM ppp_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"Year" = %s AND "DOY" = %s' % (self.NetworkCode, self.StationCode, self.date.yyyy(), self.date.ddd()), as_dict=True) if len(rs) > 0: self.ppp = rs[0] else: self.ppp = None
def compare_stninfo_rinex(NetworkCode, StationCode, STime, ETime, rinex_serial): try: cnn = dbConnection.Cnn("gnss_data.cfg") except Exception: return traceback.format_exc() + ' open de database when processing ' \ 'processing %s.%s' % (NetworkCode, StationCode), None try: # get the center of the session date = STime + (ETime - STime) / 2 date = pyDate.Date(datetime=date) stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode, date) except pyStationInfo.pyStationInfoException as e: return "Station Information error: " + str(e), None if stninfo.currentrecord.ReceiverSerial.lower() != rinex_serial.lower(): return None, [ date, rinex_serial, stninfo.currentrecord.ReceiverSerial.lower() ] return None, None
def get_records(): global StnInfo StnInfo = pyStationInfo.StationInfo(cnn, stn['NetworkCode'], stn['StationCode'], allow_empty=True) out = [] if StnInfo.record_count > 0: records = StnInfo.return_stninfo_short().split('\n') for record in records: out.append({'field': record, 'function': selection_main_menu}) return out
def __init__(self, cnn, archive, station, date, GamitConfig): self.NetworkCode = station.NetworkCode self.StationCode = station.StationCode self.StationAlias = station.StationAlias self.lat = station.record.lat self.lon = station.record.lon self.height = station.record.height self.X = station.record.auto_x self.Y = station.record.auto_y self.Z = station.record.auto_z self.otl_H = station.otl_H # save the station information as text self.StationInfo = pyStationInfo.StationInfo(cnn, station.NetworkCode, station.StationCode, date).return_stninfo() self.date = date # type: pyDate.Date self.Archive_path = GamitConfig.archive_path # get the APR and sigmas for this date (let get_xyz_s determine which side of the jump returns, if any) self.Apr, self.Sigmas, \ self.Window, self.source = station.etm.get_xyz_s(self.date.year, self.date.doy, sigma_h=float(GamitConfig.gamitopt['sigma_floor_h']), sigma_v=float(GamitConfig.gamitopt['sigma_floor_v'])) # rinex file self.ArchiveFile = archive.build_rinex_path(self.NetworkCode, self.StationCode, self.date.year, self.date.doy) self.filename = self.StationAlias + self.date.ddd( ) + '0.' + self.date.yyyy()[2:4] + 'd.Z' # save some information for debugging purposes rs = cnn.query_float( 'SELECT * FROM ppp_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"Year" = %s AND "DOY" = %s' % (self.NetworkCode, self.StationCode, self.date.yyyy(), self.date.ddd()), as_dict=True) if len(rs) > 0: self.ppp = rs[0] else: self.ppp = None
def PrintStationInfo(cnn, stnlist, short=False): for stn in stnlist: NetworkCode = stn['NetworkCode'] StationCode = stn['StationCode'] try: stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode) if short: sys.stdout.write('\n' + stninfo.return_stninfo_short() + '\n\n') else: sys.stdout.write('# ' + NetworkCode.upper() + '.' + StationCode.upper() + '\n' + stninfo.return_stninfo() + '\n') except pyStationInfo.pyStationInfoException as e: sys.stderr.write(str(e) + '\n')
def rinex_task(NetworkCode, StationCode, date, ObservationFYear, metafile): from pyRunWithRetry import RunCommandWithRetryExeception etm_err = '' # local directory as destiny for the CRINEZ files pwd_rinex = '/media/leleiona/aws-files/' + date.yyyy() + '/' + date.ddd() stop_no_aprs = False Config = pyOptions.ReadOptions( "gnss_data.cfg") # type: pyOptions.ReadOptions cnn = dbConnection.Cnn('gnss_data.cfg') # create Archive object Archive = pyArchiveStruct.RinexStruct( cnn) # type: pyArchiveStruct.RinexStruct ArchiveFile = Archive.build_rinex_path(NetworkCode, StationCode, date.year, date.doy) ArchiveFile = os.path.join(Config.archive_path, ArchiveFile) # check for a station alias in the alias table alias = cnn.query( 'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode)) sa = alias.dictresult() if len(sa) > 0: StationAlias = sa[0]['StationAlias'] else: StationAlias = StationCode # create the crinez filename filename = StationAlias + date.ddd() + '0.' + date.yyyy()[2:4] + 'd.Z' try: # create the ETM object etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # get APRs and sigmas (only in NEU) Apr, sigmas, Window, source = etm.get_xyz_s(date.year, date.doy) del etm except pyETM.pyETMException as e: # no PPP solutions available! MUST have aprs in the last run, try that stop_no_aprs = True Window = None source = '' etm_err = str(e) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) # find this station-day in the lastest global run APRs apr_tbl = cnn.query( 'SELECT * FROM apr_coords WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' ' 'AND "Year" = %i AND "DOY" = %i' % (NetworkCode, StationCode, date.year, date.doy)) apr = apr_tbl.dictresult() if len(apr) > 0: # APRs exist for this station-day # replace PPP ETM with Mike's APRs Apr = numpy.array( ([float(apr[0]['x'])], [float(apr[0]['y'])], [float(apr[0]['z'])])) sigmas = numpy.array(([float(apr[0]['sn'])], [float(apr[0]['se'])], [float(apr[0]['su'])])) source = apr[0]['ReferenceFrame'] + ' APRs' elif len(apr) == 0 and stop_no_aprs: return ( None, None, '%s.%s has no PPP solutions and no APRs from last global run for %s! ' 'Specific error from pyETM.PPPETM (if available) was: %s' % (NetworkCode, StationCode, date.yyyyddd(), etm_err), metafile) # convert sigmas to XYZ stn = cnn.query( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode)) stn = stn.dictresult() sigmas_xyz = sigmas_neu2xyz(stn[0]['lat'], stn[0]['lon'], sigmas) # write the station.info # if no station info comes back for this date, program will print a message and continue with next try: # Use the argument 'ObservationFYear' to get the exact RINEX session fyear without opening the file rnx_date = pyDate.Date(fyear=float(ObservationFYear)) stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode, rnx_date, h_tolerance=12) except pyStationInfo.pyStationInfoException: # if no metadata, warn user and continue return ( None, None, '%s.%s has no metadata available for this date, but a RINEX exists!' % (NetworkCode, StationCode), metafile) # check if RINEX file needs to be synced or not. aws_sync = cnn.query( 'SELECT * FROM aws_sync WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' ' 'AND "Year" = %i AND "DOY" = %i' % (NetworkCode, StationCode, date.year, date.doy)).dictresult() if len(aws_sync) == 0: # only copy RINEX if not synced! # open the RINEX file in the Archive try: with pyRinex.ReadRinex(NetworkCode, StationCode, ArchiveFile, False) as Rinex: # type: pyRinex.ReadRinex Rnx = None if Rinex.multiday: # find the rinex that corresponds to the session being processed, if multiday for rinex in Rinex.multiday_rnx_list: if rinex.date == date: Rnx = rinex break if Rnx is None: return ( None, None, '%s.%s was a multiday file and date %8.3f could not be found!' % (NetworkCode, StationCode, date.fyear), metafile) else: # if Rinex is not multiday Rnx = Rinex Rnx.purge_comments() Rnx.normalize_header(stninfo) Rnx.rename(filename) if Window is not None: window_rinex(Rnx, Window) source += ' windowed from/to ' + Window.datetime( ).strftime('%Y-%M-%d %H:%M:%S') # before creating local copy, decimate file Rnx.decimate(30) Rnx.compress_local_copyto(pwd_rinex) except (pyRinex.pyRinexException, RunCommandWithRetryExeception): # new behavior: if error occurs while generating RINEX, then copy raw file from the archive try: shutil.copy(ArchiveFile, os.path.join(pwd_rinex, filename)) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) except Exception: return (None, None, traceback.format_exc() + ' processing ' + NetworkCode + '.' + StationCode + ' using node ' + platform.node() + '\n', metafile) # everything ok, return information APR = '%s.%s %s %12.3f %12.3f %12.3f %5.3f %5.3f %5.3f %5.3f %5.3f %5.3f %s' % ( NetworkCode, StationCode, StationAlias, Apr[0, 0], Apr[1, 0], Apr[2, 0], sigmas_xyz[0, 0], sigmas_xyz[1, 0], sigmas_xyz[2, 0], sigmas[1, 0], sigmas[0, 0], sigmas[2, 0], source.replace(' ', '_')) return APR, stninfo.return_stninfo().replace( StationCode.upper(), StationAlias.upper()), None, metafile
def download_data(cnn, Config, stnlist, drange): archive = pyArchiveStruct.RinexStruct(cnn) pbar = tqdm(desc='%-30s' % ' >> Downloading stations declared in data_source', total=len(drange) * len(stnlist), ncols=160) for date in [pyDate.Date(mjd=mdj) for mdj in drange]: for stn in stnlist: StationCode = stn['StationCode'] NetworkCode = stn['NetworkCode'] pbar.set_postfix(current='%s.%s %s' % (NetworkCode, StationCode, date.yyyyddd())) pbar.update() try: _ = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode, date=date) except pyStationInfo.pyStationInfoHeightCodeNotFound: # if the error is that no height code is found, then there is a record pass except pyStationInfo.pyStationInfoException: # no possible data here, inform and skip tqdm.write( ' >> %s.%s skipped: no station information available -> assume station is inactive' % (NetworkCode, StationCode)) continue rinex = archive.get_rinex_record(NetworkCode=NetworkCode, StationCode=StationCode, ObservationYear=date.year, ObservationDOY=date.doy) if not rinex: download = True elif rinex and rinex[0]['Completion'] < 0.5: download = True else: download = False if download: rs = cnn.query( 'SELECT * FROM data_source WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\' ORDER BY try_order' % (NetworkCode, StationCode)) sources = rs.dictresult() for source in sources: tqdm.write(' >> Need to download %s.%s %s' % (NetworkCode, StationCode, date.yyyyddd())) result = False folder = os.path.dirname( replace_vars(source['path'], date, StationCode)) destiny = os.path.join(Config.repository_data_in, source['fqdn'].replace(':', '_')) filename = os.path.basename( replace_vars(source['path'], date, StationCode)) if source['protocol'].lower() == 'ftp': result = download_ftp(source['fqdn'], source['username'], source['password'], folder, destiny, filename) elif source['protocol'].lower() == 'sftp': result = download_sftp(source['fqdn'], source['username'], source['password'], folder, destiny, filename) elif source['protocol'].lower() == 'http': result = download_http(source['fqdn'], folder, destiny, filename) else: tqdm.write(' -- Unknown protocol %s for %s.%s' % (source['protocol'].lower(), NetworkCode, StationCode)) if result: tqdm.write(' -- Successful download of %s.%s %s' % (NetworkCode, StationCode, date.yyyyddd())) # success downloading file if source['format']: tqdm.write( ' -- File requires postprocess using scheme %s' % (source['format'])) process_file(os.path.join(destiny, filename), filename, destiny, source['format'], StationCode, date) break else: tqdm.write( ' -- Could not download %s.%s %s -> trying next source' % (NetworkCode, StationCode, date.yyyyddd())) else: tqdm.write(' >> File for %s.%s %s already in db' % (NetworkCode, StationCode, date.yyyyddd())) pbar.close()
def __init__(self, cnn, NetworkCode, StationCode, dates, StationAlias=None): self.NetworkCode = NetworkCode self.StationCode = StationCode self.netstn = self.NetworkCode + '.' + self.StationCode if StationAlias is None: if 'public.stationalias' in cnn.get_tables(): rs = cnn.query_float( 'SELECT * FROM stationalias WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs): self.StationAlias = rs[0]['StationAlias'] else: # if no record, then Alias = StationCode self.StationAlias = StationCode else: self.StationAlias = StationCode # upon creation, Alias = StationCode else: self.StationAlias = StationAlias self.record = None self.etm = None self.StationInfo = None self.lat = None self.lon = None self.height = None self.X = None self.Y = None self.Z = None self.otl_H = None rs = cnn.query_float( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs) != 0: self.record = pyBunch.Bunch().fromDict(rs[0]) self.otl_H = self.record.Harpos_coeff_otl self.lat = self.record.lat self.lon = self.record.lon self.height = self.record.height self.X = self.record.auto_x self.Y = self.record.auto_y self.Z = self.record.auto_z # get the available dates for the station (RINEX files with conditions to be processed) rs = cnn.query( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '("ObservationYear", "ObservationDOY") BETWEEN (%s) AND (%s) AND ' '"Completion" >= %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, dates[0].yyyy() + ', ' + dates[0].ddd(), dates[1].yyyy() + ', ' + dates[1].ddd(), COMPLETION, INTERVAL)) self.good_rinex = [ pyDate.Date(year=r['y'], doy=r['d']) for r in rs.dictresult() ] # create a list of the missing days good_rinex = [d.mjd for d in self.good_rinex] self.missing_rinex = [ pyDate.Date(mjd=d) for d in range(dates[0].mjd, dates[1].mjd + 1) if d not in good_rinex ] self.etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # type: pyETM.PPPETM self.StationInfo = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode) # DDG: report RINEX files with Completion < 0.5 rs = cnn.query_float( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '("ObservationYear", "ObservationDOY") BETWEEN (%s) AND (%s) AND ' '"Completion" < %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, dates[0].yyyy() + ', ' + dates[0].ddd(), dates[1].yyyy() + ', ' + dates[1].ddd(), COMPLETION, INTERVAL)) if len(rs): tqdm.write( ' WARNING: The requested date interval has %i days with < 50%% of observations. ' 'These days will not be processed.' % len(rs)) else: raise ValueError('Specified station %s.%s could not be found' % (NetworkCode, StationCode))
def generate_kml(cnn, project, stations): stnlist = [s['NetworkCode'] + '.' + s['StationCode'] for s in stations] tqdm.write( ' >> Generating KML for this run (see production directory)...') kml = simplekml.Kml() rs = cnn.query_float( 'SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%\' ' 'ORDER BY "NetworkCode", "StationCode" ', as_dict=True) tqdm.write(' >> Adding stations in database') folder1 = kml.newfolder(name=project) folder2 = kml.newfolder(name='all stations') stylec = simplekml.StyleMap() stylec.normalstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png' stylec.normalstyle.labelstyle.scale = 0 stylec.highlightstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png' stylec.highlightstyle.labelstyle.scale = 2 styles_ok = simplekml.StyleMap() styles_ok.normalstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png' styles_ok.normalstyle.iconstyle.color = 'ff00ff00' styles_ok.normalstyle.labelstyle.scale = 0 styles_ok.highlightstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png' styles_ok.highlightstyle.iconstyle.color = 'ff00ff00' styles_ok.highlightstyle.labelstyle.scale = 2 styles_nok = simplekml.StyleMap() styles_nok.normalstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png' styles_nok.normalstyle.iconstyle.color = 'ff0000ff' styles_nok.normalstyle.labelstyle.scale = 0 styles_nok.highlightstyle.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_square.png' styles_nok.highlightstyle.iconstyle.color = 'ff0000ff' styles_nok.highlightstyle.labelstyle.scale = 2 for stn in tqdm(rs, ncols=80): count = cnn.query_float( 'SELECT count(*) as cc FROM rinex_proc WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (stn['NetworkCode'], stn['StationCode'])) ppp_s = cnn.query_float( 'SELECT count(*) as cc FROM ppp_soln WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\'' % (stn['NetworkCode'], stn['StationCode'])) stninfo = pyStationInfo.StationInfo(cnn, stn['NetworkCode'], stn['StationCode'], allow_empty=True) if count[0][0]: completion = '%.1f' % (float(ppp_s[0][0]) / float(count[0][0]) * 100) else: completion = 'NA' if stn['DateStart']: DS = '%.3f' % stn['DateStart'] DE = '%.3f' % stn['DateEnd'] else: DS = 'NA' DE = 'NA' if stn['NetworkCode'] + '.' + stn['StationCode'] in stnlist: folder = folder1 # mark the stations with less than 100 observations or with less than 60% completion (PPP) if count[0][0] >= 100 and (float(ppp_s[0][0]) / float(count[0][0]) * 100) >= 60.0: style = styles_ok else: style = styles_nok else: folder = folder2 style = stylec plt = plot_station_info_rinex(cnn, stn['NetworkCode'], stn['StationCode'], stninfo) pt = folder.newpoint(name=stn['NetworkCode'] + '.' + stn['StationCode'], coords=[(stn['lon'], stn['lat'])]) pt.stylemap = style pt.description = """<strong>%s -> %s</strong> RINEX count: %i PPP soln: %s%%<br><br> <strong>Station Information:</strong><br> <table width="880" cellpadding="0" cellspacing="0"> <tr> <td align="left" valign="top"> <p style="font-family: monospace; font-size: 8pt;">%s<br><br> <strong>Observation distribution:</strong><br> </p> <img src="data:image/png;base64, %s" alt="Observation information" /> </tr> </td> </table>""" % (DS, DE, count[0][0], completion, stninfo.return_stninfo_short().replace('\n', '<br>'), plt) if not os.path.exists('production'): os.makedirs('production') kml.savekmz('production/' + project + '.kmz')
def main(): parser = argparse.ArgumentParser( description= 'Database integrity tools, metadata check and fixing tools program') parser.add_argument( 'stnlist', type=str, nargs='+', metavar='all|net.stnm', help= "List of networks/stations to process given in [net].[stnm] format or just [stnm] " "(separated by spaces; if [stnm] is not unique in the database, all stations with that " "name will be processed). Use keyword 'all' to process all stations in the database. " "If [net].all is given, all stations from network [net] will be processed. " "Alternatevily, a file with the station list can be provided.") parser.add_argument( '-d', '--date_filter', nargs='+', metavar='date', help='Date range filter for all operations. ' 'Can be specified in wwww-d, yyyy_ddd, yyyy/mm/dd or fyear format') parser.add_argument( '-rinex', '--check_rinex', choices=['fix', 'report'], type=str, nargs=1, help= 'Check the RINEX integrity of the archive-database by verifying that the RINEX files ' 'reported in the rinex table exist in the archive. If argument = "fix" and a RINEX file ' 'does not exist, remove the record. PPP records or gamit_soln are deleted. If argument = ' '"report" then just list the missing files.') parser.add_argument( '-rnx_count', '--rinex_count', action='store_true', help='Count the total number of RINEX files (unique station-days) ' 'per day for a given time interval.') parser.add_argument( '-stnr', '--station_info_rinex', action='store_true', help= 'Check that the receiver serial number in the rinex headers agrees with the station info ' 'receiver serial number.') parser.add_argument( '-stns', '--station_info_solutions', action='store_true', help='Check that the PPP hash values match the station info hash.') parser.add_argument( '-stnp', '--station_info_proposed', metavar='ignore_days', const=0, type=int, nargs='?', help= 'Output a proposed station.info using the RINEX metadata. Optional, specify [ignore_days] ' 'to ignore station.info records <= days.') parser.add_argument( '-stnc', '--station_info_check', action='store_true', help= 'Check the consistency of the station information records in the database. Date range ' 'does not apply. Also, check that the RINEX files fall within a valid station information ' 'record.') parser.add_argument( '-g', '--data_gaps', metavar='ignore_days', const=0, type=int, nargs='?', help= 'Check the RINEX files in the database and look for gaps (missing days). ' 'Optional, [ignore_days] with the smallest gap to display.') parser.add_argument('-gg', '--graphical_gaps', action='store_true', help='Visually output RINEX gaps for stations.') parser.add_argument( '-sc', '--spatial_coherence', choices=['exclude', 'delete', 'noop'], type=str, nargs=1, help= 'Check that the RINEX files correspond to the stations they are linked to using their ' 'PPP coordinate. If keyword [exclude] or [delete], add the PPP solution to the excluded ' 'table or delete the PPP solution. If [noop], then only report but do not ' 'exlude or delete.') parser.add_argument( '-print', '--print_stninfo', choices=['long', 'short'], type=str, nargs=1, help= 'Output the station info to stdout. [long] outputs the full line of the station info. ' '[short] outputs a short version (better for screen visualization).') parser.add_argument( '-r', '--rename', metavar='net.stnm', nargs=1, help= "Takes the data from the station list and renames (merges) it to net.stnm. " "It also changes the rinex filenames in the archive to match those of the new destiny " "station. Only a single station can be given as the origin and destiny. " "Limit the date range using the -d option.") parser.add_argument( '-es', '--exclude_solutions', metavar=('{start_date}', '{end_date}'), nargs=2, help= 'Exclude PPP solutions (by adding them to the excluded table) between {start_date} ' 'and {end_date}') parser.add_argument( '-del', '--delete_rinex', metavar=('{start_date}', '{end_date}', '{completion}'), nargs=3, help='Delete RINEX files (and associated solutions, PPP and GAMIT) ' 'from archive between {start_date} and {end_date} with completion <= {completion}. ' 'Completion ranges form 1.0 to 0.0. Use 1.0 to delete all data. ' 'Operation cannot be undone!') parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.") args = parser.parse_args() cnn = dbConnection.Cnn("gnss_data.cfg") # type: dbConnection.Cnn # create the execution log cnn.insert('executions', script='pyIntegrityCheck.py') Config = pyOptions.ReadOptions( "gnss_data.cfg") # type: pyOptions.ReadOptions stnlist = Utils.process_stnlist(cnn, args.stnlist) JobServer = pyJobServer.JobServer( Config, run_parallel=not args.noparallel) # type: pyJobServer.JobServer ##################################### # date filter dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)] try: dates = process_date(args.date_filter) except ValueError as e: parser.error(str(e)) ##################################### if args.check_rinex: CheckRinexIntegrity(cnn, Config, stnlist, dates[0], dates[1], args.check_rinex[0], JobServer) ##################################### if args.rinex_count: RinexCount(cnn, stnlist, dates[0], dates[1]) ##################################### if args.station_info_rinex: StnInfoRinexIntegrity(cnn, stnlist, dates[0], dates[1], JobServer) ##################################### if args.station_info_check: StnInfoCheck(cnn, stnlist, Config) ##################################### if args.data_gaps is not None: GetStnGaps(cnn, stnlist, args.data_gaps, dates[0], dates[1]) if args.graphical_gaps: VisualizeGaps(cnn, stnlist, dates[0], dates[1]) ##################################### if args.spatial_coherence is not None: CheckSpatialCoherence(cnn, stnlist, dates[0], dates[1]) ##################################### if args.exclude_solutions is not None: try: dates = process_date(args.exclude_solutions) except ValueError as e: parser.error(str(e)) ExcludeSolutions(cnn, stnlist, dates[0], dates[1]) ##################################### if args.print_stninfo is not None: if args.print_stninfo[0] == 'short': PrintStationInfo(cnn, stnlist, True) elif args.print_stninfo[0] == 'long': PrintStationInfo(cnn, stnlist, False) else: parser.error( 'Argument for print_stninfo has to be either long or short') ##################################### if args.station_info_proposed is not None: for stn in stnlist: stninfo = pyStationInfo.StationInfo(cnn, stn['NetworkCode'], stn['StationCode'], allow_empty=True) sys.stdout.write( stninfo.rinex_based_stninfo(args.station_info_proposed)) ##################################### if args.delete_rinex is not None: try: dates = process_date(args.delete_rinex[0:2]) except ValueError as e: parser.error(str(e)) DeleteRinex(cnn, stnlist, dates[0], dates[1], float(args.delete_rinex[2])) ##################################### if args.rename: if len(stnlist) > 1: parser.error( 'Only a single station should be given for the origin station') if '.' not in args.rename[0]: parser.error('Format for destiny station should be net.stnm') else: DestNetworkCode = args.rename[0].split('.')[0] DestStationCode = args.rename[0].split('.')[1] RenameStation(cnn, stnlist[0]['NetworkCode'], stnlist[0]['StationCode'], DestNetworkCode, DestStationCode, dates[0], dates[1], Config.archive_path) JobServer.close_cluster()
def RenameStation(cnn, NetworkCode, StationCode, DestNetworkCode, DestStationCode, start_date, end_date, archive_path): # make sure the destiny station exists try: rs = cnn.query( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (DestNetworkCode, DestStationCode)) if rs.ntuples() == 0: # ask the user if he/she want to create it? print 'The requested destiny station does not exist. Please create it and try again' else: # select the original rinex files names # this is the set that will effectively be transferred to the dest net and stn codes # I select this portion of data here and not after we rename the station to prevent picking up more data # (due to the time window) that is ALREADY in the dest station. rs = cnn.query( 'SELECT * FROM rinex WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"ObservationSTime" BETWEEN \'%s\' AND \'%s\'' % (NetworkCode, StationCode, start_date.yyyymmdd(), end_date.yyyymmdd())) original_rs = rs.dictresult() print " >> Beginning transfer of %i rinex files from %s.%s to %s.%s" \ % (len(original_rs), NetworkCode, StationCode, DestNetworkCode, DestStationCode) for src_rinex in tqdm(original_rs): # rename files Archive = pyArchiveStruct.RinexStruct( cnn) # type: pyArchiveStruct.RinexStruct src_file_path = Archive.build_rinex_path( NetworkCode, StationCode, src_rinex['ObservationYear'], src_rinex['ObservationDOY'], filename=src_rinex['Filename']) src_path = os.path.split( os.path.join(archive_path, src_file_path))[0] src_file = os.path.split( os.path.join(archive_path, src_file_path))[1] dest_file = src_file.replace(StationCode, DestStationCode) cnn.begin_transac() # update the NetworkCode and StationCode and filename information in the db cnn.query( 'UPDATE rinex SET "NetworkCode" = \'%s\', "StationCode" = \'%s\', "Filename" = \'%s\' ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "ObservationYear" = %i AND ' '"ObservationDOY" = %i AND "Filename" = \'%s\'' % (DestNetworkCode, DestStationCode, dest_file.replace('d.Z', 'o'), NetworkCode, StationCode, src_rinex['ObservationYear'], src_rinex['ObservationDOY'], src_rinex['Filename'])) # DO NOT USE pyArchiveStruct because we have an active transaction and the change is not visible yet # because we don't know anything about the archive's stucture, # we just try to replace the names and that should suffice dest_path = src_path.replace(StationCode, DestStationCode).replace( NetworkCode, DestNetworkCode) # check that the destination path exists (it should, but...) if not os.path.isdir(dest_path): os.makedirs(dest_path) shutil.move(os.path.join(src_path, src_file), os.path.join(dest_path, dest_file)) # if we are here, we are good. Commit cnn.commit_transac() date = pyDate.Date(year=src_rinex['ObservationYear'], doy=src_rinex['ObservationDOY']) # Station info transfer try: stninfo_dest = pyStationInfo.StationInfo( cnn, DestNetworkCode, DestStationCode, date) # type: pyStationInfo.StationInfo # no error, nothing to do. except pyStationInfo.pyStationInfoException: # failed to get a valid station info record! we need to incorporate the station info record from # the source station try: stninfo_dest = pyStationInfo.StationInfo( cnn, DestNetworkCode, DestStationCode) stninfo_src = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode, date) # force the station code in record to be the same as deststationcode record = stninfo_src.currentrecord record['StationCode'] = DestStationCode stninfo_dest.InsertStationInfo(record) except pyStationInfo.pyStationInfoException as e: # if there is no station info for this station either, warn the user! tqdm.write( ' -- Error while updating Station Information! %s' % (str(e))) except Exception: cnn.rollback_transac() raise
def StnInfoCheck(cnn, stnlist, Config): # check that there are no inconsistencies in the station info records atx = dict() for frame in Config.options['frames']: # read all the available atx files atx[frame['name']] = parse_atx_antennas(frame['atx']) for stn in stnlist: NetworkCode = stn['NetworkCode'] StationCode = stn['StationCode'] first_obs = False try: stninfo = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode) # type: pyStationInfo.StationInfo # there should not be more than one entry with 9999 999 in DateEnd empty_edata = [[record['DateEnd'], record['DateStart']] for record in stninfo.records if not record['DateEnd']] if len(empty_edata) > 1: list_empty = [ pyDate.Date(datetime=record[1]).yyyyddd() for record in empty_edata ] list_empty = ', '.join(list_empty) sys.stdout.write( '%s.%s: There is more than one station info entry with Session Stop = 9999 999 ' 'Session Start -> %s\n' % (NetworkCode, StationCode, list_empty)) # there should not be a DateStart < DateEnd of different record list_problems = [] atx_problem = False hc_problem = False for i, record in enumerate(stninfo.records): # check existence of ANTENNA in ATX # determine the reference frame using the start date frame, atx_file = determine_frame(Config.options['frames'], record['DateStart']) # check if antenna in atx, if not, produce a warning if record['AntennaCode'] not in atx[frame]: sys.stdout.write( '%s.%s: %-16s%s -> Not found in ANTEX file %s (%s) - dome not checked\n' % (NetworkCode, StationCode, record['AntennaCode'], record['RadomeCode'], os.path.basename(atx_file), frame)) atx_problem = True # check if the station info has a slant height and if the slant can be translated into a DHARP overlaps = stninfo.overlaps(record) if overlaps: for overlap in overlaps: if overlap['DateStart'].datetime( ) != record['DateStart'].datetime(): list_problems.append([ str(overlap['DateStart']), str(overlap['DateEnd']), str(record['DateStart']), str(record['DateEnd']) ]) station_list_gaps = [] if len(stninfo.records) > 1: # get gaps between stninfo records for erecord, srecord in zip(stninfo.records[0:-1], stninfo.records[1:]): sdate = srecord['DateStart'] edate = erecord['DateEnd'] # if the delta between previous and current session exceeds one second, check if any rinex falls # in that gap if (sdate.datetime() - edate.datetime()).total_seconds() > 1: count = cnn.query( 'SELECT count(*) as rcount FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"ObservationETime" > \'%s\' AND "ObservationSTime" < \'%s\' AND ' '"Completion" >= 0.5' % (NetworkCode, StationCode, edate.strftime(), sdate.strftime())).dictresult()[0]['rcount'] if count != 0: station_list_gaps += [[ count, [ str(erecord['DateStart']), str(erecord['DateEnd']) ], [ str(srecord['DateStart']), str(srecord['DateEnd']) ] ]] # there should not be RINEX data outside the station info window rs = cnn.query( 'SELECT min("ObservationSTime") as first_obs, max("ObservationSTime") as last_obs ' 'FROM rinex_proc WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' ' 'AND "Completion" >= 0.5' % (NetworkCode, StationCode) ) # only check RINEX files with more than 12 hours of data rnxtbl = rs.dictresult() if rnxtbl[0]['first_obs'] is not None: # to avoid empty stations (no rinex data) if rnxtbl[0]['first_obs'] < stninfo.records[0][ 'DateStart'].datetime(): d1 = pyDate.Date(datetime=rnxtbl[0]['first_obs']) d2 = stninfo.records[0]['DateStart'] sys.stdout.write( '%s.%s: There is one or more RINEX observation file(s) outside the ' 'Session Start -> RINEX: %s STNINFO: %s\n' % (NetworkCode, StationCode, d1.yyyyddd(), d2.yyyyddd())) first_obs = True if rnxtbl[0]['last_obs'] > stninfo.records[-1][ 'DateEnd'].datetime(): d1 = pyDate.Date(datetime=rnxtbl[0]['last_obs']) d2 = stninfo.records[-1]['DateEnd'] sys.stdout.write( '%s.%s: There is one or more RINEX observation file(s) outside the last ' 'Session End -> RINEX: %s STNINFO: %s\n' % (NetworkCode, StationCode, d1.yyyyddd(), d2.yyyyddd())) first_obs = True if len(station_list_gaps) > 0: for gap in station_list_gaps: sys.stdout.write( '%s.%s: There is a gap with %s RINEX file(s) between ' 'the following station information records: %s -> %s :: %s -> %s\n' % (NetworkCode, StationCode, gap[0], gap[1][0], gap[1][1], gap[2][0], gap[2][1])) if len(list_problems) > 0: list_problems = [ record[0] + ' -> ' + record[1] + ' conflicts ' + record[2] + ' -> ' + record[3] for record in list_problems ] list_problems = '\n '.join(list_problems) sys.stdout.write( '%s.%s: There are conflicting recods in the station information table\n %s\n' % (NetworkCode, StationCode, list_problems)) if len(empty_edata) > 1 or len(list_problems) > 0 or first_obs or len(station_list_gaps) > 0 or \ atx_problem: # only print a partial of the station info: sys.stdout.write('\n' + stninfo.return_stninfo_short() + '\n\n') else: sys.stderr.write('%s.%s: No problems found\n' % (NetworkCode, StationCode)) sys.stdout.flush() except pyStationInfo.pyStationInfoException as e: tqdm.write(str(e))
def __init__(self, cnn, NetworkCode, StationCode, dates): self.NetworkCode = NetworkCode self.StationCode = StationCode self.StationAlias = StationCode # upon creation, Alias = StationCode self.record = None self.etm = None self.StationInfo = None self.lat = None self.lon = None self.height = None self.X = None self.Y = None self.Z = None self.otl_H = None rs = cnn.query_float( 'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % (NetworkCode, StationCode), as_dict=True) if len(rs) != 0: self.record = pyBunch.Bunch().fromDict(rs[0]) self.otl_H = self.record.Harpos_coeff_otl self.lat = self.record.lat self.lon = self.record.lon self.height = self.record.height self.X = self.record.auto_x self.Y = self.record.auto_y self.Z = self.record.auto_z # get the available dates for the station (RINEX files with conditions to be processed) rs = cnn.query( 'SELECT "ObservationYear" as y, "ObservationDOY" as d FROM rinex_proc ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND ' '"ObservationSTime" >= \'%s\' AND "ObservationETime" <= \'%s\' AND ' '"Completion" >= %.3f AND "Interval" <= %i' % (NetworkCode, StationCode, (dates[0] - 1).first_epoch(), (dates[1] + 1).last_epoch(), COMPLETION, INTERVAL)) self.good_rinex = [ pyDate.Date(year=r['y'], doy=r['d']) for r in rs.dictresult() if dates[0] <= pyDate.Date(year=r['y'], doy=r['d']) <= dates[1] ] # create a list of the missing days good_rinex = [d.mjd for d in self.good_rinex] self.missing_rinex = [ pyDate.Date(mjd=d) for d in range(dates[0].mjd, dates[1].mjd + 1) if d not in good_rinex ] self.etm = pyETM.PPPETM(cnn, NetworkCode, StationCode) # type: pyETM.PPPETM self.StationInfo = pyStationInfo.StationInfo( cnn, NetworkCode, StationCode) else: raise ValueError('Specified station %s.%s could not be found' % (NetworkCode, StationCode))