def check_rinex_stn(NetworkCode, StationCode, start_date, end_date): # load the connection try: # try to open a connection to the database cnn = dbConnection.Cnn("gnss_data.cfg") Config = pyOptions.ReadOptions("gnss_data.cfg") except Exception: return traceback.format_exc() + ' processing: (' + NetworkCode + '.' + StationCode \ + ') using node ' + platform.node(), None try: Archive = pyArchiveStruct.RinexStruct(cnn) rs = cnn.query('SELECT * FROM rinex WHERE "NetworkCode" = \'%s\' AND ' '"StationCode" = \'%s\' AND ' '"ObservationSTime" BETWEEN \'%s\' AND \'%s\' ' 'ORDER BY "ObservationSTime"' % (NetworkCode, StationCode, start_date.yyyymmdd(), end_date.yyyymmdd())) rnxtbl = rs.dictresult() missing_files = [] for rnx in rnxtbl: crinex_path = os.path.join( Config.archive_path, Archive.build_rinex_path(NetworkCode, StationCode, rnx['ObservationYear'], rnx['ObservationDOY'], filename=rnx['Filename'])) if not os.path.exists(crinex_path): # problem with file! does not appear to be in the archive Archive.remove_rinex(rnx) event = pyEvents.Event( Description= 'A missing RINEX file was found during RINEX integrity check: ' + crinex_path + '. It has been removed from the database. Consider rerunning PPP for this station.', NetworkCode=NetworkCode, StationCode=StationCode, Year=rnx['ObservationYear'], DOY=rnx['ObservationDOY']) cnn.insert_event(event) missing_files += [crinex_path] return None, missing_files except Exception: return traceback.format_exc() + ' processing: ' + NetworkCode + '.' + \ StationCode + ' using node ' + platform.node(), None
def DeleteStationInfo(self, record): event = pyEvents.Event(Description=record['DateStart'].strftime() + ' has been deleted:\n' + str(record), StationCode=self.StationCode, NetworkCode=self.NetworkCode) self.cnn.insert_event(event) self.cnn.delete('stationinfo', record.database()) self.load_stationinfo_records()
def UpdateStationInfo(self, record, new_record): # avoid problems with trying to insert records from other stations. Force this NetworkCode record['NetworkCode'] = self.NetworkCode new_record['NetworkCode'] = self.NetworkCode if self.NetworkCode and self.StationCode: # check the possible overlaps. This record will probably overlap with itself, so check that the overlap has # the same DateStart as the original record (that way we know it's an overlap with itself) overlaps = self.overlaps(new_record) for overlap in overlaps: if overlap['DateStart'].datetime( ) != record['DateStart'].datetime(): # it's overlapping with another record, raise error raise pyStationInfoException( 'Record %s -> %s overlaps with existing station.info records: %s -> %s' % (str(record['DateStart']), str(record['DateEnd']), str(overlap['DateStart']), str(overlap['DateEnd']))) # insert event (before updating to save all information) event = pyEvents.Event( Description=record['DateStart'].strftime() + ' has been updated:\n' + str(new_record) + '\n+++++++++++++++++++++++++++++++++++++\n' + 'Previous record:\n' + str(record) + '\n', NetworkCode=self.NetworkCode, StationCode=self.StationCode) self.cnn.insert_event(event) if new_record['DateStart'] != record['DateStart']: self.cnn.query( 'UPDATE stationinfo SET "DateStart" = \'%s\' ' 'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "DateStart" = \'%s\'' % (new_record['DateStart'].strftime(), self.NetworkCode, self.StationCode, record['DateStart'].strftime())) self.cnn.update('stationinfo', new_record.database(), NetworkCode=self.NetworkCode, StationCode=self.StationCode, DateStart=new_record['DateStart'].datetime()) self.load_stationinfo_records()
def insert_data(cnn, archive, rinexinfo): inserted = archive.insert_rinex(rinexobj=rinexinfo) # if archive.insert_rinex has a dbInserErr, it will be catched by the calling function # always remove original file os.remove(rinexinfo.origin_file) if not inserted: # insert an event to account for the file (otherwise is weird to have a missing rinex in the events table event = pyEvents.Event( Description=rinexinfo.crinez + ' had the same interval and completion as an existing file. ' 'CRINEZ deleted from data_in.', NetworkCode=rinexinfo.NetworkCode, StationCode=rinexinfo.StationCode, Year=int(rinexinfo.date.year), DOY=int(rinexinfo.date.doy)) cnn.insert_event(event)
def verify_rinex_multiday(cnn, rinexinfo, Config): # function to verify if rinex is multiday # returns true if parent process can continue with insert # returns false if file had to be moved to the retry # check if rinex is a multiday file (rinex with more than one day of observations) if rinexinfo.multiday: # move all the files to the repository rnxlist = [] for rnx in rinexinfo.multiday_rnx_list: rnxlist.append(rnx.rinex) # some other file, move it to the repository retry_folder = os.path.join( Config.repository_data_in_retry, 'multidays_found/' + rnx.date.yyyy() + '/' + rnx.date.ddd()) rnx.compress_local_copyto(retry_folder) # if the file corresponding to this session is found, assign its object to rinexinfo event = pyEvents.Event( Description= '%s was a multi-day rinex file. The following rinex files where generated ' 'and moved to the repository/data_in_retry: %s. The file %s did not enter ' 'the database at this time.' % (rinexinfo.origin_file, ','.join(rnxlist), rinexinfo.crinez), NetworkCode=rinexinfo.NetworkCode, StationCode=rinexinfo.StationCode, Year=int(rinexinfo.date.year), DOY=int(rinexinfo.date.doy)) cnn.insert_event(event) # remove crinez from the repository (origin_file points to the repository, not to the archive in this case) os.remove(rinexinfo.origin_file) return False return True
def __init__(self, value): self.value = value self.event = pyEvents.Event(Description=value, EventType='error')
def insert_rinex(self, record=None, rinexobj=None): """ Insert a RINEX record and file into the database and archive. If only record is provided, only insert into db If only rinexobj is provided, then RinexRecord of rinexobj is used for the insert. If both are given, then RinexRecord overrides the passed record. :param record: a RinexRecord dictionary to make the insert to the db :param rinexobj: the pyRinex object containing the file being processed :param rnxaction: accion to perform to rinexobj. :return: True if insertion was successful. False if no insertion was done. """ if record is None and rinexobj is None: raise ValueError( 'insert_rinex exception: both record and rinexobj cannot be None.' ) if rinexobj is not None: record = rinexobj.record copy_succeeded = False archived_crinex = '' # check if record exists in the database if not self.get_rinex_record(NetworkCode=record['NetworkCode'], StationCode=record['StationCode'], ObservationYear=record['ObservationYear'], ObservationDOY=record['ObservationDOY'], Interval=record['Interval'], Completion=float( '%.3f' % record['Completion'])): # no record, proceed # check if we need to perform any rinex operations. We might be inserting a new record, but it may just be # a ScanRinex op where we don't copy the file into the archive if rinexobj is not None: # is the rinex object correctly named? rinexobj.apply_file_naming_convention() # update the record to the (possible) new name record['Filename'] = rinexobj.rinex self.cnn.begin_transac() try: self.cnn.insert('rinex', record) if rinexobj is not None: # a rinexobj was passed, copy it into the archive. path2archive = os.path.join( self.Config.archive_path, self.build_rinex_path(record['NetworkCode'], record['StationCode'], record['ObservationYear'], record['ObservationDOY'], with_filename=False, rinexobj=rinexobj)) # copy fixed version into the archive (in case another session exists for RINEX v2) archived_crinex = rinexobj.compress_local_copyto( path2archive) copy_succeeded = True # get the rinex filename to update the database rnx = RinexNameFormat(archived_crinex).to_rinex_format( pyRinexName.TYPE_RINEX, no_path=True) if rnx != rinexobj.rinex: # update the table with the filename (always force with step) self.cnn.query( 'UPDATE rinex SET "Filename" = \'%s\' ' 'WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\' ' 'AND "ObservationYear" = %i ' 'AND "ObservationDOY" = %i ' 'AND "Interval" = %i ' 'AND "Completion" = %.3f ' 'AND "Filename" = \'%s\'' % (rnx, record['NetworkCode'], record['StationCode'], record['ObservationYear'], record['ObservationDOY'], record['Interval'], record['Completion'], record['Filename'])) event = pyEvents.Event( Description='A new RINEX was added to the archive: %s' % record['Filename'], NetworkCode=record['NetworkCode'], StationCode=record['StationCode'], Year=record['ObservationYear'], DOY=record['ObservationDOY']) else: event = pyEvents.Event( Description= 'Archived CRINEX file %s added to the database.' % record['Filename'], NetworkCode=record['NetworkCode'], StationCode=record['StationCode'], Year=record['ObservationYear'], DOY=record['ObservationDOY']) self.cnn.insert_event(event) except: self.cnn.rollback_transac() if rinexobj and copy_succeeded: # transaction rolled back due to error. If file made into the archive, delete it. os.remove(archived_crinex) raise self.cnn.commit_transac() return True else: # record already existed return False
def remove_rinex(self, record, move_to_dir=None): # function to remove a file from the archive # should receive a rinex record # if move_to is None, file is deleted # otherwise, moves file to specified location try: self.cnn.begin_transac() # propagate the deletes # check if this rinex file is the file that was processed and used for solutions where_station = '"NetworkCode" = \'%s\' AND "StationCode" = \'%s\'' % ( record['NetworkCode'], record['StationCode']) rs = self.cnn.query( 'SELECT * FROM rinex_proc WHERE %s AND "ObservationYear" = %i AND "ObservationDOY" = %i' % (where_station, record['ObservationYear'], record['ObservationDOY'])) if rs.ntuples() > 0: self.cnn.query( 'DELETE FROM gamit_soln WHERE %s AND "Year" = %i AND "DOY" = %i' % (where_station, record['ObservationYear'], record['ObservationDOY'])) self.cnn.query( 'DELETE FROM ppp_soln WHERE %s AND "Year" = %i AND "DOY" = %i' % (where_station, record['ObservationYear'], record['ObservationDOY'])) # get the filename rinex_path = self.build_rinex_path(record['NetworkCode'], record['StationCode'], record['ObservationYear'], record['ObservationDOY'], filename=record['Filename']) rinex_path = os.path.join(self.Config.archive_path, rinex_path) # delete the rinex record self.cnn.query( 'DELETE FROM rinex WHERE %s AND "ObservationYear" = %i AND "ObservationDOY" = %i AND "Filename" = \'%s\'' % (where_station, record['ObservationYear'], record['ObservationDOY'], record['Filename'])) if os.path.isfile(rinex_path): if move_to_dir: filename = Utils.move( rinex_path, os.path.join(move_to_dir, os.path.basename(rinex_path))) description = 'RINEX %s was removed from the database and archive. ' \ 'File moved to %s. See next events for reason.' % (record['Filename'], filename) else: os.remove(rinex_path) description = 'RINEX %s was removed from the database and archive. ' \ 'File was deleted. See next events for reason.' % (record['Filename']) else: description = 'RINEX %s was removed from the database and archive. File was NOT found in the archive ' \ 'so no deletion was performed. See next events for reason.' % (record['Filename']) # insert an event event = pyEvents.Event(Description=description, NetworkCode=record['NetworkCode'], StationCode=record['StationCode'], EventType='info', Year=record['ObservationYear'], DOY=record['ObservationDOY']) self.cnn.insert_event(event) self.cnn.commit_transac() except: self.cnn.rollback_transac() raise
def __init__(self, value): self.value = value self.event = pyEvents.Event(Description=value, EventType='error', module=type(self).__name__)
def InsertStationInfo(self, record): # avoid problems with trying to insert records from other stations. Force this NetworkCode record['NetworkCode'] = self.NetworkCode if self.NetworkCode and self.StationCode: # check existence of station in the db rs = self.cnn.query( 'SELECT * FROM stationinfo WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\' AND "DateStart" = \'%s\'' % (self.NetworkCode, self.StationCode, record['DateStart'].strftime())) if rs.ntuples() == 0: # can insert because it's not the same record # 1) verify the record is not between any two existing records overlaps = self.overlaps(record) if overlaps: # if it overlaps all records and the DateStart < self.records[0]['DateStart'] # see if we have to extend the initial date if len(overlaps) == len(self.records) and \ record['DateStart'].datetime() < self.records[0]['DateStart'].datetime(): if self.records_are_equal(record, self.records[0]): # just modify the start date to match the incoming record # self.cnn.update('stationinfo', self.records[0], DateStart=record['DateStart']) # the previous statement seems not to work because it updates a primary key! self.cnn.query( 'UPDATE stationinfo SET "DateStart" = \'%s\' WHERE "NetworkCode" = \'%s\' ' 'AND "StationCode" = \'%s\' AND "DateStart" = \'%s\'' % (record['DateStart'].strftime(), self.NetworkCode, self.StationCode, self.records[0]['DateStart'].strftime())) # insert event event = pyEvents.Event(Description='The start date of the station information record ' + self.records[0]['DateStart'].strftime() + ' has been been modified to ' + record['DateStart'].strftime(), StationCode=self.StationCode, NetworkCode=self.NetworkCode) self.cnn.insert_event(event) else: # new and different record, stop the Session with # EndDate = self.records[0]['DateStart'] - datetime.timedelta(seconds=1) and insert record['DateEnd'] = pyDate.Date(datetime=self.records[0]['DateStart'].datetime() - datetime.timedelta(seconds=1)) self.cnn.insert('stationinfo', record.database()) # insert event event = pyEvents.Event( Description='A new station information record was added:\n' + str(record), StationCode=self.StationCode, NetworkCode=self.NetworkCode) self.cnn.insert_event(event) elif len(overlaps) == 1 and overlaps[0] == self.records[-1] and \ not self.records[-1]['DateEnd'].year: # overlap with the last session # stop the current valid session self.cnn.update('stationinfo', self.records[-1].database(), DateEnd=record['DateStart'].datetime() - datetime.timedelta(seconds=1)) # create the incoming session self.cnn.insert('stationinfo', record.database()) # insert event event = pyEvents.Event( Description='A new station information record was added:\n' + self.return_stninfo(record) + '\nThe previous DateEnd value was updated to ' + self.records[-1]['DateEnd'].strftime(), StationCode=self.StationCode, NetworkCode=self.NetworkCode) self.cnn.insert_event(event) else: stroverlap = [] for overlap in overlaps: stroverlap.append(' -> '.join([str(overlap['DateStart']), str(overlap['DateEnd'])])) raise pyStationInfoException('Record %s -> %s overlaps with existing station.info records: %s' % (str(record['DateStart']), str(record['DateEnd']), ' '.join(stroverlap))) else: # no overlaps, insert the record self.cnn.insert('stationinfo', record.database()) # insert event event = pyEvents.Event(Description='A new station information record was added:\n' + str(record), StationCode=self.StationCode, NetworkCode=self.NetworkCode) self.cnn.insert_event(event) # reload the records self.load_stationinfo_records() else: raise pyStationInfoException('Record %s -> %s already exists in station.info' % (str(record['DateStart']), str(record['DateEnd']))) else: raise pyStationInfoException('Cannot insert record without initializing pyStationInfo ' 'with NetworkCode and StationCode')
def process_crinex_file(crinez, filename, data_rejected, data_retry): # create a uuid temporary folder in case we cannot read the year and doy from the file (and gets rejected) reject_folder = os.path.join(data_rejected, str(uuid.uuid4())) try: cnn = dbConnection.Cnn("gnss_data.cfg") Config = pyOptions.ReadOptions("gnss_data.cfg") archive = pyArchiveStruct.RinexStruct(cnn) # apply local configuration (path to repo) in the executing node crinez = os.path.join(Config.repository_data_in, crinez) except Exception: return traceback.format_exc() + ' while opening the database to process file ' + \ crinez + ' node ' + platform.node(), None # assume a default networkcode NetworkCode = 'rnx' # get the station code year and doy from the filename fileparts = archive.parse_crinex_filename(filename) if fileparts: StationCode = fileparts[0].lower() doy = int(fileparts[1]) year = int(Utils.get_norm_year_str(fileparts[3])) else: event = pyEvents.Event( Description='Could not read the station code, year or doy for file ' + crinez, EventType='error') error_handle(cnn, event, crinez, reject_folder, filename, no_db_log=True) return event['Description'], None # we can now make better reject and retry folders reject_folder = os.path.join( data_rejected, '%reason%/' + Utils.get_norm_year_str(year) + '/' + Utils.get_norm_doy_str(doy)) retry_folder = os.path.join( data_retry, '%reason%/' + Utils.get_norm_year_str(year) + '/' + Utils.get_norm_doy_str(doy)) try: # main try except block with pyRinex.ReadRinex(NetworkCode, StationCode, crinez) as rinexinfo: # type: pyRinex.ReadRinex # STOP! see if rinexinfo is a multiday rinex file if not verify_rinex_multiday(cnn, rinexinfo, Config): # was a multiday rinex. verify_rinex_date_multiday took care of it return None, None # DDG: we don't use otl coefficients because we need an approximated coordinate # we therefore just calculate the first coordinate without otl # NOTICE that we have to trust the information coming in the RINEX header (receiver type, antenna type, etc) # we don't have station info data! Still, good enough # the final PPP coordinate will be calculated by pyScanArchive on a different process # make sure that the file has the appropriate coordinates in the header for PPP. # put the correct APR coordinates in the header. # ppp didn't work, try using sh_rx2apr brdc = pyBrdc.GetBrdcOrbits(Config.brdc_path, rinexinfo.date, rinexinfo.rootdir) # inflate the chi**2 limit to make sure it will pass (even if we get a crappy coordinate) try: rinexinfo.auto_coord(brdc, chi_limit=1000) # normalize header to add the APR coordinate # empty dict since nothing extra to change (other than the APR coordinate) rinexinfo.normalize_header(dict()) except pyRinex.pyRinexExceptionNoAutoCoord: # could not determine an autonomous coordinate, try PPP anyways. 50% chance it will work pass with pyPPP.RunPPP( rinexinfo, '', Config.options, Config.sp3types, Config.sp3altrn, rinexinfo.antOffset, strict=False, apply_met=False, clock_interpolation=True) as ppp: # type: pyPPP.RunPPP try: ppp.exec_ppp() except pyPPP.pyRunPPPException as ePPP: # inflate the chi**2 limit to make sure it will pass (even if we get a crappy coordinate) # if coordinate is TOO bad it will get kicked off by the unreasonable geodetic height try: auto_coords_xyz, auto_coords_lla = rinexinfo.auto_coord( brdc, chi_limit=1000) except pyRinex.pyRinexExceptionNoAutoCoord as e: # catch pyRinexExceptionNoAutoCoord and convert it into a pyRunPPPException raise pyPPP.pyRunPPPException( 'Both PPP and sh_rx2apr failed to obtain a coordinate for %s.\n' 'The file has been moved into the rejection folder. ' 'Summary PPP file and error (if exists) follows:\n%s\n\n' 'ERROR section:\n%s\npyRinex.auto_coord error follows:\n%s' % (crinez.replace(Config.repository_data_in, ''), ppp.summary, str(ePPP).strip(), str(e).strip())) # DDG: this is correct - auto_coord returns a numpy array (calculated in ecef2lla), # so ppp.lat = auto_coords_lla is consistent. ppp.lat = auto_coords_lla[0] ppp.lon = auto_coords_lla[1] ppp.h = auto_coords_lla[2] ppp.x = auto_coords_xyz[0] ppp.y = auto_coords_xyz[1] ppp.z = auto_coords_xyz[2] # check for unreasonable heights if ppp.h[0] > 9000 or ppp.h[0] < -400: raise pyRinex.pyRinexException( os.path.relpath(crinez, Config.repository_data_in) + ' : unreasonable geodetic height (%.3f). ' 'RINEX file will not enter the archive.' % (ppp.h[0])) Result, match, _ = ppp.verify_spatial_coherence( cnn, StationCode) if Result: # insert: there is only 1 match with the same StationCode. rinexinfo.rename(NetworkCode=match[0]['NetworkCode']) insert_data(cnn, archive, rinexinfo) else: if len(match) == 1: error = "%s matches the coordinate of %s.%s (distance = %8.3f m) but the filename " \ "indicates it is %s. Please verify that this file belongs to %s.%s, rename it and " \ "try again. The file was moved to the retry folder. " \ "Rename script and pSQL sentence follows:\n" \ "BASH# mv %s %s\n" \ "PSQL# INSERT INTO stations (\"NetworkCode\", \"StationCode\", \"auto_x\", " \ "\"auto_y\", \"auto_z\", \"lat\", \"lon\", \"height\") VALUES " \ "('???','%s', %12.3f, %12.3f, %12.3f, " \ "%10.6f, %10.6f, %8.3f)\n" \ % (os.path.relpath(crinez, Config.repository_data_in), match[0]['NetworkCode'], match[0]['StationCode'], float(match[0]['distance']), StationCode, match[0]['NetworkCode'], match[0]['StationCode'], os.path.join(retry_folder, filename), os.path.join(retry_folder, filename.replace(StationCode, match[0]['StationCode'])), StationCode, ppp.x, ppp.y, ppp.z, ppp.lat[0], ppp.lon[0], ppp.h[0]) raise pyPPP.pyRunPPPExceptionCoordConflict(error) elif len(match) > 1: # a number of things could have happened: # 1) wrong station code, and more than one matching stations # (that do not match the station code, of course) # see rms.lhcl 2007 113 -> matches rms.igm0: 34.293 m, rms.igm1: 40.604 m, rms.byns: 4.819 m # 2) no entry in the database for this solution -> add a lock and populate the exit args # no match, but we have some candidates error = "Solution for RINEX in repository (%s %s) did not match a unique station location " \ "(and station code) within 5 km. Possible cantidate(s): %s. This file has been moved " \ "to data_in_retry. pSQL sentence follows:\n" \ "PSQL# INSERT INTO stations (\"NetworkCode\", \"StationCode\", \"auto_x\", " \ "\"auto_y\", \"auto_z\", \"lat\", \"lon\", \"height\") VALUES " \ "('???','%s', %12.3f, %12.3f, %12.3f, %10.6f, %10.6f, %8.3f)\n" \ % (os.path.relpath(crinez, Config.repository_data_in), rinexinfo.date.yyyyddd(), ', '.join(['%s.%s: %.3f m' % (m['NetworkCode'], m['StationCode'], m['distance']) for m in match]), StationCode, ppp.x, ppp.y, ppp.z, ppp.lat[0], ppp.lon[0], ppp.h[0]) raise pyPPP.pyRunPPPExceptionCoordConflict(error) else: # only found a station removing the distance limit (could be thousands of km away!) # The user will have to add the metadata to the database before the file can be added, # but in principle no problem was detected by the process. This file will stay in this folder # so that it gets analyzed again but a "lock" will be added to the file that will have to be # removed before the service analyzes again. # if the user inserted the station by then, it will get moved to the appropriate place. # we return all the relevant metadata to ease the insert of the station in the database otl = pyOTL.OceanLoading(StationCode, Config.options['grdtab'], Config.options['otlgrid']) # use the ppp coordinates to calculate the otl coeff = otl.calculate_otl_coeff(x=ppp.x, y=ppp.y, z=ppp.z) # add the file to the locks table so that it doesn't get processed over and over # this will be removed by user so that the file gets reprocessed once all the metadata is ready cnn.insert('locks', filename=os.path.relpath( crinez, Config.repository_data_in)) return None, [ StationCode, (ppp.x, ppp.y, ppp.z), coeff, (ppp.lat[0], ppp.lon[0], ppp.h[0]), crinez ] except (pyRinex.pyRinexExceptionBadFile, pyRinex.pyRinexExceptionSingleEpoch, pyRinex.pyRinexExceptionNoAutoCoord) \ as e: reject_folder = reject_folder.replace('%reason%', 'bad_rinex') # add more verbose output e.event['Description'] = e.event['Description'] + '\n' + os.path.relpath(crinez, Config.repository_data_in) + \ ': (file moved to ' + reject_folder + ')' e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy # error, move the file to rejected folder error_handle(cnn, e.event, crinez, reject_folder, filename) return None, None except pyRinex.pyRinexException as e: retry_folder = retry_folder.replace('%reason%', 'rinex_issues') # add more verbose output e.event['Description'] = e.event['Description'] + '\n' + os.path.relpath(crinez, Config.repository_data_in) + \ ': (file moved to ' + retry_folder + ')' e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy # error, move the file to rejected folder error_handle(cnn, e.event, crinez, retry_folder, filename) return None, None except pyPPP.pyRunPPPExceptionCoordConflict as e: retry_folder = retry_folder.replace('%reason%', 'coord_conflicts') e.event['Description'] = e.event['Description'].replace( '%reason%', 'coord_conflicts') e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, retry_folder, filename) return None, None except pyPPP.pyRunPPPException as e: reject_folder = reject_folder.replace('%reason%', 'no_ppp_solution') e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, reject_folder, filename) return None, None except pyStationInfo.pyStationInfoException as e: retry_folder = retry_folder.replace('%reason%', 'station_info_exception') e.event['Description'] = e.event['Description'] + '. The file will stay in the repository and will be ' \ 'processed during the next cycle of pyArchiveService.' e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, retry_folder, filename) return None, None except pyOTL.pyOTLException as e: retry_folder = retry_folder.replace('%reason%', 'otl_exception') e.event['Description'] = e.event['Description'] + ' while calculating OTL for %s. ' \ 'The file has been moved into the retry folder.' \ % os.path.relpath(crinez, Config.repository_data_in) e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, retry_folder, filename) return None, None except pyProducts.pyProductsExceptionUnreasonableDate as e: # a bad RINEX file requested an orbit for a date < 0 or > now() reject_folder = reject_folder.replace('%reason%', 'bad_rinex') e.event['Description'] = e.event['Description'] + ' during %s. The file has been moved to the rejected ' \ 'folder. Most likely bad RINEX header/data.' \ % os.path.relpath(crinez, Config.repository_data_in) e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, reject_folder, filename) return None, None except pyProducts.pyProductsException as e: # if PPP fails and ArchiveService tries to run sh_rnx2apr and it doesn't find the orbits, send to retry retry_folder = retry_folder.replace('%reason%', 'sp3_exception') e.event['Description'] = e.event['Description'] + ': %s. Check the brdc/sp3/clk files and also check that ' \ 'the RINEX data is not corrupt.' \ % os.path.relpath(crinez, Config.repository_data_in) e.event['StationCode'] = StationCode e.event['NetworkCode'] = '???' e.event['Year'] = year e.event['DOY'] = doy error_handle(cnn, e.event, crinez, retry_folder, filename) return None, None except dbConnection.dbErrInsert as e: reject_folder = reject_folder.replace('%reason%', 'duplicate_insert') # insert duplicate values: two parallel processes tried to insert different filenames # (or the same) of the same station to the db: move it to the rejected folder. # The user might want to retry later. Log it in events # this case should be very rare event = pyEvents.Event( Description='Duplicate rinex insertion attempted while processing ' + os.path.relpath(crinez, Config.repository_data_in) + ' : (file moved to rejected folder)\n' + str(e), EventType='warn', StationCode=StationCode, NetworkCode='???', Year=year, DOY=doy) error_handle(cnn, event, crinez, reject_folder, filename) return None, None except Exception: retry_folder = retry_folder.replace('%reason%', 'general_exception') event = pyEvents.Event( Description=traceback.format_exc() + ' processing: ' + os.path.relpath(crinez, Config.repository_data_in) + ' in node ' + platform.node() + ' (file moved to retry folder)', EventType='error') error_handle(cnn, event, crinez, retry_folder, filename, no_db_log=True) return event['Description'], None return None, None