def parse_sinex(self): for sinex in os.listdir(self.pwd_glbf): if sinex.endswith('.snx'): snx = snxParse.snxFileParser(os.path.join(self.pwd_glbf, sinex)) snx.parse() self.polyhedron = snx.stationDict self.VarianceFactor = snx.varianceFactor if self.polyhedron: # remame any aliases and change keys to net.stn for stn in self.StationInstances: # replace the key try: self.polyhedron[stationID(stn)] = self.polyhedron.pop(stn.StationAlias.upper()) except KeyError: # maybe the station didn't have a solution pass return self.polyhedron, self.VarianceFactor
def __init__(self, expt, org, dataType, dataSrcLable, filePath): # call super class constructor for data object DataObj.__init__(self, expt, org, dataType, dataSrcLable, filePath) # init sinex parser for current sinex file snxParser = snxParse.snxFileParser(filePath).parse() # organize the coordinates for the data object for stn in snxParser: # initialize the station if we haven't seen it before if not self.data.has_key(stn): self.data[stn] = dict() # get the snx data from the parser stnData = snxParser.get(stn) # pack the data away self.data[stn]['xyz'] = [stnData.X, stnData.Y, stnData.Z]
def main(): (file, outdir) = get_input_args() # get the date from the sinex file gpsWeek = int(os.path.basename(file)[3:7]) gpsWeekDay = int(os.path.basename(file)[7]) # compute a data from the information date = pyDate.Date(gpsweek=gpsWeek, gpsweekday=gpsWeekDay) # check outdir # if out dir is none then put soln file # in same directory as snx files if outdir == None: outdir = '.' # make full path for solution file solnFilePath = os.path.join(outdir, getOutFileName(file)) # init sinex parser for current sinex file snxParser = snxParse.snxFileParser(file).parse() # construct npvs and npvs sigma from the sinex data npvs, npvs_sigma = npv(snxParser.stationDict) # create station list from dictionary keys stn_list = snxParser.stationDict.keys() # compute epoch in fractional year epochs = date.fyear #extract the variance factor var_factor = snxParser.varianceFactor # save as a mat file scipy.io.savemat(solnFilePath, mdict={'stnm' :stn_list , \ 'epochs' :epochs , \ 'npvs' :npvs , \ 'npv_sigma' :npvs_sigma , \ 'var_factor':var_factor}, \ oned_as = 'column')
def process_sinex(cnn, project, dates, sinex): # parse the SINEX to get the station list snx = snxParse.snxFileParser(sinex) snx.parse() stnlist = ('\'' + '\',\''.join(snx.stationDict.keys()) + '\'').lower() # insert the statistical data zg = cnn.query_float( 'SELECT count("Year")*2 as ss FROM gamit_soln ' 'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f AND "StationCode" IN (%s) ' 'GROUP BY "Year", "DOY"' % (project, dates[0].first_epoch('fyear'), dates[1].last_epoch('fyear'), stnlist)) zg = sum([s[0] for s in zg]) zd = cnn.query_float('SELECT count("ZTD") + %i as implicit FROM gamit_ztd ' 'WHERE "Date" BETWEEN \'%s\' AND \'%s\' ' % (zg, dates[0].first_epoch(), dates[1].last_epoch())) zd = zd[0][0] print ' >> Adding NUMBER OF UNKNOWNS: %i (previous value: %i)' % ( zd, snx.unknowns) replace_in_sinex(sinex, snx.observations, snx.unknowns, snx.unknowns + zg + zd) rs = cnn.query('SELECT "NetworkCode", "StationCode", dome FROM stations ' 'WHERE "StationCode" IN (%s) ' 'ORDER BY "NetworkCode", "StationCode"' % stnlist) stations = rs.dictresult() print ' >> Adding DOMES' # add domes add_domes(sinex, stations)
def parse_sinex(self): for sinex in os.listdir(self.pwd_comb): if sinex.endswith('.snx'): snx = snxParse.snxFileParser(os.path.join( self.pwd_comb, sinex)) snx.parse() self.polyhedron = snx.stationDict self.VarianceFactor = snx.varianceFactor if self.polyhedron: # rename the dict keys to net.stn format (and replace any aliases) for GamitSession in self.Sessions: for StationInstance in GamitSession.StationInstances: # replace the key try: self.polyhedron[StationInstance.NetworkCode + '.' + StationInstance.StationCode] = \ self.polyhedron.pop(StationInstance.StationAlias.upper()) except KeyError: # maybe the station didn't have a solution pass return self.polyhedron, self.VarianceFactor
def mk_mat(self, snxfile): # minimize depencencies if not interested in mat files import pyDate import snxParse import snx2mat import scipy.io import gzip # get the date from the sinex file gpsWeek = int(os.path.basename(snxfile)[3:7]) gpsWeekDay = int(os.path.basename(snxfile)[7]) # compute a data from the information date = pyDate.Date(gpsweek=gpsWeek, gpsweekday=gpsWeekDay) # parse the file path in to dir and name file_path, file_name = os.path.split(snxfile) # create the output file name file_name = file_name.split('.')[0] # make full path for solution file solnFilePath = os.path.join(file_path, file_name) # init sinex parser for current sinex file snxParser = snxParse.snxFileParser(snxfile).parse() # construct npvs and npvs sigma from the sinex data npvs, npvs_sigma = snx2mat.npv(snxParser.stationDict) # create station list from dictionary keys stn_list = snxParser.stationDict.keys() # compute epoch in fractional year epochs = date.fyear #extract the variance factor var_factor = snxParser.varianceFactor # save as a mat file scipy.io.savemat(solnFilePath, mdict={ 'stnm': stn_list, 'epochs': epochs, 'npvs': npvs, 'npv_sigma': npvs_sigma, 'var_factor': var_factor }, oned_as='column') # if we have a mat file then gzip it if os.path.isfile(solnFilePath + '.mat'): # gzip the file up with open file handles with open(solnFilePath + '.mat', 'rb') as orig_file: with gzip.open(solnFilePath + '.mat.gz', 'wb') as zipped_file: zipped_file.writelines(orig_file) # clean up os.remove(solnFilePath + '.mat')