Пример #1
0
 def imposeRF(self,refFrame,year,doy,fid=None):
     
     # convert year doy to fractional year
     fyear = pyDate.yeardoy2fyear(year, doy);
     
     # need to translate the 4 char names of 
     #current npvs to real stnIDs 8 char
     stnIdList = list();
     translate = self.dataMgr.getReverseAliasMap();
     for i in range(len(self.stnList)):
         stnId = list(translate[self.stnList[i]])[0];
         stnId = stnId.lower().replace('::','_');
         stnIdList.append(stnId);
     
     # initialize new reference frame
     rf = pyRF.pyRF(refFrame).initForStnList(stnIdList);
     
     npvTarget = rf.npvForEpoch(fyear);
     
     T,npvT,stats = pyStk.helmert(self.npvF, npvTarget,reweightFactor=5,limit=1.75,maxIter=100);
     
     status = self.getStatus(T, stats);
     
     summary = self.__mkSummary(refFrame, npvT, status, stats, T, np.NAN);
     
     if not fid == None:
         fid.write('\n');
         fid.write('Reference Frame:\n');
         fid.write(summary+'\n')
     else:
         print
         print 'Reference Frame:'
         print summary;
         
     if status == 'used':
         self.npvF = npvT;
Пример #2
0
def stk():
    tstart = time.time()

    hfiles = hfileLib.Hsrc(2010, 103)
    hfiles.addProjectSrc('glbl', 'glx',
                         '../data/osuGLOBAL/gamit/orbit/solutions')
    hfiles.addProjectSrc('glbd', 'glx',
                         '../data/osuGLOBAL/gamit/densification/solutions')
    hfiles.addProjectSrc('capp', 'glx', '../data/CAP/gamit/solutions')
    hfiles.addProjectSrc('anet', 'glx', '../data/ANET/gamit/solutions')
    hfiles.addProjectSrc('gnet', 'glx', '../data/GNET/gamit/solutions')

    # print the number of hfiles in the system
    print hfiles.size()

    # resolve any conflicting names
    hfiles.resolveNameConflicts()

    # get the stn list for npv syncronization
    stnNameList = list(set(hfiles.getStnNamesAsList()))

    npvF = npvForStnList(hfiles.hfileObjList[0], stnNameList)

    header = 'file               status iter  RMS [mm]  wRMS [mm]  maxResid  %-out  N-out  N-ties  N-stns    Tx [mm]    Ty [mm]    Tz [mm]   Rx [nRad]   Ry[nRad]   Rz [nRad]'
    print header

    flist = list()

    for hfile in hfiles:
        hfileName = hfile.getUniqueHfileName()
        #    print
        #    print
        #    print hfileName;

        # get the npv for the hfile
        npv = npvForStnList(hfile, stnNameList)

        # estimate transformation
        T, npvT, stats = pyStk.helmert(npv, npvF, percentile=30, reweight=5)

        #        if np.any(np.abs(T['t'])>0.15) \
        #            or np.any(np.abs(T['r'])>1e-7):
        #            T,npvT,stats = pyStk.helmert(npv, npvGlobal);

        status = 'used'
        if stats['iter'] >= stats['maxIter']:
            status = 'failed'
            flist.append(hfileName + ' failed to converge at ' +
                         str(stats['iter']) + ' iterations')

        if stats['npts'] < 2:
            #print hfileName,'failed b/c only has ',stats['npts'],'common tie stations';
            #continue
            status = 'failed'
            flist.append(hfileName + ' failed b/c only has ' +
                         str(stats['npts']) + ' common tie stations')

        if np.any(np.abs(T['t'])>0.5) \
            or np.any(np.abs(T['r'])>1e-6):
            status = 'failed'
            flist.append(hfileName + ' failed with crazy transform')

        # combine with new solution
        if status == 'used':
            npvF = combine(npvT.copy(), npvF.copy()).copy()

        if not np.any(np.abs(T['t'])>0.5) \
            and not np.any(np.abs(T['r'])>1e-6):
            T['t'] = T['t'] / 1e-3
            T['r'] = T['r'] / 1e-9
        else:
            T['t'] = np.ones_like(T['t']) * np.nan
            T['r'] = np.ones_like(T['r']) * np.nan

        if stats['RMS'] / 1e-3 > 100:
            stats['RMS'] = np.nan

        if stats['wRMS'] / 1e-3 > 100:
            stats['wRMS'] = np.nan

        id_status = '%s %6s %4d' % (hfileName, status, stats['iter'])
        rms = '%8.2f %8.2f %11.2f' % (stats['RMS'] / 1e-3, stats['wRMS'] /
                                      1e-3, stats['dvMax'] / 1e-3)
        pout = '   %4.1f  %4d    %4d    %4d' % (
            stats['pout'], stats['nout'], stats['npts'],
            np.nonzero(~np.isnan(npv))[0].size / 3)
        transform = '  %10.4f %10.4f %10.4f %10.4f %10.4f %10.4f' % (
            T['t'][0], T['t'][1], T['t'][2], T['r'][0], T['r'][1], T['r'][2])
        print id_status, rms, pout, transform

    print
    for l in flist:
        print l

    print 'elapsed time: ', time.time() - tstart

    return hfiles, stnNameList, npvF
Пример #3
0
    def transformWithScaleForEpoch(self, npv, fyear):

        npvTarget = self.npvForEpoch(fyear)
        T, npvT, stats = pyStk.helmert(npv, npvTarget, withScale='yes')
        return T, npvT, stats
Пример #4
0
    def transformForEpoch(self, npv, fyear):

        npvTarget = self.npvForEpoch(fyear)
        T, npvT, stats = pyStk.helmert(npv, npvTarget)
        return T, npvT, stats
Пример #5
0
 def stk(self,**kwargs):
                     
     # get the data manager
     dataMgr     = self.dataMgr;
     
     # check if there is more than 1 network
     if len(dataMgr) == 1:
         return;
     
     iterCount   = 0;
     residualIQR = 0.025;
     
     # something very large
     previousResidualIQR = 1e10;
     
     # init with original
     previousNpvsStacked = self.npvsStacked.copy();
     
     # flag final iteration
     isFinalIter = False;
     
     # number of coordinates = numStns *3
     numCoords = np.sum(~np.isnan(self.npvs));
             
     while iterCount < self.maxIter:
         
         self.summary.append('iter = '+str(iterCount))
         self.summary.append(self.header);
         
         # align each column (npv) onto the npvTarget
         for j in range(0,self.npvs.shape[1]):
                             
             # align the j'th column onto the target
             try:
                 T,npvT,stats                            \
                     = pyStk.helmert(self.npvs[:,j],     \
                                     self.npvTarget,     \
                                     sigma=residualIQR   \
                       )
                   
                 # save the aligned npv    
                 self.npvsStacked[:,j] = npvT.flatten().copy();
                 
                 # save the weights associated with each network alignment
                 self.npvsWeights[:,j] = stats['weights'].flatten().copy();
                 
             except Exception, e:
                 #raise;
                 print dataMgr[j].name(),e;
                 self.npvsStacked[:,j] = np.ones_like(self.npvsStacked[:,j])*np.nan;
                 self.npvsWeights[:,j] = np.zeros_like(self.npvsStacked[:,j]);
                 
             # compute the residuals 
             self.npvsRedisuals[:,j] = self.npvTarget - self.npvsStacked[:,j];
             
             # figure out of alignment was successful 
             status = self.getStatus(T, stats);
             
             # if the network failed then zero it out via weights
             if status == 'failed':
                 self.npvsWeights[:,j] = np.zeros_like(self.npvsStacked[:,j]);
                 self.npvsWeights[np.isnan(self.npvs)] = np.nan;
             
             # record the stats and transformation parameters
             self.stats.append(stats);
             self.transforms.append(T);
         
             # record the summary for this network iteration
             self.summary.append(self.__mkSummary(dataMgr[j].name(), npvT, status, stats, T, np.NAN));
         
         
         # update the iteration count
         iterCount = iterCount + 1;
         
         # compute the residualIQR of all the residuals
         residualIQR = pyWeights.ipr(self.npvsRedisuals.flatten(), 50);
                     
         # compute difference between current and previous iteration IQRs
         diffIQR = previousResidualIQR - residualIQR;
         
         # compute the percentage of down weighted data
         pout = (np.sum(self.npvsWeights < 1)/(numCoords*1.0))*100.0;
         p1   = np.sum(self.npvsWeights[self.npvsWeights < 1 ] >= 0.5);
         p1   = (p1/(numCoords*1.0))*100;
         p2   = (np.sum(self.npvsWeights < 0.5)/(numCoords*1.0))*100;
         
         # generate some summary stats for this iteration
         self.summary.append( "pySTK: residualIQR = %6.2f [mm]" % (residualIQR/1e-3));
         self.summary.append( "pySTK: decrease in sigma = %6.2f [mm]" % (diffIQR/1e-3));
         self.summary.append( "pySTK: percentage of downweighted data = %4.1f, %4.1f, %4.1f" % (pout,p1,p2));
        
         # if this is the final iteration then we're done
         if isFinalIter:
             break;
         
         # check for convergence
         if  np.any((diffIQR) <self.sigtol):
             isFinalIter = True;  
             
         # special case that current iteration actually made IQR worse
         # if so roll back to previous iteration and boost the IQR sigma
         if diffIQR < 0 or pout > 35:
             
             # figure out how much to increase sigma
             increment = max(0.002, abs(diffIQR), (residualIQR+abs(diffIQR))/1.5);
             
             # restore previous iteration sigma plus fudge factor
             residualIQR      = previousResidualIQR + increment;
             
             # restore previous iteration aligned networks
             self.npvsStacked = previousNpvsStacked.copy();
         
             # blab about it
             self.summary.append( "pySTK: rolling back residualIQR = %6.2f [mm]" % (residualIQR/1e-3));
             
         # recompute the target from stacked npv
         self.npvTarget = nanMedianForRows(self.npvsStacked,2); 
         
         # save the stations for next round
         previousResidualIQR = residualIQR;
         previousNpvsStacked = self.npvsStacked.copy();
         
         # for readability
         self.summary.append("");
Пример #6
0
#print npv.shape

ts = pyStk.pyTS().initFromMatFile('../data/ts.mat');
rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list));

fyear = ts.epochs[4000];

npv   = ts.npvForEpoch(fyear);
npvRF = rf.npvForEpoch(fyear);

print "mean: ", stats.nanmean(npv-npvRF)
print "median: ", stats.nanmedian(npv-npvRF)

print "Aligning epoch ",fyear
T,npvT,stats = pyStk.helmert(npv, npvRF);
print
print 'iter:',stats['iter'];
print
print 'pout:',stats['pout'];
print 'nout:',stats['nout'];
print 'npts:',stats['npts'];
print
print ' RMS:',stats['RMS']/1e-3, '[mm]'
print 'wRMS:',stats['wRMS']/1e-3,'[mm]';
print
print 'max resid:',stats['dvMax']/1e-3,'[mm]'
print 'max resid indx:',stats['dvMaxIndx'][0]
print 'max resid stn:',ts.stn_list[stats['dvMaxIndx']/3]