Ejemplo n.º 1
0
    def create_replace_links(self):
        replace_ln_file_path = os.path.join(self.pwd, 'replace_links.sh')

        try:
            replace_ln_file = file_open(replace_ln_file_path, 'w')
        except (OSError, IOError):
            raise Exception('could not open file ' + replace_ln_file_path)

        replace_ln_file.write("""#!/bin/bash
        set -e
        for link; do
            test -h "$link" || continue

            dir=$(dirname "$link")
            reltarget=$(readlink "$link")
            case $reltarget in
                /*) abstarget=$reltarget;;
                *)  abstarget=$dir/$reltarget;;
            esac

            rm -fv "$link"
            cp -afv "$abstarget" "$link" || {
                # on failure, restore the symlink
                rm -rfv "$link"
                ln -sfv "$reltarget" "$link"
            }
        done
        """)

        replace_ln_file.close()

        chmod_exec(replace_ln_file_path)
Ejemplo n.º 2
0
    def link_tables(self):
        script_path = 'link_tables.sh'
        try:
            link_tables = file_open(script_path, 'w')
        except (OSError, IOError):
            raise GamitSessionException('Could not create script file link_tables.sh')

        # link the apr file as the lfile.
        contents = \
            """#!/bin/bash
            # set up links
            cd %s;
            sh_links.tables -frame J2000 -year %s -eop %s -topt none &> sh_links.out;
            # kill the earthquake rename file
            rm eq_rename
            # create an empty rename file
            echo "" > eq_rename
            cd ..;
            """ % (self.pwd_tables, self.date.yyyy(), self.GamitOpts['eop_type'])

        link_tables.write(contents)
        link_tables.close()

        chmod_exec(script_path)
        os.system('./'+script_path)
Ejemplo n.º 3
0
    def linktables(self, year, eop_type):
        script_path = os.path.join(self.pwd_comb, 'link_tables.sh')
        try:
            link_tables = file_open(script_path, 'w')
        except:
            raise GlobkException('could not open file link_tables.sh')

        # link the apr file as the lfile.
        with link_tables:
            contents = \
                """#!/bin/bash
                # set up links
                sh_links.tables -frame J2000 -year %s -eop %s -topt none &> sh_links.out;
                # link the bulletin A
                ln -s ~/gg/tables/pmu.usno .
                """ % (year, eop_type)

            link_tables.write(contents)

        chmod_exec(script_path)
Ejemplo n.º 4
0
    def create_combination_script(self, date, org):

        # extract the gps week and convert to string
        gpsWeek_str = date.wwww()

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd_comb, 'globk.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except:
            raise GlobkException('could not open file '+run_file_path)

        with run_file:
            contents = \
            """#!/bin/bash

            export INSTITUTE=%s

            # data product file names
            OUT_FILE=%s%s%s;
            
            # mk solutions directory for prt files etc
            [ ! -d tables ] && mkdir tables

            cd tables
            ../link_tables.sh

            # create global directory listing for globk
            for file in $(find .. -name "*.glx" -print);do echo $file;done | grep    "\/n0\/"  > globk.gdl
            for file in $(find .. -name "*.glx" -print);do echo $file;done | grep -v "\/n0\/" >> globk.gdl

            # create the globk cmd file
            echo " app_ptid all"                                                          > globk.cmd
            echo " prt_opt GDLF MIDP CMDS"                                               >> globk.cmd
            echo " out_glb ../file.GLX"                                                  >> globk.cmd
            echo " in_pmu pmu.usno"                                                      >> globk.cmd
            echo " descript Daily combination of global and regional solutions"          >> globk.cmd
            """ % (org, org, gpsWeek_str, str(date.gpsWeekDay))

            if self.net_type == 'global':
                contents += \
                """
                echo "# activated for global network merge"                                  >> globk.cmd
                echo " apr_wob    10 10  10 10 "                                             >> globk.cmd
                echo " apr_ut1    10 10        "                                             >> globk.cmd
                echo " apr_svs all 0.05 0.05 0.05 0.005 0.005 0.005 0.01 0.01 0.00 0.01 FR"  >> globk.cmd
                """
            contents += \
            """
            echo " max_chii  1. 0.6"                                                     >> globk.cmd
            echo " apr_site  all 1 1 1 0 0 0"                                            >> globk.cmd
            echo " apr_atm   all 1 1 1"                                                  >> globk.cmd

            # create the sinex header file
            echo "+FILE/REFERENCE                               " >  head.snx
            echo " DESCRIPTION   Instituto Geografico Nacional  " >> head.snx
            echo " OUTPUT        Solucion GPS combinada         " >> head.snx
            echo " CONTACT       [email protected]              " >> head.snx
            echo " SOFTWARE      glbtosnx Version               " >> head.snx
            echo " HARDWARE      .                              " >> head.snx
            echo " INPUT         Archivos binarios Globk        " >> head.snx
            echo "-FILE/REFERENCE                               " >> head.snx

            # run globk
            globk 0 ../file.prt ../globk.log globk.gdl globk.cmd 2>&1 > ../globk.out

            # convert the GLX file into sinex
            glbtosnx . ./head.snx ../file.GLX ../${OUT_FILE}.snx 2>&1 > ../glbtosnx.out

            # restore original directory
            cd ..;

            # figure out where the parameters start in the prt file
            LINE=`grep -n "PARAMETER ESTIMATES" file.prt | cut -d ":" -f1`

            # reduce line by one to make a little cleaner
            let LINE--;

            # print prt header
            sed -n 1,${LINE}p file.prt > ${OUT_FILE}.out

            # append the log file
            cat globk.log >> ${OUT_FILE}.out

            # create the fsnx file which contains only the solution estimate
            lineNumber=`grep --binary-file=text -m 1 -n "\-SOLUTION/ESTIMATE" ${OUT_FILE}.snx | cut -d : -f 1`

            # extract the solution estimate
            head -$lineNumber ${OUT_FILE}.snx > ${OUT_FILE}.fsnx;

            # move the H file to a meaningful name
            mv file.GLX ${OUT_FILE}.GLX

            # clear out log files
            rm -rf tables
            rm -f file*
            rm -f globk*
            rm -f glb*
            rm -f *.sh

            # compress sinex file
            # gzip --force *.snx
            gzip --force *.fsnx
            gzip --force *.out
            gzip --force *.glx
            gzip --force *.GLX

            """

            run_file.write(contents)
            # all done

        # add executable permissions
        chmod_exec(run_file_path)
Ejemplo n.º 5
0
    def create_finish_script(self):

        year = self.date.yyyy()
        doy  = self.date.ddd()

        # extract the gps week and convert to string
        gpsWeek_str = str(self.date.gpsWeek)

        # normalize gps week string
        if self.date.gpsWeek < 1000: gpsWeek_str = '0' + gpsWeek_str

        # extract the gps week and day of week
        gps_week     = self.date.gpsWeek
        gps_week_day = self.date.gpsWeekDay

        finish_file_path = os.path.join(self.pwd, 'finish.sh')

        try:
            finish_file = file_open(finish_file_path,'w')
        except (OSError, IOError):
            raise Exception('could not open file '+finish_file_path)

        contents = """#!/bin/bash
        export INSTITUTE=%s

        echo "finish.sh (`date +"%%Y-%%m-%%d %%T"`): Finish script started" >> monitor.log

        # set the name of the outfile
        FILE=%s%s%s
        DOY=%s
        YEAR=%s

        # move to the solution path
        if [ ! -d ./glbf ]; then
            # something went wrong! no glbf dir
            mkdir glbf
        fi

        cd glbf

        # make sure an h file exists, if not exit
        if [ ! -f ../$DOY/h*.${YEAR}${DOY} ]; then
            echo "FATAL in finish.sh (`date +"%%Y-%%m-%%d %%T"`): h-files not found in $DOY folder. Exit" >> ../monitor.log
            exit;
        fi

        # get the WL and NL ambiguity resolution and the nrms double diff statistics
        echo "finish.sh (`date +"%%Y-%%m-%%d %%T"`): NRMS and WL-NL ambiguity summary follows:" >> ../monitor.log
        grep 'nrms' ../$DOY/sh_gamit_${DOY}.summary >> ../monitor.log
        grep 'WL fixed' ../$DOY/sh_gamit_${DOY}.summary >> ../monitor.log

        # link the svnav.dat file
        ln -s ../tables/svnav.dat .

        # create the binary h-file
        htoglb . tmp.svs -a ../$DOY/h*.${YEAR}${DOY}  >> ../${FILE}.out

        # grep any missing stations to report them to monitor.log
        grep 'No data for site ' ../${FILE}.out | sort | uniq >> ../monitor.log
        
        # convert the binary h-file to sinex file
        glbtosnx . "" h*.glx ${FILE}.snx >> ../${FILE}.out

        # clean up
        rm HTOGLB.* tmp.svs l*  svnav.dat

        # move back to home
        cd ..;

        """ % (self.gamitopts['org'], self.gamitopts['org'], gpsWeek_str, str(gps_week_day), doy, year[2:4])

        # dump contents to the script file
        finish_file.write(contents)

        # this section is to calculate the orbits
        if self.gamitopts['expt_type'] == 'relax':

            # create an sp3 file from the g-file
            contents = """
            # move to the solutions directory
            cd ./solutions/*

            # make temporary directory
            mkdir tmp

            # copy the gfile to temp dir
            cp gfile* tmp/

            # move to the temp dir
            cd tmp;

            # do the damn thing
            mksp3.sh %s %s %s

            # copy the sp3 file to solution dir if exists
            [ -e *.sp3 ] && mv *.sp3 ..;

            # move out of temporary directory
            cd ..;

            # clean up
            rm -rf tmp gfile*;

            # back to home directory
            cd ../..

            """ % (year,doy,self.options['org'])

            finish_file.write(contents)

            return

        contents = """
        # move to the solutions directory
        cd $DOY

        # rename o file to znd file
        if [ -f o*a.[0-9][0-9][0-9]* ]; then
            mv -f o*a.[0-9][0-9][0-9]* ../glbf/%s%s%s.znd;
        fi

        # remove a priori o file
        if [ -f o*p.[0-9][0-9][0-9]* ]; then
            rm -f o*p.[0-9][0-9][0-9]*;
        fi

        # restore home dir
        cd ..

        """ % (self.gamitopts['org'], gpsWeek_str, str(gps_week_day))

        finish_file.write(contents)

        contents = """
        # move to the solutions directory
        cd $DOY

        # clean up
        # remove the grid files, rinex files, etc
        rm -rf gfile* *.grid ????????.??o

        # compress remaining files
        for file in $(ls);do gzip --force $file; done

        # return to home directory
        cd ..

        cd rinex
        rm -rf *
        cd ..
        echo "finish.sh (`date +"%Y-%m-%d %T"`): Done processing h-files and generating SINEX." >> monitor.log

        """

        finish_file.write(contents)

        # make sure to close the file
        finish_file.close()

        # add executable permissions
        chmod_exec(finish_file_path)
Ejemplo n.º 6
0
    def create_run_script(self):

        year = self.date.yyyy()
        doy  = self.date.ddd()

        # extract the gps week and convert to string
        gpsWeek_str = str(self.date.gpsWeek)

        # normalize gps week string
        if self.date.gpsWeek < 1000: gpsWeek_str = '0'+gpsWeek_str

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd,'run.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except (OSError, IOError):
            raise Exception('could not open file '+run_file_path)

        contents = """#!/bin/bash

        # just in case, create a temporary dir for fortran
        if [ ! -d ./tmp ]; then
            mkdir ./tmp
        fi
        export TMPDIR=`pwd`/tmp

        export INSTITUTE=%s
        # set max depth for recursion
        MAX_LEVEL=3;

        # parse input
        level=$1; [ $# -eq 0 ] && level=1;

        # check that level less than max depth
        if [[ $level -gt $MAX_LEVEL ]];then
            # if so then exit
            echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): MAX ITERATION DEPTH REACHED ... MUST EXIT" >> monitor.log
            exit 0;
        fi

        echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): Iteration depth: $level" >> monitor.log

        # set the params
        EXPT=%s;
        YEAR=%s;
        DOY=%s;
        MIN_SPAN=%s;
        EOP=%s;
        NOFPT=%s;

        # set the name of the outfile
        OUT_FILE=%s%s%s.out

        # execution flag for sh_gamit
        EXE=1;
        COUNTER=0;

        while [ $EXE -eq 1 ]; do

        if [ $COUNTER -gt 9 ]; then
            echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): Maximum number of retries (10) reached. Abnormal exit in run.sh. Check processing log." >> monitor.log
            exit 1
        fi

        # set exe to 0 so that we exit exe loop if no problems found
        EXE=0;

        # save a copy of the lfile. before running sh_gamit
        iter_ext=`printf "l%%02d_i%%02d" $level $COUNTER`
        cp ./tables/lfile. ./tables/lfile.${iter_ext}

        # do the damn thing
        if [ "$NOFTP" = "no" ]; then
            sh_gamit -update_l N -topt none -c -copt null -dopt c x -expt $EXPT -d $YEAR $DOY -minspan $MIN_SPAN -remakex Y -eop $EOP &> $OUT_FILE;
        else
            sh_gamit -update_l N -topt none -c -copt null -noftp -dopt c x -expt $EXPT -d $YEAR $DOY -minspan $MIN_SPAN -remakex Y -eop $EOP &> $OUT_FILE;
        fi
        """ \
        % (self.gamitopts['org'], self.gamitopts['expt'], year, doy, '12', self.gamitopts['eop_type'],
           self.gamitopts['noftp'], self.gamitopts['org'], gpsWeek_str, str(self.date.gpsWeekDay))

        # if we're in debug mode do not pipe output to file
        # if not session.options['debug']: contents += """ &> $OUT_FILE; """;

        contents += """

        grep -q "Geodetic height unreasonable"  $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "MODEL/open: Site" $OUT_FILE  | tail -1 | cut -d ":" -f 5 | cut -d " " -f 3 |tr '[:upper:]' '[:lower:]'`;
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: unreasonable geodetic height" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            grep "MODEL/open: Site" $OUT_FILE  | tail -1
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep "FATAL.*MAKEX/lib/rstnfo: No match for" $OUT_FILE
        if [ $? -eq 0 ];then
            sstn=`grep "FATAL.*MAKEX/lib/rstnfo: No match for" $OUT_FILE | tail -1 | cut -d ":" -f5 | awk '{print $4}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: no station info" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q "Error extracting velocities for"  $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "Error extracting velocities for" $OUT_FILE  | head -1 | cut -d ":" -f 5 | cut -d " " -f 6 |tr '[:upper:]' '[:lower:]'`;
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Error extracting velocities for" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            grep "Error extracting velocities for" $OUT_FILE  | tail -1
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Bad WAVELENGTH FACT" $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "Bad WAVELENGTH FACT" $OUT_FILE | tail -1 | cut -d ":" -f 5 | cut -d " " -f 6 | cut -c 3-6`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Bad WAVELENGTH FACT in rinex header" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Error decoding swver" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "Error decoding swver" $OUT_FILE;
            sstn=`grep "Error decoding swver" $OUT_FILE | tail -1 | awk '{print $8}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Error decoding swver" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE;
            sstn=`grep "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE | tail -1 | awk '{print $9}' | cut -c2-5 | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Antenna code not in hi.dat" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*FIXDRV/dcheck: Only one or no existing X-files" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*FIXDRV/dcheck: Only one or no existing X-files" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): FIXDRV/dcheck: Only one or no existing X-files" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEXP/makexp: No RINEX or X-files found" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEXP/makexp: No RINEX or X-files found" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEXP/makexp: No RINEX or X-files found" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/get_rxfiles: Cannot find selected RINEX file" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/get_rxfiles: Cannot find selected RINEX file" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEX/get_rxfiles: Cannot find selected RINEX file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/openf: Error opening file:.*" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/openf: Error opening file:.*" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEX/openf: Error opening file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi
        
        grep -q    "SOLVE/get_widelane: Error reading first record" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "SOLVE/get_widelane: Error reading first record" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): SOLVE/get_widelane: Error reading first record of temp file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        # problems related to ill conditioned bias matrix
        grep -q  "FATAL.*SOLVE/lcloos: Inversion error in" $OUT_FILE;
        if [ $? -eq 0 ]; then

            # remove the FATAL from the message to avoid confusing finish.sh that there was an error during execution
            err="SOLVE/lcloos: Inversion error in LCNORM(2)"

            # determine which autocln.sum exists and has information in it
            if [ -s $DOY/autcln.post.sum ]; then
                autocln=autcln.post.sum

                # error occurred after the prefit, read the autcln file and remove the station with low obs

                echo "run.sh (`date +"%Y-%m-%d %T"`): $err (after prefit) Will remove the station with the lowest obs count in $autocln" >> monitor.log

                sstn=`sed -n -e '/Number of data by site/,/^$/ p' $DOY/$autocln | tail -n +3 | sed '$d' | awk '{print $3, $4}' | awk -v min=999999 '{if($2<min){min=$2; stn=$1}}END{print stn}' | tr '[:upper:]' '[:lower:]'`

                nobs=`sed -n -e '/Number of data by site/,/^$/ p' $DOY/$autocln | tail -n +3 | sed '$d' | awk '{print $3, $4}' | awk -v min=999999 '{if($2<min){min=$2; stn=$1}}END{print min}'`

                echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn} -> observation count: $nobs" >> monitor.log
                rm rinex/${sstn}*;
                rm $DOY/${sstn}* ;
                echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
                EXE=1;

            else
                # the error occurred during the prefit, autocln may or may not have gotten the problem. Use the observation count in the $OUTFILE

                echo "run.sh (`date +"%Y-%m-%d %T"`): $err. (during prefit) Will analyze the MAKEX output and remove the file with more rejected observations" >> monitor.log

                max_rejected=`grep "observations rejected" $OUT_FILE | awk -F ':' '{print $5}' | awk '{print $6}' | awk -v max=0 '{if($1>max){max=$1}}END{print max}'`

                sstn=(`sed -n -e '/'$max_rejected' observations rejected/,/End processing/ p' $OUT_FILE | grep 'End' | awk -F ':' '{print $6}' | awk '{print $1'} | uniq | tr '[:upper:]' '[:lower:]'`)

                if [ -z "$sstn" ]; then
                    echo "run.sh (`date +"%Y-%m-%d %T"`): could not determine the station with low observation count. Check $OUT_FILE" >> monitor.log
                else
                    for stn in ${sstn[*]}
                    do
                        echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${stn} -> rejected observation count: $max_rejected" >> monitor.log
                        rm rinex/${stn}*;
                        rm $DOY/${stn}* ;
                    done
                    echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
                    EXE=1;
                fi
            fi

            # different search methods, deprecated
            #sstn=(`sed -n -e '/ .... valid observations/,/stop in MODEL/ p' $OUT_FILE | grep 'Site' | awk -F ':' '{print $5}' | awk '{print $2'} | uniq | tr '[:upper:]' '[:lower:]'`)
        fi

        # this case after SOLVE/lcloos because it also triggers GAMIT sh_chksolve
        grep -q "FATAL GAMIT sh_chksolve: Solve failed to complete normally" $OUT_FILE;
        if [ $? -eq 0 ] && [ $EXE -eq 0 ]; then
            echo "run.sh (`date +"%Y-%m-%d %T"`): GAMIT sh_chksolve: Solve failed to complete normally" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi
        
        # grep over constrained sites
        grep -q "over constrained" ./$DOY/sh_gamit_${DOY}.summary;
        if [ $? -eq 0 ]; then
            # get the number of lines
            lines=`cat ./$DOY/sh_gamit_${DOY}.summary | sed -n 's/WARNING: \([0-9]*\) SITES.*$/\\1/p'`
            grep -A $lines "over constrained" ./$DOY/sh_gamit_${DOY}.summary >> monitor.log
            
            # DDG: new behavior -> remove the station with the largest over constrained coordinate
            # grep the sites and get the unique list separeted by | (to do regex grep)
            # stns=`grep "GCR APTOL" monitor.log | awk '{print $4"_GPS"}' | uniq | tr '<line break>' '|'`
            # copy the sittbl. (just in case)
            # cp tables/sittbl. tables/sittbl.${iter_ext}
            # remove those from the sittbl list: this will relax station to 100 m
            # grep -v -E "${stns:0:-1}" tables/sittbl.${iter_ext} > tables/sittbl.
            
            stns=`grep "GCR APTOL" ./$DOY/sh_gamit_${DOY}.summary | awk '{print sqrt($(NF) * $(NF)), $4}' | sort -r | head -n1 | awk '{print $2}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting over constrained station ${stns}" >> monitor.log
            rm rinex/${stns}*;
            rm $DOY/${stns}* ;
            
            # echo "run.sh (`date +"%Y-%m-%d %T"`): relaxing over constrained stations ${stns:0:-1}" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): replacing lfile. from this run with lfile.${iter_ext}" >> monitor.log
            rm ./tables/lfile.
            cp ./tables/lfile.${iter_ext} ./tables/lfile.
            
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            
            EXE=1;
        fi

        if [ $EXE -eq 1 ]; then
            # if it will retry, save the previous output using extension .l00_i00, .l00_i01, ... etc
            # where lxx is the level of iteration and iyy is the interation in this level
            mv $OUT_FILE $OUT_FILE.${iter_ext}
            COUNTER=$((COUNTER+1));
        fi

        # grep updated coordinates
        grep Updated ./tables/lfile.;
        if [ $? -eq 0 ]; then
            grep Updated ./tables/lfile. >> monitor.log
        fi

        done

        # clean up
        rm -rf ionex met teqc*

        # blab about failures
        grep "FATAL" *.out >> monitor.log

        """

        run_file.write(contents)

        contents = \
        """

        # remove extraneous solution files
        # rm ./$DOY/l*[ab].*;

        # make sure to rename the gfilea to the correct gfile[0-9].doy
        [ -e ./igs/gfilea.* ] && mv -f ./igs/gfilea* ./*/gfile[0-9]*

        # see if any of the coordinates were updated, exit if not
        # DDG: Actually, sometimes the final updated coordinate only differs by < .3 m when solve is invoked more than
        # once from within sh_gamit. Therefore, also check that the updated coordinate is > .3 m from the original APR
        # this happens because the first Updated coordinate (lxxxxa.ddd) triggers an iteration in solve (lxxxxb.ddd) 
        # with a solution that is again close to the original APR. Without this check, PG iterates 3 times unnecessarily
        
        grep Updated ./tables/lfile.;
        if [ $? -ne 0 ]; then
            echo "run.sh (`date +"%Y-%m-%d %T"`): Normal exit from run.sh" >> monitor.log
            # uncompress everything. Will be compressed later on
            gunzip ./*/*;
            exit
        else
            updated=(`grep Updated ./tables/lfile. | awk '{print $1}'`)
            
            RERUN=0
            
            for stn in ${updated[*]}
            do
                coords=`grep $stn ./tables/lfile. | awk '{print $2,$3,$4}'`
                
                # use the copy of the lfile to grep the APR coordinates
                aprs=`grep $stn ./tables/lfile.${iter_ext} | awk '{print $2,$3,$4}'`
                
                # get the distance between Updated and APR
                dist=`echo $coords $aprs | awk '{print sqrt(($1 - $4)^2 + ($2 - $5)^2 + ($3 - $6)^2)}'`
                
                if (( $(echo "$dist > 0.3" | bc -l) )); then
                    RERUN=1;
                fi
            done
            
            # if RERUN = 0, Updated coordinate was < 0.3 m
            if [ $RERUN -eq 0 ]; then
                echo "run.sh (`date +"%Y-%m-%d %T"`): Updated coordinate detected but final solution within 0.3 m of APR" >> monitor.log
                echo "run.sh (`date +"%Y-%m-%d %T"`): Normal exit from run.sh" >> monitor.log
                # uncompress everything. Will be compressed later on
                gunzip ./*/*;
                exit
            fi
        fi

        # iteration detected!
        echo "run.sh (`date +"%Y-%m-%d %T"`): Updated coordinate detected in lfile. Iterating..." >> monitor.log

        # save this level's out file for debugging
        mv $OUT_FILE $OUT_FILE.${iter_ext}

        # apr file for updated coordinates
        aprfile=${EXPT}.temp

        # recreate the apr file with updated coordinates minus the comments
        sed -e 's/Updated from l.....\.[0-9][0-9][0-9]//g' ./tables/lfile. > ./tables/${aprfile};

        # the copy of the lfile was saved BEFORE running GAMIT. Replace with the updated version
        cp ./tables/${aprfile} ./tables/lfile.

        # copy over an updated gfile if it exists
        # cp ./*/gfile* ./tables/

        # update level
        level=$((level+1));

        # remove the 'old' solution
        [ $level -le $MAX_LEVEL ] && rm -rf ./$DOY;

        # decompress the remaining solution files
        gunzip ./*/*;

        # do another iteration
        ./run.sh $level;

        """

        run_file.write(contents)
        run_file.close()

        chmod_exec(run_file_path)
Ejemplo n.º 7
0
    def create_combination_script(self, org, gpsweek, gpsweekday, sites):

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd_comb, 'globk.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except:
            raise GlobkException('could not open file '+run_file_path)

        sites = split_string(sites, 80)
        site_list_string = []
        for s in sites:
            site_list_string.append('echo " use_site %s"                                     >> globk.cmd' % s)

        site_string = '\n'.join(site_list_string)

        contents = \
        """#!/bin/bash

        export INSTITUTE=%s
        export GPSWEEK=%s
        export GPSWEEKDAY=%i
        
        # data product file names
        OUT_FILE=${INSTITUTE}${GPSWEEK}${GPSWEEKDAY};

        # create global directory listing for globk
        for file in $(find . -name "*.GLX" -print | sort);do echo "$file";done | grep    "\/n0\/"  > globk.gdl
        for file in $(find . -name "*.GLX" -print | sort);do echo "$file";done | grep -v "\/n0\/" >> globk.gdl

        # create the globk cmd file
        echo " eq_file eq_rename.txt"                            >  globk.cmd
        echo " use_site clear"                                   >> globk.cmd
        %s
        echo " prt_opt GDLF MIDP CMDS PLST "                     >> globk.cmd
        echo " out_glb $OUT_FILE.GLX"                            >> globk.cmd
        echo " in_pmu /opt/gamit_globk/tables/pmu.usno"          >> globk.cmd
        echo " descript Weekly combined solution at $INSTITUTE"  >> globk.cmd
        echo " max_chii  3. 0.6"                                 >> globk.cmd
        echo " apr_site  all 1 1 1 0 0 0"                        >> globk.cmd
        # DO NOT ACTIVATE ATM COMBINATION BECAUSE IT WILL NOT WORK!
        echo "#apr_atm   all 1 1 1"                              >> globk.cmd

        # create the sinex header file
        echo "+FILE/REFERENCE                               " >  head.snx
        echo " DESCRIPTION   Instituto Geografico Nacional  " >> head.snx
        echo " OUTPUT        Solucion GPS combinada         " >> head.snx
        echo " CONTACT       [email protected]                 " >> head.snx
        echo " SOFTWARE      glbtosnx Version               " >> head.snx
        echo " HARDWARE      .                              " >> head.snx
        echo " INPUT         Archivos binarios Globk        " >> head.snx
        echo "-FILE/REFERENCE                               " >> head.snx

        # run globk
        globk 0 file.prt globk.log globk.gdl globk.cmd 2>&1 > globk.out

        # convert the GLX file into sinex
        glbtosnx . head.snx $OUT_FILE.GLX ${OUT_FILE}.snx 2>&1 > glbtosnx.out

        # figure out where the parameters start in the prt file
        LINE=`grep -n "PARAMETER ESTIMATES" file.prt | cut -d ":" -f1`

        # reduce line by one to make a little cleaner
        let LINE--;

        # print prt header
        sed -n 1,${LINE}p file.prt > ${OUT_FILE}.out

        # append the log file
        cat globk.log >> ${OUT_FILE}.out

        # create the fsnx file which contains only the solution estimate
        lineNumber=`grep --binary-file=text -m 1 -n "\-SOLUTION/ESTIMATE" ${OUT_FILE}.snx | cut -d : -f 1`

        # extract the solution estimate
        head -$lineNumber ${OUT_FILE}.snx > ${OUT_FILE}.fsnx;

        """ % (org, gpsweek, gpsweekday, site_string)

        run_file.write(contents)

        # all done
        run_file.close()

        # add executable permissions
        chmod_exec(run_file_path)