Exemplo n.º 1
0
    def create_replace_links(self):
        replace_ln_file_path = os.path.join(self.pwd, 'replace_links.sh')

        try:
            replace_ln_file = file_open(replace_ln_file_path, 'w')
        except (OSError, IOError):
            raise Exception('could not open file ' + replace_ln_file_path)

        replace_ln_file.write("""#!/bin/bash
        set -e
        for link; do
            test -h "$link" || continue

            dir=$(dirname "$link")
            reltarget=$(readlink "$link")
            case $reltarget in
                /*) abstarget=$reltarget;;
                *)  abstarget=$dir/$reltarget;;
            esac

            rm -fv "$link"
            cp -afv "$abstarget" "$link" || {
                # on failure, restore the symlink
                rm -rfv "$link"
                ln -sfv "$reltarget" "$link"
            }
        done
        """)

        replace_ln_file.close()

        chmod_exec(replace_ln_file_path)
Exemplo n.º 2
0
    def link_tables(self):
        script_path = 'link_tables.sh'
        try:
            link_tables = file_open(script_path, 'w')
        except (OSError, IOError):
            raise GamitSessionException('Could not create script file link_tables.sh')

        # link the apr file as the lfile.
        contents = \
            """#!/bin/bash
            # set up links
            cd %s;
            sh_links.tables -frame J2000 -year %s -eop %s -topt none &> sh_links.out;
            # kill the earthquake rename file
            rm eq_rename
            # create an empty rename file
            echo "" > eq_rename
            cd ..;
            """ % (self.pwd_tables, self.date.yyyy(), self.GamitOpts['eop_type'])

        link_tables.write(contents)
        link_tables.close()

        chmod_exec(script_path)
        os.system('./'+script_path)
Exemplo n.º 3
0
    def load_session_config(self, session_config_file, check_config):
        try:
            # parse session config file
            config = configparser.ConfigParser()
            with file_open(session_config_file) as f:
                config.read_file(f)

            # check that all required items are there and files exist
            if check_config:
                self.__check_config(config)

            # get gamit config items from session config file
            self.gamitopt.update(dict(config.items('gamit')))

            self.NetworkConfig = pyBunch.Bunch().fromDict(
                dict(config.items('network')))

            if 'type' not in self.NetworkConfig.keys():
                raise ValueError(
                    'Network "type" must be specified in config file: use "regional" or "global"'
                )

            self.gamitopt['gnss_data'] = config.get('Archive', 'gnss_data')
            self.gamitopt['max_cores'] = int(self.gamitopt['max_cores'])

            # TO-DO: check that all the required parameters are present
            if len(self.gamitopt['expt']) != 4:
                raise ValueError(
                    'The experiment name parameter must be 4 characters long.')

        except configparser.NoOptionError:
            raise
Exemplo n.º 4
0
    def __init__(self, sp3archive, date, sp3types, copyto, no_cleanup=False):

        # try both compressed and non-compressed sp3 files
        # loop through the types of sp3 files to try
        self.sp3_path   = None
        self.RF         = None
        self.no_cleanup = no_cleanup

        for sp3type in sp3types:
            self.sp3_filename = sp3type + date.wwwwd() + '.sp3'

            try:
                pyProducts.OrbitalProduct.__init__(self, sp3archive, date, self.sp3_filename, copyto)
                self.sp3_path = self.file_path
                self.type     = sp3type
                break
            except pyProducts.pyProductsExceptionUnreasonableDate:
                raise
            except pyProducts.pyProductsException:
                # if the file was not found, go to next
                continue

        # if we get here and self.sp3_path is still none, then no type of sp3 file was found
        if self.sp3_path is None:
            raise pySp3Exception('Could not find a valid orbit file (types: ' +
                                 ', '.join(sp3types) + ') for '
                                 'week ' + str(date.gpsWeek) +
                                 ' day ' + str(date.gpsWeekDay) +
                                 ' using any of the provided sp3 types')

        # parse the RF of the orbit file
        with file_open(self.sp3_path) as fileio:
            line = fileio.readline()

            self.RF = line[46:51].strip()
Exemplo n.º 5
0
    def create_otl_list(self):

        otl_path = os.path.join(self.pwd_tables, 'otl.list')
        if os.path.isfile(otl_path):
            os.remove(otl_path)

        
        with file_open(otl_path, 'w') as otl_list:
            otl_list.write('%s   8-character GAMIT ID read by grdtab (M -> CM)\n' % (self.Config.options['otlmodel']))
            otl_list.write("""$$ Ocean loading displacement
$$
$$ Calculated on holt using olfg/olmpp of H.-G. Scherneck
$$
$$ COLUMN ORDER:  M2  S2  N2  K2  K1  O1  P1  Q1  MF  MM SSA
$$
$$ ROW ORDER:
$$ AMPLITUDES (m)
$$   RADIAL
$$   TANGENTL    EW
$$   TANGENTL    NS
$$ PHASES (degrees)
$$   RADIAL
$$   TANGENTL    EW
$$   TANGENTL    NS
$$
$$ Displacement is defined positive in upwards, South and West direction.
$$ The phase lag is relative to Greenwich and lags positive. The
$$ Gutenberg-Bullen Greens function is used. In the ocean tide model the
$$ deficit of tidal water mass has been corrected by subtracting a uniform
$$ layer of water with a certain phase lag globally.
$$
$$ Complete <model name> : No interpolation of ocean model was necessary
$$ <model name>_PP       : Ocean model has been interpolated near the station
$$                         (PP = Post-Processing)
$$
$$ CMC:  YES  (corr.tide centre of mass)
$$
$$ Ocean tide model: %s
$$
$$ END HEADER
$$\n""" % (self.Config.options['otlmodel']))

            for stn in self.StationInstances:
                otl = stn.otl_H.split('\n')
                # remove BLQ header
                otl = otl[29:]
                # need to change the station record for GAMIT to take it
                otl[0] = '  %s' % stn.StationAlias.upper()
                if stn.lon < 0:
                    lon = 360+stn.lon
                else:
                    lon = stn.lon
                otl[3] = '$$ %s                                 RADI TANG lon/lat:%10.4f%10.4f' \
                         % (stn.StationAlias.upper(), lon, stn.lat)

                otl_list.write('\n'.join(otl))

            # write BLQ format termination
            otl_list.write("$$ END TABLE\n")
Exemplo n.º 6
0
    def copy_sestbl_procdef_atx(self):

        self.frame, atx = determine_frame(self.Config.options['frames'], self.date)

        # copy process.defaults and sestbl.
        copyfile(self.GamitOpts['process_defaults'],
                 os.path.join(self.pwd_tables, 'process.defaults'))
        # copyfile(self.GamitOpts['atx'], os.path.join(self.pwd_tables, 'antmod.dat'))
        copyfile(atx, os.path.join(self.pwd_tables, 'antmod.dat'))

        # change the scratch directory in the sestbl. file
        with file_open(os.path.join(self.pwd_tables, 'sestbl.'), 'w') as sestbl:
            with file_open(self.GamitOpts['sestbl']) as orig_sestbl:
                for line in orig_sestbl:
                    if 'Scratch directory' in line:
                        # empty means local directory! LA RE PU...
                        sestbl.write('Scratch directory = \n')
                    else:
                        sestbl.write(line)
Exemplo n.º 7
0
    def create_apr_sittbl_file(self):
        lfile_path  = os.path.join(self.pwd_tables, 'lfile.')
        sittbl_path = os.path.join(self.pwd_tables, 'sittbl.')
        log_path    = os.path.join(self.pwd_tables, 'debug.log')

        for f in (lfile_path, sittbl_path):
            if os.path.isfile(f):
                os.remove(f)

        with file_open(lfile_path, 'w') as lfile:
            with file_open(sittbl_path, 'w') as sittbl:
                with file_open(log_path, 'w') as debug:

                    sittbl.write('SITE              FIX    --COORD.CONSTR.--  \n')
                    sittbl.write('      << default for regional sites >>\n')
                    sittbl.write('ALL               NNN    100.  100.   100. \n')

                    for stn in self.StationInstances:
                        lfile .write(stn.GetApr()     + '\n')
                        sittbl.write(stn.GetSittbl()  + '\n')
                        debug .write(stn.DebugCoord() + '\n')
Exemplo n.º 8
0
def replace_in_sinex(sinex, observations, unknowns, new_val):

    new_unknowns = \
""" NUMBER OF UNKNOWNS%22i
 NUMBER OF DEGREES OF FREEDOM%12i
 PHASE MEASUREMENTS SIGMA          0.0025
 SAMPLING INTERVAL (SECONDS)           30
""" % (new_val, observations - new_val)

    sinex_path = os.path.basename(os.path.splitext(sinex)[0]) + '_MOD.snx'
    with file_open(sinex_path, 'w') as nsnx:
        with file_open(sinex, 'r') as osnx:
            for line in osnx:
                if ' NUMBER OF UNKNOWNS%22i' % unknowns in line:
                    # empty means local directory! LA RE PU...
                    nsnx.write(new_unknowns)
                else:
                    nsnx.write(line)

    # rename file
    os.remove(sinex)
    os.renames(sinex_path, sinex)
Exemplo n.º 9
0
    def create_sitedef(self):

        sitedefFile = os.path.join(self.pwd_tables, 'sites.defaults')
        try:
            with file_open(sitedefFile, 'w') as sitedef:
                sitedef.write(' all_sites %s xstinfo\n' % (self.GamitOpts['expt']))

                for StationInstance in self.StationInstances:
                    sitedef.write(" %s_GPS  %s localrx\n" % (StationInstance.StationAlias.upper(),
                                                             self.GamitOpts['expt']))

        except Exception as e:
            raise GamitSessionException(e)
Exemplo n.º 10
0
    def run(self):
        retry = 0
        while True:
            cmd_stdin = None
            try:
                if self.cat_file:
                    cmd_stdin = file_open(
                        os.path.join(self.cwd or '', self.cat_file))

                self.p = subprocess.Popen(
                    self.cmd.split(),
                    shell=False,
                    stdin=cmd_stdin,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE,
                    cwd=self.cwd,
                    close_fds=True,
                    bufsize=-1,
                    # text mode:
                    universal_newlines=True,
                    encoding='utf-8',
                    errors='ignore')

                # Block until finalization
                self.stdout, self.stderr = self.p.communicate()
                break

            except OSError as e:
                if str(e) == '[Errno 35] Resource temporarily unavailable':
                    if retry <= 2:
                        retry += 1
                        # wait a moment
                        time.sleep(0.5)
                        continue
                    else:
                        print(self.cmd)
                        raise OSError(
                            str(e) + ' after 3 retries on node: ' +
                            platform.node())
                else:
                    print(self.cmd)
                    raise

            except Exception:
                print(self.cmd)
                raise

            finally:
                if cmd_stdin:
                    cmd_stdin.close()
Exemplo n.º 11
0
    def create_station_info(self):

        # delete any current station.info files
        station_path = os.path.join(self.pwd_tables, 'station.info')
        if os.path.isfile(station_path):
            os.remove(station_path)

        with file_open(station_path, 'w') as stninfo_file:
            stninfo_file.write('*SITE  Station Name      Session Start      Session Stop       Ant Ht   HtCod  '
                               'Ant N    Ant E    Receiver Type         Vers                  SwVer  '
                               'Receiver SN           Antenna Type     Dome   Antenna SN          \n')

            for stn in self.StationInstances:
                # stninfo_file.write(stn.StationInfo.return_stninfo() + '\n')
                stninfo_file.write(stn.GetStationInformation() + '\n')
Exemplo n.º 12
0
    def linktables(self, year, eop_type):
        script_path = os.path.join(self.pwd_comb, 'link_tables.sh')
        try:
            link_tables = file_open(script_path, 'w')
        except:
            raise GlobkException('could not open file link_tables.sh')

        # link the apr file as the lfile.
        with link_tables:
            contents = \
                """#!/bin/bash
                # set up links
                sh_links.tables -frame J2000 -year %s -eop %s -topt none &> sh_links.out;
                # link the bulletin A
                ln -s ~/gg/tables/pmu.usno .
                """ % (year, eop_type)

            link_tables.write(contents)

        chmod_exec(script_path)
Exemplo n.º 13
0
    def create_finish_script(self):

        year = self.date.yyyy()
        doy  = self.date.ddd()

        # extract the gps week and convert to string
        gpsWeek_str = str(self.date.gpsWeek)

        # normalize gps week string
        if self.date.gpsWeek < 1000: gpsWeek_str = '0' + gpsWeek_str

        # extract the gps week and day of week
        gps_week     = self.date.gpsWeek
        gps_week_day = self.date.gpsWeekDay

        finish_file_path = os.path.join(self.pwd, 'finish.sh')

        try:
            finish_file = file_open(finish_file_path,'w')
        except (OSError, IOError):
            raise Exception('could not open file '+finish_file_path)

        contents = """#!/bin/bash
        export INSTITUTE=%s

        echo "finish.sh (`date +"%%Y-%%m-%%d %%T"`): Finish script started" >> monitor.log

        # set the name of the outfile
        FILE=%s%s%s
        DOY=%s
        YEAR=%s

        # move to the solution path
        if [ ! -d ./glbf ]; then
            # something went wrong! no glbf dir
            mkdir glbf
        fi

        cd glbf

        # make sure an h file exists, if not exit
        if [ ! -f ../$DOY/h*.${YEAR}${DOY} ]; then
            echo "FATAL in finish.sh (`date +"%%Y-%%m-%%d %%T"`): h-files not found in $DOY folder. Exit" >> ../monitor.log
            exit;
        fi

        # get the WL and NL ambiguity resolution and the nrms double diff statistics
        echo "finish.sh (`date +"%%Y-%%m-%%d %%T"`): NRMS and WL-NL ambiguity summary follows:" >> ../monitor.log
        grep 'nrms' ../$DOY/sh_gamit_${DOY}.summary >> ../monitor.log
        grep 'WL fixed' ../$DOY/sh_gamit_${DOY}.summary >> ../monitor.log

        # link the svnav.dat file
        ln -s ../tables/svnav.dat .

        # create the binary h-file
        htoglb . tmp.svs -a ../$DOY/h*.${YEAR}${DOY}  >> ../${FILE}.out

        # grep any missing stations to report them to monitor.log
        grep 'No data for site ' ../${FILE}.out | sort | uniq >> ../monitor.log
        
        # convert the binary h-file to sinex file
        glbtosnx . "" h*.glx ${FILE}.snx >> ../${FILE}.out

        # clean up
        rm HTOGLB.* tmp.svs l*  svnav.dat

        # move back to home
        cd ..;

        """ % (self.gamitopts['org'], self.gamitopts['org'], gpsWeek_str, str(gps_week_day), doy, year[2:4])

        # dump contents to the script file
        finish_file.write(contents)

        # this section is to calculate the orbits
        if self.gamitopts['expt_type'] == 'relax':

            # create an sp3 file from the g-file
            contents = """
            # move to the solutions directory
            cd ./solutions/*

            # make temporary directory
            mkdir tmp

            # copy the gfile to temp dir
            cp gfile* tmp/

            # move to the temp dir
            cd tmp;

            # do the damn thing
            mksp3.sh %s %s %s

            # copy the sp3 file to solution dir if exists
            [ -e *.sp3 ] && mv *.sp3 ..;

            # move out of temporary directory
            cd ..;

            # clean up
            rm -rf tmp gfile*;

            # back to home directory
            cd ../..

            """ % (year,doy,self.options['org'])

            finish_file.write(contents)

            return

        contents = """
        # move to the solutions directory
        cd $DOY

        # rename o file to znd file
        if [ -f o*a.[0-9][0-9][0-9]* ]; then
            mv -f o*a.[0-9][0-9][0-9]* ../glbf/%s%s%s.znd;
        fi

        # remove a priori o file
        if [ -f o*p.[0-9][0-9][0-9]* ]; then
            rm -f o*p.[0-9][0-9][0-9]*;
        fi

        # restore home dir
        cd ..

        """ % (self.gamitopts['org'], gpsWeek_str, str(gps_week_day))

        finish_file.write(contents)

        contents = """
        # move to the solutions directory
        cd $DOY

        # clean up
        # remove the grid files, rinex files, etc
        rm -rf gfile* *.grid ????????.??o

        # compress remaining files
        for file in $(ls);do gzip --force $file; done

        # return to home directory
        cd ..

        cd rinex
        rm -rf *
        cd ..
        echo "finish.sh (`date +"%Y-%m-%d %T"`): Done processing h-files and generating SINEX." >> monitor.log

        """

        finish_file.write(contents)

        # make sure to close the file
        finish_file.close()

        # add executable permissions
        chmod_exec(finish_file_path)
Exemplo n.º 14
0
    def create_combination_script(self, org, gpsweek, gpsweekday, sites):

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd_comb, 'globk.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except:
            raise GlobkException('could not open file '+run_file_path)

        sites = split_string(sites, 80)
        site_list_string = []
        for s in sites:
            site_list_string.append('echo " use_site %s"                                     >> globk.cmd' % s)

        site_string = '\n'.join(site_list_string)

        contents = \
        """#!/bin/bash

        export INSTITUTE=%s
        export GPSWEEK=%s
        export GPSWEEKDAY=%i
        
        # data product file names
        OUT_FILE=${INSTITUTE}${GPSWEEK}${GPSWEEKDAY};

        # create global directory listing for globk
        for file in $(find . -name "*.GLX" -print | sort);do echo "$file";done | grep    "\/n0\/"  > globk.gdl
        for file in $(find . -name "*.GLX" -print | sort);do echo "$file";done | grep -v "\/n0\/" >> globk.gdl

        # create the globk cmd file
        echo " eq_file eq_rename.txt"                            >  globk.cmd
        echo " use_site clear"                                   >> globk.cmd
        %s
        echo " prt_opt GDLF MIDP CMDS PLST "                     >> globk.cmd
        echo " out_glb $OUT_FILE.GLX"                            >> globk.cmd
        echo " in_pmu /opt/gamit_globk/tables/pmu.usno"          >> globk.cmd
        echo " descript Weekly combined solution at $INSTITUTE"  >> globk.cmd
        echo " max_chii  3. 0.6"                                 >> globk.cmd
        echo " apr_site  all 1 1 1 0 0 0"                        >> globk.cmd
        # DO NOT ACTIVATE ATM COMBINATION BECAUSE IT WILL NOT WORK!
        echo "#apr_atm   all 1 1 1"                              >> globk.cmd

        # create the sinex header file
        echo "+FILE/REFERENCE                               " >  head.snx
        echo " DESCRIPTION   Instituto Geografico Nacional  " >> head.snx
        echo " OUTPUT        Solucion GPS combinada         " >> head.snx
        echo " CONTACT       [email protected]                 " >> head.snx
        echo " SOFTWARE      glbtosnx Version               " >> head.snx
        echo " HARDWARE      .                              " >> head.snx
        echo " INPUT         Archivos binarios Globk        " >> head.snx
        echo "-FILE/REFERENCE                               " >> head.snx

        # run globk
        globk 0 file.prt globk.log globk.gdl globk.cmd 2>&1 > globk.out

        # convert the GLX file into sinex
        glbtosnx . head.snx $OUT_FILE.GLX ${OUT_FILE}.snx 2>&1 > glbtosnx.out

        # figure out where the parameters start in the prt file
        LINE=`grep -n "PARAMETER ESTIMATES" file.prt | cut -d ":" -f1`

        # reduce line by one to make a little cleaner
        let LINE--;

        # print prt header
        sed -n 1,${LINE}p file.prt > ${OUT_FILE}.out

        # append the log file
        cat globk.log >> ${OUT_FILE}.out

        # create the fsnx file which contains only the solution estimate
        lineNumber=`grep --binary-file=text -m 1 -n "\-SOLUTION/ESTIMATE" ${OUT_FILE}.snx | cut -d : -f 1`

        # extract the solution estimate
        head -$lineNumber ${OUT_FILE}.snx > ${OUT_FILE}.fsnx;

        """ % (org, gpsweek, gpsweekday, site_string)

        run_file.write(contents)

        # all done
        run_file.close()

        # add executable permissions
        chmod_exec(run_file_path)
Exemplo n.º 15
0
    def start(self, dirname, year, doy, dry_run=False):
        monitor_open = False

        try:
            # copy the folder created by GamitSession in the solution_pwd to the remote_pwd (pwd)
            try:
                if not os.path.exists(os.path.dirname(self.pwd)):
                    os.makedirs(os.path.dirname(self.pwd))
            except OSError:
                # racing condition having several processes trying to create the same folder
                # if OSError occurs, ignore and continue
                pass

            # if the local folder exists (due to previous incomplete processing, erase it).
            if os.path.exists(self.pwd):
                shutil.rmtree(self.pwd)

            # ready to copy the shared solution_dir to pwd
            shutil.copytree(self.solution_pwd, self.pwd, symlinks=True)

            with file_open(os.path.join(self.pwd, 'monitor.log'), 'a') as monitor:
                monitor_open = True

                def log(s):
                    monitor.write(now_str() + ' -> ' + s + '\n')

                log('%s %i %i executing on %s' % (dirname, year, doy, platform.node()))
                log('fetching orbits')

                try:
                    Sp3 = pySp3.GetSp3Orbits(self.orbits['sp3_path'], self.date, self.orbits['sp3types'],
                                             self.pwd_igs, True)  # type: pySp3.GetSp3Orbits

                except pySp3.pySp3Exception:
                    log('could not find principal orbits, fetching alternative')

                    # try alternative orbits
                    if self.options['sp3altrn']:
                        Sp3 = pySp3.GetSp3Orbits(self.orbits['sp3_path'], self.date, self.orbits['sp3altrn'],
                                                 self.pwd_igs, True)  # type: pySp3.GetSp3Orbits
                    else:
                        raise

                if Sp3.type != 'igs':
                    # rename file
                    shutil.copyfile(Sp3.file_path, Sp3.file_path.replace(Sp3.type, 'igs'))

                log('fetching broadcast orbits')

                pyBrdc.GetBrdcOrbits(self.orbits['brdc_path'], self.date, self.pwd_brdc,
                                     no_cleanup=True)  # type: pyBrdc.GetBrdcOrbits

                for rinex in self.params['rinex']:

                    log('fetching rinex for %s %s %s %s'
                        % (stationID(rinex), rinex['StationAlias'],
                           '{:10.6f} {:11.6f}'.format(rinex['lat'], rinex['lon']), 'tie' if rinex['is_tie'] else ''))

                    try:
                        with pyRinex.ReadRinex(rinex['NetworkCode'],
                                               rinex['StationCode'],
                                               rinex['source'], False) as Rinex:  # type: pyRinex.ReadRinex

                            # WARNING! some multiday RINEX were generating conflicts because the RINEX has a name, say,
                            # tuc12302.10o and the program wants to rename it as tuc12030.10o but because it's a
                            # multiday file, during __init__ it's already split and renamed as tuc12300.10o and
                            # additional folders are generated with the information for each file. Therefore, find
                            # the rinex that corresponds to the date being processed and use that one instead of the
                            # original file. These files are not allowed by pyArchiveService, but the "start point" of
                            # the database (i.e. the files already in the folders read by pyScanArchive) has such
                            # problems.

                            # figure out if this station has been affected by an earthquake
                            # if so, window the data
                            if rinex['jump'] is not None:
                                monitor.write(
                                    '                    -> RINEX file has been windowed: ETM detected jump on ' +
                                    rinex['jump'].datetime().strftime('%Y-%m-%d %H:%M:%S') + '\n')

                            if Rinex.multiday:
                                # find the rinex that corresponds to the session being processed
                                for Rnx in Rinex.multiday_rnx_list:
                                    if Rnx.date == self.date:
                                        Rnx.rename(rinex['destiny'])

                                        if rinex['jump'] is not None:
                                            self.window_rinex(Rnx, rinex['jump'])
                                        # before creating local copy, decimate file
                                        Rnx.decimate(30)
                                        Rnx.purge_comments()
                                        Rnx.compress_local_copyto(self.pwd_rinex)
                                        break
                            else:
                                Rinex.rename(rinex['destiny'])

                                if rinex['jump'] is not None:
                                    self.window_rinex(Rinex, rinex['jump'])
                                # before creating local copy, decimate file
                                Rinex.decimate(30)
                                Rinex.purge_comments()
                                Rinex.compress_local_copyto(self.pwd_rinex)

                    except (OSError, IOError):
                        log('An error occurred while trying to copy ' +
                            rinex['source'] + ' to ' + rinex['destiny'] + ': File skipped.')

                    except (pyRinex.pyRinexException, Exception) as e:
                        log('An error occurred while trying to copy ' +
                            rinex['source'] + ': ' + str(e))

                log('executing GAMIT')

                # create the run script
                self.create_replace_links()
                self.create_run_script()
                self.create_finish_script()

            # run the script to replace the links of the tables directory
            self.p = subprocess.Popen('find ./tables ! -name "otl.grid" -type l -exec ./replace_links.sh {} +',
                                      shell=True, stdout=subprocess.PIPE,stderr=subprocess.PIPE, cwd=self.pwd)
            _, _ = self.p.communicate()

            # now execute the run script
            if not dry_run:
                self.p = subprocess.Popen('./run.sh', shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                self.p = subprocess.Popen('./finish.sh', shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                # check for any fatals
                self.p = subprocess.Popen('grep -q \'FATAL\' monitor.log', shell=True, stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE, cwd=self.pwd)

                _, _ = self.p.communicate()

                self.success = (self.p.returncode != 0)

            # output statistics to the parent to display
            result = self.parse_monitor(self.success)

            file_append(os.path.join(self.pwd, 'monitor.log'),
                        now_str() + ' -> return to Parallel.GAMIT\n')

            # no matter the result of the processing, move folder to final destination
            if not dry_run:
                self.finish()

            return result

        except:

            msg = traceback.format_exc() + '\nProcessing %s date %s on node %s' \
                  % (self.params['NetName'], self.date.yyyyddd(), platform.node())

            # DDG: do not attempt to write to monitor.log or do any file operations (maybe permission problem)
            # problem might occur during copytree or rmtree or some other operation before opening monitor.log
            if monitor_open:
                file_append(os.path.join(self.pwd, 'monitor.log'),
                            now_str() +
                            ' -> ERROR in pyGamitTask.start()\n%s' % msg)

                # the solution folder exists because it was created by GamitSession to start the processing.
                # erase it to upload the result
                if os.path.exists(self.solution_pwd):
                    shutil.rmtree(self.solution_pwd)

                # execute final error step: copy to self.solution_pwd
                shutil.copytree(self.pwd, self.solution_pwd, symlinks=True)
                # remove the remote pwd
                shutil.rmtree(self.pwd)

                # output statistics to the parent to display
                result = self.parse_monitor(False)
            else:
                result = {'session'             : '%s %s' % (self.date.yyyyddd(), self.params['DirName']),
                          'Project'             : self.params['NetName'],
                          'subnet'              : self.params['subnet'],
                          'Year'                : self.date.year,
                          'DOY'                 : self.date.doy,
                          'FYear'               : self.date.fyear,
                          'wl'                  : 0,
                          'nl'                  : 0,
                          'nrms'                : 0,
                          'relaxed_constrains'  : '',
                          'max_overconstrained' : '',
                          'node'                : platform.node(),
                          'execution_time'      : 0,
                          'execution_date'      : 0,
                          'missing'             : '',
                          'success'             : False,
                          'fatals'              : []
                          }

            result['error'] = msg

            # return useful information to the main node
            return result
Exemplo n.º 16
0
    def create_run_script(self):

        year = self.date.yyyy()
        doy  = self.date.ddd()

        # extract the gps week and convert to string
        gpsWeek_str = str(self.date.gpsWeek)

        # normalize gps week string
        if self.date.gpsWeek < 1000: gpsWeek_str = '0'+gpsWeek_str

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd,'run.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except (OSError, IOError):
            raise Exception('could not open file '+run_file_path)

        contents = """#!/bin/bash

        # just in case, create a temporary dir for fortran
        if [ ! -d ./tmp ]; then
            mkdir ./tmp
        fi
        export TMPDIR=`pwd`/tmp

        export INSTITUTE=%s
        # set max depth for recursion
        MAX_LEVEL=3;

        # parse input
        level=$1; [ $# -eq 0 ] && level=1;

        # check that level less than max depth
        if [[ $level -gt $MAX_LEVEL ]];then
            # if so then exit
            echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): MAX ITERATION DEPTH REACHED ... MUST EXIT" >> monitor.log
            exit 0;
        fi

        echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): Iteration depth: $level" >> monitor.log

        # set the params
        EXPT=%s;
        YEAR=%s;
        DOY=%s;
        MIN_SPAN=%s;
        EOP=%s;
        NOFPT=%s;

        # set the name of the outfile
        OUT_FILE=%s%s%s.out

        # execution flag for sh_gamit
        EXE=1;
        COUNTER=0;

        while [ $EXE -eq 1 ]; do

        if [ $COUNTER -gt 9 ]; then
            echo "run.sh (`date +"%%Y-%%m-%%d %%T"`): Maximum number of retries (10) reached. Abnormal exit in run.sh. Check processing log." >> monitor.log
            exit 1
        fi

        # set exe to 0 so that we exit exe loop if no problems found
        EXE=0;

        # save a copy of the lfile. before running sh_gamit
        iter_ext=`printf "l%%02d_i%%02d" $level $COUNTER`
        cp ./tables/lfile. ./tables/lfile.${iter_ext}

        # do the damn thing
        if [ "$NOFTP" = "no" ]; then
            sh_gamit -update_l N -topt none -c -copt null -dopt c x -expt $EXPT -d $YEAR $DOY -minspan $MIN_SPAN -remakex Y -eop $EOP &> $OUT_FILE;
        else
            sh_gamit -update_l N -topt none -c -copt null -noftp -dopt c x -expt $EXPT -d $YEAR $DOY -minspan $MIN_SPAN -remakex Y -eop $EOP &> $OUT_FILE;
        fi
        """ \
        % (self.gamitopts['org'], self.gamitopts['expt'], year, doy, '12', self.gamitopts['eop_type'],
           self.gamitopts['noftp'], self.gamitopts['org'], gpsWeek_str, str(self.date.gpsWeekDay))

        # if we're in debug mode do not pipe output to file
        # if not session.options['debug']: contents += """ &> $OUT_FILE; """;

        contents += """

        grep -q "Geodetic height unreasonable"  $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "MODEL/open: Site" $OUT_FILE  | tail -1 | cut -d ":" -f 5 | cut -d " " -f 3 |tr '[:upper:]' '[:lower:]'`;
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: unreasonable geodetic height" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            grep "MODEL/open: Site" $OUT_FILE  | tail -1
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep "FATAL.*MAKEX/lib/rstnfo: No match for" $OUT_FILE
        if [ $? -eq 0 ];then
            sstn=`grep "FATAL.*MAKEX/lib/rstnfo: No match for" $OUT_FILE | tail -1 | cut -d ":" -f5 | awk '{print $4}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: no station info" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q "Error extracting velocities for"  $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "Error extracting velocities for" $OUT_FILE  | head -1 | cut -d ":" -f 5 | cut -d " " -f 6 |tr '[:upper:]' '[:lower:]'`;
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Error extracting velocities for" >> monitor.log
            rm rinex/${sstn}* ;
            rm $DOY/${sstn}* ;
            grep "Error extracting velocities for" $OUT_FILE  | tail -1
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Bad WAVELENGTH FACT" $OUT_FILE;
        if [ $? -eq 0 ]; then
            sstn=`grep "Bad WAVELENGTH FACT" $OUT_FILE | tail -1 | cut -d ":" -f 5 | cut -d " " -f 6 | cut -c 3-6`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Bad WAVELENGTH FACT in rinex header" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Error decoding swver" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "Error decoding swver" $OUT_FILE;
            sstn=`grep "Error decoding swver" $OUT_FILE | tail -1 | awk '{print $8}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Error decoding swver" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE;
            sstn=`grep "FATAL.*MAKEX/lib/hisub:  Antenna code.*not in hi.dat" $OUT_FILE | tail -1 | awk '{print $9}' | cut -c2-5 | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn}: Antenna code not in hi.dat" >> monitor.log
            rm rinex/${sstn}*;
            rm $DOY/${sstn}* ;
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*FIXDRV/dcheck: Only one or no existing X-files" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*FIXDRV/dcheck: Only one or no existing X-files" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): FIXDRV/dcheck: Only one or no existing X-files" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEXP/makexp: No RINEX or X-files found" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEXP/makexp: No RINEX or X-files found" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEXP/makexp: No RINEX or X-files found" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/get_rxfiles: Cannot find selected RINEX file" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/get_rxfiles: Cannot find selected RINEX file" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEX/get_rxfiles: Cannot find selected RINEX file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "FATAL.*MAKEX/openf: Error opening file:.*" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "FATAL.*MAKEX/openf: Error opening file:.*" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): MAKEX/openf: Error opening file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi
        
        grep -q    "SOLVE/get_widelane: Error reading first record" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "SOLVE/get_widelane: Error reading first record" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): SOLVE/get_widelane: Error reading first record of temp file" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        grep -q    "Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" $OUT_FILE;
        if [ $? -eq 0 ]; then
            grep       "Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" $OUT_FILE;
            echo "run.sh (`date +"%Y-%m-%d %T"`): Failure in sh_preproc. STATUS 1 -- sh_gamit terminated" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi

        # problems related to ill conditioned bias matrix
        grep -q  "FATAL.*SOLVE/lcloos: Inversion error in" $OUT_FILE;
        if [ $? -eq 0 ]; then

            # remove the FATAL from the message to avoid confusing finish.sh that there was an error during execution
            err="SOLVE/lcloos: Inversion error in LCNORM(2)"

            # determine which autocln.sum exists and has information in it
            if [ -s $DOY/autcln.post.sum ]; then
                autocln=autcln.post.sum

                # error occurred after the prefit, read the autcln file and remove the station with low obs

                echo "run.sh (`date +"%Y-%m-%d %T"`): $err (after prefit) Will remove the station with the lowest obs count in $autocln" >> monitor.log

                sstn=`sed -n -e '/Number of data by site/,/^$/ p' $DOY/$autocln | tail -n +3 | sed '$d' | awk '{print $3, $4}' | awk -v min=999999 '{if($2<min){min=$2; stn=$1}}END{print stn}' | tr '[:upper:]' '[:lower:]'`

                nobs=`sed -n -e '/Number of data by site/,/^$/ p' $DOY/$autocln | tail -n +3 | sed '$d' | awk '{print $3, $4}' | awk -v min=999999 '{if($2<min){min=$2; stn=$1}}END{print min}'`

                echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${sstn} -> observation count: $nobs" >> monitor.log
                rm rinex/${sstn}*;
                rm $DOY/${sstn}* ;
                echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
                EXE=1;

            else
                # the error occurred during the prefit, autocln may or may not have gotten the problem. Use the observation count in the $OUTFILE

                echo "run.sh (`date +"%Y-%m-%d %T"`): $err. (during prefit) Will analyze the MAKEX output and remove the file with more rejected observations" >> monitor.log

                max_rejected=`grep "observations rejected" $OUT_FILE | awk -F ':' '{print $5}' | awk '{print $6}' | awk -v max=0 '{if($1>max){max=$1}}END{print max}'`

                sstn=(`sed -n -e '/'$max_rejected' observations rejected/,/End processing/ p' $OUT_FILE | grep 'End' | awk -F ':' '{print $6}' | awk '{print $1'} | uniq | tr '[:upper:]' '[:lower:]'`)

                if [ -z "$sstn" ]; then
                    echo "run.sh (`date +"%Y-%m-%d %T"`): could not determine the station with low observation count. Check $OUT_FILE" >> monitor.log
                else
                    for stn in ${sstn[*]}
                    do
                        echo "run.sh (`date +"%Y-%m-%d %T"`): deleting station ${stn} -> rejected observation count: $max_rejected" >> monitor.log
                        rm rinex/${stn}*;
                        rm $DOY/${stn}* ;
                    done
                    echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
                    EXE=1;
                fi
            fi

            # different search methods, deprecated
            #sstn=(`sed -n -e '/ .... valid observations/,/stop in MODEL/ p' $OUT_FILE | grep 'Site' | awk -F ':' '{print $5}' | awk '{print $2'} | uniq | tr '[:upper:]' '[:lower:]'`)
        fi

        # this case after SOLVE/lcloos because it also triggers GAMIT sh_chksolve
        grep -q "FATAL GAMIT sh_chksolve: Solve failed to complete normally" $OUT_FILE;
        if [ $? -eq 0 ] && [ $EXE -eq 0 ]; then
            echo "run.sh (`date +"%Y-%m-%d %T"`): GAMIT sh_chksolve: Solve failed to complete normally" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            EXE=1;
        fi
        
        # grep over constrained sites
        grep -q "over constrained" ./$DOY/sh_gamit_${DOY}.summary;
        if [ $? -eq 0 ]; then
            # get the number of lines
            lines=`cat ./$DOY/sh_gamit_${DOY}.summary | sed -n 's/WARNING: \([0-9]*\) SITES.*$/\\1/p'`
            grep -A $lines "over constrained" ./$DOY/sh_gamit_${DOY}.summary >> monitor.log
            
            # DDG: new behavior -> remove the station with the largest over constrained coordinate
            # grep the sites and get the unique list separeted by | (to do regex grep)
            # stns=`grep "GCR APTOL" monitor.log | awk '{print $4"_GPS"}' | uniq | tr '<line break>' '|'`
            # copy the sittbl. (just in case)
            # cp tables/sittbl. tables/sittbl.${iter_ext}
            # remove those from the sittbl list: this will relax station to 100 m
            # grep -v -E "${stns:0:-1}" tables/sittbl.${iter_ext} > tables/sittbl.
            
            stns=`grep "GCR APTOL" ./$DOY/sh_gamit_${DOY}.summary | awk '{print sqrt($(NF) * $(NF)), $4}' | sort -r | head -n1 | awk '{print $2}' | tr '[:upper:]' '[:lower:]'`
            echo "run.sh (`date +"%Y-%m-%d %T"`): deleting over constrained station ${stns}" >> monitor.log
            rm rinex/${stns}*;
            rm $DOY/${stns}* ;
            
            # echo "run.sh (`date +"%Y-%m-%d %T"`): relaxing over constrained stations ${stns:0:-1}" >> monitor.log
            echo "run.sh (`date +"%Y-%m-%d %T"`): replacing lfile. from this run with lfile.${iter_ext}" >> monitor.log
            rm ./tables/lfile.
            cp ./tables/lfile.${iter_ext} ./tables/lfile.
            
            echo "run.sh (`date +"%Y-%m-%d %T"`): will try sh_gamit again ..." >> monitor.log
            
            EXE=1;
        fi

        if [ $EXE -eq 1 ]; then
            # if it will retry, save the previous output using extension .l00_i00, .l00_i01, ... etc
            # where lxx is the level of iteration and iyy is the interation in this level
            mv $OUT_FILE $OUT_FILE.${iter_ext}
            COUNTER=$((COUNTER+1));
        fi

        # grep updated coordinates
        grep Updated ./tables/lfile.;
        if [ $? -eq 0 ]; then
            grep Updated ./tables/lfile. >> monitor.log
        fi

        done

        # clean up
        rm -rf ionex met teqc*

        # blab about failures
        grep "FATAL" *.out >> monitor.log

        """

        run_file.write(contents)

        contents = \
        """

        # remove extraneous solution files
        # rm ./$DOY/l*[ab].*;

        # make sure to rename the gfilea to the correct gfile[0-9].doy
        [ -e ./igs/gfilea.* ] && mv -f ./igs/gfilea* ./*/gfile[0-9]*

        # see if any of the coordinates were updated, exit if not
        # DDG: Actually, sometimes the final updated coordinate only differs by < .3 m when solve is invoked more than
        # once from within sh_gamit. Therefore, also check that the updated coordinate is > .3 m from the original APR
        # this happens because the first Updated coordinate (lxxxxa.ddd) triggers an iteration in solve (lxxxxb.ddd) 
        # with a solution that is again close to the original APR. Without this check, PG iterates 3 times unnecessarily
        
        grep Updated ./tables/lfile.;
        if [ $? -ne 0 ]; then
            echo "run.sh (`date +"%Y-%m-%d %T"`): Normal exit from run.sh" >> monitor.log
            # uncompress everything. Will be compressed later on
            gunzip ./*/*;
            exit
        else
            updated=(`grep Updated ./tables/lfile. | awk '{print $1}'`)
            
            RERUN=0
            
            for stn in ${updated[*]}
            do
                coords=`grep $stn ./tables/lfile. | awk '{print $2,$3,$4}'`
                
                # use the copy of the lfile to grep the APR coordinates
                aprs=`grep $stn ./tables/lfile.${iter_ext} | awk '{print $2,$3,$4}'`
                
                # get the distance between Updated and APR
                dist=`echo $coords $aprs | awk '{print sqrt(($1 - $4)^2 + ($2 - $5)^2 + ($3 - $6)^2)}'`
                
                if (( $(echo "$dist > 0.3" | bc -l) )); then
                    RERUN=1;
                fi
            done
            
            # if RERUN = 0, Updated coordinate was < 0.3 m
            if [ $RERUN -eq 0 ]; then
                echo "run.sh (`date +"%Y-%m-%d %T"`): Updated coordinate detected but final solution within 0.3 m of APR" >> monitor.log
                echo "run.sh (`date +"%Y-%m-%d %T"`): Normal exit from run.sh" >> monitor.log
                # uncompress everything. Will be compressed later on
                gunzip ./*/*;
                exit
            fi
        fi

        # iteration detected!
        echo "run.sh (`date +"%Y-%m-%d %T"`): Updated coordinate detected in lfile. Iterating..." >> monitor.log

        # save this level's out file for debugging
        mv $OUT_FILE $OUT_FILE.${iter_ext}

        # apr file for updated coordinates
        aprfile=${EXPT}.temp

        # recreate the apr file with updated coordinates minus the comments
        sed -e 's/Updated from l.....\.[0-9][0-9][0-9]//g' ./tables/lfile. > ./tables/${aprfile};

        # the copy of the lfile was saved BEFORE running GAMIT. Replace with the updated version
        cp ./tables/${aprfile} ./tables/lfile.

        # copy over an updated gfile if it exists
        # cp ./*/gfile* ./tables/

        # update level
        level=$((level+1));

        # remove the 'old' solution
        [ $level -le $MAX_LEVEL ] && rm -rf ./$DOY;

        # decompress the remaining solution files
        gunzip ./*/*;

        # do another iteration
        ./run.sh $level;

        """

        run_file.write(contents)
        run_file.close()

        chmod_exec(run_file_path)
Exemplo n.º 17
0
    def create_combination_script(self, date, org):

        # extract the gps week and convert to string
        gpsWeek_str = date.wwww()

        # set the path and name for the run script
        run_file_path = os.path.join(self.pwd_comb, 'globk.sh')

        try:
            run_file = file_open(run_file_path, 'w')
        except:
            raise GlobkException('could not open file '+run_file_path)

        with run_file:
            contents = \
            """#!/bin/bash

            export INSTITUTE=%s

            # data product file names
            OUT_FILE=%s%s%s;
            
            # mk solutions directory for prt files etc
            [ ! -d tables ] && mkdir tables

            cd tables
            ../link_tables.sh

            # create global directory listing for globk
            for file in $(find .. -name "*.glx" -print);do echo $file;done | grep    "\/n0\/"  > globk.gdl
            for file in $(find .. -name "*.glx" -print);do echo $file;done | grep -v "\/n0\/" >> globk.gdl

            # create the globk cmd file
            echo " app_ptid all"                                                          > globk.cmd
            echo " prt_opt GDLF MIDP CMDS"                                               >> globk.cmd
            echo " out_glb ../file.GLX"                                                  >> globk.cmd
            echo " in_pmu pmu.usno"                                                      >> globk.cmd
            echo " descript Daily combination of global and regional solutions"          >> globk.cmd
            """ % (org, org, gpsWeek_str, str(date.gpsWeekDay))

            if self.net_type == 'global':
                contents += \
                """
                echo "# activated for global network merge"                                  >> globk.cmd
                echo " apr_wob    10 10  10 10 "                                             >> globk.cmd
                echo " apr_ut1    10 10        "                                             >> globk.cmd
                echo " apr_svs all 0.05 0.05 0.05 0.005 0.005 0.005 0.01 0.01 0.00 0.01 FR"  >> globk.cmd
                """
            contents += \
            """
            echo " max_chii  1. 0.6"                                                     >> globk.cmd
            echo " apr_site  all 1 1 1 0 0 0"                                            >> globk.cmd
            echo " apr_atm   all 1 1 1"                                                  >> globk.cmd

            # create the sinex header file
            echo "+FILE/REFERENCE                               " >  head.snx
            echo " DESCRIPTION   Instituto Geografico Nacional  " >> head.snx
            echo " OUTPUT        Solucion GPS combinada         " >> head.snx
            echo " CONTACT       [email protected]              " >> head.snx
            echo " SOFTWARE      glbtosnx Version               " >> head.snx
            echo " HARDWARE      .                              " >> head.snx
            echo " INPUT         Archivos binarios Globk        " >> head.snx
            echo "-FILE/REFERENCE                               " >> head.snx

            # run globk
            globk 0 ../file.prt ../globk.log globk.gdl globk.cmd 2>&1 > ../globk.out

            # convert the GLX file into sinex
            glbtosnx . ./head.snx ../file.GLX ../${OUT_FILE}.snx 2>&1 > ../glbtosnx.out

            # restore original directory
            cd ..;

            # figure out where the parameters start in the prt file
            LINE=`grep -n "PARAMETER ESTIMATES" file.prt | cut -d ":" -f1`

            # reduce line by one to make a little cleaner
            let LINE--;

            # print prt header
            sed -n 1,${LINE}p file.prt > ${OUT_FILE}.out

            # append the log file
            cat globk.log >> ${OUT_FILE}.out

            # create the fsnx file which contains only the solution estimate
            lineNumber=`grep --binary-file=text -m 1 -n "\-SOLUTION/ESTIMATE" ${OUT_FILE}.snx | cut -d : -f 1`

            # extract the solution estimate
            head -$lineNumber ${OUT_FILE}.snx > ${OUT_FILE}.fsnx;

            # move the H file to a meaningful name
            mv file.GLX ${OUT_FILE}.GLX

            # clear out log files
            rm -rf tables
            rm -f file*
            rm -f globk*
            rm -f glb*
            rm -f *.sh

            # compress sinex file
            # gzip --force *.snx
            gzip --force *.fsnx
            gzip --force *.out
            gzip --force *.glx
            gzip --force *.GLX

            """

            run_file.write(contents)
            # all done

        # add executable permissions
        chmod_exec(run_file_path)
Exemplo n.º 18
0
    def parse(self):
        
        foundSolutionEstimate           = False
        inSolutionEstimateSection       = False
        inSolutionMatrixEstimateSection = False
        isHeaderLine                    = False
        
        foundSiteId     = False
        inSiteIdSection = False

        dictID = {}
        stn_ID = {}

        # if there's a file to parse
        if self.snxFilePath != None:
            # flag to rezip at end
            wasZipped     = False
            wasCompressed = False
            
            # check for gzip
            if self.snxFilePath[-2:] == "gz":
                #file_ops.gunzip(self.snxFilePath)
                self.snxFilePath = self.snxFilePath[0:-3]
                wasZipped        = True
            
            # check for unix compression
            elif self.snxFilePath[-1:] == "Z":
                #file_ops.uncompress(self.snxFilePath)
                self.snxFilePath = self.snxFilePath[0:-2]
                wasCompressed    = True
                
            # open the file
            try:
                snxFileHandle = file_open(self.snxFilePath)
            except:
                print("snxFileParser ERROR:  Could not open file " + self.snxFilePath + " !!!") 
                raise

            with snxFileHandle:
                #make pattern to match to snx organization ...
                self.snxFileName = os.path.basename(self.snxFilePath)

                # @todo optimize by pre-compiling regexes
                orgPattern   = re.compile('^([a-zA-Z]+).*\.f?snx$')
                orgMatch     = orgPattern.findall(self.snxFileName)
                self.orgName = orgMatch[0].upper()

                # make pattern to look for SiteId start tag
                siteIdStartPattern = re.compile('^\+SITE\/ID$')

                # make pattern to look for end of siteId section
                siteIdEndPattern   = re.compile('^\-SITE\/ID$')

                # make pattern to parse the siteId lines
                # Example:
                #
                #     TROM  A 82397M001 P , USA                   18 56 18.0  69 39 45.9   135.4
                #
                siteIdPattern      = re.compile('^\s+(\w+)\s+\w\s+(\w+).*$')

                # variance factor patther
                # Example:
                #
                # VARIANCE FACTOR                    0.048618461936712
                #
                #
                varianceFactorPattern = re.compile('^ VARIANCE FACTOR\s+([\d+]?\.\d+)$')
                observationsPattern   = re.compile('^ NUMBER OF OBSERVATIONS\s+(\d+)$')
                unknownsPattern       = re.compile('^ NUMBER OF UNKNOWNS\s+(\d+)$')

                # Make pattern to look for solution estimate start tag
                startSolutionEstimatePattern = re.compile('^\+SOLUTION\/ESTIMATE.*')

                # make pattern to look for solution estimate end tag
                endSolutionEstimatePattern = re.compile('^\-SOLUTION\/ESTIMATE.*')

                # make pattern to look for the L COVA start tag (+SOLUTION/MATRIX_ESTIMATE L COVA)
                startSolutionMatrixEstimate = re.compile('^\+SOLUTION\/MATRIX_ESTIMATE.*')

                # make pattern to look for the L COVA end tag (-SOLUTION/MATRIX_ESTIMATE L COVA)
                endSolutionMatrixEstimate = re.compile('^\-SOLUTION\/MATRIX_ESTIMATE.*')

                # make pattern to look for station coordinates
                # Example:
                #
                #   1 STAX   ALGO  A ---- 05:180:43200 m    2 .91812936331043008E+6 .2511266E-2
                #
                stationCoordinatePattern = re.compile('^\s+(\d+)+\s+STA(\w)\s+(\w+)\s+(\w).*\d+\s+(-?[\d+]?\.\d+[Ee][+-]?\d+)\s+(-?[\d+]?\.\d+[Ee][+-]?\d+)$')

                # make pattern to look for station velocities
                # Example:
                #
                # 916 VELX   YAKA  A    1 00:001:00000 m/y  2 -.219615010076079E-01 0.13728E-03
                #
                stationVelocityPattern = re.compile('^\s+\d+\s+VEL(\w)\s+(\w+)\s+\w\s+....\s+(\d\d:\d\d\d).*\d+\s+(-?[\d+]?\.\d+[Ee][+-]?\d+)\s+(-?[\d+]?\.\d+[Ee][+-]?\d+)$')

                for line in snxFileHandle:

                    if siteIdStartPattern.findall(line):
                        inSiteIdSection = True
                        continue

                    elif siteIdEndPattern.findall(line):
                        inSiteIdSection = False
                        continue

                    # check for solution estimate section
                    elif startSolutionEstimatePattern.findall(line):
                        inSolutionEstimateSection = True
                        continue

                    elif endSolutionEstimatePattern.findall(line):
                        inSolutionEstimateSection = False
                        continue

                    elif startSolutionMatrixEstimate.findall(line):
                        inSolutionMatrixEstimateSection = True
                        continue

                    elif endSolutionMatrixEstimate.findall(line):
                        inSolutionMatrixEstimateSection = False
                        break

                    m = varianceFactorPattern.findall(line)
                    if m:
                        self.varianceFactor = float(m[0])                    

                    m = unknownsPattern.findall(line)
                    if m:
                        self.unknowns = float(m[0])

                    m = observationsPattern.findall(line)
                    if m:
                        self.observations = float(m[0])


                    if inSiteIdSection:
                        # parse the siteID line
                        siteIdMatch = siteIdPattern.findall(line)

                        # blab about it
                        #print siteIdMatch

                        # if the line does not contain a match then move along
                        if not siteIdMatch:
                            continue

                        # extract the parsed info
                        (stationName, domesNumber) = siteIdMatch[0]

                        # make sure the name is upper case
                        stationName = stationName.upper()

                        # initialize station data if not seen this station before
                        if not stationName in self.stationDict:
                            self.stationDict[stationName] = StationData()

                        self.stationDict[stationName].domesNumber = domesNumber

                        #print "set domes number "+ domesNumber +" for station "+stationName


                    # if in the solution estimate section
                    if inSolutionEstimateSection:

                        # check for station coordinate match
                        stationCoordinateMatch = stationCoordinatePattern.findall(line)

                        #print line
                        #print stationCoordinateMatch

                        # if match then store result                    
                        if stationCoordinateMatch:
                            (ID,
                             coordID,
                             stationName,
                             pointCode,
                             coord,
                             sigCoord
                             ) = stationCoordinateMatch[0]

                            if pointCode != 'A': 
                                os.sys.stderr.write('ignoring solution/estimate STA'+coordID \
                                                    +' for station: '+stationName            \
                                                    +', point code = '+pointCode             \
                                                    +', file = '+ self.snxFileName           \
                                                    +'\n'                                    \
                                )
                                continue

                            # make sure station name is upper case 
                            stationName = stationName.upper()

                            # save the correspondance between the ID and the coordID and stationName
                            dictID[ID] = coordID
                            stn_ID[ID] = stationName

                            if not stationName in self.stationDict:
                                self.stationDict[stationName] = StationData()

                            if coordID == 'X':
                                self.stationDict[stationName].X    = float(coord)
                                self.stationDict[stationName].sigX = float(sigCoord)

                            elif coordID == 'Y':
                                self.stationDict[stationName].Y    = float(coord)
                                self.stationDict[stationName].sigY = float(sigCoord)

                            else:
                                self.stationDict[stationName].Z    = float(coord)
                                self.stationDict[stationName].sigZ = float(sigCoord)


                        # check for station velocity match
                        stationVelocityMatch = stationVelocityPattern.findall(line)

                        if stationVelocityMatch:

                            (             \
                             coordID,     \
                             stationName, \
                             refEpoch,    \
                             vel,         \
                             sigVel       \
                            ) = stationVelocityMatch[0]

                            stationName = stationName.upper()

                            # parse refEpoch String
                            (year,doy) = refEpoch.split(':')

                            # convert from string
                            doy = float(doy)

                            # normalize the year and convert to float
                            year = float(get_norm_year_str(year))

                            #compute fractional year to match matlab round off
                            fractionalYear = year+((doy-1)/366.0)+ 0.001413

                            # init if not already in dict
                            if not stationName in self.stationDict:
                                self.stationDict[stationName] = StationData()

                            # set the reference epoch for the velocity    
                            self.stationDict[stationName].refEpoch    =  fractionalYear

                            if coordID == 'X':
                                self.stationDict[stationName].velX    = float(vel)
                                self.stationDict[stationName].sigVelX = float(sigVel)

                            elif coordID == 'Y':
                                self.stationDict[stationName].velY    = float(vel)
                                self.stationDict[stationName].sigVelY = float(sigVel)

                            else:
                                self.stationDict[stationName].velZ    = float(vel)
                                self.stationDict[stationName].sigVelZ = float(sigVel)


            # regzip the file is was .gz
            #if wasZipped:
                #file_ops.gzip(self.snxFilePath)

            # recompress the file if was .Z    
            #if wasCompressed:
                #file_ops.compress(self.snxFilePath)

                    if inSolutionMatrixEstimateSection:
                        matrixLine = line.split()

                        ID1 = matrixLine[0]

                        # check that the key is actually a station variance-covariance item
                        if ID1 in list(stn_ID.keys()):

                            for i, ID2 in enumerate(range(len(matrixLine) - 2)):
                                ID2 = str(int(matrixLine[1]) + i)

                                if stn_ID[ID1] == stn_ID[ID2] and dictID[ID1] != dictID[ID2]:
                                    # we already have the variance, we want the covariance
                                    if (dictID[ID1] == 'X' and dictID[ID2] == 'Y') or \
                                       (dictID[ID1] == 'Y' and dictID[ID2] == 'X'):
                                        self.stationDict[stn_ID[ID1]].sigXY = float(matrixLine[i+2])

                                    elif (dictID[ID1] == 'X' and dictID[ID2] == 'Z') or \
                                         (dictID[ID1] == 'Z' and dictID[ID2] == 'X'):
                                        self.stationDict[stn_ID[ID1]].sigXZ = float(matrixLine[i + 2])

                                    elif (dictID[ID1] == 'Y' and dictID[ID2] == 'Z') or \
                                         (dictID[ID1] == 'Z' and dictID[ID2] == 'Y'):
                                        self.stationDict[stn_ID[ID1]].sigYZ = float(matrixLine[i + 2])

            return self
Exemplo n.º 19
0
    def __init__(self, configfile):

        self.options = {
            'path': None,
            'repository': None,
            'parallel': False,
            'cups': None,
            'node_list': None,
            'ip_address': None,
            'brdc': None,
            'sp3_type_1': None,
            'sp3_type_2': None,
            'sp3_type_3': None,
            'sp3_altr_1': None,
            'sp3_altr_2': None,
            'sp3_altr_3': None,
            'grdtab': None,
            'otlgrid': None,
            'otlmodel': 'FES2014b',
            'ppp_path': None,
            'institution': None,
            'info': None,
            'sp3': None,
            'frames': None,
            'atx': None,
            'height_codes': None,
            'ppp_exe': None,
            'ppp_remote_local': ()
        }

        config = configparser.ConfigParser()
        with file_open(configfile) as f:
            config.read_file(f)

        # get the archive config
        self.options.update(dict(config.items('archive')))

        # get the otl config
        self.options.update(dict(config.items('otl')))

        # get the ppp config
        for iconfig, val in dict(config.items('ppp')).items():
            self.options[iconfig] = os.path.expandvars(val).replace('//', '/')

        # frames and dates
        frames = [item.strip() for item in self.options['frames'].split(',')]
        atx = [item.strip() for item in self.options['atx'].split(',')]

        self.Frames = []

        for frame, atx in zip(frames, atx):
            date = process_date(self.options[frame.lower()].split(','))
            self.Frames += [{
                'name':
                frame,
                'atx':
                atx,
                'dates': (Date(year=date[0].year,
                               doy=date[0].doy,
                               hour=0,
                               minute=0,
                               second=0),
                          Date(year=date[1].year,
                               doy=date[1].doy,
                               hour=23,
                               minute=59,
                               second=59))
            }]

        self.options['frames'] = self.Frames

        self.archive_path = self.options['path']
        self.sp3_path = self.options['sp3']
        self.brdc_path = self.options['brdc']
        self.repository = self.options['repository']

        self.repository_data_in = os.path.join(self.repository, 'data_in')
        self.repository_data_in_retry = os.path.join(self.repository,
                                                     'data_in_retry')
        self.repository_data_reject = os.path.join(self.repository,
                                                   'data_rejected')

        self.sp3types = [
            self.options[k] for k in ('sp3_type_1', 'sp3_type_2', 'sp3_type_3')
            if self.options[k] is not None
        ]
        # alternative sp3 types
        self.sp3altrn = [
            self.options[k] for k in ('sp3_altr_1', 'sp3_altr_2', 'sp3_altr_3')
            if self.options[k] is not None
        ]

        self.run_parallel = (self.options['parallel'] == 'True')
Exemplo n.º 20
0
def main():

    parser = argparse.ArgumentParser(description='Program to perform weekly loosely-constrained solutions. Combination '
                                                 'is performed using GLOBK. Result is output in SINEX format.')

    parser.add_argument('stnlist', type=str, nargs='+', metavar='all|net.stnm',
                        help="List of networks/stations to include in the solution.")

    parser.add_argument('-s', '--session_config', type=str, nargs=1, metavar='session.cfg',
                        help="Filename with the session configuration to run Parallel.GAMIT")

    parser.add_argument('-w', '--gpsweek', nargs=1,
                        help="GPS week to combine.")

    parser.add_argument('-e', '--exclude', type=str, nargs='+', metavar='station',
                        help="List of stations to exclude (e.g. -e igm1 lpgs vbca)")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    # get the working dates
    date_s = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=0)
    date_e = pyDate.Date(gpsWeek=int(args.gpsweek[0]), gpsWeekDay=6)

    print(' >> Working with GPS week ' + args.gpsweek[0] + ' (%s to %s)' % (date_s.yyyyddd(), date_e.yyyyddd()))

    exclude = args.exclude
    if exclude is not None:
        print(' >> User selected list of stations to exclude:')
        Utils.print_columns(exclude)
        args.stnlist += ['-' + exc for exc in exclude]

    # get the station list
    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # check that the selected stations have all different station codes
    # otherwise, exit with error
    for i in range(len(stnlist) - 1):
        for j in range(i + 1, len(stnlist)):
            if stnlist[i]['StationCode'] == stnlist[j]['StationCode']:
                print('During station selection, two identical station codes were found. '
                      'Please remove one and try again.')
                exit()

    GamitConfig = pyGamitConfig.GamitConfiguration(args.session_config[0])  # type: pyGamitConfig.GamitConfiguration

    project = GamitConfig.NetworkConfig.network_id.lower()
    org     = GamitConfig.gamitopt['org']

    print(' >> REMINDER: To automatically remove outliers during the weekly combination, '
          'first run DRA.py to analyze the daily repetitivities')

    soln_pwd = GamitConfig.gamitopt['solutions_dir']

    # create a globk directory in production
    if not os.path.exists('production/globk'):
        os.makedirs('production/globk')

    # check if week folder exists
    globk_pwd = 'production/globk/' + args.gpsweek[0]
    if os.path.exists(globk_pwd):
        rmtree(globk_pwd)

    # create the directory
    os.makedirs(globk_pwd)

    glx_list = []

    # make a list of the h files that need to be combined
    for day in range(0, 7):
        date = pyDate.Date(gpsWeek    = int(args.gpsweek[0]),
                           gpsWeekDay = day)

        soln_dir = os.path.join(soln_pwd, "%s/%s/%s/glbf" % (date.yyyy(), date.ddd(), project))

        if os.path.exists(soln_dir):
            glx = glob.glob(os.path.join(soln_dir, '*.GLX.*'))
            if not glx:
                glx = glob.glob(os.path.join(soln_dir, '*.glx'))
                
            glx_list.append({'file': glx[0], 'gpsweek': date.wwwwd()})

    # create the earthquakes.txt file to remove outliers
    with file_open(globk_pwd + '/eq_rename.txt', 'w') as fd:
        rename   = []
        remove   = []
        use_site = []
        fd.write('# LIST OF OUTLIERS DETECTED BY DRA\n')
        for stn in stnlist:
            # obtain the filtered solutions
            rm = cnn.query_float('SELECT * FROM gamit_soln_excl WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                                 ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
                                 'ORDER BY residual' %
                                 (project, stn['NetworkCode'], stn['StationCode'], date_s.year, date_s.doy,
                                  date_e.year, date_e.doy), as_dict=True)

            # obtain the total number of solutions
            sl = cnn.query_float('SELECT * FROM gamit_soln WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\''
                                 ' AND "StationCode" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) ' %
                                 (project, stn['NetworkCode'], stn['StationCode'], date_s.year, date_s.doy,
                                  date_e.year, date_e.doy), as_dict=True)
            for i, r in enumerate(rm):
                date = pyDate.Date(year=r['Year'], doy=r['DOY'])
                # if the number of rejected solutions is equal to the number of total solutions,
                # leave out the first one (i == 0) which is the one with the lowest residual (see ORDER BY in rm)
                if len(rm) < len(sl) or (len(rm) == len(sl) and i != 0):
                    fd.write(' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                             (stn['StationCode'], stn['StationCode'], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                              date.month, date.day, date.yyyy()[2:], date.month, date.day))

            # check for renames that might not agree between days
            mv = cnn.query_float('SELECT * FROM gamit_subnets WHERE "Project" = \'%s\' AND ("Year", "DOY") '
                                 'BETWEEN (%i, %i) AND (%i, %i) AND \'%s.%s\' = ANY(stations)' %
                                 (project, date_s.year, date_s.doy, date_e.year, date_e.doy,
                                  stn['NetworkCode'], stn['StationCode']), as_dict=True)

            for m in mv:
                date = pyDate.Date(year=m['Year'], doy=m['DOY'])
                # check on each day to see if alias agrees with station code
                for i, s in enumerate(m['stations']):
                    if s.split('.')[1] != m['alias'][i] and s == stationID(stn):

                        print(' -- %s alias for %s = %s: renaming' \
                              % (date.yyyyddd(), stationID(stn), m['alias'][i]))

                        # change the name of the station to the original name
                        rename.append(' rename %s_gps %s_dup %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                                      (m['alias'][i], stn['StationCode'], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                                       date.month, date.day, date.yyyy()[2:], date.month, date.day))
                        use_site.append('%s_dup' % stn['StationCode'])

                    elif s not in [stationID(st) for st in stnlist]:
                        # print ' -- Removing %s: not selected' % s
                        # just in case, remove any other occurrences of this station code
                        remove.append(' rename %s_gps %s_xcl %-20s %s %02i %02i 0 0 %s %02i %02i 24 0\n' %
                                      (m['alias'][i], m['alias'][i], org + date.wwwwd() + '.GLX', date.yyyy()[2:],
                                       date.month, date.day, date.yyyy()[2:], date.month, date.day))
                    else:
                        use_site.append('%s_gps' % stn['StationCode'])

        fd.write('# LIST OF STATIONS TO BE REMOVED\n')
        fd.write(''.join(remove))
        fd.write('# LIST OF STATIONS TO BE RENAMED\n')
        fd.write(''.join(rename))

    print(' >> Converting to SINEX the daily solutions')

    for day, glx in enumerate(glx_list):
        date = pyDate.Date(gpsWeek    = int(args.gpsweek[0]),
                           gpsWeekDay = day)

        print(' -- Working on %s' % date.wwwwd())
        # delete the existing GLX files
        for ff in glob.glob(globk_pwd + '/*.GLX'):
            os.remove(ff)

        Globk(globk_pwd, org, [glx], date.wwww(), date.gpsWeekDay + 8, ' '.join(set(use_site)))
        # convert the file to a valid gpsweek day
        move(globk_pwd + '/' + org + date.wwww() + '%i.snx' % (date.gpsWeekDay + 8),
             globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

        process_sinex(cnn, project, [date, date], globk_pwd + '/' + org + date.wwww() + '%i.snx' % date.gpsWeekDay)

    # delete the existing GLX files: get ready for weekly combination
    for ff in glob.glob(globk_pwd + '/*.GLX'):
        os.remove(ff)
    # ready to pass list to globk object
    Globk(globk_pwd, org, glx_list, date_s.wwww(), 7, ' '.join(set(use_site)))
    print(' >> Formatting the SINEX file')

    process_sinex(cnn, project, [date_s, date_e], globk_pwd + '/' + org + date_s.wwww() + '7.snx')
Exemplo n.º 21
0
def load_constrains(constrains_file):
    """
    Load the frame parameters
    :param constrains_file:
    :return: dictionary with the parameters for the given frame
    """
    params = dict()

    with file_open(constrains_file) as f:
        lines = f.read()

        stn = re.findall(
            r'^\s(\w+.\w+)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)'
            r'\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)'
            r'\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)'
            r'\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)'
            r'\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)\s*(-?\d*\.\d+|NaN)',
            lines, re.MULTILINE)

        for s in stn:
            params[s[0]] = {
                'x': float(s[1]),
                'y': float(s[2]),
                'z': float(s[3]),
                'epoch': float(s[4]),
                'vx': float(s[5]),
                'vy': float(s[6]),
                'vz': float(s[7]),

                # sin and cos to arranged as [n:sin, n:cos] ... it as we have it in the database
                '365.250': {
                    'n': [
                        np.divide(float(s[8]), 1000.),
                        np.divide(float(s[10]), 1000.)
                    ],
                    'e': [
                        np.divide(float(s[12]), 1000.),
                        np.divide(float(s[14]), 1000.)
                    ],
                    'u': [
                        np.divide(float(s[16]), 1000.),
                        np.divide(float(s[18]), 1000.)
                    ]
                },
                '182.625': {
                    'n': [
                        np.divide(float(s[9]), 1000.),
                        np.divide(float(s[11]), 1000.)
                    ],
                    'e': [
                        np.divide(float(s[13]), 1000.),
                        np.divide(float(s[15]), 1000.)
                    ],
                    'u': [
                        np.divide(float(s[17]), 1000.),
                        np.divide(float(s[19]), 1000.)
                    ]
                }
            }

    return params