Beispiel #1
0
    def get_orbits(self, type):

        options = self.options

        orbits1 = pySp3.GetSp3Orbits(options['sp3'], self.rinex.date, type, os.path.join(self.rootdir, 'orbits'), True)
        orbits2 = pySp3.GetSp3Orbits(options['sp3'], self.rinex.date + 1, type, 
                                     os.path.join(self.rootdir, 'orbits'), True)

        clocks1 = pyClk.GetClkFile(options['sp3'], self.rinex.date, type, os.path.join(self.rootdir, 'orbits'), True)
        clocks2 = pyClk.GetClkFile(options['sp3'], self.rinex.date + 1, type,
                                   os.path.join(self.rootdir, 'orbits'), True)

        try:
            eop_file = pyEOP.GetEOP(options['sp3'], self.rinex.date, type, self.rootdir)
            eop_file = eop_file.eop_filename
        except pyEOP.pyEOPException:
            # no eop, continue with out one
            eop_file = 'dummy.eop'

        self.orbits1 = orbits1
        self.orbits2 = orbits2
        self.clocks1 = clocks1
        self.clocks2 = clocks2
        self.eop_file = eop_file
        # get the type of orbit
        self.orbit_type = orbits1.type
Beispiel #2
0
def test_node(check_gamit_tables=None, software_sync=()):
    # test node: function that makes sure that all required packages and tools are present in the nodes
    import traceback
    import platform
    import os
    import sys

    def check_tab_file(tabfile, date):
        if os.path.isfile(tabfile):
            # file exists, check contents
            with open(tabfile, 'rt', encoding='utf-8', errors='ignore') as f:
                lines = f.readlines()

            tabdate = pyDate.Date(mjd=lines[-1].split()[0])
            if tabdate < date:
                return ' -- %s: Last entry in %s is %s but processing %s' \
                       % (platform.node(), tabfile, tabdate.yyyyddd(), date.yyyyddd())
            return []
        else:
            return ' -- %s: Could not find file %s' % (platform.node(),
                                                       tabfile)

    # BEFORE ANYTHING! check the python version
    version = sys.version_info
    # if version.major > 2 or version.minor < 7 or (version.micro < 12 and version.minor <= 7):
    #     return ' -- %s: Incorrect Python version: %i.%i.%i. Recommended version >= 2.7.12' \
    #            % (platform.node(), version.major, version.minor, version.micro)
    if version.major < 3:
        return ' -- %s: Incorrect Python version: %i.%i.%i. Recommended version >= 3.0.0' \
               % (platform.node(), version.major, version.minor, version.micro)

    # start importing the modules needed
    try:
        import shutil
        import datetime
        import time
        import uuid
        import traceback
        # deps
        import numpy
        import pg
        import dirsync
        # app
        import pyRinex
        import dbConnection
        import pyStationInfo
        import pyArchiveStruct
        import pyPPP
        import pyBrdc
        import pyOptions
        import Utils
        import pyOTL
        import pySp3
        import pyETM
        import pyRunWithRetry
        import pyDate

    except:
        return ' -- %s: Problem found while importing modules:\n%s' % (
            platform.node(), traceback.format_exc())

    try:
        if len(software_sync) > 0:
            # synchronize directories listed in the src and dst arguments
            from dirsync import sync

            for source_dest in software_sync:
                if isinstance(source_dest, str) and ',' in source_dest:
                    s = source_dest.split(',')[0].strip()
                    d = source_dest.split(',')[1].strip()

                    print('    -- Synchronizing %s -> %s' % (s, d))

                    updated = sync(s, d, 'sync', purge=True, create=True)

                    for f in updated:
                        print('    -- Updated %s' % f)

    except:
        return ' -- %s: Problem found while synchronizing software:\n%s ' % (
            platform.node(), traceback.format_exc())

    # continue with a test SQL connection
    # make sure that the gnss_data.cfg is present
    try:
        cnn = dbConnection.Cnn('gnss_data.cfg')

        q = cnn.query('SELECT count(*) FROM networks')

        if int(pg.version[0]) < 5:
            return ' -- %s: Incorrect PyGreSQL version!: %s' % (
                platform.node(), pg.version)

    except:
        return ' -- %s: Problem found while connecting to postgres:\n%s ' % (
            platform.node(), traceback.format_exc())

    # make sure we can create the production folder
    try:
        test_dir = os.path.join('production/node_test')
        if not os.path.exists(test_dir):
            os.makedirs(test_dir)
    except:
        return ' -- %s: Could not create production folder:\n%s ' % (
            platform.node(), traceback.format_exc())

    # test
    try:
        Config = pyOptions.ReadOptions('gnss_data.cfg')

        # check that all paths exist and can be reached
        if not os.path.exists(Config.archive_path):
            return ' -- %s: Could not reach archive path %s' % (
                platform.node(), Config.archive_path)

        if not os.path.exists(Config.repository):
            return ' -- %s: Could not reach repository path %s' % (
                platform.node(), Config.repository)

        # pick a test date to replace any possible parameters in the config file
        date = pyDate.Date(year=2010, doy=1)

    except:
        return ' -- %s: Problem while reading config file and/or testing archive access:\n%s' \
               % (platform.node(), traceback.format_exc())

    try:
        brdc = pyBrdc.GetBrdcOrbits(Config.brdc_path, date, test_dir)
    except:
        return ' -- %s: Problem while testing the broadcast ephemeris archive (%s) access:\n%s' \
               % (platform.node(), Config.brdc_path, traceback.format_exc())

    try:
        sp3 = pySp3.GetSp3Orbits(Config.sp3_path, date, Config.sp3types,
                                 test_dir)
    except:
        return ' -- %s: Problem while testing the sp3 orbits archive (%s) access:\n%s' \
               % (platform.node(), Config.sp3_path, traceback.format_exc())

    # check that all executables and GAMIT bins are in the path
    for prg in ('crz2rnx', 'crx2rnx', 'rnx2crx', 'rnx2crz', 'gfzrnx_lx',
                'svdiff', 'svpos', 'tform', 'sh_rx2apr', 'doy', 'sed',
                'compress'):
        with pyRunWithRetry.command('which ' + prg) as run:
            run.run()
            if run.stdout == '':
                return ' -- %s: Could not find path to %s' % (platform.node(),
                                                              prg)

    # check grdtab and ppp from the config file
    for opt in ('grdtab', 'otlgrid', 'ppp_exe'):
        path = Config.options[opt]
        if not os.path.isfile(path):
            return ' -- %s: Could not find %s in %s' % (platform.node(), opt,
                                                        path)

    ppp_path = Config.options['ppp_path']
    for f in ('gpsppp.stc', 'gpsppp.svb_gps_yrly', 'gpsppp.flt', 'gpsppp.stc',
              'gpsppp.met'):
        if not os.path.isfile(os.path.join(ppp_path, f)):
            return ' -- %s: Could not find %s in %s' % (platform.node(), f,
                                                        ppp_path)

    for frame in Config.options['frames']:
        if not os.path.isfile(frame['atx']):
            return ' -- %s: Could not find atx in %s' % (platform.node(),
                                                         frame['atx'])

    if check_gamit_tables is not None:
        # check the gamit tables if not none

        date = check_gamit_tables[0]
        eop = check_gamit_tables[1]

        gg = os.path.expanduser('~/gg')
        tables = os.path.expanduser('~/gg/tables')

        if not os.path.isdir(gg):
            return ' -- %s: Could not GAMIT installation dir (gg)' % (
                platform.node())

        elif not os.path.isdir(tables):
            return ' -- %s: Could not GAMIT tables dir (gg)' % (
                platform.node())

        # DDG: deprecated -> GAMIT now uses a single nbody file (binary)
        # for t_name in ('luntab.' + date.yyyy() + '.J2000',
        #               'soltab.' + date.yyyy() + '.J2000',
        #               'ut1.' + eop,
        #               # leapseconds
        #               # vmf1
        #               'pole.' + eop
        #               ):
        #    result = check_tab_file(os.path.join(tables, t_name), date)
        #    if result:
        #        return result

        # fes_cmc consistency

    return ' -- %s: Test passed!' % platform.node()
    def start(self, dirname, year, doy, dry_run=False):

        monitor_open = False

        try:
            # copy the folder created by GamitSession in the solution_pwd to the remote_pwd (pwd)
            try:
                if not os.path.exists(os.path.dirname(self.pwd)):
                    os.makedirs(os.path.dirname(self.pwd))
            except OSError:
                # racing condition having several processes trying to create the same folder
                # if OSError occurs, ignore and continue
                pass

            # if the local folder exists (due to previous incomplete processing, erase it).
            if os.path.exists(self.pwd):
                shutil.rmtree(self.pwd)

            # ready to copy the shared solution_dir to pwd
            shutil.copytree(self.solution_pwd, self.pwd, symlinks=True)

            with open(os.path.join(self.pwd, 'monitor.log'), 'a') as monitor:

                monitor_open = True

                monitor.write(
                    datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                    ' -> %s %i %i executing on %s\n' %
                    (dirname, year, doy, platform.node()))

                monitor.write(
                    datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                    ' -> fetching orbits\n')

                try:
                    Sp3 = pySp3.GetSp3Orbits(self.orbits['sp3_path'],
                                             self.date,
                                             self.orbits['sp3types'],
                                             self.pwd_igs,
                                             True)  # type: pySp3.GetSp3Orbits

                except pySp3.pySp3Exception:

                    monitor.write(
                        datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                        ' -> could not find principal orbits, fetching alternative\n'
                    )

                    # try alternative orbits
                    if self.options['sp3altrn']:
                        Sp3 = pySp3.GetSp3Orbits(
                            self.orbits['sp3_path'], self.date,
                            self.orbits['sp3altrn'], self.pwd_igs,
                            True)  # type: pySp3.GetSp3Orbits
                    else:
                        raise

                if Sp3.type != 'igs':
                    # rename file
                    shutil.copyfile(Sp3.file_path,
                                    Sp3.file_path.replace(Sp3.type, 'igs'))

                monitor.write(
                    datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                    ' -> fetching broadcast orbits\n')

                pyBrdc.GetBrdcOrbits(
                    self.orbits['brdc_path'],
                    self.date,
                    self.pwd_brdc,
                    no_cleanup=True)  # type: pyBrdc.GetBrdcOrbits

                for rinex in self.params['rinex']:

                    monitor.write(
                        datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                        ' -> fetching rinex for %s.%s %s %s\n' %
                        (rinex['NetworkCode'], rinex['StationCode'],
                         rinex['StationAlias'], '{:10.6f} {:11.6f}'.format(
                             rinex['lat'], rinex['lon'])))

                    try:
                        with pyRinex.ReadRinex(
                                rinex['NetworkCode'], rinex['StationCode'],
                                rinex['source'],
                                False) as Rinex:  # type: pyRinex.ReadRinex

                            # WARNING! some multiday RINEX were generating conflicts because the RINEX has a name, say,
                            # tuc12302.10o and the program wants to rename it as tuc12030.10o but because it's a
                            # multiday file, during __init__ it's already split and renamed as tuc12300.10o and
                            # additional folders are generated with the information for each file. Therefore, find
                            # the rinex that corresponds to the date being processed and use that one instead of the
                            # original file. These files are not allowed by pyArchiveService, but the "start point" of
                            # the database (i.e. the files already in the folders read by pyScanArchive) has such
                            # problems.

                            # figure out if this station has been affected by an earthquake
                            # if so, window the data
                            if rinex['jump'] is not None:
                                monitor.write(
                                    '                    -> RINEX file has been windowed: ETM detected jump on '
                                    + rinex['jump'].datetime().strftime(
                                        '%Y-%m-%d %H:%M:%S') + '\n')

                            if Rinex.multiday:
                                # find the rinex that corresponds to the session being processed
                                for Rnx in Rinex.multiday_rnx_list:
                                    if Rnx.date == self.date:
                                        Rnx.rename(rinex['destiny'])

                                        if rinex['jump'] is not None:
                                            self.window_rinex(
                                                Rnx, rinex['jump'])
                                        # before creating local copy, decimate file
                                        Rnx.decimate(30)
                                        Rnx.purge_comments()
                                        Rnx.compress_local_copyto(
                                            self.pwd_rinex)
                                        break
                            else:
                                Rinex.rename(rinex['destiny'])

                                if rinex['jump'] is not None:
                                    self.window_rinex(Rinex, rinex['jump'])
                                # before creating local copy, decimate file
                                Rinex.decimate(30)
                                Rinex.purge_comments()
                                Rinex.compress_local_copyto(self.pwd_rinex)

                    except (OSError, IOError):
                        monitor.write(
                            datetime.datetime.now().strftime(
                                '%Y-%m-%d %H:%M:%S') +
                            ' -> An error occurred while trying to copy ' +
                            rinex['source'] + ' to ' + rinex['destiny'] +
                            ': File skipped.\n')

                    except (pyRinex.pyRinexException, Exception) as e:
                        monitor.write(
                            datetime.datetime.now().strftime(
                                '%Y-%m-%d %H:%M:%S') +
                            ' -> An error occurred while trying to copy ' +
                            rinex['source'] + ': ' + str(e) + '\n')

                monitor.write(
                    datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                    ' -> executing GAMIT\n')

                # create the run script
                self.create_replace_links()
                self.create_run_script()
                self.create_finish_script()

            # run the script to replace the links of the tables directory
            self.p = subprocess.Popen(
                'find ./tables ! -name "otl.grid" -type l -exec ./replace_links.sh {} +',
                shell=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=self.pwd)
            _, _ = self.p.communicate()

            # now execute the run script
            if not dry_run:
                self.p = subprocess.Popen('./run.sh',
                                          shell=False,
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                self.p = subprocess.Popen('./finish.sh',
                                          shell=False,
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                self.stdout, self.stderr = self.p.communicate()

                # check for any fatals
                self.p = subprocess.Popen('grep -q \'FATAL\' monitor.log',
                                          shell=True,
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE,
                                          cwd=self.pwd)

                _, _ = self.p.communicate()

                if self.p.returncode == 0:
                    self.success = False
                else:
                    self.success = True

            # output statistics to the parent to display
            result = self.parse_monitor(self.success)

            with open(os.path.join(self.pwd, 'monitor.log'), 'a') as monitor:
                monitor.write(
                    datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                    ' -> return to Parallel.GAMIT\n')

            # no matter the result of the processing, move folder to final destination
            if not dry_run:
                self.finish()

            return result

        except Exception:

            msg = traceback.format_exc() + '\nProcessing %s date %s on node %s' \
                  % (self.params['NetName'], self.date.yyyyddd(), platform.node())

            # DDG: do not attempt to write to monitor.log or do any file operations (maybe permission problem)
            # problem might occur during copytree or rmtree or some other operation before opening monitor.log
            if monitor_open:
                with open(os.path.join(self.pwd, 'monitor.log'),
                          'a') as monitor:
                    monitor.write(
                        datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') +
                        ' -> ERROR in pyGamitTask.start()\n%s' % msg)

                # the solution folder exists because it was created by GamitSession to start the processing.
                # erase it to upload the result
                if os.path.exists(self.solution_pwd):
                    shutil.rmtree(self.solution_pwd)

                # execute final error step: copy to self.solution_pwd
                shutil.copytree(self.pwd, self.solution_pwd, symlinks=True)
                # remove the remote pwd
                shutil.rmtree(self.pwd)

                # output statistics to the parent to display
                result = self.parse_monitor(False)
            else:
                result = {
                    'session':
                    '%s %s' % (self.date.yyyyddd(), self.params['DirName']),
                    'Project':
                    self.params['NetName'],
                    'subnet':
                    self.params['subnet'],
                    'Year':
                    self.date.year,
                    'DOY':
                    self.date.doy,
                    'FYear':
                    self.date.fyear,
                    'wl':
                    0,
                    'nl':
                    0,
                    'nrms':
                    0,
                    'relaxed_constrains':
                    '',
                    'max_overconstrained':
                    '',
                    'node':
                    platform.node(),
                    'execution_time':
                    0,
                    'execution_date':
                    0,
                    'missing':
                    '',
                    'success':
                    False,
                    'fatals': []
                }

            result['error'] = msg

            # return useful information to the main node
            return result