示例#1
0
 def to_json(self, json_file):
     # print(repr(self.transformations))
     file_write(
         json_file,
         json.dumps({'transformations': self.transformations},
                    indent=4,
                    sort_keys=False,
                    default=json_converter))
示例#2
0
def create_files():

    cnn = dbConnection.Cnn("gnss_data.cfg")

    rs = cnn.query('SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%%\' AND "Harpos_coeff_otl" LIKE \'%%HARPOS%%\' ORDER BY "NetworkCode", "StationCode"')
    # rs = cnn.query(
    #    'SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%%\' ORDER BY "NetworkCode", "StationCode"')

    stations = rs.dictresult()
    print(' >> Cantidad de estaciones a procesar en Chalmers: %d' % len(stations))
    stnlist = []
    index   = 0
    for stn in stations:
        stnlist += ['%-24s %16.3f%16.3f%16.3f' % (stn['NetworkCode'] + '.' + stn['StationCode'],
                                                  float(stn['auto_x']),
                                                  float(stn['auto_y']),
                                                  float(stn['auto_z']))]

        if len(stnlist) == 99:

            body = '\n'.join(stnlist)

            file_write('otl_%i.list' % index,
                       body)

            index += 1
            # msg = MIMEText(body)
            # msg['Subject'] = 'Subject: Ocean Loading Tides'
            # msg['From'] = '*****@*****.**'
            # msg['To'] = '*****@*****.**'
            #
            # s = smtplib.SMTP_SSL('64.233.190.108', 465)
            # s.ehlo()
            # s.login('*****@*****.**', 'demostenes0624')
            # s.sendmail('*****@*****.**', '*****@*****.**', msg.as_string())
            # s.close()

            stnlist = []

    if len(stnlist) > 0:
        body = '\n'.join(stnlist)

        file_write('otl_%i.list' % index, 
                   body)
示例#3
0
def plot_etm(cnn, stack, station, directory):
    try:
        ts = stack.get_station(station['NetworkCode'], station['StationCode'])

        ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'],
                             station['StationCode'], stack.project)
        etm = pyETM.GamitETM(cnn,
                             station['NetworkCode'],
                             station['StationCode'],
                             gamit_soln=ts)

        pngfile = os.path.join(directory, stationID(etm) + '_gamit.png')
        jsonfile = os.path.join(directory, stationID(etm) + '_gamit.json')

        etm.plot(pngfile, plot_missing=False)
        file_write(
            os.path.join(jsonfile),
            json.dumps(etm.todictionary(False), indent=4, sort_keys=False))

    except pyETM.pyETMException as e:
        tqdm.write(str(e))
示例#4
0
    def __init__(self, remote_pwd, params, solution_pwd):

        self.pwd          = remote_pwd
        self.solution_pwd = solution_pwd
        self.pwd_igs      = os.path.join(remote_pwd, 'igs')
        self.pwd_brdc     = os.path.join(remote_pwd, 'brdc')
        self.pwd_rinex    = os.path.join(remote_pwd, 'rinex')
        self.pwd_tables   = os.path.join(remote_pwd, 'tables')

        self.params    = params
        self.options   = params['options']
        self.orbits    = params['orbits']
        self.gamitopts = params['gamitopts']
        self.date      = params['date']
        self.success   = False
        self.stdout    = ''
        self.stderr    = ''
        self.p         = None

        file_write(os.path.join(self.solution_pwd, 'monitor.log'),
                   now_str() +
                   ' -> GamitTask initialized for %s: %s\n' % (self.params['DirName'],
                                                               self.date.yyyyddd()))
示例#5
0
def pull_rinex(cnn, date, Config, JobServer):

    # before starting the sync, determine if there were any station code changes that will require file deletions in AWS
    # Join aws_sync with stations. If an entry in aws_sync has has no record in stations, station was renamed and needs
    # to be deleted. It will be resent in this run.
    rs = cnn.query(
        'SELECT a."NetworkCode", a."StationCode", a."StationAlias" FROM aws_sync as a '
        'LEFT JOIN stations as s on '
        'a."NetworkCode" = s."NetworkCode" and '
        'a."StationCode" = s."StationCode" '
        'WHERE "Year" = %i AND "DOY" = %i AND s."StationCode" IS NULL' %
        (date.year, date.doy))

    deletes = rs.dictresult()

    for stn in deletes:
        # produce a single file with the deletions that need to occur in the AWS
        file_append(
            'file_ops.log',
            'rm %s/%s* # %s.%s not found in stations table with net.stn code declared in aws_sync\n'
            % (date.yyyyddd().replace(' ', '/'), stn['StationAlias'],
               stn['NetworkCode'], stn['StationCode']))

        # delete the records from aws_sync
        cnn.query(
            'DELETE FROM aws_sync WHERE "Year" = %i AND "DOY" = %i AND "NetworkCode" = \'%s\' AND '
            '"StationCode" = \'%s\'' %
            (date.year, date.doy, stn['NetworkCode'], stn['StationCode']))

    # Join aws_sync with stationalias (stationalias is FK-ed to stations).
    # If an entry in aws_sync that has StationCode <> StationAlias has no record in stationalias OR
    # the stationalias declared is different than the station alias in the aws_sync, delete from AWS.
    # will be resent in this batch
    rs = cnn.query(
        'SELECT a."NetworkCode", a."StationCode", a."StationAlias" FROM aws_sync as a '
        'LEFT JOIN stationalias as sa on '
        'a."NetworkCode" = sa."NetworkCode" and '
        'a."StationCode" = sa."StationCode" '
        'WHERE "Year" = %i AND "DOY" = %i AND '
        'a."StationAlias" <> sa."StationAlias" OR '
        '(sa."StationAlias" IS NULL AND a."StationCode" <> a."StationAlias")' %
        (date.year, date.doy))

    deletes = rs.dictresult()

    for stn in deletes:
        # produce a single file with the deletions that need to occur in the AWS
        file_append(
            'file_ops.log',
            'rm %s/%s* # alias declared in aws_sync for %s.%s does not match alias in stationalias table\n'
            % (date.yyyyddd().replace(' ', '/'), stn['StationAlias'],
               stn['NetworkCode'], stn['StationCode']))

        # delete the records from aws_sync
        cnn.query(
            'DELETE FROM aws_sync WHERE "Year" = %i AND "DOY" = %i AND "NetworkCode" = \'%s\' AND '
            '"StationCode" = \'%s\'' %
            (date.year, date.doy, stn['NetworkCode'], stn['StationCode']))

    # check the individual files for this day. All files reported as uploaded should have a match in the rinex_proc
    # table, otherwise this could be a station split or deletion. If that's the case, order their deletion from the AWS
    rs = cnn.query(
        'SELECT a."NetworkCode", a."StationCode", a."StationAlias" FROM aws_sync as a '
        'LEFT JOIN rinex_proc as rx on '
        'a."NetworkCode" = rx."NetworkCode" and '
        'a."StationCode" = rx."StationCode" and '
        'a."Year"        = rx."ObservationYear" and '
        'a."DOY"         = rx."ObservationDOY" '
        'WHERE "Year" = %i AND "DOY" = %i AND '
        'rx."StationCode" IS NULL ' % (date.year, date.doy))

    deletes = rs.dictresult()

    for stn in deletes:
        # produce a single file with the deletions that need to occur in the AWS
        file_append(
            'file_ops.log',
            'rm %s/%s* # rinex file for %s.%s could not be found in the rinex_proc table\n'
            % (date.yyyyddd().replace(' ', '/'), stn['StationAlias'],
               stn['NetworkCode'], stn['StationCode']))

        # delete the records from aws_sync
        cnn.query(
            'DELETE FROM aws_sync WHERE "Year" = %i AND "DOY" = %i AND "NetworkCode" = \'%s\' AND '
            '"StationCode" = \'%s\'' %
            (date.year, date.doy, stn['NetworkCode'], stn['StationCode']))

    ####################################################################################################################
    # continue with sync of files
    ####################################################################################################################

    # behavior requested by Abel: ALWAYS output the metadata but don't output a RINEX if already synced.
    rs = cnn.query(
        'SELECT rinex_proc.* FROM rinex_proc '
        'WHERE "ObservationYear" = %i AND "ObservationDOY" = %i AND "Completion" >= 0.3'
        % (date.year, date.doy))

    rinex = rs.dictresult()

    pbar = tqdm(total=len(rinex), ncols=80)

    metafile = date.yyyy() + '/' + date.ddd() + '/' + date.yyyyddd().replace(
        ' ', '-')

    date_subpath = date.yyyy() + '/' + date.ddd()
    date_path = './' + date_path
    # following Abel's request, make a subdir for the files
    lele_path = '/media/leleiona/aws-files/' + date_subpath

    for p in (date_path, lele_path):
        if not os.path.isdir(p):
            os.makedirs(p)

    # write the header to the .info file
    file_write(
        './' + metafile + '.info',
        '*SITE  Station Name      Session Start      Session Stop       Ant Ht   HtCod  Ant N    Ant E    '
        'Receiver Type         Vers                  SwVer  Receiver SN           Antenna Type     Dome   '
        'Antenna SN          \n')

    modules = ('dbConnection', 'pyETM', 'pyDate', 'pyRinex', 'pyStationInfo',
               'pyOptions', 'pyArchiveStruct', 'os', 'numpy', 'traceback',
               'platform', 'Utils', 'shutil')

    depfuncs = (window_rinex, sigmas_neu2xyz)

    JobServer.create_cluster(rinex_task,
                             depfuncs,
                             output_handle,
                             pbar,
                             modules=modules)

    for rnx in rinex:
        JobServer.submit(rnx['NetworkCode'], rnx['StationCode'], date,
                         rnx['ObservationFYear'], metafile)

    JobServer.wait()

    pbar.close()

    JobServer.close_cluster()

    print('Done, chau!')
示例#6
0
def main():
    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )

    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)

    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)

    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")

    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)

    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")

    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '0' to just output "
        "the ETM parameters, '1' to export time series without "
        "model and '2' to export both time series and model.")

    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )

    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')

    parser.add_argument(
        '-q',
        '--query',
        nargs=2,
        metavar='{type} {date}',
        type=str,
        help=
        'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
        'of the daily solution (if exists). Output is in XYZ.')

    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=1,
        metavar='{stack}',
        help="Plot the GAMIT time series specifying which stack name to plot.")

    parser.add_argument(
        '-lang',
        '--language',
        type=str,
        help="Change the language of the plots. Default is English. "
        "Use ESP to select Spanish. To add more languages, "
        "include the ISO 639-1 code in pyETM.py",
        default='ENG')

    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot histogram of residuals")

    parser.add_argument('-file',
                        '--filename',
                        type=str,
                        help="Obtain data from an external source (filename). "
                        "Format should be specified with -format.")

    parser.add_argument(
        '-format',
        '--format',
        nargs='+',
        type=str,
        help=
        "To be used together with --filename. Specify order of the fields as found in the input "
        "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
        "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
        "are at the end of the line, then there is no need to specify those.")

    parser.add_argument('-outliers',
                        '--plot_outliers',
                        action='store_true',
                        help="Plot an additional panel with the outliers")

    parser.add_argument('-vel',
                        '--velocity',
                        action='store_true',
                        help="During query, output the velocity in XYZ.")

    parser.add_argument('-seasonal',
                        '--seasonal_terms',
                        action='store_true',
                        help="During query, output the seasonal terms in NEU.")

    parser.add_argument('-quiet',
                        '--suppress_messages',
                        action='store_true',
                        help="Quiet mode: suppress information messages")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')

    stnlist = process_stnlist(cnn, args.stnlist)

    # define the language
    pyETM.LANG = args.language.lower()
    # set the logging level
    if not args.suppress_messages:
        pyETM.logger.setLevel(pyETM.INFO)
    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None and args.filename is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                elif args.filename is not None:
                    etm = from_file(args, cnn, stn)
                else:
                    polyhedrons = cnn.query_float(
                        'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                        'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                        '"StationCode" = \'%s\' '
                        'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                        % (args.gamit[0], stn['NetworkCode'],
                           stn['StationCode']))

                    soln = pyETM.GamitSoln(cnn, polyhedrons,
                                           stn['NetworkCode'],
                                           stn['StationCode'], args.gamit[0])

                    etm = pyETM.GamitETM(cnn,
                                         stn['NetworkCode'],
                                         stn['StationCode'],
                                         False,
                                         args.no_model,
                                         gamit_soln=soln)

                    # print ' > %5.2f %5.2f %5.2f %i %i' % \
                    #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                    #       etm.soln.t.shape[0] -
                    #       np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                    # print two largest outliers
                    if etm.A is not None:
                        lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                        slres = lres[np.argsort(-lres)]

                        print(' >> Two largest residuals:')
                        for i in [0, 1]:
                            print(' %s %6.3f %6.3f %6.3f' %
                                  (pyDate.Date(mjd=etm.soln.mjd[
                                      lres == slres[i]]).yyyyddd(),
                                   etm.R[0, lres == slres[i]],
                                   etm.R[1, lres == slres[i]],
                                   etm.R[2, lres == slres[i]]))

                if args.interactive:
                    xfile = None
                else:
                    postfix = "gamit"
                    if args.gamit is None:
                        postfix = "ppp" if args.filename is None else "file"

                    xfile = os.path.join(
                        args.directory, '%s.%s_%s' %
                        (etm.NetworkCode, etm.StationCode, postfix))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data,
                             plot_outliers=args.plot_outliers)

                    if args.histogram:
                        etm.plot_hist(xfile + '_hist.png')

                if args.json is not None:
                    if args.json == 1:
                        obj = etm.todictionary(time_series=True)
                    elif args.json == 2:
                        obj = etm.todictionary(time_series=True, model=True)
                    else:
                        obj = etm.todictionary(False)

                    file_write(xfile + '.json',
                               json.dumps(obj, indent=4, sort_keys=False))

                if args.query is not None:
                    model = (args.query[0] == 'model')
                    q_date = pyDate.Date(fyear=float(args.query[1]))

                    xyz, _, _, txt = etm.get_xyz_s(q_date.year,
                                                   q_date.doy,
                                                   force_model=model)

                    strp = ''
                    # if user requests velocity too, output it
                    if args.velocity:
                        if etm.A is not None:
                            vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                            strp = '%8.5f %8.5f %8.5f ' \
                                   % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0])

                    # also output seasonal terms, if requested
                    if args.seasonal_terms and etm.Periodic.frequency_count > 0:
                        strp += ' '.join([
                            '%8.5f' % (x * 1000)
                            for x in etm.Periodic.p.params.flatten().tolist()
                        ])

                    print(' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                          % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt))

                print('Successfully plotted ' + stn['NetworkCode'] + '.' +
                      stn['StationCode'])

            except pyETM.pyETMException as e:
                print(str(e))

            except Exception:
                print('Error during processing of ' + stn['NetworkCode'] +
                      '.' + stn['StationCode'])
                print(traceback.format_exc())
示例#7
0
    def config_session(self):

        options = self.options

        # create the def file
        file_write(os.path.join(self.rootdir, 'gpsppp.def'), 
                   "'LNG' 'ENGLISH'\n"
                   "'TRF' 'gpsppp.trf'\n"
                   "'SVB' 'gpsppp.svb_gnss_yrly'\n"
                   "'PCV' '%s'\n"
                   "'FLT' 'gpsppp.flt'\n"
                   "'OLC' '%s.olc'\n"
                   "'MET' 'gpsppp.met'\n"
                   "'ERP' '%s'\n"
                   "'GSD' '%s'\n"
                   "'GSD' '%s'\n"
                   % (os.path.basename(self.atx),
                      self.rinex.StationCode,
                      self.eop_file,
                      options['institution'],
                      options['info']))

        file_write(os.path.join(self.rootdir, 'commands.cmd'),
                   "' UT DAYS OBSERVED                      (1-45)'               1\n"
                   "' USER DYNAMICS         (1=STATIC,2=KINEMATIC)'               %s\n"
                   "' OBSERVATION TO PROCESS         (1=COD,2=C&P)'               2\n"
                   "' FREQUENCY TO PROCESS        (1=L1,2=L2,3=L3)'               3\n"
                   "' SATELLITE EPHEMERIS INPUT     (1=BRD ,2=SP3)'               2\n"
                   "' SATELLITE PRODUCT (1=NO,2=Prc,3=RTCA,4=RTCM)'               2\n"
                   "' SATELLITE CLOCK INTERPOLATION   (1=NO,2=YES)'               %s\n"
                   "' IONOSPHERIC GRID INPUT          (1=NO,2=YES)'               1\n"
                   "' SOLVE STATION COORDINATES       (1=NO,2=YES)'               2\n"
                   "' SOLVE TROP. (1=NO,2-5=RW MM/HR) (+100=grad) '             105\n"
                   "' BACKWARD SUBSTITUTION           (1=NO,2=YES)'               1\n"
                   "' REFERENCE SYSTEM            (1=NAD83,2=ITRF)'               2\n"
                   "' COORDINATE SYSTEM(1=ELLIPSOIDAL,2=CARTESIAN)'               2\n"
                   "' A-PRIORI PSEUDORANGE SIGMA               (m)'           2.000\n"
                   "' A-PRIORI CARRIER PHASE SIGMA             (m)'           0.015\n"
                   "' LATITUDE  (ddmmss.sss,+N) or ECEF X      (m)'          0.0000\n"
                   "' LONGITUDE (ddmmss.sss,+E) or ECEF Y      (m)'          0.0000\n"
                   "' HEIGHT (m)                or ECEF Z      (m)'          0.0000\n"
                   "' ANTENNA HEIGHT                           (m)'          %6.4f\n"
                   "' CUTOFF ELEVATION                       (deg)'          10.000\n"
                   "' GDOP CUTOFF                                 '          20.000\n"
                   % ('1' if not self.kinematic else '2', '1'
                      if not self.clock_interpolation else '2', self.antH))

        file_write(os.path.join(self.rootdir, 'input.inp'), 
                   "%s\n"
                   "commands.cmd\n"
                   "0 0\n"
                   "0 0\n"
                   "orbits/%s\n"
                   "orbits/%s\n"
                   "orbits/%s\n"
                   "orbits/%s\n"
                   % (self.rinex.rinex,
                      self.orbits1.sp3_filename,
                      self.clocks1.clk_filename,
                      self.orbits2.sp3_filename,
                      self.clocks2.clk_filename))
示例#8
0
 def write_otl(self):
     file_write(os.path.join(self.rootdir, self.rinex.StationCode + '.olc'),
                self.otl_coeff)