Ejemplo n.º 1
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")

    parser.add_argument('sinex', type=str, nargs=1, metavar='{project name}',
                        help="SINEX file to update.")

    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    sinex = args.sinex[0]
    project = args.project[0]

    process_sinex(cnn, project, dates, sinex)
Ejemplo n.º 2
0
def main():

    # create start and stop dates for the run
    start_date = pyDate.Date(year=2000, doy=1)
    end_date = pyDate.Date(year=2017, doy=1)

    # init queue for dates
    dates = list()

    # init date
    dt = start_date

    # populate the date queue
    while dt <= end_date:

        # add the date to the queue
        dates.append(dt)

        # increment the data
        dt += 1

    # create a pool of worker threads
    pool = Pool(16)

    # map the action function to each date in the queue
    pool.map(action, dates)
Ejemplo n.º 3
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")
    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    # create the execution log

    dates = [pyDate.Date(year=1980, doy=1),
             pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(args.project[0]):
        os.makedirs(args.project[0])

    ########################################
    # load polyhedrons

    project = dra(cnn, args.project[0], dates)
Ejemplo n.º 4
0
def GetGaps(cnn, NetworkCode, StationCode, start_date, end_date):

    rs = cnn.query(
        'SELECT * FROM rinex_proc WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND '
        '"ObservationSTime" BETWEEN \'%s\' AND \'%s\' ORDER BY "ObservationSTime"'
        %
        (NetworkCode, StationCode, start_date.yyyymmdd(), end_date.yyyymmdd()))

    # make the start date and end date the limits of the data
    rnxtbl = rs.dictresult()
    gaps = []
    possible_doys = []

    if len(rnxtbl) > 0:
        start_date = pyDate.Date(year=rnxtbl[0]['ObservationYear'],
                                 doy=rnxtbl[0]['ObservationDOY'])
        end_date = pyDate.Date(year=rnxtbl[-1]['ObservationYear'],
                               doy=rnxtbl[-1]['ObservationDOY'])

        possible_doys = [
            pyDate.Date(mjd=mjd)
            for mjd in range(start_date.mjd, end_date.mjd + 1)
        ]

        actual_doys = [
            pyDate.Date(year=rnx['ObservationYear'], doy=rnx['ObservationDOY'])
            for rnx in rnxtbl
        ]
        gaps = []
        for doy in possible_doys:

            if doy not in actual_doys:
                gaps += [doy]

    return gaps, possible_doys
Ejemplo n.º 5
0
    def rinex_based_stninfo(self, ignore):
        # build a station info based on the information from the RINEX headers
        rs = self.cnn.query('SELECT * FROM rinex WHERE "NetworkCode" = \'' + self.NetworkCode +
                            '\' AND "StationCode" = \'' + self.StationCode + '\' ORDER BY "ObservationSTime"')

        rnxtbl = rs.dictresult()

        rnx = rnxtbl[0]

        RecSerial = rnx['ReceiverSerial']
        AntSerial = rnx['AntennaSerial']
        AntHeig   = rnx['AntennaOffset']
        RadCode   = rnx['AntennaDome']
        StartDate = rnx['ObservationSTime']

        stninfo = []
        count = 0
        for i, rnx in enumerate(rnxtbl):

            if RecSerial != rnx['ReceiverSerial'] or AntSerial != rnx['AntennaSerial'] or \
                    AntHeig != rnx['AntennaOffset'] or RadCode != rnx['AntennaDome']:
                # start the counter
                count += 1

                if count > ignore:
                    Vers = rnx['ReceiverFw'][:22]

                    record                  = StationInfoRecord(self.NetworkCode, self.StationCode, rnx)
                    record.DateStart        = pyDate.Date(datetime=StartDate)
                    record.DateEnd          = pyDate.Date(datetime=rnxtbl[i-count]['ObservationETime'])
                    record.HeightCode       = 'DHARP'
                    record.ReceiverVers     = Vers[:5]
                    record.ReceiverFirmware = '-----'

                    stninfo.append(str(record))

                    RecSerial = rnx['ReceiverSerial']
                    AntSerial = rnx['AntennaSerial']
                    AntHeig   = rnx['AntennaOffset']
                    RadCode   = rnx['AntennaDome']
                    StartDate = rnxtbl[i - count + 1]['ObservationSTime']
                    count = 0
            elif RecSerial == rnx['ReceiverSerial'] and AntSerial == rnx['AntennaSerial'] and \
                    AntHeig == rnx['AntennaOffset'] and RadCode == rnx['AntennaDome'] and count > 0:
                # we started counting records that where different, but we didn't make it past > ignore, reset counter
                count = 0

        # insert the last record with 9999
        record                  = StationInfoRecord(self.NetworkCode, self.StationCode, None)
        record.DateStart        = pyDate.Date(datetime=StartDate)
        record.DateEnd          = pyDate.Date(stninfo=None)
        record.HeightCode       = 'DHARP'
        record.ReceiverFirmware = '-----'

        stninfo.append(str(record))

        return '\n'.join(stninfo) + '\n'
Ejemplo n.º 6
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
                        help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
    parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
                        help='Date range filter Can be specified in yyyy/mm/dd yyyy_doy  wwww-d format')

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    project = args.project[0]

    dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(year=2100, doy=1)]
    try:
        dates = process_date(args.date_filter)
    except ValueError as e:
        parser.error(str(e))

    # create folder for plots

    if not os.path.isdir(project + '_dra'):
        os.makedirs(project + '_dra')

    ########################################
    # load polyhedrons

    dra = DRA(cnn, args.project[0], dates[1])

    dra.stack_dra()

    for stn in tqdm(dra.stations):
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        # load from the db
        ts = dra.get_station(NetworkCode, StationCode)

        if ts.size:
            try:
                if ts.shape[0] > 2:
                    dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)

                    dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)

                    etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts)

                    etm.plot(pngfile='%s/%s.%s_DRA.png' % (project + '_dra', NetworkCode, StationCode),
                             plot_missing=False)

            except Exception as e:
                tqdm.write(' -->' + str(e))

    dra.to_json(project + '_dra.json')
Ejemplo n.º 7
0
 def spvsWithSigma(self,stn):
     xyz   = self.xyzForStn(stn);
     sigma = self.sigmaForStn(stn);
          
     for i in range(0,xyz.shape[1]):
         if np.any(np.isnan(xyz[:,i])):
             continue;
         if sigma == None:
             yield xyz[:,i],None,pyDate.Date(fyear=self.epochs[i]);
         else:
             yield xyz[:,i],sigma[:,i],pyDate.Date(fyear=self.epochs[i]);
Ejemplo n.º 8
0
def main():
    parser = argparse.ArgumentParser(description='Script to synchronize AWS with OSU\'s archive database')

    parser.add_argument('date', type=str, nargs=1, help="Check the sync state for this given date. Format can be fyear or yyyy_ddd.")
    parser.add_argument('-mark', '--mark_uploaded', nargs='+', type=str, help="Pass net.stnm to mark these files as transferred to the AWS", metavar='{net.stnm}')
    parser.add_argument('-pull', '--pull_rinex', action='store_true', help="Get all the unsynchronized RINEX files in the local dir")
    parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")

    args = parser.parse_args()

    Config = pyOptions.ReadOptions("gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    # before attempting anything, check aliases!!
    print ' >> Checking GAMIT aliases'
    check_aliases(cnn)

    # initialize the PP job server
    if not args.noparallel:
        JobServer = pyJobServer.JobServer(Config, 1500)  # type: pyJobServer.JobServer
    else:
        JobServer = None
        Config.run_parallel = False

    dd = args.date[0]

    if '_' in dd:
        date = pyDate.Date(year=int(dd.split('_')[0]), doy=int(dd.split('_')[1]))
    elif dd == 'all':
        # run all dates (1994 to 2018)
        ts = range(pyDate.Date(year=2004, doy=20).mjd, pyDate.Date(year=2018, doy=87).mjd, 1)
        ts = [pyDate.Date(mjd=tts) for tts in ts]
        for date in ts:
            print ' >> Processing ' + str(date)
            pull_rinex(cnn, date, Config, JobServer)

        return
    else:
        date = pyDate.Date(fyear=float(dd))

    if args.pull_rinex:
        pull_rinex(cnn, date, Config, JobServer)

    if args.mark_uploaded is not None:
        print 'Processing %i for day %s' % (len(args.mark_uploaded), date.yyyyddd())
        # mark the list of stations as transferred to the AWS
        mark_uploaded(cnn, date, args.mark_uploaded)
Ejemplo n.º 9
0
def from_file(args, cnn, stn):
    # execute on a file with wk XYZ coordinates
    ts = np.genfromtxt(args.filename)

    # read the format options
    if args.format is None:
        raise Exception('A format should be specified using the -format switch')

    dd = []
    x = []
    y = []
    z = []
    for k in ts:
        d = dict()
        for i, f in enumerate(args.format):
            if f in ('gpsWeek', 'gpsWeekDay', 'year', 'doy', 'fyear', 'month', 'day', 'mjd'):
                d[f] = k[i]
            if f == 'x':
                x.append(k[i])
            elif f == 'y':
                y.append(k[i])
            elif f == 'z':
                z.append(k[i])
        dd.append(d)

    dd = [pyDate.Date(**d) for d in dd]

    polyhedrons = np.array((x, y, z, [d.year for d in dd], [d.doy for d in dd])).transpose()

    soln = pyETM.ListSoln(cnn, polyhedrons.tolist(), stn['NetworkCode'], stn['StationCode'])
    etm = pyETM.FileETM(cnn, soln, False, args.no_model)

    return etm
Ejemplo n.º 10
0
def compare_stninfo_rinex(NetworkCode, StationCode, STime, ETime,
                          rinex_serial):

    try:
        cnn = dbConnection.Cnn("gnss_data.cfg")
    except Exception:
        return traceback.format_exc() + ' open de database when processing ' \
                                         'processing %s.%s' % (NetworkCode, StationCode), None

    try:
        # get the center of the session
        date = STime + (ETime - STime) / 2
        date = pyDate.Date(datetime=date)

        stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode,
                                            date)

    except pyStationInfo.pyStationInfoException as e:
        return "Station Information error: " + str(e), None

    if stninfo.currentrecord.ReceiverSerial.lower() != rinex_serial.lower():
        return None, [
            date, rinex_serial,
            stninfo.currentrecord.ReceiverSerial.lower()
        ]

    return None, None
Ejemplo n.º 11
0
    def parse_station_record(self, record):

        if isinstance(record, str):

            fieldnames = ('StationCode', 'StationName', 'DateStart', 'DateEnd',
                          'AntennaHeight', 'HeightCode', 'AntennaNorth',
                          'AntennaEast', 'ReceiverCode', 'ReceiverVers',
                          'ReceiverFirmware', 'ReceiverSerial', 'AntennaCode',
                          'RadomeCode', 'AntennaSerial')

            fieldwidths = (
                1, 6, 18, 19, 19, 9, 7, 9, 9, 22, 22, 7, 22, 17, 7, 20
            )  # negative widths represent ignored padding fields
            fmtstring = ' '.join('{}{}'.format(abs(fw), 'x' if fw < 0 else 's')
                                 for fw in fieldwidths)

            fieldstruct = struct.Struct(fmtstring)

            if record[0] == ' ' and len(record) >= 77:
                record = dict(
                    zip(
                        fieldnames,
                        map(
                            str.strip,
                            struct_unpack(fieldstruct,
                                          record.ljust(
                                              fieldstruct.size))[1:])))
            else:
                return

        for key in list(self.keys()):
            try:
                if key in ('AntennaNorth', 'AntennaEast', 'AntennaHeight'):
                    self[key] = float(record[key])
                else:
                    self[key] = record[key]
            except KeyError:
                # if key not found in the record, may be an added field (like hash)
                pass

        try:
            # if initializing with a RINEX record, some of these may not exist in the dictionary
            self.DateStart = pyDate.Date(stninfo=record['DateStart'])
            self.DateEnd = pyDate.Date(stninfo=record['DateEnd'])
            self.StationCode = record['StationCode'].lower()
        except KeyError:
            pass
Ejemplo n.º 12
0
def plot_station_info_rinex(cnn, NetworkCode, StationCode, stninfo):

    import matplotlib.pyplot as plt

    stnfo = []

    if stninfo.records is not None:
        for record in stninfo.records:
            stnfo.append([record['DateStart'].fyear,
                          (record['DateEnd'].fyear if record['DateEnd'].year is not None \
                           else pyDate.Date(datetime=dt.datetime.now()).fyear)
                          ])

    rinex = np.array(
        cnn.query_float(
            'SELECT "ObservationFYear" FROM rinex_proc WHERE "NetworkCode" = \'%s\' '
            'AND "StationCode" = \'%s\'' % (NetworkCode, StationCode)))

    fig, ax = plt.subplots(figsize=(7, 3))

    ax.grid(True)
    ax.set_title('RINEX and Station Information for %s.%s' %
                 (NetworkCode, StationCode))

    for poly in stnfo:
        ax.plot(poly, [1, 1],
                'o-',
                linewidth=2,
                markersize=4,
                color='tab:orange')
        # break line to clearly show the stop of a station info
        ax.plot([poly[1], poly[1]], [-0.5, 1.5], ':', color='tab:orange')

    ax.plot(rinex,
            np.zeros(rinex.shape[0]),
            'o',
            color='tab:blue',
            markersize=3)
    ax.set_yticks([0, 1])
    ax.set_yticklabels(["rinex", "stninfo"])
    plt.ylim([-.5, 1.5])
    figfile = BytesIO()

    try:
        plt.savefig(figfile, format='png')
        # plt.show()
        figfile.seek(0)  # rewind to beginning of file

        figdata_png = base64.b64encode(figfile.getvalue()).decode()
    except Exception:
        # either no rinex or no station info
        figdata_png = ''
        tqdm.write(
            ' -- Error processing %s.%s: station appears to have no RINEX or Station Info'
            % (NetworkCode, StationCode))

    plt.close()

    return figdata_png
Ejemplo n.º 13
0
def process_date(arg, missing_input='fill', allow_days=True):
    # function to handle date input from PG.
    # Input: arg = arguments from command line
    #        missing_input = a string specifying if vector should be filled when something is missing
    #        allow_day = allow a single argument which represents an integer N expressed in days, to compute now()-N

    now = datetime.now()
    if missing_input == 'fill':
        dates = [pyDate.Date(year=1980, doy=1), pyDate.Date(datetime=now)]
    else:
        dates = [None, None]

    if arg:
        for i, arg in enumerate(arg):
            dates[i] = process_date_str(arg, allow_days)

    return tuple(dates)
Ejemplo n.º 14
0
def GetStnGaps(cnn, stnlist, ignore_val, start_date, end_date):

    for stn in stnlist:
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        rs = cnn.query(
            'SELECT * FROM rinex_proc WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
            'AND "ObservationSTime" BETWEEN \'%s\' AND \'%s\' ORDER BY "ObservationSTime"'
            % (NetworkCode, StationCode, start_date.yyyymmdd(),
               end_date.yyyymmdd()))

        rnxtbl = rs.dictresult()
        gap_begin = None
        gaps = []
        for i, rnx in enumerate(rnxtbl):

            if i > 0:
                d1 = pyDate.Date(year=rnx['ObservationYear'],
                                 doy=rnx['ObservationDOY'])
                d2 = pyDate.Date(year=rnxtbl[i - 1]['ObservationYear'],
                                 doy=rnxtbl[i - 1]['ObservationDOY'])

                if d1 != d2 + 1 and not gap_begin:
                    gap_begin = d2 + 1

                if d1 == d2 + 1 and gap_begin:
                    days = ((d2 - 1).mjd - gap_begin.mjd) + 1
                    if days > ignore_val:
                        gaps.append(
                            '%s.%s gap in data found %s -> %s (%i days)' %
                            (NetworkCode, StationCode, gap_begin.yyyyddd(),
                             (d2 - 1).yyyyddd(), days))

                    gap_begin = None

        if gaps:
            sys.stdout.write('\nData gaps in %s.%s follow:\n' %
                             (NetworkCode, StationCode))
            sys.stdout.write('\n'.join(gaps) + '\n')
        else:
            sys.stdout.write('\nNo data gaps found for %s.%s\n' %
                             (NetworkCode, StationCode))
Ejemplo n.º 15
0
    def parse_archive_keys(self, path, key_filter=()):

        try:
            pathparts = path.split('/')
            filename = path.split('/')[-1]

            # check the number of levels in pathparts against the number of expected levels
            # subtract one for the filename
            if len(pathparts) - 1 != len(self.levels):
                return False, {}

            if not filename.endswith('.info'):
                fileparts = self.parse_crinex_filename(filename)
            else:
                # parsing a station info file, fill with dummy the doy and year
                fileparts = ('dddd', '1', '0', '80')

            if fileparts:
                keys = dict()

                # fill in all the possible keys using the crinex file info
                keys['station'] = fileparts[0]
                keys['doy'] = int(fileparts[1])
                keys['session'] = fileparts[2]
                keys['year'] = int(fileparts[3])
                keys['network'] = 'rnx'

                # now look in the different levels to match more data (or replace filename keys)
                for key in self.levels:

                    if len(pathparts[key['Level'] - 1]) != key['TotalChars']:
                        return False, {}

                    if key['isnumeric'] == '1':
                        keys[key['KeyCode']] = int(pathparts[key['Level'] - 1])
                    else:
                        keys[key['KeyCode']] = pathparts[key['Level'] -
                                                         1].lower()

                # check date is valid and also fill day and month keys
                date = pyDate.Date(year=keys['year'], doy=keys['doy'])
                keys['day'] = date.day
                keys['month'] = date.month

                return True, {
                    key: keys[key]
                    for key in keys.keys() if key in key_filter
                }
            else:
                return False, {}

        except Exception as e:
            return False, {}
Ejemplo n.º 16
0
def get_sp3(year, doy, org, outdir=None):

    year = Utils.get_norm_year_str(year)
    doy = Utils.get_norm_doy_str(doy)

    # initialize a date object
    date = pyDate.Date(year=year, doy=doy)

    # create string version of the gps week
    gps_week_str = str(date.gpsWeek)

    # make sure that the string is 5 characters
    if date.gpsWeek < 1000: gps_week_str = '0' + gps_week_str

    # create the file name of the sp3
    sp3_file_name_base = org + gps_week_str + str(date.gpsWeekDay) + '.sp3'

    # set outdir to current directory if not set
    if outdir is None: outdir = '.'

    # init s3 connection to the metadata bucket
    conn = S3Connection(calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(WL_SP3_BUCKET)
    bucketKey = Key(bucket)

    file_list = []
    for f in bucket.list(prefix=sp3_file_name_base):
        file_list.append(f.key)

    # check if the sp3 file listing was empty
    if len(file_list) == 0:
        raise ResourceException('sp3 resource: ' + sp3_file_name_base +
                                ' could not be located')

    # make sure no more than a single match occurred
    if len(file_list) > 1:
        raise ResourceException('sp3 resource: ' + sp3_file_name_base +
                                ' matches multiple files')

    # just be explicit about it
    sp3_file_name = file_list[0]

    # create the full path to file on local system
    sp3_file_path = os.path.join(outdir, sp3_file_name)

    # create the s3 object
    bucketKey.key = sp3_file_name

    # pull the file
    bucketKey.get_contents_to_filename(sp3_file_path)

    # that's all
    return sp3_file_path
Ejemplo n.º 17
0
def process_date_str(arg, allow_days=False):

    rdate = pyDate.Date(datetime=datetime.now())

    try:
        if '.' in arg:
            rdate = pyDate.Date(fyear=float(arg))
        elif '_' in arg:
            rdate = pyDate.Date(year=int(arg.split('_')[0]),
                                doy=int(arg.split('_')[1]))
        elif '/' in arg:
            rdate = pyDate.Date(year=int(arg.split('/')[0]),
                                month=int(arg.split('/')[1]),
                                day=int(arg.split('/')[2]))
        elif '-' in arg:
            rdate = pyDate.Date(gpsWeek=int(arg.split('-')[0]),
                                gpsWeekDay=int(arg.split('-')[1]))
        elif len(arg) > 0:
            if allow_days:
                rdate = pyDate.Date(datetime=datetime.now()) - int(arg)
            else:
                raise ValueError('Invalid input date: allow_days was set to False.')

    except Exception as e:
        raise ValueError('Could not decode input date (valid entries: '
                         'fyear, yyyy_ddd, yyyy/mm/dd, gpswk-wkday). '
                         'Error while reading the date start/end parameters: ' + str(e))

    return rdate
Ejemplo n.º 18
0
def getOutFileName(snxFile):

    snxFile = os.path.basename(snxFile)

    gpsWeek = int(snxFile[3:7])
    gpsWeekDay = int(snxFile[7])

    date = pyDate.Date(gpsweek=gpsWeek, gpsweekday=gpsWeekDay)

    year = str(date.year)
    doy = str(date.doy)

    return snxFile[0:3] + year + Utils.get_norm_doy_str(doy)
Ejemplo n.º 19
0
    def __init__(self, archive, date, filename, copyto):

        if date.gpsWeek < 0 or date > pyDate.Date(datetime=datetime.now()):
            # do not allow negative weeks or future orbit downloads!
            raise pyProductsExceptionUnreasonableDate(
                'Orbit requested for an unreasonable date: week ' +
                str(date.gpsWeek) + ' day ' + str(date.gpsWeekDay) + ' (' +
                date.yyyyddd() + ')')

        archive = archive.replace('$year', str(date.year))
        archive = archive.replace('$doy', str(date.doy).zfill(3))
        archive = archive.replace('$gpsweek', str(date.gpsWeek).zfill(4))
        archive = archive.replace('$gpswkday', str(date.gpsWeekDay))

        self.archive = archive
        self.path = None
        self.filename = filename

        # try both zipped and unzipped n files
        archive_file_path = os.path.join(archive, self.filename)

        if os.path.isfile(archive_file_path):
            try:
                copyfile(archive_file_path,
                         os.path.join(copyto, self.filename))
                self.file_path = os.path.join(copyto, self.filename)
            except Exception:
                raise
        else:
            ext = None
            if os.path.isfile(archive_file_path + '.Z'):
                ext = '.Z'
            elif os.path.isfile(archive_file_path + '.gz'):
                ext = '.gz'
            elif os.path.isfile(archive_file_path + '.zip'):
                ext = '.zip'

            if ext is not None:
                copyfile(archive_file_path + ext,
                         os.path.join(copyto, self.filename + ext))
                self.file_path = os.path.join(copyto, self.filename)

                cmd = pyRunWithRetry.RunCommand(
                    'gunzip -f ' + self.file_path + ext, 15)
                try:
                    cmd.run_shell()
                except Exception:
                    raise
            else:
                raise pyProductsException(
                    'Could not find the archive file for ' + self.filename)
Ejemplo n.º 20
0
def UpdateRecord(rinex, path):

    cnn = dbConnection.Cnn('gnss_data.cfg')
    Config = pyOptions.ReadOptions('gnss_data.cfg')

    try:
        rnxobj = pyRinex.ReadRinex(rinex['NetworkCode'], rinex['StationCode'],
                                   path)

        date = pyDate.Date(year=rinex['ObservationYear'],
                           doy=rinex['ObservationDOY'])

        if not verify_rinex_date_multiday(date, rnxobj, Config):
            cnn.begin_transac()
            # propagate the deletes
            cnn.query(
                'DELETE FROM gamit_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM ppp_soln WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.query(
                'DELETE FROM rinex WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND "ObservationYear" = %i AND "ObservationDOY" = %i'
                % (rinex['NetworkCode'], rinex['StationCode'],
                   rinex['ObservationYear'], rinex['ObservationDOY']))
            cnn.commit_transac()

            return 'Multiday rinex file moved out of the archive: ' + rinex[
                'NetworkCode'] + '.' + rinex['StationCode'] + ' ' + str(
                    rinex['ObservationYear']) + ' ' + str(
                        rinex['ObservationDOY']
                    ) + ' using node ' + platform.node()
        else:
            cnn.update('rinex', rinex, Completion=rnxobj.completion)

    except pyRinex.pyRinexExceptionBadFile:
        # empty file or problem with crinex format, move out
        archive = pyArchiveStruct.RinexStruct(cnn)
        archive.remove_rinex(
            rinex,
            os.path.join(
                Config.repository_data_reject, 'bad_rinex/%i/%03i' %
                (rinex['ObservationYear'], rinex['ObservationDOY'])))

    except Exception:
        return traceback.format_exc(
        ) + ' processing rinex: ' + rinex['NetworkCode'] + '.' + rinex[
            'StationCode'] + ' ' + str(rinex['ObservationYear']) + ' ' + str(
                rinex['ObservationDOY']) + ' using node ' + platform.node()
Ejemplo n.º 21
0
 def get_path(self):
     
     # create a date object
     # initialize a date object
     date = pyDate.Date(year=self.year, doy=self.doy);
     
     # create string version of the gps week
     gps_week_str = str(date.gpsWeek);
     
     # make sure that the string is 5 characters
     if date.gpsWeek < 1000: gps_week_str = '0'+gps_week_str;
     
     file_name = 'g04'+gps_week_str+str(date.gpsWeekDay)+'.sp3.Z'
             
     return file_name
Ejemplo n.º 22
0
def main():
    
    #stnList      = ['igs::algo','igs::thu3','igs::alrt','igs::yell','igs::p213','igs::zimm','igs::palm','igs::vesl'];
    stnList      = open('/Users/abelbrown/Documents/workspace/pyNapeos/data/pyOTL/stn_list.test').readlines();
    date         = pyDate.Date(year=2009,doy=207);
    pathToGrdtab = "/media/fugu/processing/src/gamit_OSX/gamit/bin/grdtab";
    pathToGrdtab = "/Users/abelbrown/Documents/workspace/pyNapeos/data/pyOTL/grdtab.bin";
    
    pyotl        = pyOTL(work_dir="~/Documents/workspace/pyNapeos/data/pyOTL",        \
                         otl_output_file="~/Documents/workspace/pyNapeos/data/pyOTL/otl.blq", \
                         stn_list=stnList,                 \
                         date=date,                        \
                         grdtab_path=pathToGrdtab);
    
    pyotl.computeOTL(stnList = ['igs::algo','igs::thu3','igs::alrt']);
Ejemplo n.º 23
0
 def export(self, path='.'):
     
     # init
     ts = dict();
     
     # export with full station ids
     translate = self.dataMgr.getReverseAliasMap();
     
     # init the station list
     ts['stnm'] = list();
     
     for stnName in self.stnList:
         stnId = list(translate[stnName])[0];
         ts['stnm'].append(stnId);
         
        
     # compute the fractional year 
     ts['epochs']   = pyDate.Date(year=self.dataMgr.year,doy=self.dataMgr.doy).fyear;
     
     # the actual data
     ts['npvs'] = self.npvs;
     
     # the pyk daily merge
     #npvsF = nanMeanForRows(self.npvsStacked,self.npvsWeights);
     #ts['npv_pyk'] = npvsF;
     
     # generate a list of network names
     net_list= list();
     for obj in self.dataMgr:
         net_list.append(obj.name());
     ts['net_list'] = net_list;
     
     # import mat file capabilities
     import scipy.io;
     import os;
     
     # make sure the path given actually exists
     if not os.path.isdir(path):
         raise pyStk.pyStkException('path '+path+' does not exist\n');
     
     # generate the tsd file name
     fileName = 'tsd_'+str(self.dataMgr.year)+'_'+str(self.dataMgr.doy)+'.mat';
     
     # create the full export file path with file name
     filePath = os.path.join(path,fileName);
     
     # do it 
     scipy.io.savemat(filePath, ts, oned_as='column', do_compression=True)
Ejemplo n.º 24
0
def main():

    print(' >> Loading g08d APRs...')
    mat = hdf5storage.loadmat('PRIORS_from_g08d.mat')

    # stn_index = np.where(mat['pv_stnm'] == rnx['NetworkCode'].uppper() + '_' + rnx['StationCode'].upper())[0][0]
    # ydm_index = np.where((mat['pv_Epoch']['iyear'] == date.year) & (mat['pv_Epoch']['doy'] == date.doy))

    cnn = dbConnection.Cnn('gnss_data.cfg')

    for stnm in mat['pv_stnm'].tolist():
        NetworkCode = stnm[0].split('_')[0].lower()
        StationCode = stnm[0].split('_')[1].lower()

        station_id = NetworkCode + '.' + StationCode

        print(' -- inserting ' + station_id)

        if cnn.query(
                'SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
                % (NetworkCode, StationCode)).ntuples() != 0:
            # get the rows for this station
            stn_index = np.where(mat['pv_stnm'] == stnm[0])[0][0]
            xyz = mat['pv_xyz'][stn_index * 3:stn_index * 3 + 3]
            enu = mat['pv_sig_enu'][stn_index * 3:stn_index * 3 + 3]

            # loop through the epochs
            for i, fyear in enumerate(mat['pv_Epoch']['fyear'][0][0]):
                date = pyDate.Date(fyear=fyear)

                if enu[0][i] < 10:
                    # print ' -- ' + station_id  + ' ' + date.yyyyddd()
                    # actual sigma value, otherwise it's a super unconstrained (should not be inserted)
                    try:
                        cnn.query(
                            'INSERT INTO apr_coords '
                            '("NetworkCode", "StationCode", "Year", "DOY", "FYear", "x", "y", "z", "sn", "se", "su", "ReferenceFrame") VALUES '
                            '(\'%s\', \'%s\', %i, %i, %f, %f, %f, %f, %f, %f, %f, \'g08d\')'
                            % (NetworkCode, StationCode, date.year, date.doy,
                               date.fyear, xyz[0][i], xyz[1][i], xyz[2][i],
                               enu[0][i], enu[1][i], enu[2][i]))
                    except pg.IntegrityError:
                        print(' -- ' + station_id + ' ' + date.yyyyddd() +
                              ' already exists!')

        else:
            print(' -- COULD NOT FIND STATION ' + station_id)
Ejemplo n.º 25
0
    def initialize(self):

        # create date object
        self.date = pyDate.Date(year=self.options['year'],
                                doy=self.options['doy'])

        # check for pre-existing solution if lazy
        (solutionAlreadyExists,
         key) = Resources.soln_exists(self.date, self.options['expt'],
                                      self.options['org'],
                                      self.options['network_id'])

        if solutionAlreadyExists and self.isLazy:
            raise Processing.LazyException("file exists: " + key)

        # do all the program independent stuff
        super(Session, self).initialize()

        # get the resource bucket path
        bucket = self.get_resources_path()

        # get the apr file
        apr_file = glob.glob(os.path.join(bucket, '*.apr'))

        # yell about it if not found
        if len(apr_file) != 1:
            raise GamitException('problem identifying APR resources in ' +
                                 bucket)

        # create apr file
        wlapr2apr(os.path.join(bucket, apr_file[0]))

        # get the binaries for gamit
        self.files['bin'] = Resources.get_bin('gamit', self.work_dir_path)

        # get the tables for gamit
        self.files['tables'] = Resources.get_tables('gamit',
                                                    self.work_dir_path)

        # create custom setup shell script
        self.files['setup_script_path'] = self.__create_setup_script()

        # create custom run script
        self.files['run_script_path'] = self.__create_run_script()

        # create the custom cleanup script
        self.files['teardown_script_path'] = self.__create_teardown_script()
Ejemplo n.º 26
0
    def get_path(self):
        
        # create a date object
        # initialize a date object
        date = pyDate.Date(year=self.year, doy=self.doy);
        
        # create string version of the gps week
        gps_week_str = str(date.gpsWeek);
        
        # make sure that the string is 5 characters
        if date.gpsWeek < 1000: gps_week_str = '0'+gps_week_str;
        
        file_name = 'g05'+gps_week_str+str(date.gpsWeekDay)+'.mat.gz'

        doy = Utils.get_norm_doy_str(date.doy);
        
        return os.path.join(str(date.year),doy,'','boss','g05','n1',file_name)
Ejemplo n.º 27
0
    def initialize(self):
        
        # no call to super.initialize().  just create work dir ourselves
        
        year = Utils.get_norm_year_str(self.options['year']);
        doy  = Utils.get_norm_doy_str( self.options['doy'] );
        
        # init date object
        date = pyDate.Date(year=year,doy=doy);

        # check for pre-existing solution if lazy
        (solutionAlreadyExists, key) = Resources.soln_exists(
            date,
            self.options['expt'],
            self.options['org'],
            self.options['network_id']
        )

        if solutionAlreadyExists and self.isLazy:
            raise Processing.LazyException("file exists: " + key)
        
        # make sure we have something specified to work with
        if len(self.src) == 0: 
            raise GlobkException('no src has been specified');
              
        # make sure that work directory has been initialized
        if not self.is_valid():
            raise GlobkException('invalid session state');    
         
        # make sure the the temporary directory does not already exist
        if os.path.isdir(self.work_dir_path):
            raise GlobkException(
                'temporary work directory '+self.work_dir_path+' already exists'
            );
        
        # attempt to create the work directory
        try:
            # make parent dirs also
            os.makedirs(self.work_dir_path    , 0755);
            os.makedirs(self.get_resources_path(), 0755);
        except Exception, e:
            
            # unsuccessful attempt
            raise GlobkException(str(e));
Ejemplo n.º 28
0
def process_date(arg, missing_input='fill', allow_days=True):
    # function to handle date input from PG.
    # Input: arg = arguments from command line
    #        missing_input = a string specifying if vector should be filled when something is missing
    #        allow_day = allow a single argument which represents an integer N expressed in days, to compute now()-N

    if missing_input == 'fill':
        dates = [
            pyDate.Date(year=1980, doy=1),
            pyDate.Date(datetime=datetime.now())
        ]
    else:
        dates = [None, None]

    if arg:
        for i, arg in enumerate(arg):
            try:
                if '.' in arg:
                    dates[i] = pyDate.Date(fyear=float(arg))
                elif '_' in arg:
                    dates[i] = pyDate.Date(year=int(arg.split('_')[0]),
                                           doy=int(arg.split('_')[1]))
                elif '/' in arg:
                    dates[i] = pyDate.Date(year=int(arg.split('/')[0]),
                                           month=int(arg.split('/')[1]),
                                           day=int(arg.split('/')[2]))
                elif '-' in arg:
                    dates[i] = pyDate.Date(gpsWeek=int(arg.split('-')[0]),
                                           gpsWeekDay=int(arg.split('-')[1]))
                elif len(arg) > 0:
                    if allow_days and i == 0:
                        dates[i] = pyDate.Date(
                            datetime=datetime.now()) - int(arg)
                    else:
                        raise ValueError(
                            'Invalid input date: allow_days was set to False.')
            except Exception as e:
                raise ValueError(
                    'Could not decode input date (valid entries: '
                    'fyear, yyyy_ddd, yyyy/mm/dd, gpswk-wkday). '
                    'Error while reading the date start/end parameters: ' +
                    str(e))

    return tuple(dates)
Ejemplo n.º 29
0
def main():

    (file, outdir) = get_input_args()

    # get the date from the sinex file
    gpsWeek = int(os.path.basename(file)[3:7])
    gpsWeekDay = int(os.path.basename(file)[7])

    # compute a data from the information
    date = pyDate.Date(gpsweek=gpsWeek, gpsweekday=gpsWeekDay)

    # check outdir
    # if out dir is none then put soln file
    # in same directory as snx files
    if outdir == None: outdir = '.'

    # make full path for solution file
    solnFilePath = os.path.join(outdir, getOutFileName(file))

    # init sinex parser for current sinex file
    snxParser = snxParse.snxFileParser(file).parse()

    # construct npvs and npvs sigma from the sinex data
    npvs, npvs_sigma = npv(snxParser.stationDict)

    # create station list from dictionary keys
    stn_list = snxParser.stationDict.keys()

    # compute epoch in fractional year
    epochs = date.fyear

    #extract the variance factor
    var_factor = snxParser.varianceFactor

    # save as a mat file
    scipy.io.savemat(solnFilePath, mdict={'stnm'      :stn_list   ,  \
                                          'epochs'    :epochs     ,  \
                                          'npvs'      :npvs       ,  \
                                          'npv_sigma' :npvs_sigma ,  \
                                          'var_factor':var_factor},  \
                     oned_as = 'column')
Ejemplo n.º 30
0
    def __init__(self, archive, date, filename, copyto):

        if date.gpsWeek < 0 or date > pyDate.Date(datetime=datetime.now()):
            # do not allow negative weeks or future orbit downloads!
            raise pyProductsExceptionUnreasonableDate('Orbit requested for an unreasonable date: '
                                                      'week ' + str(date.gpsWeek) + \
                                                      ' day ' + str(date.gpsWeekDay) + \
                                                      ' (' + date.yyyyddd() + ')')

        archive = archive.replace('$year',     str(date.year)) \
                         .replace('$doy',      str(date.doy).zfill(3)) \
                         .replace('$gpsweek',  str(date.gpsWeek).zfill(4)) \
                         .replace('$gpswkday', str(date.gpsWeekDay))

        self.archive = archive
        self.path = None
        self.filename = filename

        archive_file_path = os.path.join(archive, self.filename)
        copy_path = os.path.join(copyto, self.filename)

        self.file_path = copy_path

        # try both zipped and unzipped n files
        if os.path.isfile(archive_file_path):
            copyfile(archive_file_path, copy_path)
        else:
            for ext in ('.Z', '.gz', '.zip'):
                if os.path.isfile(archive_file_path + ext):
                    copyfile(archive_file_path + ext, copy_path + ext)

                    pyRunWithRetry.RunCommand('gunzip -f ' + copy_path + ext,
                                              15).run_shell()
                    break
            else:
                raise pyProductsException(
                    'Could not find the archive file for ' + self.filename)