Exemplo n.º 1
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project_file',
        type=str,
        nargs=1,
        metavar='{project cfg file}',
        help=
        "Project CFG file with all the stations being processed in Parallel.GAMIT"
    )

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    GamitConfig = GamitConfiguration(
        args.project_file[0], check_config=False)  # type: GamitConfiguration

    stations = station_list(
        cnn, GamitConfig.NetworkConfig,
        [Date(year=1999, doy=100),
         Date(year=1999, doy=128)])

    # split the stations into subnet_count subnetworks

    archive = pyArchiveStruct.RinexStruct(
        cnn)  # type: pyArchiveStruct.RinexStruct

    net_object = pyNetwork.Network(cnn, archive, GamitConfig, stations,
                                   Date(year=1999, doy=128))

    generate_kml([Date(year=1999, doy=128)], net_object.sessions, GamitConfig)
Exemplo n.º 2
0
    def __init__(self, cnn, project, start_date, end_date, verbose=False):

        super(DRA, self).__init__()

        self.project = project
        self.cnn = cnn
        self.transformations = []
        self.verbose = verbose

        if end_date is None:
            end_date = Date(datetime=datetime.now())

        print(' >> Loading GAMIT solutions for project %s...' % project)

        gamit_vertices = self.cnn.query_float(
            'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" '
            'FROM gamit_soln WHERE "Project" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
            'ORDER BY "NetworkCode", "StationCode"' %
            (project, start_date.year, start_date.doy, end_date.year,
             end_date.doy))

        self.gamit_vertices = np_array_vertices(gamit_vertices)

        dates = self.cnn.query_float(
            'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
            'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
            'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' %
            (project, start_date.year, start_date.doy, end_date.year,
             end_date.doy))

        self.dates = [Date(year=int(d[0]), doy=int(d[1])) for d in dates]

        self.stations = self.cnn.query_float(
            'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
            'WHERE "Project" = \'%s\' AND ("Year", "DOY") '
            'BETWEEN (%i, %i) AND (%i, %i) '
            'GROUP BY "NetworkCode", "StationCode" '
            'ORDER BY "NetworkCode", "StationCode"' %
            (project, start_date.year, start_date.doy, end_date.year,
             end_date.doy),
            as_dict=True)

        i = 0
        for d in tqdm(self.dates,
                      ncols=160,
                      desc=' >> Initializing the stack polyhedrons'):
            self.append(Polyhedron(self.gamit_vertices, project, d))
            if i < len(self.dates) - 1:
                if d != self.dates[i + 1] - 1:
                    for dd in [
                            Date(mjd=md) for md in list(
                                range(d.mjd + 1, self.dates[i + 1].mjd))
                    ]:
                        tqdm.write(' -- Missing DOY detected: %s' %
                                   dd.yyyyddd())
            i += 1
Exemplo n.º 3
0
    def __init__(self, cnn, NetworkCode, StationCode, project):
        self.rs = cnn.query_float('SELECT "Year", "DOY", "Date", "ZTD" FROM gamit_ztd '
                                  'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                                  '"StationCode" = \'%s\' '
                                  'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                                  % (project, NetworkCode, StationCode), as_dict=True)

        self.date = [Date(datetime=r['Date']) for r in self.rs]
        self.t = np.array([d.fyear for d in self.date])
        ts = np.arange(np.min(self.date[0].mjd), np.max(self.date[-1].mjd) + 1, 1)
        self.ts = np.array([Date(mjd=tts).fyear for tts in ts])
        self.ztd = np.array([r['ZTD'] for r in self.rs])

        self.type = 'ztd'
        self.stack_name = None
    def __init__(self, cnn, name):

        self.name = name

        # get the station list
        rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                       'WHERE "Project" = \'%s\' GROUP BY "NetworkCode", "StationCode" '
                       'ORDER BY "NetworkCode", "StationCode"' % name)

        self.stnlist = [Station(cnn, item['NetworkCode'], item['StationCode']) for item in rs.dictresult()]

        # get the epochs
        rs = cnn.query('SELECT "Year", "DOY" FROM gamit_soln '
                       'WHERE "Project" = \'%s\' GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % name)

        rs = rs.dictresult()
        self.epochs = [Date(year=item['Year'], doy=item['DOY']) for item in rs]

        # load the polyhedrons
        self.polyhedrons = []

        print ' >> Loading polyhedrons. Please wait...'

        rs = cnn.query('SELECT * FROM gamit_soln WHERE "Project" = \'%s\' '
                       'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % name)

        self.polyhedrons = rs.dictresult()
        self.ts = []
        self.etms = []

        self.calculate_etms(cnn)
        self.cnn = cnn
Exemplo n.º 5
0
    def __init__(self, polyhedrons):

        # get the mean epoch
        date = [poly.date.mjd for poly in polyhedrons]
        date = Date(mjd=np.mean(date))

        # get the set of stations
        stn = []
        for poly in polyhedrons:
            stn += poly.vertices['stn'].tolist()

        stn = np.unique(stn)

        # average the coordinates for each station
        poly = []
        for s in stn:
            v = np.array([])
            for p in polyhedrons:
                if not v.size:
                    v = p.vertices[p.vertices['stn'] == s]
                else:
                    v = np.concatenate((v, p.vertices[p.vertices['stn'] == s]))

            poly.append((s, np.mean(v['x']), np.mean(v['y']), np.mean(v['z']), date.year, date.doy, date.fyear))

        pp = np.array(poly, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'), ('z', 'float64'),
                                   ('yr', 'i4'), ('dd', 'i4'), ('fy', 'float64')])

        super(Combination, self).__init__(pp, polyhedrons[0].project, date)
Exemplo n.º 6
0
    def __init__(self, filename):
        self.path = os.path.dirname(filename)
        self.filename = os.path.basename(filename)
        self.version = 0

        self.type = self.identify_rinex_type(filename)

        parts = self.split_filename(filename)
        try:
            if self.version < 3:
                self.StationCode = parts[0]
                self.monument = None
                self.receiver = None
                self.country = None
                self.doy = parts[1]
                self.session = parts[2]
                self.year = parts[3]
                self.format_compression = parts[4]
                self.start_time = None
                self.data_source = None
                self.file_period = None
                self.data_frequency = None
                self.data_type = None
                self.date = Date(year=check_year(self.year), doy=int(self.doy))
            else:
                # DDG: lowercase station code to match the default station name conventions
                self.StationCode = parts[0][0:4].lower()
                self.monument = parts[0][4:5]
                self.receiver = parts[0][5:6]
                self.country = parts[0][6:]
                self.session = None
                self.year = parts[2][0:4]
                self.doy = parts[2][4:7]
                self.format_compression = parts[6]
                self.start_time = parts[2]
                self.data_source = parts[1]
                self.file_period = parts[3]
                self.data_frequency = parts[4]
                self.data_type = parts[5]
                self.date = Date(year=int(self.year), doy=int(self.doy))

            self.month = self.date.month
            self.day = self.date.day

        except Exception as e:
            raise RinexNameException(e)
Exemplo n.º 7
0
    def __init__(self, cnn, project, end_date):

        super(DRA, self).__init__()

        self.project = project
        self.cnn = cnn
        self.transformations = []

        if end_date is None:
            end_date = Date(datetime=datetime.now())

        print ' >> Loading GAMIT solutions for project %s...' % project

        gamit_vertices = self.cnn.query_float(
            'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" '
            'FROM gamit_soln WHERE "Project" = \'%s\' AND ("Year", "DOY") <= (%i, %i)'
            'ORDER BY "NetworkCode", "StationCode"' %
            (project, end_date.year, end_date.doy))

        self.gamit_vertices = np.array(gamit_vertices,
                                       dtype=[('stn', 'S8'), ('x', 'float64'),
                                              ('y', 'float64'),
                                              ('z', 'float64'), ('yr', 'i4'),
                                              ('dd', 'i4'), ('fy', 'float64')])

        dates = self.cnn.query_float(
            'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
            'AND ("Year", "DOY") <= (%i, %i) '
            'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' %
            (project, end_date.year, end_date.doy))

        self.dates = [Date(year=int(d[0]), doy=int(d[1])) for d in dates]

        self.stations = self.cnn.query_float(
            'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
            'WHERE "Project" = \'%s\' AND ("Year", "DOY") <= (%i, %i) '
            'GROUP BY "NetworkCode", "StationCode" '
            'ORDER BY "NetworkCode", "StationCode"' %
            (project, end_date.year, end_date.doy),
            as_dict=True)

        for d in tqdm(self.dates,
                      ncols=160,
                      desc=' >> Initializing the stack polyhedrons'):
            self.append(Polyhedron(self.gamit_vertices, project, d))
Exemplo n.º 8
0
def dra(cnn, project, dates):

    rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f GROUP BY "NetworkCode", "StationCode" '
                   'ORDER BY "NetworkCode", "StationCode"' % (project, dates[0].fyear, dates[1].fyear))

    stnlist = rs.dictresult()

    # get the epochs
    ep = cnn.query('SELECT "Year", "DOY" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f'
                   'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % (project, dates[0].fyear, dates[1].fyear))

    ep = ep.dictresult()

    epochs = [Date(year=item['Year'], doy=item['DOY'])
              for item in ep]

    A = np.array([])
    Ax = []
    Ay = []
    Az = []

    for station in stnlist:

        print('stacking %s.%s' % (station['NetworkCode'], station['StationCode']))

        try:
            etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], project=project)
        except Exception as e:
            print(" Exception: " + str(e))
            continue

        x = etm.soln.x
        y = etm.soln.y
        z = etm.soln.z

        Ax.append(np.array([np.zeros(x.shape), -z, y, np.ones(x.shape), np.zeros(x.shape), np.zeros(x.shape)]).transpose())
        Ay.append(np.array([z, np.zeros(x.shape), -x, np.zeros(x.shape), np.ones(x.shape), np.zeros(x.shape)]).transpose())
        Az.append(np.array([-y, x, np.zeros(x.shape), np.zeros(x.shape), np.zeros(x.shape), np.ones(x.shape)]).transpose())

        x = np.column_stack((Ax, etm.A, np.zeros(etm.A.shape), np.zeros(etm.A.shape)))
        y = np.column_stack((Ay, np.zeros(etm.A.shape), etm.A, np.zeros(etm.A.shape)))
        z = np.column_stack((Az, np.zeros(etm.A.shape), np.zeros(etm.A.shape), etm.A))

        A = np.row_stack((x, y, z))
Exemplo n.º 9
0
def main():

    parser = argparse.ArgumentParser(description='GNSS time series stacker')

    parser.add_argument(
        'project',
        type=str,
        nargs=1,
        metavar='{project name}',
        help=
        "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT."
    )
    parser.add_argument(
        'stack_name',
        type=str,
        nargs=1,
        metavar='{stack name}',
        help=
        "Specify a name for the stack: eg. itrf2014 or posgar07b. This name should be unique "
        "and cannot be repeated for any other solution project")
    parser.add_argument(
        '-max',
        '--max_iters',
        nargs=1,
        type=int,
        metavar='{max_iter}',
        help="Specify maximum number of iterations. Default is 4.")
    parser.add_argument(
        '-exclude',
        '--exclude_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to remove from the stacking process.")
    parser.add_argument(
        '-use',
        '--use_stations',
        nargs='+',
        type=str,
        metavar='{net.stnm}',
        help="Manually specify stations to use for the stacking process.")
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument('-redo',
                        '--redo_stack',
                        action='store_true',
                        help="Delete the stack and redo it from scratch")
    parser.add_argument('-plot',
                        '--plot_stack_etms',
                        action='store_true',
                        default=False,
                        help="Plot the stack ETMs after computation is done")
    parser.add_argument(
        '-constrains',
        '--external_constrains',
        nargs='+',
        help=
        "File with external constrains parameters (position, velocity and periodic). These may be "
        "from a parent frame such as ITRF. "
        "Inheritance will occur with stations on the list whenever a parameter exists. "
        "Example: -constrains itrf14.txt "
        "Format is: net.stn x y z epoch vx vy vz sn_1y sn_6m cn_1y cn_6m se_1y se_6m ce_1y ce_6m "
        "su_1y su_6m cu_1y cu_6m ")
    parser.add_argument(
        '-d',
        '--date_end',
        nargs=1,
        metavar='date',
        help=
        'Limit the polyhedrons to the specified date. Can be in wwww-d, yyyy_ddd, yyyy/mm/dd '
        'or fyear format')
    parser.add_argument('-np',
                        '--noparallel',
                        action='store_true',
                        help="Execute command without parallelization.")

    args = parser.parse_args()

    cnn = dbConnection.Cnn("gnss_data.cfg")

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    JobServer = pyJobServer.JobServer(
        Config,
        run_parallel=not args.noparallel)  # type: pyJobServer.JobServer

    if args.max_iters:
        max_iters = int(args.max_iters[0])
    else:
        max_iters = 4
        print ' >> Defaulting to 4 iterations'

    if args.exclude_stations:
        exclude_stn = args.exclude_stations
    else:
        exclude_stn = []

    if args.use_stations:
        use_stn = args.use_stations
    else:
        use_stn = []

    dates = [Date(year=1980, doy=1), Date(datetime=datetime.now())]
    if args.date_end is not None:
        try:
            dates = process_date(
                [str(Date(year=1980, doy=1).fyear), args.date_end[0]])
        except ValueError as e:
            parser.error(str(e))

    # create folder for plots

    if args.directory:
        if not os.path.exists(args.directory):
            os.mkdir(args.directory)
    else:
        if not os.path.exists('production'):
            os.mkdir('production')
        args.directory = 'production'

    # load the ITRF dat file with the periodic space components
    if args.external_constrains:
        constrains = load_constrains(args.external_constrains[0])
    else:
        constrains = None

    # create the stack object
    stack = pyStack.Stack(cnn,
                          args.project[0],
                          args.stack_name[0],
                          args.redo_stack,
                          end_date=dates[1])

    # stack.align_spaces(frame_params)
    # stack.to_json('alignment.json')
    # exit()

    for i in range(max_iters):
        # create the target polyhedrons based on iteration number (i == 0: PPP)

        target = calculate_etms(cnn, stack, JobServer, i)

        qbar = tqdm(total=len(stack),
                    ncols=160,
                    desc=' >> Aligning polyhedrons (%i of %i)' %
                    (i + 1, max_iters))

        # work on each polyhedron of the stack
        for j in range(len(stack)):

            qbar.update()

            if not stack[j].aligned:
                # do not move this if up one level: to speed up the target polyhedron loading process, the target is
                # set to an empty list when the polyhedron is already aligned
                if stack[j].date != target[j].date:
                    # raise an error if dates don't agree!
                    raise StandardError(
                        'Error processing %s: dates don\'t agree (target date %s)'
                        % (stack[j].date.yyyyddd(), target[j].date.yyyyddd()))
                else:
                    # should only attempt to align a polyhedron that is unaligned
                    # do not set the polyhedron as aligned unless we are in the max iteration step
                    stack[j].align(target[j],
                                   True if i == max_iters - 1 else False)
                    # write info to the screen
                    qbar.write(
                        ' -- %s (%3i) %2i it: wrms: %4.1f T %5.1f %5.1f %5.1f '
                        'R (%5.1f %5.1f %5.1f)*1e-9' %
                        (stack[j].date.yyyyddd(), stack[j].stations_used,
                         stack[j].iterations, stack[j].wrms * 1000,
                         stack[j].helmert[-3] * 1000,
                         stack[j].helmert[-2] * 1000,
                         stack[j].helmert[-1] * 1000, stack[j].helmert[-6],
                         stack[j].helmert[-5], stack[j].helmert[-4]))

        stack.transformations.append([poly.info() for poly in stack])
        qbar.close()

    if args.redo_stack:
        # before removing common modes (or inheriting periodic terms), calculate ETMs with final aligned solutions
        calculate_etms(cnn,
                       stack,
                       JobServer,
                       iterations=None,
                       create_target=False)
        # only apply common mode removal if redoing the stack
        if args.external_constrains:
            stack.remove_common_modes(constrains)
        else:
            stack.remove_common_modes()

        # here, we also align the stack in velocity and coordinate space
        stack.align_spaces(constrains)

    # calculate the etms again, after removing or inheriting parameters
    calculate_etms(cnn, stack, JobServer, iterations=None, create_target=False)

    # save the json with the information about the alignment
    stack.to_json(args.stack_name[0] + '_alignment.json')
    # save polyhedrons to the database
    stack.save()

    if args.plot_stack_etms:
        qbar = tqdm(total=len(stack.stations), ncols=160)
        for stn in stack.stations:
            # plot the ETMs
            qbar.update()
            qbar.postfix = '%s.%s' % (stn['NetworkCode'], stn['StationCode'])
            plot_etm(cnn, stack, stn, args.directory)

        qbar.close()
Exemplo n.º 10
0
def parse_monitor(cnn, monitor):
    lines = file_readlines(monitor)
    output = ''.join(lines)

    try:
        project, subnet, year, doy = re.findall(
            'GamitTask initialized for (\w+.*?).(\w+\d+): (\d+) (\d+)', output,
            re.MULTILINE)[0]
        subnet = int(subnet[3:])
        year = int(year)
        doy = int(doy)
    except:
        # maybe it is a project with no subnets
        try:
            project, year, doy = re.findall(
                'GamitTask initialized for (\w+.*?): (\d+) (\d+)', output,
                re.MULTILINE)[0]
            subnet = 0
            year = int(year)
            doy = int(doy)
        except:
            print(' -- could not determine project! ' + monitor)
            return

    try:
        node = re.findall('executing on (\w+)', output, re.MULTILINE)[0]
    except:
        node = 'PUGAMIT100'

    try:
        start_time = datetime.strptime(
            re.findall(
                'run.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Iteration depth: 1',
                output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
    except:
        print(' -- could not determine start_time! ' + monitor)
        return

    try:
        end_time = datetime.strptime(
            re.findall(
                'finish.sh \((\d+-\d+-\d+ \d+:\d+:\d+)\): Done processing h-files and generating SINEX.',
                output, re.MULTILINE)[0], '%Y-%m-%d %H:%M:%S')
    except:
        print(' -- could not determine end_time! ' + monitor)
        return

    try:
        iterations = int(
            re.findall(
                'run.sh \(\d+-\d+-\d+ \d+:\d+:\d+\): Iteration depth: (\d+)',
                output, re.MULTILINE)[-1])
    except:
        print(' -- could not determine iterations!')
        return

    try:
        nrms = float(
            re.findall(
                'Prefit nrms:\s+\d+.\d+[eEdD]\+\d+\s+Postfit nrms:\s+(\d+.\d+[eEdD][+-]\d+)',
                output, re.MULTILINE)[-1])
    except:
        # maybe GAMIT didn't finish
        nrms = 1

    try:
        updated_apr = re.findall(' (\w+).*?Updated from', output,
                                 re.MULTILINE)[0]
        updated_apr = [upd.replace('_GPS', '').lower() for upd in updated_apr]
        upd_stn = []
        for stn in updated_apr:
            upd_stn += re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)

        upd_stn = ','.join(upd_stn)
    except:
        # maybe GAMIT didn't finish
        upd_stn = None

    try:
        wl = float(re.findall('WL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
    except:
        # maybe GAMIT didn't finish
        wl = 0

    try:
        nl = float(re.findall('NL fixed\s+(\d+.\d+)', output, re.MULTILINE)[0])
    except:
        # maybe GAMIT didn't finish
        nl = 0

    try:
        oc = re.findall('relaxing over constrained stations (\w+.*)', output,
                        re.MULTILINE)[0]
        oc = oc.replace('|', ',').replace('_GPS', '').lower()

        oc_stn = []
        for stn in oc.split(','):
            oc_stn += re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)

        oc_stn = ','.join(oc_stn)

    except:
        # maybe GAMIT didn't finish
        oc_stn = None

    try:
        overcons = re.findall('GCR APTOL (\w+).{10}\s+([-]?\d+.\d+)', output,
                              re.MULTILINE)

        if len(overcons) > 0:
            i = np.argmax(np.abs([float(o[1]) for o in overcons]))
            stn = overcons[int(i)][0]

            # get the real station code
            max_overconstrained = re.findall(
                'fetching rinex for (\w+.\w+) %s' % stn.lower(), output,
                re.MULTILINE)[0]
        else:
            max_overconstrained = None
    except:
        # maybe GAMIT didn't finish
        max_overconstrained = None

    try:
        cnn.insert(
            'gamit_stats', {
                'Project':
                project,
                'subnet':
                subnet,
                'Year':
                year,
                'DOY':
                doy,
                'FYear':
                Date(year=year, doy=doy).fyear,
                'wl':
                wl,
                'nl':
                nl,
                'nrms':
                nrms,
                'relaxed_constrains':
                oc_stn,
                'max_overconstrained':
                max_overconstrained,
                'updated_apr':
                upd_stn,
                'iterations':
                iterations,
                'node':
                node,
                'execution_time':
                int((end_time - start_time).total_seconds() / 60.0),
                'execution_date':
                start_time
            })
    except dbConnection.dbErrInsert:
        print(' -- record already exists ' + monitor)
Exemplo n.º 11
0
    def __init__(self, configfile):

        self.options = {
            'path': None,
            'repository': None,
            'parallel': False,
            'cups': None,
            'node_list': None,
            'ip_address': None,
            'brdc': None,
            'sp3_type_1': None,
            'sp3_type_2': None,
            'sp3_type_3': None,
            'sp3_altr_1': None,
            'sp3_altr_2': None,
            'sp3_altr_3': None,
            'grdtab': None,
            'otlgrid': None,
            'otlmodel': 'FES2014b',
            'ppp_path': None,
            'institution': None,
            'info': None,
            'sp3': None,
            'frames': None,
            'atx': None,
            'height_codes': None,
            'ppp_exe': None,
            'ppp_remote_local': ()
        }

        config = ConfigParser.ConfigParser()
        config.readfp(open(configfile))

        # get the archive config
        for iconfig, val in dict(config.items('archive')).iteritems():
            self.options[iconfig] = val

        # get the otl config
        for iconfig, val in dict(config.items('otl')).iteritems():
            self.options[iconfig] = val

        # get the ppp config
        for iconfig, val in dict(config.items('ppp')).iteritems():
            self.options[iconfig] = os.path.expandvars(val).replace('//', '/')

        # frames and dates
        frames = [item.strip() for item in self.options['frames'].split(',')]
        atx = [item.strip() for item in self.options['atx'].split(',')]

        self.Frames = []

        for frame, atx in zip(frames, atx):
            date = process_date(self.options[frame.lower()].split(','))
            self.Frames += [{
                'name':
                frame,
                'atx':
                atx,
                'dates': (Date(year=date[0].year,
                               doy=date[0].doy,
                               hour=0,
                               minute=0,
                               second=0),
                          Date(year=date[1].year,
                               doy=date[1].doy,
                               hour=23,
                               minute=59,
                               second=59))
            }]

        self.options['frames'] = self.Frames

        self.archive_path = self.options['path']
        self.sp3_path = self.options['sp3']
        self.brdc_path = self.options['brdc']
        self.repository = self.options['repository']

        self.repository_data_in = os.path.join(self.repository, 'data_in')
        self.repository_data_in_retry = os.path.join(self.repository,
                                                     'data_in_retry')
        self.repository_data_reject = os.path.join(self.repository,
                                                   'data_rejected')

        self.sp3types = [
            self.options['sp3_type_1'], self.options['sp3_type_2'],
            self.options['sp3_type_3']
        ]

        self.sp3types = [
            sp3type for sp3type in self.sp3types if sp3type is not None
        ]

        # alternative sp3 types
        self.sp3altrn = [
            self.options['sp3_altr_1'], self.options['sp3_altr_2'],
            self.options['sp3_altr_3']
        ]

        self.sp3altrn = [
            sp3alter for sp3alter in self.sp3altrn if sp3alter is not None
        ]

        if self.options['parallel'] == 'True':
            self.run_parallel = True
        else:
            self.run_parallel = False

        return
Exemplo n.º 12
0
def dra(cnn, project, dates):

    rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f GROUP BY "NetworkCode", "StationCode" '
                   'ORDER BY "NetworkCode", "StationCode"' % (project, dates[0].fyear, dates[1].fyear))

    stnlist = rs.dictresult()

    # get the epochs
    ep = cnn.query('SELECT "Year", "DOY" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f'
                   'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % (project, dates[0].fyear, dates[1].fyear))

    ep = ep.dictresult()

    epochs = [Date(year=item['Year'], doy=item['DOY']) for item in ep]

    # delete DRA starting from the first requested epoch
    cnn.query('DELETE FROM gamit_dra WHERE "Project" = \'%s\' AND "FYear" >= %f' % (project, epochs[0].fyear))

    # query the first polyhedron in the line, which should be the last polyhedron in gamit_dra
    poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_dra '
                           'WHERE "Project" = \'%s\' AND "FYear" = (SELECT max("FYear") FROM gamit_dra)'
                           'ORDER BY "NetworkCode", "StationCode"' % project)

    if len(poly) == 0:
        print ' -- Using gamit_soln: no pre-existent DRA found'
        # no last entry found in gamit_dra, use gamit_soln
        poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_soln '
                               'WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                               'ORDER BY "NetworkCode", "StationCode"'
                               % (project, epochs[0].year, epochs[0].doy))
    else:
        print ' -- Pre-existent DRA found. Attaching.'

    polyhedrons = poly

    bar = tqdm(total=len(epochs)-1, ncols=160)

    for date1, date2 in zip(epochs[0:-1], epochs[1:]):

        poly1 = []

        # get the stations common stations between day i and day i+1 (in A format)
        s = cnn.query_float(sql_select_union(project, '"X", "Y", "Z", "NetworkCode", "StationCode"', date1, date2))

        x = cnn.query_float(sql_select_union(project, '0, -"Z", "Y", 1, 0, 0', date1, date2))
        y = cnn.query_float(sql_select_union(project, '"Z", 0, -"X", 0, 1, 0', date1, date2))
        z = cnn.query_float(sql_select_union(project, '-"Y", "X", 0, 0, 0, 1', date1, date2))

        # polyhedron of the common stations
        Xx = cnn.query_float(sql_select_union(project, '"X", "Y", "Z"', date1, date2))

        X = numpy.array(Xx).transpose().flatten()

        # for vertex in stations
        for v in s:
            poly1 += [np.array(pp[0:3], dtype=float) - np.array(v[0:3]) for pp in poly if pp[-2] == v[-2] and pp[-1] == v[-1]]

        # residuals for adjustment
        L = np.array(poly1)

        A = numpy.row_stack((np.array(x), np.array(y), np.array(z)))
        A[:, 0:3] = A[:, 0:3]*1e-9
        # find helmert transformation
        c, _, _, v, _, p, it = adjust_lsq(A, L.flatten())

        # write some info to the screen
        tqdm.write(' -- %s (%3i): translation (mm mm mm) scale: (%6.1f %6.1f %6.1f) %10.2e ' %
                   (date2.yyyyddd(), it, c[-3] * 1000, c[-2] * 1000, c[-1] * 1000, c[-4]))

        # make A again with all stations
        s = cnn.query_float(sql_select(project, '"Year", "DOY", "NetworkCode", "StationCode"', date2))

        x = cnn.query_float(sql_select(project, '0, -"Z", "Y", 1, 0, 0', date2))
        y = cnn.query_float(sql_select(project, '"Z", 0, -"X", 0, 1, 0', date2))
        z = cnn.query_float(sql_select(project, '-"Y", "X", 0, 0, 0, 1', date2))

        A = numpy.row_stack((np.array(x), np.array(y), np.array(z)))
        A[:, 0:3] = A[:, 0:3] * 1e-9

        Xx = cnn.query_float(sql_select(project, '"X", "Y", "Z"', date2))
        X = numpy.array(Xx).transpose().flatten()

        X = (numpy.dot(A, c) + X).reshape(3, len(x)).transpose()

        # save current transformed polyhedron to use in the next iteration
        polyhedrons += poly
        poly = [x.tolist() + list(s) for x, s in zip(X, s)]

        # insert results in gamit_dra
        for pp in poly:
            cnn.insert('gamit_dra', NetworkCode=pp[-2], StationCode=pp[-1], Project=project, X=pp[0], Y=pp[1],
                       Z=pp[2], Year=date2.year, DOY=date2.doy, FYear=date2.fyear)

        bar.update()

    bar.close()

    # plot the residuals
    for stn in tqdm(stnlist):
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        # load from the db
        ts = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM gamit_dra '
                             'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND '
                             '"Project" = \'%s\' ORDER BY "Year", "DOY"' % (NetworkCode, StationCode, project))

        ts = np.array(ts)

        if ts.size:
            try:
                # save the time series
                gsoln = pyETM.GamitSoln(cnn, ts, NetworkCode, StationCode, project)

                # create the ETM object
                etm = pyETM.GamitETM(cnn, NetworkCode, StationCode, False, False, gsoln)

                etm.plot(pngfile='%s/%s.%s_SOL.png' % (project, NetworkCode, StationCode), residuals=True,
                         plot_missing=False)

                if ts.shape[0] > 2:
                    dts = np.append(np.diff(ts[:,0:3], axis=0), ts[1:, -2:], axis=1)
                    dra = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)

                    etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra)

                    etm.plot(pngfile='%s/%s.%s_DRA.png' % (project, NetworkCode, StationCode), residuals=True,
                             plot_missing=False)

            except Exception as e:
                tqdm.write(' -->' + str(e))
Exemplo n.º 13
0
import dbConnection
import pyETM
from pyDate import Date

cnn = dbConnection.Cnn('gnss_data.cfg')

stns = cnn.query('SELECT * FROM stations WHERE "NetworkCode" NOT LIKE \'?%\'')

for stn in stns.dictresult():

    print ' >> working on %s.%s' % (stn['NetworkCode'], stn['StationCode'])
    etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'])

    dates = [Date(mjd=mjd) for mjd in etm.soln.mjd]

Exemplo n.º 14
0
    def align_spaces(self, target_dict):

        # get the list of stations to use during the alignment
        use_stations = target_dict.keys()

        # reference date used to align the stack
        # epochs SHOULD all be the same. Get first item and then the epoch
        ref_date = Date(fyear=target_dict.values()[0]['epoch'])

        # convert the target dict to a list
        target_list = []
        stack_list = []

        tqdm.write(' >> Aligning coordinate space...')
        for stn in use_stations:
            if not np.isnan(target_dict[stn]['x']):
                target_list.append((stn, target_dict[stn]['x'], target_dict[stn]['y'], target_dict[stn]['z'],
                                    ref_date.year, ref_date.doy, ref_date.fyear))
                # get the ETM coordinate for this station
                net = stn.split('.')[0]
                ssn = stn.split('.')[1]

                ts = pyETM.GamitSoln(self.cnn, self.get_station(net, ssn), net, ssn, self.name)
                etm = pyETM.GamitETM(self.cnn, net, ssn, gamit_soln=ts)
                stack_list += etm.get_etm_soln_list()

        c_array = np.array(stack_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                              ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                              ('fy', 'float64')])
        comb = Polyhedron(c_array, 'etm', ref_date)

        # build a target polyhedron from the target_list
        vertices = np.array(target_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                                ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                                ('fy', 'float64')])

        target = Polyhedron(vertices, 'target_frame', ref_date)

        # start aligning the coordinates
        tqdm.write(' -- Aligning polyhedron at %.3f (%s)' % (ref_date.fyear, ref_date.yyyyddd()))

        scale = False
        # align the polyhedron to the target
        r_before, r_after, a_stn = comb.align(target, scale=scale, verbose=True)
        # extract the Helmert parameters to apply to the rest of the polyhedrons
        # remove the scale factor
        helmert = comb.helmert

        tqdm.write(' -- Reporting coordinate space residuals (in mm) before and after frame alignment\n'
                   '         Before   After |     Before   After  ')
        # format r_before and r_after to satisfy the required print_residuals format
        r_before = r_before.reshape(3, r_before.shape[0] / 3).transpose()
        r_after = r_after.reshape(3, r_after.shape[0] / 3).transpose()

        residuals = np.stack((r_before, r_after), axis=2)

        stn_lla = []
        for i, stn in enumerate(a_stn):
            n = stn.split('.')[0]
            s = stn.split('.')[1]
            # get the lat lon of the station to report back in the json
            lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' '
                                       'AND "StationCode" = \'%s\'' % (n, s))[0]
            stn_lla.append([lla[0], lla[1]])
            # print residuals to screen
            print_residuals(n, s, residuals[i], lla[0], lla[1], ['X', 'Y', 'Z'])

        # save the position space residuals
        self.position_space = {'stations': {'codes': a_stn.tolist(), 'latlon': stn_lla},
                               'residuals_before_alignment': r_before.tolist(),
                               'residuals_after_alignment': r_after.tolist(),
                               'reference_date': ref_date,
                               'helmert_transformation': comb.helmert.tolist(),
                               'comments': 'No scale factor estimated.'}

        for poly in tqdm(self, ncols=160, desc=' -- Applying coordinate space transformation'):
            if poly.date != ref_date:
                poly.align(helmert=helmert, scale=scale)

        tqdm.write(' >> Aligning velocity space...')

        # choose the stations that have a velocity
        use_stn = []
        for stn in use_stations:
            if not np.isnan(target_dict[stn]['vx']):
                use_stn.append(stn)

        # load the polynomial terms of the stations
        etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, '
                                           'stations.lon, params FROM etms '
                                           'LEFT JOIN stations ON '
                                           'etms."NetworkCode" = stations."NetworkCode" AND '
                                           'etms."StationCode" = stations."StationCode" '
                                           'WHERE "object" = \'polynomial\' AND soln = \'gamit\' AND stack = \'%s\' '
                                           'AND etms."NetworkCode" || \'.\' || etms."StationCode" IN (\'%s\') '
                                           'ORDER BY etms."NetworkCode", etms."StationCode"'
                                           % (self.name, '\', \''.join(use_stn)), as_dict=True)

        # first, align the velocity space by finding a Helmert transformation that takes vx, vy, and vz of the stack at
        # each station and makes it equal to vx, vy, and vz of the ITRF structure

        dvx = np.zeros(len(etm_objects))
        dvy = np.zeros(len(etm_objects))
        dvz = np.zeros(len(etm_objects))

        for s, p in enumerate(etm_objects):
            stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
                           '"StationCode" = \'%s\' AND stack = \'%s\' '
                           % (p['NetworkCode'], p['StationCode'], self.name))
            # save the time series
            ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name)
            # create the ETM object
            pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)

            q = self.cnn.query_float('SELECT params FROM etms '
                                     'WHERE "object" = \'polynomial\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\' '
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            params = np.array(q['params'])
            params = params.reshape((3, params.shape[0] / 3))
            # first item, i.e. params[:][0] in array is position
            # second item is velocity, which is what we are interested in
            v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten()
            # put the residuals in an array
            td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])]
            dvx[s] = v[0] - np.array(td['vx'])
            dvy[s] = v[1] - np.array(td['vy'])
            dvz[s] = v[2] - np.array(td['vz'])

        scale = False
        A = self.build_design(etm_objects, scale=scale)

        # loop through the frequencies
        L = np.row_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).flatten()

        c, _, _, _, wrms, _, it = adjust_lsq(A, L)

        tqdm.write(' -- Velocity space transformation:   ' + ' '.join(['%7.4f' % cc for cc in c]) +
                   ' wrms: %.3f it: %i' % (wrms * 1000, it))

        # loop through all the polyhedrons
        for poly in tqdm(self, ncols=160, desc=' -- Applying velocity space transformation'):
            t = np.repeat(poly.date.fyear - ref_date.fyear, poly.Ax.shape[0])

            poly.vertices['x'] = poly.vertices['x'] - t * np.dot(poly.ax(scale=scale), c)
            poly.vertices['y'] = poly.vertices['y'] - t * np.dot(poly.ay(scale=scale), c)
            poly.vertices['z'] = poly.vertices['z'] - t * np.dot(poly.az(scale=scale), c)

        tqdm.write(' -- Reporting velocity space residuals (in mm/yr) before and after frame alignment\n'
                   '         Before   After |     Before   After  ')

        dvxa = np.zeros(len(etm_objects))
        dvya = np.zeros(len(etm_objects))
        dvza = np.zeros(len(etm_objects))
        for s, p in enumerate(etm_objects):
            # redo the etm for this station
            stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
                           '"StationCode" = \'%s\' AND stack = \'%s\''
                           % (p['NetworkCode'], p['StationCode'], self.name))
            # save the time series
            ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name)
            # create the ETM object
            pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)

            q = self.cnn.query_float('SELECT params FROM etms '
                                     'WHERE "object" = \'polynomial\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\''
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            params = np.array(q['params'])
            params = params.reshape((3, params.shape[0] / 3))
            # first item, i.e. params[:][0] in array is position
            # second item is velocity, which is what we are interested in
            v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten()
            # put the residuals in an array
            td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])]
            dvxa[s] = v[0] - np.array(td['vx'])
            dvya[s] = v[1] - np.array(td['vy'])
            dvza[s] = v[2] - np.array(td['vz'])

            lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' '
                                       'AND "StationCode" = \'%s\'' % (p['NetworkCode'], p['StationCode']))[0]

            print_residuals(p['NetworkCode'], p['StationCode'],
                            np.array([[dvx[s], dvxa[s]], [dvy[s], dvya[s]], [dvz[s], dvza[s]]]), lla[0], lla[1],
                            ['X', 'Y', 'Z'])

        # save the position space residuals
        self.velocity_space = {'stations': {'codes': [p['NetworkCode'] + '.' + p['StationCode'] for p in etm_objects],
                                            'latlon': [[p['lat'], p['lon']] for p in etm_objects]},
                               'residuals_before_alignment':
                                   np.column_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).tolist(),
                               'residuals_after_alignment':
                                   np.column_stack((dvxa.flatten(), dvya.flatten(), dvza.flatten())).tolist(),
                               'reference_date': ref_date,
                               'helmert_transformation': c.tolist(),
                               'comments': 'Velocity space transformation.'}

        tqdm.write(' -- Done!')
Exemplo n.º 15
0
    def __init__(self, in_rinex, otl_coeff, options, sp3types, sp3altrn, antenna_height, strict=True, apply_met=True,
                 kinematic=False, clock_interpolation=False, hash=0, erase=True, decimate=True, solve_coordinates=True,
                 solve_troposphere=105, back_substitution=False, elev_mask=10, x=0, y=0, z=0):

        assert isinstance(in_rinex, pyRinex.ReadRinex)

        # DDG: if RINEX 3 version, convert to RINEX 2 (no PPP support)
        if in_rinex.rinex_version >= 3:
            # DDG: make a new object and convert to RINEX 3 to leave the other one untouched
            rinexobj = pyRinex.ReadRinex(in_rinex.NetworkCode, in_rinex.StationCode, in_rinex.origin_file,
                                         no_cleanup=in_rinex.no_cleanup, allow_multiday=in_rinex.allow_multiday)
            rinexobj.ConvertRinex(2)
        else:
            # file is in RINEX 2 format, use file as is
            rinexobj = in_rinex

        PPPSpatialCheck.__init__(self)

        self.rinex     = rinexobj
        self.epoch     = rinexobj.date
        self.antH      = antenna_height
        self.ppp_path  = options['ppp_path']
        self.ppp       = options['ppp_exe']
        self.options   = options
        self.kinematic = kinematic

        self.ppp_version = None

        self.file_summary        = None
        self.proc_parameters     = None
        self.observation_session = None
        self.coordinate_estimate = None
        self.clock_estimates     = None

        # DDG: do not allow clock interpolation before May 1 2001
        self.clock_interpolation = clock_interpolation if rinexobj.date > Date(year=2001, month=5, day=1) else False

        self.frame             = None
        self.atx               = None
        # DDG: now accepts solving for a fixed coordinate PPP
        self.solve_coordinates = solve_coordinates
        self.solve_troposphere = solve_troposphere
        self.back_substitution = back_substitution
        self.elev_mask         = elev_mask
        self.x                 = x
        self.y                 = y
        self.z                 = z
        self.lat               = None
        self.lon               = None
        self.h                 = None
        self.sigmax            = None
        self.sigmay            = None
        self.sigmaz            = None
        self.sigmaxy           = None
        self.sigmaxz           = None
        self.sigmayz           = None
        self.clock_phase       = None
        self.clock_phase_sigma = None
        self.phase_drift       = None
        self.phase_drift_sigma = None
        self.clock_rms         = None
        self.clock_rms_number  = None
        self.hash              = hash

        self.processed_obs = None
        self.rejected_obs  = None

        self.orbit_type = None
        self.orbits1    = None
        self.orbits2    = None
        self.clocks1    = None
        self.clocks2    = None
        self.eop_file   = None
        self.sp3altrn   = sp3altrn
        self.sp3types   = sp3types
        self.otl_coeff  = otl_coeff
        self.strict     = strict
        self.apply_met  = apply_met
        self.erase      = erase
        self.out        = ''
        self.summary    = ''
        self.pos        = ''

        self.rootdir = os.path.join('production', 'ppp')

        fieldnames = ('NetworkCode', 'StationCode', 'X', 'Y', 'Z', 'Year', 'DOY',
                      'ReferenceFrame', 'sigmax', 'sigmay',
                      'sigmaz', 'sigmaxy', 'sigmaxz', 'sigmayz', 'hash')

        self.record = dict.fromkeys(fieldnames)

        # determine the atx to use
        self.frame, self.atx = determine_frame(self.options['frames'], self.epoch)

        if os.path.isfile(self.rinex.rinex_path):

            # generate a unique id for this instance
            self.rootdir = os.path.join(self.rootdir, str(uuid.uuid4()))

            path = os.path.join(self.rootdir, self.rinex.rinex[:-3])
            self.path_sum_file = path + 'sum'
            self.path_pos_file = path + 'pos'
            self.path_ses_file = path + 'ses'
            self.path_res_file = path + 'res'

            try:
                # create a production folder to analyze the rinex file
                if not os.path.exists(self.rootdir):
                    os.makedirs(self.rootdir)
                    os.makedirs(os.path.join(self.rootdir, 'orbits'))
            except Exception:
                # could not create production dir! FATAL
                raise

            try:
                self.get_orbits(self.sp3types)

            except (pySp3.pySp3Exception,
                    pyClk.pyClkException,
                    pyEOP.pyEOPException):

                if sp3altrn:
                    self.get_orbits(self.sp3altrn)
                else:
                    raise

            self.write_otl()
            self.copyfiles()
            self.config_session()

            # make a local copy of the rinex file
            # decimate the rinex file if the interval is < 15 sec.
            # DDG: only decimate when told by caller
            if self.rinex.interval < 15 and decimate:
                self.rinex.decimate(30)

            copyfile(self.rinex.rinex_path,
                     os.path.join(self.rootdir, self.rinex.rinex))

        else:
            raise pyRunPPPException('The file ' + self.rinex.rinex_path +
                                    ' could not be found. PPP was not executed.')
Exemplo n.º 16
0
    def __init__(self, rinexobj, otl_coeff, options, sp3types, sp3altrn, antenna_height, strict=True, apply_met=True,
                 kinematic=False, clock_interpolation=False, hash=0, erase=True, decimate=True):

        assert isinstance(rinexobj, pyRinex.ReadRinex)

        PPPSpatialCheck.__init__(self)

        self.rinex     = rinexobj
        self.epoch     = rinexobj.date
        self.antH      = antenna_height
        self.ppp_path  = options['ppp_path']
        self.ppp       = options['ppp_exe']
        self.options   = options
        self.kinematic = kinematic

        self.ppp_version = None

        self.file_summary = None
        self.proc_parameters = None
        self.observation_session = None
        self.coordinate_estimate = None
        self.clock_estimates = None

        # DDG: do not allow clock interpolation before May 1 2001
        self.clock_interpolation = clock_interpolation if rinexobj.date > Date(year=2001, month=5, day=1) else False

        self.frame     = None
        self.atx       = None
        self.x         = None
        self.y         = None
        self.z         = None
        self.lat       = None
        self.lon       = None
        self.h         = None
        self.sigmax    = None
        self.sigmay    = None
        self.sigmaz    = None
        self.sigmaxy   = None
        self.sigmaxz   = None
        self.sigmayz   = None
        self.clock_phase = None
        self.clock_phase_sigma = None
        self.phase_drift = None
        self.phase_drift_sigma = None
        self.clock_rms = None
        self.clock_rms_number = None
        self.hash      = hash

        self.processed_obs = None
        self.rejected_obs = None

        self.orbit_type = None
        self.orbits1    = None
        self.orbits2    = None
        self.clocks1    = None
        self.clocks2    = None
        self.eop_file   = None
        self.sp3altrn   = sp3altrn
        self.sp3types   = sp3types
        self.otl_coeff  = otl_coeff
        self.strict     = strict
        self.apply_met  = apply_met
        self.erase      = erase
        self.out        = ''
        self.summary    = ''
        self.pos        = ''

        self.rootdir = os.path.join('production', 'ppp')

        fieldnames = ['NetworkCode', 'StationCode', 'X', 'Y', 'Z', 'Year', 'DOY', 'ReferenceFrame', 'sigmax', 'sigmay',
                      'sigmaz', 'sigmaxy', 'sigmaxz', 'sigmayz', 'hash']

        self.record = dict.fromkeys(fieldnames)

        # determine the atx to use
        self.frame, self.atx = determine_frame(self.options['frames'], self.epoch)

        if os.path.isfile(self.rinex.rinex_path):

            # generate a unique id for this instance
            self.rootdir = os.path.join(self.rootdir, str(uuid.uuid4()))

            try:
                # create a production folder to analyze the rinex file
                if not os.path.exists(self.rootdir):
                    os.makedirs(self.rootdir)
                    os.makedirs(os.path.join(self.rootdir, 'orbits'))
            except Exception:
                # could not create production dir! FATAL
                raise

            try:
                self.get_orbits(self.sp3types)

            except (pySp3.pySp3Exception, pyClk.pyClkException, pyEOP.pyEOPException):

                if sp3altrn:
                    self.get_orbits(self.sp3altrn)
                else:
                    raise

            self.write_otl()
            self.copyfiles()
            self.config_session()

            # make a local copy of the rinex file
            # decimate the rinex file if the interval is < 15 sec.
            # DDG: only decimate when told by caller
            if self.rinex.interval < 15 and decimate:
                self.rinex.decimate(30)

            copyfile(self.rinex.rinex_path, os.path.join(self.rootdir, self.rinex.rinex))

        else:
            raise pyRunPPPException('The file ' + self.rinex.rinex_path + ' could not be found. PPP was not executed.')

        return
Exemplo n.º 17
0
    def __init__(self, cnn, project, redo=False):

        super(Stack, self).__init__()

        self.project = project
        self.cnn = cnn

        if redo:
            # if redoing the stack, ignore the contents of the stacks table
            print ' >> Redoing stack'

            self.cnn.query('DELETE FROM stacks WHERE "Project" = \'%s\'' % self.project)

            print ' >> Loading GAMIT solutions for project %s...' % project

            gamit_vertices = self.cnn.query_float(
                'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" '
                'FROM gamit_soln WHERE "Project" = \'%s\' '
                'ORDER BY "NetworkCode", "StationCode"' % project)

            self.gamit_vertices = np.array(gamit_vertices, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                                                  ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                                                  ('fy', 'float64')])

            dates = self.cnn.query_float('SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
                                         'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % project)

            self.dates = [Date(year=int(d[0]), doy=int(d[1])) for d in dates]

            self.stations = self.cnn.query_float('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                                                 'WHERE "Project" = \'%s\' '
                                                 'GROUP BY "NetworkCode", "StationCode" '
                                                 'ORDER BY "NetworkCode", "StationCode"' % project, as_dict=True)

            for d in tqdm(self.dates, ncols=160, desc=' >> Initializing the stack polyhedrons'):
                self.append(Polyhedron(self.gamit_vertices, project, d))

        else:
            print ' >> Preserving the existing stack'
            print ' >> Determining differences between current stack and GAMIT solutions for project %s...' % project

            # load the vertices that don't have differences wrt to the GAMIT solution
            stack_vertices = self.cnn.query_float(
                'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" FROM stacks '
                'WHERE ("Year", "DOY") NOT IN ('
                ' SELECT "Year", "DOY" FROM ('
                ' SELECT "NetworkCode", "StationCode", "Year", "DOY", \'not in stack\' '
                '  AS note FROM gamit_soln WHERE "Project" = \'%s\' EXCEPT '
                ' SELECT "NetworkCode", "StationCode", "Year", "DOY", \'not in stack\' '
                '  AS note FROM stacks WHERE "Project" = \'%s\''
                ' ) AS missing_stack GROUP BY "Year", "DOY" ORDER BY "Year", "DOY") AND '
                '"Project" = \'%s\' ORDER BY "NetworkCode", "StationCode"' % (project, project, project))

            print ' >> Loading pre-existing stack for project %s' % project

            # load the vertices that were different
            gamit_vertices = self.cnn.query_float(
                'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" FROM gamit_soln '
                'WHERE ("Year", "DOY") IN ('
                ' SELECT "Year", "DOY" FROM ('
                ' SELECT "NetworkCode", "StationCode", "Year", "DOY", \'not in stack\' '
                '  AS note FROM gamit_soln WHERE "Project" = \'%s\' EXCEPT '
                ' SELECT "NetworkCode", "StationCode", "Year", "DOY", \'not in stack\' '
                '  AS note FROM stacks WHERE "Project" = \'%s\''
                ' ) AS missing_stack GROUP BY "Year", "DOY" ORDER BY "Year", "DOY") AND '
                '"Project" = \'%s\' ORDER BY "NetworkCode", "StationCode"' % (project, project, project))

            self.stack_vertices = np.array(stack_vertices, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                                                  ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                                                  ('fy', 'float64')])

            self.gamit_vertices = np.array(gamit_vertices, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                                                  ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                                                  ('fy', 'float64')])

            dates = self.cnn.query_float('SELECT "Year", "DOY" FROM stacks WHERE "Project" = \'%s\' UNION '
                                         'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
                                         'ORDER BY "Year", "DOY"' % (project, project))

            self.dates = [Date(year=d[0], doy=d[1]) for d in dates]

            self.stations = self.cnn.query_float('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                                                 'WHERE "Project" = \'%s\' UNION '
                                                 'SELECT "NetworkCode", "StationCode" FROM stacks '
                                                 'WHERE "Project" = \'%s\' '
                                                 'ORDER BY "NetworkCode", "StationCode"'
                                                 % (project, project), as_dict=True)

            for d in tqdm(self.dates, ncols=160, desc=' >> Initializing the stack polyhedrons'):
                try:
                    # try to append the stack vertices
                    self.append(Polyhedron(self.stack_vertices, project, d, aligned=True))

                except ValueError:
                    # if value error is risen, then append the gamit vertices
                    tqdm.write(' -- Appending %s from GAMIT solutions' % d.yyyyddd())
                    self.append(Polyhedron(self.gamit_vertices, project, d, aligned=False))
Exemplo n.º 18
0
    def __init__(self, cnn, name, max_iters=4, exclude=(), use=()):

        self.name = name

        # incorporate the list of stations to remove from the stacking process
        self.exclude = [{
            'NetworkCode': item[0],
            'StationCode': item[1]
        } for item in [item.lower().split('.') for item in exclude]]

        self.use = [{
            'NetworkCode': item[0],
            'StationCode': item[1]
        } for item in [item.lower().split('.') for item in use]]

        self.max_iters = max_iters
        self.iter = 0
        self.ts = []
        self.cnn = cnn
        self.json = dict()

        # get the station list
        rs = cnn.query(
            'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
            'WHERE "Project" = \'%s\' GROUP BY "NetworkCode", "StationCode" '
            'ORDER BY "NetworkCode", "StationCode"' % name)

        self.stnlist = [
            Station(cnn, item['NetworkCode'], item['StationCode'])
            for item in rs.dictresult()
        ]

        # if none selected, use all
        if not self.use:
            for stn in self.stnlist:
                if stn.dictionary not in self.use and stn.dictionary not in self.exclude:
                    self.use += [stn.dictionary]
        else:
            # if stations are included in the use list, then exclude the other
            for stn in self.stnlist:
                if stn.dictionary not in self.use and stn.dictionary not in self.exclude:
                    self.exclude += [stn.dictionary]

        # get the epochs
        rs = cnn.query(
            'SELECT "Year", "DOY" FROM gamit_soln '
            'WHERE "Project" = \'%s\' GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
            % name)

        rs = rs.dictresult()
        self.epochs = [Date(year=item['Year'], doy=item['DOY']) for item in rs]

        # load the polyhedrons
        self.polyhedrons = []

        print ' >> Loading polyhedrons. Please wait...'

        self.polyhedrons = cnn.query_float(
            'SELECT * FROM gamit_soln WHERE "Project" = \'%s\' '
            'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"' % name,
            as_dict=True)