Esempio n. 1
0
def station_etm(station, stn_ts, stack_name, iteration=0):

    cnn = dbConnection.Cnn("gnss_data.cfg")

    vertices = None

    try:
        # save the time series
        ts = pyETM.GamitSoln(cnn, stn_ts, station['NetworkCode'],
                             station['StationCode'], stack_name)

        # create the ETM object
        etm = pyETM.GamitETM(cnn, station['NetworkCode'],
                             station['StationCode'], False, False, ts)

        if etm.A is not None:
            if iteration == 0:
                # if iteration is == 0, then the target frame has to be the PPP ETMs
                vertices = etm.get_etm_soln_list(use_ppp_model=True, cnn=cnn)
            else:
                # on next iters, the target frame is the inner geometry of the stack
                vertices = etm.get_etm_soln_list()

    except pyETM.pyETMException:

        vertices = None

    return vertices if vertices else None
Esempio n. 2
0
def station_etm(project, station, stn_ts, exclude, iteration=0):

    msg = None
    add_exclude = []

    cnn = dbConnection.Cnn("gnss_data.cfg")

    sql_r = 'INSERT INTO stack_residuals ' \
            '("NetworkCode", "StationCode", "Project", x, y, z, sigmax, sigmay, sigmaz, "Year", "DOY") ' \
            'VALUES (%s, %s, \'' + project + '\', %f, %f, %f, %f, %f, %f, %i, %i)'

    sql_s = 'INSERT INTO stacks ' \
            '("NetworkCode", "StationCode", "Project", "X", "Y", "Z", sigmax, sigmay, sigmaz, "Year", "DOY", "FYear") ' \
            'VALUES (\'' + station.NetworkCode + '\', \'' + station.StationCode + '\', \'' \
            + project + '\', %f, %f, %f, 0, 0, 0, %i, %i, %f)'

    # make sure it is sorted by date
    stn_ts.sort(key=lambda k: (k[3], k[4]))

    try:
        # save the time series
        ts = pyETM.GamitSoln(cnn, stn_ts, station.NetworkCode,
                             station.StationCode)

        cnn.executemany(
            sql_s,
            zip(ts.x.tolist(), ts.y.tolist(), ts.z.tolist(),
                [t.year for t in ts.date], [t.doy for t in ts.date],
                [t.fyear for t in ts.date]))

        if not exclude:
            # create the ETM object
            etm = pyETM.GamitETM(cnn, station.NetworkCode, station.StationCode,
                                 False, False, ts)

            if etm.A is None:
                # no contribution to stack, remove from the station list
                add_exclude = [station.dictionary]
            else:
                # insert the residuals for the station in stack_residuals
                # these values will be used later on in helmert_stack
                if iteration == 0:
                    # if iteration is == 0, then the target frame has to be the PPP ETMs
                    cnn.executemany(
                        sql_r,
                        etm.get_residuals_dict(use_ppp_model=True, cnn=cnn))
                else:
                    # on next iters, the target frame is the inner geometry of the stack
                    cnn.executemany(sql_r, etm.get_residuals_dict())

    except Exception as e:

        add_exclude = [station.dictionary]
        msg = 'Error while producing ETM for %s.%s: ' % (
            station.NetworkCode, station.StationCode) + str(e)

    return add_exclude, msg
Esempio n. 3
0
def process_postseismic(cnn, stnlist, force_stnlist, stack,
                        interseimic_filename, events, sigma_cutoff, lat_lim,
                        filename, kmz):
    tqdm.write(
        ' >> Analyzing suitability of station list to participate in interseismic trajectory model...'
    )
    tqdm.write(' -- output filename: %s' % filename)

    use_station = []
    discarded = []
    velocities = []
    min_lon = 9999
    max_lon = -9999
    min_lat = 9999
    max_lat = -9999

    # load the interseismic model
    model = np.loadtxt(interseimic_filename)

    model[:, 0] -= 360

    for stn in tqdm(stnlist, ncols=160, disable=None):
        try:
            lla = cnn.query_float(
                'SELECT lat,lon FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
                % (stn['NetworkCode'], stn['StationCode']),
                as_dict=True)[0]

            ve = griddata(model[:, 0:2],
                          model[:, 2] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')
            vn = griddata(model[:, 0:2],
                          model[:, 3] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')

            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack,
                                 interseismic=[vn, ve, 0.])

            etm.plot('production/%s.%s_.png' %
                     (stn['NetworkCode'], stn['StationCode']))
            # only check everything is station not included in the force list
            #if stn not in force_stnlist:

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s.%s: %s' %
                       (stn['NetworkCode'], stn['StationCode'], str(e)))
Esempio n. 4
0
def dra(cnn, project, dates):

    rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f GROUP BY "NetworkCode", "StationCode" '
                   'ORDER BY "NetworkCode", "StationCode"' % (project, dates[0].fyear, dates[1].fyear))

    stnlist = rs.dictresult()

    # get the epochs
    ep = cnn.query('SELECT "Year", "DOY" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f'
                   'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % (project, dates[0].fyear, dates[1].fyear))

    ep = ep.dictresult()

    epochs = [Date(year=item['Year'], doy=item['DOY'])
              for item in ep]

    A = np.array([])
    Ax = []
    Ay = []
    Az = []

    for station in stnlist:

        print('stacking %s.%s' % (station['NetworkCode'], station['StationCode']))

        try:
            etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'], project=project)
        except Exception as e:
            print(" Exception: " + str(e))
            continue

        x = etm.soln.x
        y = etm.soln.y
        z = etm.soln.z

        Ax.append(np.array([np.zeros(x.shape), -z, y, np.ones(x.shape), np.zeros(x.shape), np.zeros(x.shape)]).transpose())
        Ay.append(np.array([z, np.zeros(x.shape), -x, np.zeros(x.shape), np.ones(x.shape), np.zeros(x.shape)]).transpose())
        Az.append(np.array([-y, x, np.zeros(x.shape), np.zeros(x.shape), np.zeros(x.shape), np.ones(x.shape)]).transpose())

        x = np.column_stack((Ax, etm.A, np.zeros(etm.A.shape), np.zeros(etm.A.shape)))
        y = np.column_stack((Ay, np.zeros(etm.A.shape), etm.A, np.zeros(etm.A.shape)))
        z = np.column_stack((Az, np.zeros(etm.A.shape), np.zeros(etm.A.shape), etm.A))

        A = np.row_stack((x, y, z))
Esempio n. 5
0
    def plot_etms(self):

        qbar = tqdm(total=len(self.stnlist),
                    desc=' >> Plotting ETMs',
                    ncols=160)

        for station in self.stnlist:

            qbar.set_postfix(station=str(station))
            qbar.update()

            try:
                stn_ts = [[
                    item['X'], item['Y'], item['Z'], item['Year'], item['DOY']
                ] for item in self.polyhedrons
                          if item['NetworkCode'] == station.NetworkCode
                          and item['StationCode'] == station.StationCode]

                # make sure it is sorted by date
                stn_ts.sort(key=lambda k: (k[3], k[4]))

                # save the time series
                ts = pyETM.GamitSoln(self.cnn, stn_ts, station.NetworkCode,
                                     station.StationCode)

                # create the ETM object
                etm = pyETM.GamitETM(self.cnn, station.NetworkCode,
                                     station.StationCode, False, False, ts)

                etm.plot(pngfile='%s/%s.%s_RR.png' %
                         (self.name, etm.NetworkCode, etm.StationCode),
                         residuals=True,
                         plot_missing=False)

                etm.plot(pngfile='%s/%s.%s_FF.png' %
                         (self.name, etm.NetworkCode, etm.StationCode),
                         residuals=False,
                         plot_missing=False)

            except pyETM.pyETMException as e:

                qbar.write(' -- %s %s' % (str(station), str(e)))

        qbar.close()
Esempio n. 6
0
def main():

    cnn = dbConnection.Cnn("gnss_data.cfg")

    stack = Stack(cnn, 'igs-sirgas', redo=True)

    for i in tqdm(range(1, len(stack)), ncols=160):
        stack[i].align(stack[i - 1])

    net = 'igs'
    stn = 'braz'

    ts = stack.get_station(net, stn)

    dts = np.append(np.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)

    ts = pyETM.GamitSoln(cnn, dts, net, stn, 'igs-sirgas')

    pyETM.GamitETM(cnn, net, stn, True, gamit_soln=ts)
Esempio n. 7
0
def plot_etm(cnn, stack, station, directory):
    try:
        ts = stack.get_station(station['NetworkCode'], station['StationCode'])

        ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'],
                             station['StationCode'], stack.project)
        etm = pyETM.GamitETM(cnn,
                             station['NetworkCode'],
                             station['StationCode'],
                             gamit_soln=ts)

        pngfile = os.path.join(directory, stationID(etm) + '_gamit.png')
        jsonfile = os.path.join(directory, stationID(etm) + '_gamit.json')

        etm.plot(pngfile, plot_missing=False)
        file_write(
            os.path.join(jsonfile),
            json.dumps(etm.todictionary(False), indent=4, sort_keys=False))

    except pyETM.pyETMException as e:
        tqdm.write(str(e))
Esempio n. 8
0
def plot_etm(cnn, stack, station, directory):
    try:
        ts = stack.get_station(station['NetworkCode'], station['StationCode'])

        ts = pyETM.GamitSoln(cnn, ts, station['NetworkCode'],
                             station['StationCode'], stack.project)

        etm = pyETM.GamitETM(cnn,
                             station['NetworkCode'],
                             station['StationCode'],
                             gamit_soln=ts)

        pngfile = os.path.join(
            directory, etm.NetworkCode + '.' + etm.StationCode + '.png')
        jsonfile = os.path.join(
            directory, etm.NetworkCode + '.' + etm.StationCode + '.json')

        etm.plot(pngfile, plot_missing=False)
        with open(os.path.join(jsonfile), 'w') as f:
            json.dump(etm.todictionary(False), f, indent=4, sort_keys=False)

    except pyETM.pyETMException as e:
        tqdm.write(str(e))
Esempio n. 9
0
def main():
    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '0' to just output "
        "the ETM parameters, '1' to export time series without "
        "model and '2' to export both time series and model.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-q',
        '--query',
        nargs=2,
        metavar='{type} {date}',
        type=str,
        help=
        'Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
        'of the daily solution (if exists). Output is in XYZ.')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=1,
        metavar='{stack}',
        help="Plot the GAMIT time series specifying which stack name to plot.")
    parser.add_argument(
        '-lang',
        '--language',
        type=str,
        help="Change the language of the plots. Default is English. "
        "Use ESP to select Spanish. To add more languages, "
        "include the ISO 639-1 code in pyETM.py",
        default='ENG')
    parser.add_argument('-hist',
                        '--histogram',
                        action='store_true',
                        help="Plot histogram of residuals")
    parser.add_argument(
        '-file',
        '--filename',
        type=str,
        help=
        "Obtain data from an external source (filename). Format should be specified with -format."
    )
    parser.add_argument(
        '-format',
        '--format',
        nargs='+',
        type=str,
        help=
        "To be used together with --filename. Specify order of the fields as found in the input "
        "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
        "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
        "are at the end of the line, then there is no need to specify those.")
    parser.add_argument('-outliers',
                        '--plot_outliers',
                        action='store_true',
                        help="Plot an additional panel with the outliers")
    parser.add_argument('-vel',
                        '--velocity',
                        action='store_true',
                        help="During query, output the velocity in XYZ.")
    parser.add_argument('-seasonal',
                        '--seasonal_terms',
                        action='store_true',
                        help="During query, output the seasonal terms in NEU.")
    parser.add_argument('-quiet',
                        '--suppress_messages',
                        action='store_true',
                        help="Quiet mode: suppress information messages")

    args = parser.parse_args()

    cnn = dbConnection.Cnn('gnss_data.cfg')

    stnlist = Utils.process_stnlist(cnn, args.stnlist)

    # define the language
    pyETM.LANG = args.language.lower()
    # set the logging level
    if not args.suppress_messages:
        pyETM.logger.setLevel(pyETM.INFO)
    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None and args.filename is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                elif args.filename is not None:
                    etm = from_file(args, cnn, stn)
                else:
                    polyhedrons = cnn.query_float(
                        'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                        'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                        '"StationCode" = \'%s\' '
                        'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                        % (args.gamit[0], stn['NetworkCode'],
                           stn['StationCode']))

                    soln = pyETM.GamitSoln(cnn, polyhedrons,
                                           stn['NetworkCode'],
                                           stn['StationCode'], args.gamit[0])

                    etm = pyETM.GamitETM(cnn,
                                         stn['NetworkCode'],
                                         stn['StationCode'],
                                         False,
                                         args.no_model,
                                         gamit_soln=soln)

                    # print ' > %5.2f %5.2f %5.2f %i %i' % \
                    #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                    #       etm.soln.t.shape[0] -
                    #       np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                    # print two largest outliers
                    if etm.A is not None:
                        lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                        slres = lres[np.argsort(-lres)]

                        print ' >> Two largest residuals:'
                        for i in [0, 1]:
                            print(' %s %6.3f %6.3f %6.3f' %
                                  (pyDate.Date(mjd=etm.soln.mjd[
                                      lres == slres[i]]).yyyyddd(),
                                   etm.R[0, lres == slres[i]],
                                   etm.R[1, lres == slres[i]],
                                   etm.R[2, lres == slres[i]]))

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        if args.filename is None:
                            xfile = os.path.join(
                                args.directory, '%s.%s_ppp' %
                                (etm.NetworkCode, etm.StationCode))
                        else:
                            xfile = os.path.join(
                                args.directory, '%s.%s_file' %
                                (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data,
                             plot_outliers=args.plot_outliers)

                    if args.histogram:
                        etm.plot_hist(xfile + '_hist.png')

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json == 1:
                            json.dump(etm.todictionary(time_series=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        elif args.json == 2:
                            json.dump(etm.todictionary(time_series=True,
                                                       model=True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                if args.query is not None:
                    model = True if args.query[0] == 'model' else False
                    q_date = pyDate.Date(fyear=float(args.query[1]))

                    xyz, _, _, txt = etm.get_xyz_s(q_date.year,
                                                   q_date.doy,
                                                   force_model=model)

                    strp = ''
                    # if user requests velocity too, output it
                    if args.velocity:
                        if etm.A is not None:
                            vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                            strp = '%8.5f %8.5f %8.5f ' \
                                   % (vxyz[0, 0], vxyz[1, 0], vxyz[2, 0])

                    # also output seasonal terms, if requested
                    if args.seasonal_terms:
                        if etm.Periodic.frequency_count > 0:
                            strp += ' '.join([
                                '%8.5f' % (x * 1000) for x in
                                etm.Periodic.p.params.flatten().tolist()
                            ])

                    print ' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                          % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
Esempio n. 10
0
def main():
    parser = argparse.ArgumentParser(description='Query ETM for stations in the database. Default is PPP ETMs.')

    parser.add_argument('stnlist', type=str, nargs='+',
                        help="List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
                             "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
                             "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
                             "If [net].all is given, all stations from network [net] will be plotted")

    parser.add_argument('-q', '--query', nargs=2, metavar='{type} {date}', type=str,
                        help='Dates to query the ETM. Specify "model" or "solution" to get the ETM value or the value '
                             'of the daily solution (if exists). Output is in XYZ.')

    parser.add_argument('-gamit', '--gamit', type=str, nargs=1, metavar='{stack}',
                        help="Plot the GAMIT time series specifying which stack name to plot.")

    parser.add_argument('-file', '--filename', type=str,
                        help="Obtain data from an external source (filename). Format should be specified with -format.")

    parser.add_argument('-format', '--format', nargs='+', type=str,
                        help="To be used together with --filename. Specify order of the fields as found in the input "
                             "file. Format strings are gpsWeek, gpsWeekDay, year, doy, fyear, month, day, mjd, "
                             "x, y, z, na. Use 'na' to specify a field that should be ignored. If fields to be ignored "
                             "are at the end of the line, then there is no need to specify those.")

    parser.add_argument('-quiet', '--quiet', action='store_true',
                        help="Do not print message when no solutions are available.")

    parser.add_argument('-vel', '--velocity', action='store_true',
                        help="Output the velocity in XYZ.")

    parser.add_argument('-seasonal', '--seasonal_terms', action='store_true',
                        help="Output the seasonal terms in NEU.")

    args = parser.parse_args()

    ##
    cnn = dbConnection.Cnn('gnss_data.cfg')

    if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]):
        print(' >> Station list read from ' + args.stnlist[0])
        stnlist = [{'NetworkCode': items[0],
                    'StationCode': items[1]}
                   for items in
                   (line.strip().split('.') for line in file_readlines(args.stnlist[0]))]
    else:
        stnlist = Utils.process_stnlist(cnn, args.stnlist)


    for stn in stnlist:
        try:

            if args.gamit is None and args.filename is None:
                etm = pyETM.PPPETM(cnn, stn['NetworkCode'], stn['StationCode'], False)

            elif args.filename is not None:
                etm = from_file(args, cnn, stn)

            else:
                polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                                              'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                                              '"StationCode" = \'%s\' '
                                              'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                                              % (args.gamit[0], stn['NetworkCode'], stn['StationCode']))

                soln = pyETM.GamitSoln(cnn, polyhedrons, stn['NetworkCode'], stn['StationCode'], args.gamit[0])

                etm  = pyETM.GamitETM(cnn, stn['NetworkCode'], stn['StationCode'], False, gamit_soln=soln)



            if args.query is not None:
                model  = (args.query[0] == 'model')
                q_date = pyDate.Date(fyear=float(args.query[1]))

                # get the coordinate
                xyz, _, _, txt = etm.get_xyz_s(q_date.year, q_date.doy, force_model=model)

                strp = ''
                # if user requests velocity too, output it
                if args.velocity and etm.A is not None:
                    vxyz = etm.rotate_2xyz(etm.Linear.p.params[:, 1])
                    strp = '%8.5f %8.5f %8.5f ' % (vxyz[0, 0],
                                                   vxyz[1, 0],
                                                   vxyz[2, 0])

                # also output seasonal terms, if requested
                if args.seasonal_terms and etm.Periodic.frequency_count > 0:
                    strp += ' '.join('%8.5f' % (x * 1000)
                                     for x in etm.Periodic.p.params.flatten())

                print(' %s.%s %14.5f %14.5f %14.5f %8.3f %s -> %s' \
                      % (etm.NetworkCode, etm.StationCode, xyz[0], xyz[1], xyz[2], q_date.fyear, strp, txt))

        except pyETM.pyETMException as e:
            if not args.quiet:
                print(str(e))

        except:
            print('Error during processing of ' + stn['NetworkCode'] + '.' + stn['StationCode'])
            print(traceback.format_exc())
Esempio n. 11
0
def process_interseismic(cnn, stnlist, force_stnlist, stack, sigma_cutoff,
                         vel_cutoff, lat_lim, filename, kmz):
    # start by checking that the stations in the list have a linear start (no post-seismic)
    # and more than 2 years of data until the first earthquake or non-linear behavior

    tqdm.write(
        ' >> Analyzing suitability of station list to participate in interseismic trajectory model...'
    )
    tqdm.write(' -- velocity cutoff: %.2f mm/yr; output filename: %s' %
               (vel_cutoff, filename))

    use_station = []
    discarded = []
    velocities = []
    min_lon = 9999
    max_lon = -9999
    min_lat = 9999
    max_lat = -9999

    for stn in tqdm(stnlist, ncols=160, disable=None):
        try:
            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack)

            use = True
            # only check everything is station not included in the force list
            if stn not in force_stnlist:
                # check that station is within latitude range
                if etm.gamit_soln.lat[0] < lat_lim[0] or etm.gamit_soln.lat[
                        0] > lat_lim[1]:
                    tqdm.write(
                        ' -- %s.%s excluded because it is outside of the latitude limit'
                        % (stn['NetworkCode'], stn['StationCode']))
                    use = False

                # check that station has at least 2 years of data
                if etm.gamit_soln.date[-1].fyear - etm.gamit_soln.date[
                        0].fyear < 2 and use:
                    tqdm.write(
                        ' -- %s.%s rejected due having less than two years of observations %s -> %s'
                        % (stn['NetworkCode'], stn['StationCode'],
                           etm.gamit_soln.date[0].yyyyddd(),
                           etm.gamit_soln.date[-1].yyyyddd()))
                    use = False

                # other checks
                if etm.A is not None:
                    if len(etm.Jumps.table) > 0 and use:
                        eq_jumps = [
                            j for j in etm.Jumps.table
                            if j.p.jump_type == pyETM.CO_SEISMIC_JUMP_DECAY
                            and j.fit
                        ]
                        for j in eq_jumps:
                            if j.magnitude >= 7 and j.date.fyear < etm.gamit_soln.date[
                                    0].fyear + 1.5:
                                tqdm.write(
                                    ' -- %s.%s has a Mw %.1f in %s and data starts in %s'
                                    % (stn['NetworkCode'], stn['StationCode'],
                                       j.magnitude, j.date.yyyyddd(),
                                       etm.gamit_soln.date[0].yyyyddd()))
                                use = False
                                break

                        eq_jumps = [
                            j for j in etm.Jumps.table if
                            j.p.jump_type == pyETM.CO_SEISMIC_DECAY and j.fit
                        ]
                        if len(eq_jumps) > 0 and use:
                            tqdm.write(
                                ' -- %s.%s has one or more earthquakes before data started in %s'
                                % (stn['NetworkCode'], stn['StationCode'],
                                   etm.gamit_soln.date[0].yyyyddd()))
                            use = False

                    if (etm.factor[0] * 1000 > sigma_cutoff
                            or etm.factor[1] * 1000 > sigma_cutoff) and use:
                        tqdm.write(
                            ' -- %s.%s rejected due to large wrms %5.2f %5.2f %5.2f'
                            % (stn['NetworkCode'], stn['StationCode'],
                               etm.factor[0] * 1000, etm.factor[1] * 1000,
                               etm.factor[2] * 1000))
                        use = False

                    norm = np.sqrt(
                        np.sum(np.square(etm.Linear.p.params[0:2, 1] * 1000)))
                    if norm > vel_cutoff and use:
                        tqdm.write(
                            ' -- %s.%s rejected due to large NEU velocity: %5.2f %5.2f %5.2f NE norm %5.2f'
                            % (stn['NetworkCode'], stn['StationCode'],
                               etm.Linear.p.params[0, 1] * 1000,
                               etm.Linear.p.params[1, 1] * 1000,
                               etm.Linear.p.params[2, 1] * 1000, norm))
                        use = False
                elif use:
                    tqdm.write(' -- %s.%s too few solutions to calculate ETM' %
                               (stn['NetworkCode'], stn['StationCode']))
                    use = False
            else:
                tqdm.write(' -- %s.%s was forced to be included in the list' %
                           (stn['NetworkCode'], stn['StationCode']))

            if use:
                tqdm.write(
                    ' -- %s.%s added NEU wrms: %5.2f %5.2f %5.2f NEU vel: %5.2f %5.2f %5.2f'
                    % (stn['NetworkCode'], stn['StationCode'],
                       etm.factor[0] * 1000, etm.factor[1] * 1000,
                       etm.factor[2] * 1000, etm.Linear.p.params[0, 1] * 1000,
                       etm.Linear.p.params[1, 1] * 1000,
                       etm.Linear.p.params[2, 1] * 1000))
                use_station.append(stn)
                velocities.append({
                    'NetworkCode':
                    etm.NetworkCode,
                    'StationCode':
                    etm.StationCode,
                    'lat':
                    etm.gamit_soln.lat[0],
                    'lon':
                    etm.gamit_soln.lon[0],
                    'vn':
                    etm.Linear.p.params[0, 1],
                    've':
                    etm.Linear.p.params[1, 1],
                    'etm':
                    etm.plot(plot_missing=False,
                             plot_outliers=False,
                             fileio=BytesIO())
                })
                if etm.gamit_soln.lon[0] < min_lon:
                    min_lon = etm.gamit_soln.lon
                if etm.gamit_soln.lon[0] > max_lon:
                    max_lon = etm.gamit_soln.lon
                if etm.gamit_soln.lat[0] < min_lat:
                    min_lat = etm.gamit_soln.lat
                if etm.gamit_soln.lat[0] > max_lat:
                    max_lat = etm.gamit_soln.lat
            elif not use and etm.A is not None:
                discarded.append({
                    'NetworkCode':
                    etm.NetworkCode,
                    'StationCode':
                    etm.StationCode,
                    'lat':
                    etm.gamit_soln.lat[0],
                    'lon':
                    etm.gamit_soln.lon[0],
                    'vn':
                    etm.Linear.p.params[0, 1],
                    've':
                    etm.Linear.p.params[1, 1],
                    'etm':
                    etm.plot(plot_missing=False,
                             plot_outliers=False,
                             fileio=BytesIO())
                })

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s.%s: %s' %
                       (stn['NetworkCode'], stn['StationCode'], str(e)))

    tqdm.write(' >> Total number of stations for linear model: %i' %
               len(use_station))
    map = Basemap(llcrnrlon=min_lon - 2,
                  llcrnrlat=min_lat - 2,
                  urcrnrlon=max_lon + 2,
                  urcrnrlat=max_lat + 2,
                  resolution='i',
                  projection='merc',
                  lon_0=(max_lon - min_lon) / 2 + min_lon,
                  lat_0=(max_lat - min_lat) / 2 + min_lat)

    plt.figure(figsize=(15, 10))
    map.drawcoastlines()
    map.drawcountries()
    # map.drawstates()
    # map.fillcontinents(color='#cc9966', lake_color='#99ffff')
    # draw parallels and meridians.
    # map.drawparallels(np.arange(np.floor(min_lat), np.ceil(max_lat), 2.))
    # map.drawmeridians(np.arange(np.floor(min_lon), np.ceil(max_lon), 2.))
    # map.drawmapboundary(fill_color='#99ffff')
    map.quiver([l['lon'] for l in velocities], [l['lat'] for l in velocities],
               [l['ve'] for l in velocities], [l['vn'] for l in velocities],
               scale=0.25,
               latlon=True,
               color='blue',
               zorder=3)
    plt.title("Transverse Mercator Projection")
    plt.savefig('production/test.png')
    plt.close()

    outvar = np.array([[v['lon'], v['lat'], v['ve'], v['vn']]
                       for v in velocities])
    np.savetxt(filename, outvar)
    if kmz:
        generate_kmz(kmz, velocities, discarded)
Esempio n. 12
0
def dra(cnn, project, dates):

    rs = cnn.query('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f GROUP BY "NetworkCode", "StationCode" '
                   'ORDER BY "NetworkCode", "StationCode"' % (project, dates[0].fyear, dates[1].fyear))

    stnlist = rs.dictresult()

    # get the epochs
    ep = cnn.query('SELECT "Year", "DOY" FROM gamit_soln '
                   'WHERE "Project" = \'%s\' AND "FYear" BETWEEN %.4f AND %.4f'
                   'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"' % (project, dates[0].fyear, dates[1].fyear))

    ep = ep.dictresult()

    epochs = [Date(year=item['Year'], doy=item['DOY']) for item in ep]

    # delete DRA starting from the first requested epoch
    cnn.query('DELETE FROM gamit_dra WHERE "Project" = \'%s\' AND "FYear" >= %f' % (project, epochs[0].fyear))

    # query the first polyhedron in the line, which should be the last polyhedron in gamit_dra
    poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_dra '
                           'WHERE "Project" = \'%s\' AND "FYear" = (SELECT max("FYear") FROM gamit_dra)'
                           'ORDER BY "NetworkCode", "StationCode"' % project)

    if len(poly) == 0:
        print ' -- Using gamit_soln: no pre-existent DRA found'
        # no last entry found in gamit_dra, use gamit_soln
        poly = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY", "NetworkCode", "StationCode" FROM gamit_soln '
                               'WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i'
                               'ORDER BY "NetworkCode", "StationCode"'
                               % (project, epochs[0].year, epochs[0].doy))
    else:
        print ' -- Pre-existent DRA found. Attaching.'

    polyhedrons = poly

    bar = tqdm(total=len(epochs)-1, ncols=160)

    for date1, date2 in zip(epochs[0:-1], epochs[1:]):

        poly1 = []

        # get the stations common stations between day i and day i+1 (in A format)
        s = cnn.query_float(sql_select_union(project, '"X", "Y", "Z", "NetworkCode", "StationCode"', date1, date2))

        x = cnn.query_float(sql_select_union(project, '0, -"Z", "Y", 1, 0, 0', date1, date2))
        y = cnn.query_float(sql_select_union(project, '"Z", 0, -"X", 0, 1, 0', date1, date2))
        z = cnn.query_float(sql_select_union(project, '-"Y", "X", 0, 0, 0, 1', date1, date2))

        # polyhedron of the common stations
        Xx = cnn.query_float(sql_select_union(project, '"X", "Y", "Z"', date1, date2))

        X = numpy.array(Xx).transpose().flatten()

        # for vertex in stations
        for v in s:
            poly1 += [np.array(pp[0:3], dtype=float) - np.array(v[0:3]) for pp in poly if pp[-2] == v[-2] and pp[-1] == v[-1]]

        # residuals for adjustment
        L = np.array(poly1)

        A = numpy.row_stack((np.array(x), np.array(y), np.array(z)))
        A[:, 0:3] = A[:, 0:3]*1e-9
        # find helmert transformation
        c, _, _, v, _, p, it = adjust_lsq(A, L.flatten())

        # write some info to the screen
        tqdm.write(' -- %s (%3i): translation (mm mm mm) scale: (%6.1f %6.1f %6.1f) %10.2e ' %
                   (date2.yyyyddd(), it, c[-3] * 1000, c[-2] * 1000, c[-1] * 1000, c[-4]))

        # make A again with all stations
        s = cnn.query_float(sql_select(project, '"Year", "DOY", "NetworkCode", "StationCode"', date2))

        x = cnn.query_float(sql_select(project, '0, -"Z", "Y", 1, 0, 0', date2))
        y = cnn.query_float(sql_select(project, '"Z", 0, -"X", 0, 1, 0', date2))
        z = cnn.query_float(sql_select(project, '-"Y", "X", 0, 0, 0, 1', date2))

        A = numpy.row_stack((np.array(x), np.array(y), np.array(z)))
        A[:, 0:3] = A[:, 0:3] * 1e-9

        Xx = cnn.query_float(sql_select(project, '"X", "Y", "Z"', date2))
        X = numpy.array(Xx).transpose().flatten()

        X = (numpy.dot(A, c) + X).reshape(3, len(x)).transpose()

        # save current transformed polyhedron to use in the next iteration
        polyhedrons += poly
        poly = [x.tolist() + list(s) for x, s in zip(X, s)]

        # insert results in gamit_dra
        for pp in poly:
            cnn.insert('gamit_dra', NetworkCode=pp[-2], StationCode=pp[-1], Project=project, X=pp[0], Y=pp[1],
                       Z=pp[2], Year=date2.year, DOY=date2.doy, FYear=date2.fyear)

        bar.update()

    bar.close()

    # plot the residuals
    for stn in tqdm(stnlist):
        NetworkCode = stn['NetworkCode']
        StationCode = stn['StationCode']

        # load from the db
        ts = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM gamit_dra '
                             'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND '
                             '"Project" = \'%s\' ORDER BY "Year", "DOY"' % (NetworkCode, StationCode, project))

        ts = np.array(ts)

        if ts.size:
            try:
                # save the time series
                gsoln = pyETM.GamitSoln(cnn, ts, NetworkCode, StationCode, project)

                # create the ETM object
                etm = pyETM.GamitETM(cnn, NetworkCode, StationCode, False, False, gsoln)

                etm.plot(pngfile='%s/%s.%s_SOL.png' % (project, NetworkCode, StationCode), residuals=True,
                         plot_missing=False)

                if ts.shape[0] > 2:
                    dts = np.append(np.diff(ts[:,0:3], axis=0), ts[1:, -2:], axis=1)
                    dra = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)

                    etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra)

                    etm.plot(pngfile='%s/%s.%s_DRA.png' % (project, NetworkCode, StationCode), residuals=True,
                             plot_missing=False)

            except Exception as e:
                tqdm.write(' -->' + str(e))
Esempio n. 13
0
def main():

    parser = argparse.ArgumentParser(
        description='Plot ETM for stations in the database')

    parser.add_argument(
        'stnlist',
        type=str,
        nargs='+',
        help=
        "List of networks/stations to plot given in [net].[stnm] format or just [stnm] "
        "(separated by spaces; if [stnm] is not unique in the database, all stations with that "
        "name will be plotted). Use keyword 'all' to plot all stations in all networks. "
        "If [net].all is given, all stations from network [net] will be plotted"
    )
    parser.add_argument('-nop',
                        '--no_plots',
                        action='store_true',
                        help="Do not produce plots",
                        default=False)
    parser.add_argument('-nom',
                        '--no_missing_data',
                        action='store_true',
                        help="Do not show missing days",
                        default=False)
    parser.add_argument('-nm',
                        '--no_model',
                        action='store_true',
                        help="Plot time series without fitting a model")
    parser.add_argument('-r',
                        '--residuals',
                        action='store_true',
                        help="Plot time series residuals",
                        default=False)
    parser.add_argument(
        '-dir',
        '--directory',
        type=str,
        help=
        "Directory to save the resulting PNG files. If not specified, assumed to be the "
        "production directory")
    parser.add_argument(
        '-json',
        '--json',
        type=int,
        help="Export ETM adjustment to JSON. Append '1' to export time "
        "series or append '0' to just output the ETM parameters.")
    parser.add_argument(
        '-gui',
        '--interactive',
        action='store_true',
        help="Interactive mode: allows to zoom and view the plot interactively"
    )
    parser.add_argument(
        '-win',
        '--time_window',
        nargs='+',
        metavar='interval',
        help=
        'Date range to window data. Can be specified in yyyy/mm/dd, yyyy.doy or as a single '
        'integer value (N) which shall be interpreted as last epoch-N')
    parser.add_argument(
        '-gamit',
        '--gamit',
        type=str,
        nargs=2,
        metavar='{project} {type}',
        help=
        "Plot the GAMIT time series. Specify project and type = \'stack\' to plot the time "
        "series after stacking or \'gamit\' to just plot the coordinates of the polyhedron"
    )

    args = parser.parse_args()

    Config = pyOptions.ReadOptions(
        "gnss_data.cfg")  # type: pyOptions.ReadOptions

    cnn = dbConnection.Cnn('gnss_data.cfg')

    if len(args.stnlist) == 1 and os.path.isfile(args.stnlist[0]):
        print ' >> Station list read from ' + args.stnlist[0]
        stnlist = [line.strip() for line in open(args.stnlist[0], 'r')]
        stnlist = [{
            'NetworkCode': item.split('.')[0],
            'StationCode': item.split('.')[1]
        } for item in stnlist]
    else:
        stnlist = Utils.process_stnlist(cnn, args.stnlist)

    #####################################
    # date filter

    dates = None
    if args.time_window is not None:
        if len(args.time_window) == 1:
            try:
                dates = process_date(args.time_window,
                                     missing_input=None,
                                     allow_days=False)
                dates = (dates[0].fyear, )
            except ValueError:
                # an integer value
                dates = float(args.time_window[0])
        else:
            dates = process_date(args.time_window)
            dates = (dates[0].fyear, dates[1].fyear)

    if stnlist:
        # do the thing
        if args.directory:
            if not os.path.exists(args.directory):
                os.mkdir(args.directory)
        else:
            if not os.path.exists('production'):
                os.mkdir('production')
            args.directory = 'production'

        for stn in stnlist:
            try:

                if args.gamit is None:
                    etm = pyETM.PPPETM(cnn, stn['NetworkCode'],
                                       stn['StationCode'], False,
                                       args.no_model)
                else:
                    if args.gamit[1] == 'stack':
                        polyhedrons = cnn.query_float(
                            'SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
                            'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND '
                            '"StationCode" = \'%s\' '
                            'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
                            % (args.gamit[0], stn['NetworkCode'],
                               stn['StationCode']))

                        soln = pyETM.GamitSoln(cnn, polyhedrons,
                                               stn['NetworkCode'],
                                               stn['StationCode'],
                                               args.gamit[0])

                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             gamit_soln=soln)

                        # print ' > %5.2f %5.2f %5.2f %i %i' % \
                        #      (etm.factor[0]*1000, etm.factor[1]*1000, etm.factor[2]*1000, etm.soln.t.shape[0],
                        #       etm.soln.t.shape[0] - np.sum(np.logical_and(np.logical_and(etm.F[0], etm.F[1]), etm.F[2])))

                        # print two largest outliers
                        if etm.A is not None:
                            lres = np.sqrt(np.sum(np.square(etm.R), axis=0))
                            slres = lres[np.argsort(-lres)]

                            print ' >> Two largest residuals:'
                            for i in [0, 1]:
                                print(' %s %6.3f %6.3f %6.3f' %
                                      (pyDate.Date(mjd=etm.soln.mjd[
                                          lres == slres[i]]).yyyyddd(),
                                       etm.R[0, lres == slres[i]],
                                       etm.R[1, lres == slres[i]],
                                       etm.R[2, lres == slres[i]]))

                    elif args.gamit[1] == 'gamit':
                        etm = pyETM.GamitETM(cnn,
                                             stn['NetworkCode'],
                                             stn['StationCode'],
                                             False,
                                             args.no_model,
                                             project=args.gamit[1])
                    else:
                        parser.error('Invalid option for -gamit switch')
                        etm = None

                if args.interactive:
                    xfile = None
                else:
                    if args.gamit is None:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_ppp' % (etm.NetworkCode, etm.StationCode))
                    else:
                        xfile = os.path.join(
                            args.directory,
                            '%s.%s_gamit' % (etm.NetworkCode, etm.StationCode))

                # leave pngfile empty to enter interactive mode (GUI)
                if not args.no_plots:
                    etm.plot(xfile + '.png',
                             t_win=dates,
                             residuals=args.residuals,
                             plot_missing=not args.no_missing_data)

                if args.json is not None:
                    with open(xfile + '.json', 'w') as f:
                        if args.json != 0:
                            json.dump(etm.todictionary(True),
                                      f,
                                      indent=4,
                                      sort_keys=False)
                        else:
                            json.dump(etm.todictionary(False),
                                      f,
                                      indent=4,
                                      sort_keys=False)

                print 'Successfully plotted ' + stn['NetworkCode'] + '.' + stn[
                    'StationCode']

            except pyETM.pyETMException as e:
                print str(e)

            except Exception:
                print 'Error during processing of ' + stn[
                    'NetworkCode'] + '.' + stn['StationCode']
                print traceback.format_exc()
                pass
Esempio n. 14
0
def process_postseismic(cnn, stnlist, force_stnlist, stack,
                        interseimic_filename, event, prev_events, sigma_cutoff,
                        lat_lim, filename, kmz):
    tqdm.write(
        ' >> Analyzing suitability of station list to participate in postseismic model...'
    )
    tqdm.write(' -- output filename: %s' % filename)

    use_station = []
    discarded = []

    # load the interseismic model
    model = np.loadtxt(interseimic_filename)

    # model[:, 0] -= 360
    params = []

    def getpost():
        return {
            'NetworkCode':
            etm.NetworkCode,
            'StationCode':
            etm.StationCode,
            'lat':
            etm.gamit_soln.lat[0],
            'lon':
            etm.gamit_soln.lon[0],
            'n':
            eq.p.params[0, 0]
            if eq.p.jump_type is CO_SEISMIC_DECAY else eq.p.params[0, 1],
            'e':
            eq.p.params[1, 0]
            if eq.p.jump_type is CO_SEISMIC_DECAY else eq.p.params[1, 1],
            'etm':
            etm.plot(plot_missing=False, plot_outliers=True, fileio=BytesIO())
        }

    for stn in tqdm(stnlist, ncols=160, disable=None):
        stn_id = stationID(stn)
        tqdm.write(' -- Processing station %s' % stn_id)
        try:
            lla = cnn.query_float(
                'SELECT lat,lon FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
                % (stn['NetworkCode'], stn['StationCode']),
                as_dict=True)[0]

            ve = griddata(model[:, 0:2],
                          model[:, 2] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')
            vn = griddata(model[:, 0:2],
                          model[:, 3] / 1000, (lla['lon'], lla['lat']),
                          method='cubic')

            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack,
                                 interseismic=[vn, ve, 0.])

            for eq in [
                    e for e in etm.Jumps.table if e.p.jump_type in (
                        CO_SEISMIC_DECAY,
                        CO_SEISMIC_JUMP_DECAY) and e.fit and etm.A is not None
            ]:
                if eq.date == event:
                    tqdm.write(
                        '    co-seismic decay detected for event %s (years: %.3f; data points: %i)'
                        % (str(eq.p.jump_date), eq.constrain_years,
                           eq.constrain_data_points))
                    if (eq.constrain_years >= 2.5 and eq.constrain_data_points >= eq.constrain_years * 5) \
                            or stn in force_stnlist:
                        params.append(getpost())
                        tqdm.write(
                            '    co-seismic decay added to the list for interpolation'
                        )
                    else:
                        tqdm.write(
                            '    co-seismic decay not added (conditions not met)'
                        )
                        discarded.append(getpost())
                    break

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s: %s' % (stn_id, str(e)))

    outvar = np.array([(v['NetworkCode'] + '.' + v['StationCode'], v['lon'],
                        v['lat'], v['e'], v['n']) for v in params],
                      dtype=[('stn', 'U8'), ('lon', 'float64'),
                             ('lat', 'float64'), ('e', 'float64'),
                             ('n', 'float64')])

    np.savetxt(filename,
               outvar,
               fmt=("%s", "%13.8f", "%12.8f", "%12.8f", "%12.8f"))
    if kmz:
        generate_kmz(kmz, params, discarded, 'postseismic', 'mm')
Esempio n. 15
0
def process_interseismic(cnn, stnlist, force_stnlist, stack, sigma_cutoff,
                         vel_cutoff, lat_lim, filename, kmz):
    # start by checking that the stations in the list have a linear start (no post-seismic)
    # and more than 2 years of data until the first earthquake or non-linear behavior

    tqdm.write(
        ' >> Analyzing suitability of station list to participate in interseismic model...'
    )
    tqdm.write(' -- latitude cutoff: south %.2f, north %.2f' %
               (lat_lim[0], lat_lim[1]))
    tqdm.write(' -- velocity cutoff: %.2f mm/yr; output filename: %s' %
               (vel_cutoff, filename))

    use_station = []
    discarded = []
    velocities = []
    # min_lon     =  9999
    # max_lon     = -9999
    # min_lat     =  9999
    # max_lat     = -9999

    for stn in tqdm(stnlist, ncols=160, disable=None):
        try:
            stn_id = stationID(stn)

            etm = pyETM.GamitETM(cnn,
                                 stn['NetworkCode'],
                                 stn['StationCode'],
                                 stack_name=stack)

            use = True
            if stn in force_stnlist:
                tqdm.write(' -- %s was forced to be included in the list' %
                           stn_id)
            else:
                # only check everything is station not included in the force list
                # check that station is within latitude range
                if etm.gamit_soln.lat[0] < lat_lim[0] or \
                   etm.gamit_soln.lat[0] > lat_lim[1]:
                    tqdm.write(
                        ' -- %s excluded because it is outside of the latitude limit'
                        % stn_id)
                    use = False

                # check that station has at least 2 years of data
                if etm.gamit_soln.date[-1].fyear - etm.gamit_soln.date[
                        0].fyear < 2 and use:
                    tqdm.write(
                        ' -- %s rejected because it has less than two years of observations %s -> %s'
                        % (stn_id, etm.gamit_soln.date[0].yyyyddd(),
                           etm.gamit_soln.date[-1].yyyyddd()))
                    use = False

                # other checks
                if etm.A is not None:
                    if len(etm.Jumps.table) > 0 and use:
                        j = next((j
                                  for j in etm.Jumps.table
                                  if j.p.jump_type == pyETM.CO_SEISMIC_JUMP_DECAY and j.fit and \
                                  j.magnitude >= 7 and j.date.fyear < etm.gamit_soln.date[0].fyear + 1.5
                                  ), None)
                        if j:
                            tqdm.write(
                                ' -- %s has a Mw %.1f in %s and data starts in %s'
                                % (stn_id, j.magnitude, j.date.yyyyddd(),
                                   etm.gamit_soln.date[0].yyyyddd()))
                            use = False

                        else:
                            has_eq_jumps = any(
                                True for j in etm.Jumps.table
                                if j.p.jump_type == pyETM.CO_SEISMIC_DECAY
                                and j.fit)
                            if has_eq_jumps:
                                tqdm.write(
                                    ' -- %s has one or more earthquakes before data started in %s'
                                    %
                                    (stn_id, etm.gamit_soln.date[0].yyyyddd()))
                                use = False

                    if (etm.factor[0] * 1000 > sigma_cutoff
                            or etm.factor[1] * 1000 > sigma_cutoff) and use:
                        tqdm.write(
                            ' -- %s rejected due to large wrms %5.2f %5.2f %5.2f'
                            % (stn_id, etm.factor[0] * 1000,
                               etm.factor[1] * 1000, etm.factor[2] * 1000))
                        use = False

                    norm = np.sqrt(
                        np.sum(np.square(etm.Linear.p.params[0:2, 1] * 1000)))
                    if norm > vel_cutoff and use:
                        tqdm.write(
                            ' -- %s rejected due to large NEU velocity: %5.2f %5.2f %5.2f NE norm %5.2f'
                            % (stn_id, etm.Linear.p.params[0, 1] * 1000,
                               etm.Linear.p.params[1, 1] * 1000,
                               etm.Linear.p.params[2, 1] * 1000, norm))
                        use = False
                elif use:
                    tqdm.write(' -- %s too few solutions to calculate ETM' %
                               stn_id)
                    use = False

            def getvel():
                return {
                    'NetworkCode':
                    etm.NetworkCode,
                    'StationCode':
                    etm.StationCode,
                    'lat':
                    etm.gamit_soln.lat[0],
                    'lon':
                    etm.gamit_soln.lon[0],
                    'n':
                    etm.Linear.p.params[0, 1],
                    'e':
                    etm.Linear.p.params[1, 1],
                    'etm':
                    etm.plot(plot_missing=False,
                             plot_outliers=True,
                             fileio=BytesIO())
                }

            if use:
                tqdm.write(
                    ' -- %s added NEU wrms: %5.2f %5.2f %5.2f NEU vel: %5.2f %5.2f %5.2f'
                    % (stn_id, etm.factor[0] * 1000, etm.factor[1] * 1000,
                       etm.factor[2] * 1000, etm.Linear.p.params[0, 1] * 1000,
                       etm.Linear.p.params[1, 1] * 1000,
                       etm.Linear.p.params[2, 1] * 1000))
                use_station.append(stn)
                v = getvel()
                velocities.append(v)

                #min_lon = min(v['lon'], min_lon)
                #max_lon = max(v['lon'], max_lon)
                #min_lat = min(v['lat'], min_lat)
                #max_lat = max(v['lat'], max_lat)

            elif etm.A is not None:
                discarded.append(getvel())

        except pyETM.pyETMException as e:
            tqdm.write(' -- %s: %s' % (stn_id, str(e)))

    tqdm.write(' >> Total number of stations for linear model: %i' %
               len(use_station))

    outvar = np.array([(v['NetworkCode'] + '.' + v['StationCode'], v['lon'],
                        v['lat'], v['e'], v['n']) for v in velocities],
                      dtype=[('stn', 'U8'), ('lon', 'float64'),
                             ('lat', 'float64'), ('e', 'float64'),
                             ('n', 'float64')])
    np.savetxt(filename,
               outvar,
               fmt=("%s", "%13.8f", "%12.8f", "%12.8f", "%12.8f"))
    if kmz:
        generate_kmz(kmz, velocities, discarded, 'interseismic', 'mm/yr')
Esempio n. 16
0
def insert_modify_param(parser, cnn, stnlist, args):

    # determine if passed function is valid
    if len(args.function_type) < 2:
        parser.error('invalid number of arguments')

    elif args.function_type[0] not in ('p', 'j', 'q', 't'):
        parser.error('function type should be one of the following: polynomial (p), jump (j), periodic (q), or '
                     'bulk earthquake jump removal (t)')

    # create a bunch object to save all the params that will enter the database
    tpar = Bunch()
    tpar.NetworkCode = None
    tpar.StationCode = None
    tpar.soln        = None
    tpar.object      = None
    tpar.terms       = None
    tpar.frequencies = None
    tpar.jump_type   = None
    tpar.relaxation  = None
    tpar.Year        = None
    tpar.DOY         = None
    tpar.action      = None

    ftype = args.function_type[0]
    remove_eq = False

    try:
        if ftype == 'p':
            tpar.object = 'polynomial'
            tpar.terms  = int(args.function_type[1])

            if tpar.terms <= 0:
                parser.error('polynomial terms should be > 0')

        elif ftype == 'j':
            tpar.object = 'jump'
            # insert the action
            tpar.action = args.function_type[1]

            if tpar.action not in ('+', '-'):
                parser.error('action for function type jump (j) should be + or -')

            # jump type
            tpar.jump_type = int(args.function_type[2])

            if tpar.jump_type not in (0, 1):
                parser.error('jump type should be either 0 or 1')

            try:
                date, _ = Utils.process_date([args.function_type[3]])

                # recover the year and doy
                tpar.Year = date.year
                tpar.DOY  = date.doy

            except Exception as e:
                parser.error('while parsing jump date: ' + str(e))

            if tpar.jump_type == 1:
                tpar.relaxation = [float(f) for f in args.function_type[4:]]

                if not tpar.relaxation:
                    if tpar.action == '-':
                        tpar.relaxation = None
                    elif tpar.action == '+':
                        parser.error('jump type == 1 but no relaxation parameter, please specify relaxation')

        elif ftype == 'q':
            tpar.object      = 'periodic'
            tpar.frequencies = [float(1/float(p)) for p in args.function_type[1:]]

        elif ftype == 't':
            tpar.object = 'jump'
            remove_eq = True

    except ValueError:
        parser.error('invalid argument type for function "%s"' % ftype)

    for station in stnlist:
        for soln in args.solution_type:
            tpar.NetworkCode = station['NetworkCode']
            tpar.StationCode = station['StationCode']
            tpar.soln        = soln

            station_soln = "%s.%s (%s)" % (station['NetworkCode'], station['StationCode'], soln)

            if remove_eq:
                # load the ETM parameters for this station
                print(' >> Obtaining ETM parameters for  ' + station_soln)

                if soln == 'ppp':
                    etm = pyETM.PPPETM(cnn, station['NetworkCode'], station['StationCode'])
                else:
                    etm = pyETM.GamitETM(cnn, station['NetworkCode'], station['StationCode'],
                                         stack_name=args.function_type[2])

                for eq in [e for e in etm.Jumps.table
                           if e.p.jump_type in (pyETM.CO_SEISMIC_DECAY, pyETM.CO_SEISMIC_JUMP_DECAY,
                                                pyETM.CO_SEISMIC_JUMP)]:
                    if eq.magnitude <= float(args.function_type[1]):
                        # this earthquake should be removed, fill in the data
                        tpar.Year = eq.date.year
                        tpar.DOY = eq.date.doy
                        tpar.jump_type = 1
                        tpar.relaxation = None
                        tpar.action = '-'
                        apply_change(cnn, station, tpar, soln)
            else:
                apply_change(cnn, station, tpar, soln)
Esempio n. 17
0
    def remove_common_modes(self, target_periods=None):

        if target_periods is None:
            tqdm.write(' >> Removing periodic common modes...')

            # load all the periodic terms
            etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, '
                                               'stations.lon, '
                                               'frequencies as freq, params FROM etms '
                                               'LEFT JOIN stations ON '
                                               'etms."NetworkCode" = stations."NetworkCode" AND '
                                               'etms."StationCode" = stations."StationCode" '
                                               'WHERE "object" = \'periodic\' AND soln = \'gamit\' AND stack = \'%s\' '
                                               'AND frequencies <> \'{}\' '
                                               'ORDER BY etms."NetworkCode", etms."StationCode"'
                                               % self.name, as_dict=True)
        else:
            use_stations = []
            for s in target_periods.keys():
                # check that the stations have not one or both periods with NaNs
                if not np.isnan(target_periods[s]['365.250']['n'][0]) and \
                        not np.isnan(target_periods[s]['182.625']['n'][0]):
                    use_stations.append(s)

            tqdm.write(' >> Inheriting periodic components...')

            # load the periodic terms of the stations that will produce the inheritance
            etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, '
                                               'stations.lon, '
                                               'frequencies as freq, params FROM etms '
                                               'LEFT JOIN stations ON '
                                               'etms."NetworkCode" = stations."NetworkCode" AND '
                                               'etms."StationCode" = stations."StationCode" '
                                               'WHERE "object" = \'periodic\' AND soln = \'gamit\' AND stack = \'%s\' '
                                               'AND frequencies <> \'{}\' AND etms."NetworkCode" || \'.\' || '
                                               'etms."StationCode" IN (\'%s\') '
                                               'ORDER BY etms."NetworkCode", etms."StationCode"'
                                               % (self.name, '\', \''.join(use_stations)), as_dict=True)

        # load the frequencies to subtract
        frequencies = self.cnn.query_float('SELECT frequencies FROM etms WHERE soln = \'gamit\' AND '
                                           'object = \'periodic\' AND frequencies <> \'{}\' AND stack = \'%s\' '
                                           'GROUP BY frequencies' % self.name, as_dict=True)

        # get the unique list of frequencies
        f_vector = []

        for freq in frequencies:
            f_vector += [f for f in freq['frequencies']]

        f_vector = np.array(list(set(f_vector)))

        # initialize the vectors
        ox = np.zeros((len(f_vector), len(etm_objects), 2))
        oy = np.zeros((len(f_vector), len(etm_objects), 2))
        oz = np.zeros((len(f_vector), len(etm_objects), 2))

        # vector for residuals after alignment
        rx = np.zeros((len(f_vector), len(etm_objects), 2))
        ry = np.zeros((len(f_vector), len(etm_objects), 2))
        rz = np.zeros((len(f_vector), len(etm_objects), 2))

        tqdm.write(' -- Reporting periodic residuals (in mm) before %s'
                   % ('inheritance' if target_periods else 'common mode removal'))

        for s, p in enumerate(etm_objects):

            # DDG: should only activate this portion of the code if for any reason ETMs stored in database made with
            # a stack different to what is being used here
            # stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            # self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
            #                '"StationCode" = \'%s\'' % (p['NetworkCode'], p['StationCode']))
            # save the time series
            # ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.project)
            # create the ETM object
            # pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)
            # REDUNDANT CALL, but leave anyways
            q = self.cnn.query_float('SELECT frequencies as freq, * FROM etms '
                                     'WHERE "object" = \'periodic\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND '
                                     '"StationCode" = \'%s\' AND stack = \'%s\''
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            if target_periods:
                n = []
                e = []
                u = []
                # inheritance invoked! we want to remove the difference between current periodic terms and target
                # terms from the parent frame
                for k in range(2):
                    for f in q['freq']:
                        t = target_periods['%s.%s' % (p['NetworkCode'], p['StationCode'])]['%.3f' % (1 / f)]
                        n += [t['n'][k]]
                        e += [t['e'][k]]
                        u += [t['u'][k]]

                params = np.array(q['params']) - np.array([n, e, u]).flatten()
            else:
                # no inheritance: make a vector of current periodic terms to be removed as common modes
                params = np.array(q['params'])

            params = params.reshape((3, params.shape[0] / 3))
            param_count = params.shape[1] / 2

            print_residuals(p['NetworkCode'], p['StationCode'], params, p['lat'], p['lon'])

            # convert from NEU to XYZ
            for j in range(params.shape[1]):
                params[:, j] = np.array(lg2ct(params[0, j], params[1, j], params[2, j],
                                              p['lat'], p['lon'])).flatten()

            for i, f in enumerate(p['freq']):
                ox[f_vector == f, s] = params[0, i:i + param_count + 1:param_count]
                oy[f_vector == f, s] = params[1, i:i + param_count + 1:param_count]
                oz[f_vector == f, s] = params[2, i:i + param_count + 1:param_count]

        # build the design matrix using the stations involved in inheritance or all stations if no inheritance
        sql_where = ','.join(["'" + stn['NetworkCode'] + '.' + stn['StationCode'] + "'" for stn in etm_objects])

        x = self.cnn.query_float('SELECT 0, -auto_z*1e-9, auto_y*1e-9, 1, 0, 0, auto_x*1e-9 FROM stations WHERE '
                                 '"NetworkCode" || \'.\' || "StationCode" '
                                 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where)

        y = self.cnn.query_float('SELECT auto_z*1e-9, 0, -auto_x*1e-9, 0, 1, 0, auto_y*1e-9 FROM stations WHERE '
                                 '"NetworkCode" || \'.\' || "StationCode" '
                                 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where)

        z = self.cnn.query_float('SELECT -auto_y*1e-9, auto_x*1e-9, 0, 0, 0, 1, auto_z*1e-9 FROM stations WHERE '
                                 '"NetworkCode" || \'.\' || "StationCode" '
                                 'IN (%s) ORDER BY "NetworkCode", "StationCode"' % sql_where)
        Ax = np.array(x)
        Ay = np.array(y)
        Az = np.array(z)

        A = np.row_stack((Ax, Ay, Az))

        solution_vector = []

        # vector to display down-weighted stations
        st = dict()
        st['stn'] = [s['NetworkCode'] + '.' + s['StationCode'] for s in etm_objects]
        xyzstn = ['X-%s' % ss for ss in st['stn']] + ['Y-%s' % ss for ss in st['stn']] + \
                 ['Z-%s' % ss for ss in st['stn']]

        # loop through the frequencies
        for freq in f_vector:
            for i, cs in enumerate((np.sin, np.cos)):
                L = np.row_stack((ox[f_vector == freq, :, i].flatten(),
                                  oy[f_vector == freq, :, i].flatten(),
                                  oz[f_vector == freq, :, i].flatten())).flatten()

                c, _, index, _, wrms, _, it = adjust_lsq(A, L)
                # c = np.linalg.lstsq(A, L, rcond=-1)[0]

                tqdm.write(' -- Transformation for %s(2 * pi * 1/%.2f) : %s'
                           % (cs.__name__, np.divide(1., freq), ' '.join(['%7.4f' % cc for cc in c])) +
                           ' wrms: %.3f it: %i\n' % (wrms * 1000, it) +
                           '    Down-weighted station components: %s'
                           % ' '.join(['%s' % ss for ss in np.array(xyzstn)[np.logical_not(index)]]))

                # save the transformation parameters to output to json file
                solution_vector.append(['%s(2 * pi * 1/%.2f)' % (cs.__name__, np.divide(1., freq)), c.tolist()])

                # loop through all the polyhedrons
                for poly in tqdm(self, ncols=160, desc=' -- Applying transformation -> %s(2 * pi * 1/%.2f)' %
                                                       (cs.__name__, np.divide(1., freq))):

                    # subtract the inverted common modes
                    poly.vertices['x'] = poly.vertices['x'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \
                                         np.dot(poly.ax(scale=True), c)
                    poly.vertices['y'] = poly.vertices['y'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \
                                         np.dot(poly.ay(scale=True), c)
                    poly.vertices['z'] = poly.vertices['z'] - cs(2 * pi * freq * 365.25 * poly.date.fyear) * \
                                         np.dot(poly.az(scale=True), c)

        tqdm.write(' -- Reporting periodic residuals (in mm) after %s\n'
                   '       365.25  182.62  365.25  182.62  \n'
                   '       sin     sin     cos     cos       '
                   % ('inheritance' if target_periods else 'common mode removal'))

        for s, p in enumerate(etm_objects):
            # redo the etm for this station
            # DDG: etms need to be redone because we changed the stack!
            stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
                           '"StationCode" = \'%s\' AND stack = \'%s\''
                           % (p['NetworkCode'], p['StationCode'], self.name))
            # save the time series
            ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name)
            # create the ETM object
            pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)

            # obtain the updated parameters
            # they should exist for sure!
            q = self.cnn.query_float('SELECT frequencies as freq, * FROM etms '
                                     'WHERE "object" = \'periodic\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND '
                                     '"StationCode" = \'%s\' AND stack = \'%s\''
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            if target_periods:
                n = []
                e = []
                u = []
                # inheritance invoked! we want to remove the difference between current periodic terms and target
                # terms from the parent frame
                for k in range(2):
                    for f in q['freq']:
                        t = target_periods['%s.%s' % (p['NetworkCode'], p['StationCode'])]['%.3f' % (1 / f)]
                        n += [t['n'][k]]
                        e += [t['e'][k]]
                        u += [t['u'][k]]

                residuals = (np.array(q['params']) - np.array([n, e, u]).flatten())
            else:
                # residuals are the minimized frequencies
                residuals = np.array(q['params'])

            # reshape the array to NEU
            residuals = residuals.reshape((3, residuals.shape[0] / 3))
            param_count = residuals.shape[1] / 2

            print_residuals(p['NetworkCode'], p['StationCode'], residuals, p['lat'], p['lon'])

            # convert from NEU to XYZ
            for j in range(residuals.shape[1]):
                residuals[:, j] = np.array(lg2ct(residuals[0, j], residuals[1, j], residuals[2, j],
                                           p['lat'], p['lon'])).flatten()

            for i, f in enumerate(p['freq']):
                rx[f_vector == f, s] = residuals[0, i:i + param_count + 1:param_count]
                ry[f_vector == f, s] = residuals[1, i:i + param_count + 1:param_count]
                rz[f_vector == f, s] = residuals[2, i:i + param_count + 1:param_count]

        # save the position space residuals
        self.periodic_space = {'stations': {'codes': [p['NetworkCode'] + '.' + p['StationCode'] for p in etm_objects],
                                            'latlon': [[p['lat'], p['lon']] for p in etm_objects]},
                               'frequencies': f_vector.tolist(),
                               'components': ['sin', 'cos'],
                               'residuals_before_alignment': np.array([ox, oy, oz]).tolist(),
                               'residuals_after_alignment': np.array([rx, ry, rz]).tolist(),
                               'helmert_transformations': solution_vector,
                               'comments': 'Periodic space transformation. Each residual component (X, Y, Z) '
                                           'stored as X[freq, station, component]. Frequencies, stations, and '
                                           'components ordered as in respective elements.'}

        tqdm.write(' -- Done!')
Esempio n. 18
0
    def align_spaces(self, target_dict):

        # get the list of stations to use during the alignment
        use_stations = target_dict.keys()

        # reference date used to align the stack
        # epochs SHOULD all be the same. Get first item and then the epoch
        ref_date = Date(fyear=target_dict.values()[0]['epoch'])

        # convert the target dict to a list
        target_list = []
        stack_list = []

        tqdm.write(' >> Aligning coordinate space...')
        for stn in use_stations:
            if not np.isnan(target_dict[stn]['x']):
                target_list.append((stn, target_dict[stn]['x'], target_dict[stn]['y'], target_dict[stn]['z'],
                                    ref_date.year, ref_date.doy, ref_date.fyear))
                # get the ETM coordinate for this station
                net = stn.split('.')[0]
                ssn = stn.split('.')[1]

                ts = pyETM.GamitSoln(self.cnn, self.get_station(net, ssn), net, ssn, self.name)
                etm = pyETM.GamitETM(self.cnn, net, ssn, gamit_soln=ts)
                stack_list += etm.get_etm_soln_list()

        c_array = np.array(stack_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                              ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                              ('fy', 'float64')])
        comb = Polyhedron(c_array, 'etm', ref_date)

        # build a target polyhedron from the target_list
        vertices = np.array(target_list, dtype=[('stn', 'S8'), ('x', 'float64'), ('y', 'float64'),
                                                ('z', 'float64'), ('yr', 'i4'), ('dd', 'i4'),
                                                ('fy', 'float64')])

        target = Polyhedron(vertices, 'target_frame', ref_date)

        # start aligning the coordinates
        tqdm.write(' -- Aligning polyhedron at %.3f (%s)' % (ref_date.fyear, ref_date.yyyyddd()))

        scale = False
        # align the polyhedron to the target
        r_before, r_after, a_stn = comb.align(target, scale=scale, verbose=True)
        # extract the Helmert parameters to apply to the rest of the polyhedrons
        # remove the scale factor
        helmert = comb.helmert

        tqdm.write(' -- Reporting coordinate space residuals (in mm) before and after frame alignment\n'
                   '         Before   After |     Before   After  ')
        # format r_before and r_after to satisfy the required print_residuals format
        r_before = r_before.reshape(3, r_before.shape[0] / 3).transpose()
        r_after = r_after.reshape(3, r_after.shape[0] / 3).transpose()

        residuals = np.stack((r_before, r_after), axis=2)

        stn_lla = []
        for i, stn in enumerate(a_stn):
            n = stn.split('.')[0]
            s = stn.split('.')[1]
            # get the lat lon of the station to report back in the json
            lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' '
                                       'AND "StationCode" = \'%s\'' % (n, s))[0]
            stn_lla.append([lla[0], lla[1]])
            # print residuals to screen
            print_residuals(n, s, residuals[i], lla[0], lla[1], ['X', 'Y', 'Z'])

        # save the position space residuals
        self.position_space = {'stations': {'codes': a_stn.tolist(), 'latlon': stn_lla},
                               'residuals_before_alignment': r_before.tolist(),
                               'residuals_after_alignment': r_after.tolist(),
                               'reference_date': ref_date,
                               'helmert_transformation': comb.helmert.tolist(),
                               'comments': 'No scale factor estimated.'}

        for poly in tqdm(self, ncols=160, desc=' -- Applying coordinate space transformation'):
            if poly.date != ref_date:
                poly.align(helmert=helmert, scale=scale)

        tqdm.write(' >> Aligning velocity space...')

        # choose the stations that have a velocity
        use_stn = []
        for stn in use_stations:
            if not np.isnan(target_dict[stn]['vx']):
                use_stn.append(stn)

        # load the polynomial terms of the stations
        etm_objects = self.cnn.query_float('SELECT etms."NetworkCode", etms."StationCode", stations.lat, '
                                           'stations.lon, params FROM etms '
                                           'LEFT JOIN stations ON '
                                           'etms."NetworkCode" = stations."NetworkCode" AND '
                                           'etms."StationCode" = stations."StationCode" '
                                           'WHERE "object" = \'polynomial\' AND soln = \'gamit\' AND stack = \'%s\' '
                                           'AND etms."NetworkCode" || \'.\' || etms."StationCode" IN (\'%s\') '
                                           'ORDER BY etms."NetworkCode", etms."StationCode"'
                                           % (self.name, '\', \''.join(use_stn)), as_dict=True)

        # first, align the velocity space by finding a Helmert transformation that takes vx, vy, and vz of the stack at
        # each station and makes it equal to vx, vy, and vz of the ITRF structure

        dvx = np.zeros(len(etm_objects))
        dvy = np.zeros(len(etm_objects))
        dvz = np.zeros(len(etm_objects))

        for s, p in enumerate(etm_objects):
            stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
                           '"StationCode" = \'%s\' AND stack = \'%s\' '
                           % (p['NetworkCode'], p['StationCode'], self.name))
            # save the time series
            ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name)
            # create the ETM object
            pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)

            q = self.cnn.query_float('SELECT params FROM etms '
                                     'WHERE "object" = \'polynomial\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\' '
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            params = np.array(q['params'])
            params = params.reshape((3, params.shape[0] / 3))
            # first item, i.e. params[:][0] in array is position
            # second item is velocity, which is what we are interested in
            v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten()
            # put the residuals in an array
            td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])]
            dvx[s] = v[0] - np.array(td['vx'])
            dvy[s] = v[1] - np.array(td['vy'])
            dvz[s] = v[2] - np.array(td['vz'])

        scale = False
        A = self.build_design(etm_objects, scale=scale)

        # loop through the frequencies
        L = np.row_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).flatten()

        c, _, _, _, wrms, _, it = adjust_lsq(A, L)

        tqdm.write(' -- Velocity space transformation:   ' + ' '.join(['%7.4f' % cc for cc in c]) +
                   ' wrms: %.3f it: %i' % (wrms * 1000, it))

        # loop through all the polyhedrons
        for poly in tqdm(self, ncols=160, desc=' -- Applying velocity space transformation'):
            t = np.repeat(poly.date.fyear - ref_date.fyear, poly.Ax.shape[0])

            poly.vertices['x'] = poly.vertices['x'] - t * np.dot(poly.ax(scale=scale), c)
            poly.vertices['y'] = poly.vertices['y'] - t * np.dot(poly.ay(scale=scale), c)
            poly.vertices['z'] = poly.vertices['z'] - t * np.dot(poly.az(scale=scale), c)

        tqdm.write(' -- Reporting velocity space residuals (in mm/yr) before and after frame alignment\n'
                   '         Before   After |     Before   After  ')

        dvxa = np.zeros(len(etm_objects))
        dvya = np.zeros(len(etm_objects))
        dvza = np.zeros(len(etm_objects))
        for s, p in enumerate(etm_objects):
            # redo the etm for this station
            stn_ts = self.get_station(p['NetworkCode'], p['StationCode'])

            self.cnn.query('DELETE FROM etms WHERE "soln" = \'gamit\' AND "NetworkCode" = \'%s\' AND '
                           '"StationCode" = \'%s\' AND stack = \'%s\''
                           % (p['NetworkCode'], p['StationCode'], self.name))
            # save the time series
            ts = pyETM.GamitSoln(self.cnn, stn_ts, p['NetworkCode'], p['StationCode'], self.name)
            # create the ETM object
            pyETM.GamitETM(self.cnn, p['NetworkCode'], p['StationCode'], False, False, ts)

            q = self.cnn.query_float('SELECT params FROM etms '
                                     'WHERE "object" = \'polynomial\' AND soln = \'gamit\' '
                                     'AND "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' AND stack = \'%s\''
                                     % (p['NetworkCode'], p['StationCode'], self.name), as_dict=True)[0]

            params = np.array(q['params'])
            params = params.reshape((3, params.shape[0] / 3))
            # first item, i.e. params[:][0] in array is position
            # second item is velocity, which is what we are interested in
            v = np.array(lg2ct(params[0, 1], params[1, 1], params[2, 1], p['lat'], p['lon'])).flatten()
            # put the residuals in an array
            td = target_dict['%s.%s' % (p['NetworkCode'], p['StationCode'])]
            dvxa[s] = v[0] - np.array(td['vx'])
            dvya[s] = v[1] - np.array(td['vy'])
            dvza[s] = v[2] - np.array(td['vz'])

            lla = self.cnn.query_float('SELECT lat, lon FROM stations WHERE "NetworkCode" = \'%s\' '
                                       'AND "StationCode" = \'%s\'' % (p['NetworkCode'], p['StationCode']))[0]

            print_residuals(p['NetworkCode'], p['StationCode'],
                            np.array([[dvx[s], dvxa[s]], [dvy[s], dvya[s]], [dvz[s], dvza[s]]]), lla[0], lla[1],
                            ['X', 'Y', 'Z'])

        # save the position space residuals
        self.velocity_space = {'stations': {'codes': [p['NetworkCode'] + '.' + p['StationCode'] for p in etm_objects],
                                            'latlon': [[p['lat'], p['lon']] for p in etm_objects]},
                               'residuals_before_alignment':
                                   np.column_stack((dvx.flatten(), dvy.flatten(), dvz.flatten())).tolist(),
                               'residuals_after_alignment':
                                   np.column_stack((dvxa.flatten(), dvya.flatten(), dvza.flatten())).tolist(),
                               'reference_date': ref_date,
                               'helmert_transformation': c.tolist(),
                               'comments': 'Velocity space transformation.'}

        tqdm.write(' -- Done!')