Ejemplo n.º 1
0
def estimate_time_func(date_list, dis_ts, model):
    """
    Deformation model estimator, using a suite of linear, periodic, step function(s).

    Gm = d

    Parameters: date_list - list of str, dates in YYYYMMDD format
                dis_ts    - 2D np.ndarray, displacement observation in size of (num_date, num_pixel)
                model     - dict of time functions, e.g.:
                    {'polynomial' : 2,            # int, polynomial with 1 (linear), 2 (quadratic), 3 (cubic), etc.
                     'periodic'   : [1.0, 0.5],   # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc.
                     'step'       : ['20061014'], # list of str, date(s) in YYYYMMDD.
                     ...
                     }
    Returns:    G         - 2D np.ndarray, design matrix           in size of (num_date, num_par)
                m         - 2D np.ndarray, parameter solution      in size of (num_par, num_pixel)
                e2        - 1D np.ndarray, sum of squared residual in size of (num_pixel,)
    """

    G = timeseries.get_design_matrix4time_func(date_list, model)

    # least squares solver
    # Opt. 1: m = np.linalg.pinv(G).dot(dis_ts)
    # Opt. 2: m = scipy.linalg.lstsq(G, dis_ts, cond=1e-15)[0]
    # Numpy is not used because it can not handle NaN value in dis_ts
    m, e2 = linalg.lstsq(G, dis_ts)[:2]

    return G, m, e2
Ejemplo n.º 2
0
def get_design_matrix4defo(inps):
    """Get the design matrix for ground surface deformation
    Parameters: inps   - namespace
    Returns:    G_defo - 2D np.ndarray in float32 in size of [num_date, num_param]
    """

    # key msg
    msg = '-' * 80
    msg += '\ncorrect topographic phase residual (DEM error) (Fattahi & Amelung, 2013, IEEE-TGRS)'
    msg += '\nordinal least squares (OLS) inversion with L2-norm minimization on: phase'
    if inps.phaseVelocity:
        msg += ' velocity'
    msg += "\ntemporal deformation model: polynomial order = {}".format(
        inps.polyOrder)
    if inps.stepFuncDate:
        msg += "\ntemporal deformation model: step functions at {}".format(
            inps.stepFuncDate)
    msg += '\n' + '-' * 80
    print(msg)

    # get design matrix for temporal deformation model
    model = dict()
    model['polynomial'] = inps.polyOrder
    model['step'] = inps.stepFuncDate
    date_list = timeseries(inps.timeseries_file).get_date_list()
    G_defo = timeseries.get_design_matrix4time_func(date_list, model)

    return G_defo
Ejemplo n.º 3
0
def timeseries2velocity(date_list, defo_list):
    # date_list --> design_matrix
    A = timeseries.get_design_matrix4time_func(date_list)
    A_inv = np.linalg.pinv(A)

    # least square inversion
    defo = np.array(defo_list, np.float32).reshape(-1, 1)
    vel = np.dot(A_inv, defo)[1, :]
    return vel
Ejemplo n.º 4
0
 def get_gps_los_velocity(self, geom_obj, start_date=None, end_date=None, ref_site=None, gps_comp='enu2los'):
     dates, dis = self.read_gps_los_displacement(geom_obj,
                                                 start_date=start_date,
                                                 end_date=end_date,
                                                 ref_site=ref_site,
                                                 gps_comp=gps_comp)[0:2]
     date_list = [dt.strftime(i, '%Y%m%d') for i in dates]
     if len(date_list) > 2:
         A = timeseries.get_design_matrix4time_func(date_list)
         self.velocity = np.dot(np.linalg.pinv(A), dis)[1]
     else:
         self.velocity = np.nan
     return self.velocity
Ejemplo n.º 5
0
def estimate_time_func(model, date_list, dis_ts, ref_date=None):
    """
    Deformation model estimator, using a suite of linear, periodic, step, exponential, and logarithmic function(s).

    Gm = d

    Parameters: date_list - list of str, dates in YYYYMMDD format
                dis_ts    - 2D np.ndarray, displacement observation in size of (num_date, num_pixel)
                model     - dict of time functions, e.g.:
                            {'polynomial' : 2,                    # int, polynomial degree with 1 (linear), 2 (quadratic), 3 (cubic), etc.
                             'periodic'   : [1.0, 0.5],           # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc.
                             'step'       : ['20061014'],         # list of str, date(s) in YYYYMMDD.
                             'exp'        : {'20181026': [60],    # dict, key for onset time in YYYYMMDD and value for char. times in days.
                                             ...
                                            },
                             'log'        : {'20161231': [80.5],  # dict, key for onset time in YYYYMMDD and value for char. times in days.
                                             '20190125': [100, 200],
                                             ...
                                            },
                             ...
                             }
    Returns:    G         - 2D np.ndarray, design matrix           in size of (num_date, num_param)
                m         - 2D np.ndarray, parameter solution      in size of (num_param, num_pixel)
                e2        - 1D np.ndarray, sum of squared residual in size of (num_pixel,)
    """

    G = timeseries.get_design_matrix4time_func(date_list, model, refDate=ref_date)

    # least squares solver
    # Opt. 1: m = np.linalg.pinv(G).dot(dis_ts)
    # Opt. 2: m = scipy.linalg.lstsq(G, dis_ts, cond=1e-15)[0]
    # Numpy is not used because it can not handle NaN value in dis_ts
    m, e2 = linalg.lstsq(G, dis_ts, cond=None)[:2]

    # check empty e2 due to the rank-deficient G matrix for sigularities.
    e2 = np.array(e2)
    if e2.size == 0:
        print('\nWarning: empty e2 residues array due to a redundant or rank-deficient G matrix. This can cause sigularities.')
        print('Please check: https://docs.scipy.org/doc/scipy/reference/generated/scipy.linalg.lstsq.html#scipy.linalg.lstsq')
        print('The issue may be due to:')
        print('\t1) very small char time(s), tau, of the exp/log function(s)')
        print('\t2) the onset time(s) of exp/log are far earlier than the minimum date of the time series.')
        print('Try a different char time, onset time.')
        print('Your G matrix of the temporal model: \n', G)
        raise ValueError('G matrix is redundant/rank-deficient!')

    return G, m, e2
def estimate_velocity(date_list, dis_ts, model):
    """
    Deformation model estimator, using a suite of linear, periodic, step function(s).

    Gm = d

    Parameters: date_list - list of str, dates in YYYYMMDD format
                dis_ts    - 2D np.ndarray, displacement observation in size of (num_date, num_pixel)
                model     - dict of time functions, e.g.:
                            {'polynomial' : 2,            # int, polynomial with 1 (linear), 2 (quadratic), 3 (cubic), etc.
                             'periodic'   : [1.0, 0.5],   # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc.
                             'step'       : ['20061014'], # list of str, date(s) in YYYYMMDD.
                             ...
                             }
    Returns:    G         - 2D np.ndarray, design matrix           in size of (num_date, num_par)
                m         - 2D np.ndarray, parameter solution      in size of (num_par, num_pixel)
                e2        - 1D np.ndarray, sum of squared residual in size of (num_pixel,)
    """

    print(
        'estimate deformation model with the following assumed time functions:'
    )
    for key, value in model.items():
        print('{:<10} : {}'.format(key, value))

    if 'polynomial' not in model.keys():
        raise ValueError(
            'linear/polynomial model is NOT included! Are you sure?!')

    G = timeseries.get_design_matrix4time_func(date_list, model)

    # least squares solver
    # m = np.dot(np.linalg.pinv(G), dis_ts)
    # The following is equivalent
    # m = scipy.linalg.lstsq(G, dis_ts, cond=1e-15)[0]
    # It is not used because it can not handle NaN value in dis_ts
    m, e2 = linalg.lstsq(G, dis_ts)[:2]

    return G, m, e2
Ejemplo n.º 7
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # --update option
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.outfile

    start_time = time.time()
    inps = read_geometry(inps)

    # key msg
    msg = '-'*80
    msg += '\ncorrect topographic phase residual (DEM error) (Fattahi & Amelung, 2013, IEEE-TGRS)'
    msg += '\nordinal least squares (OLS) inversion with L2-norm minimization on: phase'
    if inps.phaseVelocity:
        msg += ' velocity'
    if inps.rangeDist.size != 1:
        msg += ' (pixel-wisely)'
    msg += "\ntemporal deformation model: polynomial order = {}".format(inps.polyOrder)
    if inps.stepFuncDate:
        msg += "\ntemporal deformation model: step functions at {}".format(inps.stepFuncDate)
    msg += '\n'+'-'*80
    print(msg)

    # get design matrix for temporal deformation model
    model = dict()
    model['polynomial'] = inps.polyOrder
    model['step'] = inps.stepFuncDate
    date_list = timeseries(inps.timeseries_file).get_date_list()
    G_defo = timeseries.get_design_matrix4time_func(date_list, model)

    inps = correct_dem_error(inps, G_defo)

    m, s = divmod(time.time()-start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
    return inps.outfile
Ejemplo n.º 8
0
def read_init_info(inps):
    # Time Series Info
    atr = readfile.read_attribute(inps.file[0])
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(inps.file[0])
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(inps.file[0])
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(inps.file[0])
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open(print_msg=inps.print_msg)

    if not inps.file_label:
        inps.file_label = []
        for fname in inps.file:
            fbase = os.path.splitext(os.path.basename(fname))[0]
            fbase = fbase.replace('timeseries', '')
            inps.file_label.append(fbase)

    # default mask file
    if not inps.mask_file and 'msk' not in inps.file[0]:
        dir_name = os.path.dirname(inps.file[0])
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    ## date info
    inps.date_list = obj.dateList
    inps.num_date = len(inps.date_list)
    if inps.start_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) >= int(inps.start_date)
        ]
    if inps.end_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) <= int(inps.end_date)
        ]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)

    (inps.ex_date_list, inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # reference date/index
    if not inps.ref_date:
        inps.ref_date = atr.get('REF_DATE', None)
    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    else:
        inps.ref_idx = None

    # date/index of interest for initial display
    if not inps.idx:
        if (not inps.ref_idx) or (inps.ref_idx < inps.num_date / 2.):
            inps.idx = inps.num_date - 2
        else:
            inps.idx = 2

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr,
                                              disp_unit=inps.disp_unit)[1:3]

    # Map info - coordinate unit
    inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower()

    # Read Error List
    inps.ts_plot_func = plot_ts_scatter
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        # assign plot function
        inps.ts_plot_func = plot_ts_errorbar

        # read error file
        error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
        inps.error_ts = error_fc[:, 1].astype(np.float) * inps.unit_fac

        # update error file with exlcude date
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file and coordinate object
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    ## size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    vprint('data   coverage in y/x: ' + str(data_box))
    vprint('subset coverage in y/x: ' + str(inps.pix_box))
    vprint('data   coverage in lat/lon: ' +
           str(inps.coord.box_pixel2geo(data_box)))
    vprint('subset coverage in lat/lon: ' + str(inps.geo_box))
    vprint(
        '------------------------------------------------------------------------'
    )

    # calculate multilook_num
    # ONLY IF:
    #   inps.multilook is True (no --nomultilook input) AND
    #   inps.multilook_num ==1 (no --multilook-num input)
    # Note: inps.multilook is used for this check ONLY
    # Note: multilooking is only applied to the 3D data cubes and their related operations:
    # e.g. spatial indexing, referencing, etc. All the other variables are in the original grid
    # so that users get the same result as the non-multilooked version.
    if inps.multilook and inps.multilook_num == 1:
        inps.multilook_num = pp.auto_multilook_num(inps.pix_box,
                                                   inps.num_date,
                                                   print_msg=inps.print_msg)

    ## reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        # set longitude to [-180, 180)
        if inps.coord_unit.lower().startswith(
                'deg') and inps.ref_lalo[1] >= 180.:
            inps.ref_lalo[1] -= 360.
        # ref_lalo --> ref_yx if not set in cmd
        if not inps.ref_yx:
            inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0],
                                               inps.ref_lalo[1],
                                               print_msg=False)[0:2]

    # use REF_Y/X if ref_yx not set in cmd
    if not inps.ref_yx and 'REF_Y' in atr.keys():
        inps.ref_yx = (int(atr['REF_Y']), int(atr['REF_X']))

    # ref_yx --> ref_lalo if in geo-coord
    # for plotting purpose only
    if inps.ref_yx and 'Y_FIRST' in atr.keys():
        inps.ref_lalo = inps.coord.radar2geo(inps.ref_yx[0],
                                             inps.ref_yx[1],
                                             print_msg=False)[0:2]

    # do not plot native reference point if it's out of the coverage due to subset
    if (inps.ref_yx and 'Y_FIRST' in atr.keys()
            and inps.ref_yx == (int(atr['REF_Y']), int(atr['REF_X']))
            and not (inps.pix_box[0] <= inps.ref_yx[1] < inps.pix_box[2]
                     and inps.pix_box[1] <= inps.ref_yx[0] < inps.pix_box[3])):
        inps.disp_ref_pixel = False
        print('the native REF_Y/X is out of subset box, thus do not display')

    ## initial pixel coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0],
                                       inps.lalo[1],
                                       print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0],
                                         inps.yx[1],
                                         print_msg=False)[0:2]
    except:
        inps.lalo = None

    ## figure settings
    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(
            atr, print_msg=inps.print_msg)

    # Transparency - Alpha
    if not inps.transparency:
        # Auto adjust transparency value when showing shaded relief DEM
        if inps.dem_file and inps.disp_dem_shade:
            inps.transparency = 0.7
        else:
            inps.transparency = 1.0

    ## display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian;
    # otherwise set disp_unit_img = disp_unit
    inps.disp_unit_img = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000.
        elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(
                inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi:
            inps.disp_unit_img = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img)

    ## fit a suite of time func to the time series
    inps.model, inps.num_param = ts2vel.read_inps2model(
        inps, date_list=inps.date_list)

    # dense TS for plotting
    inps.date_list_fit = ptime.get_date_range(inps.date_list[0],
                                              inps.date_list[-1])
    inps.dates_fit = ptime.date_list2vector(inps.date_list_fit)[0]
    inps.G_fit = timeseries.get_design_matrix4time_func(
        date_list=inps.date_list_fit, model=inps.model)
    return inps, atr