Example #1
0
def calc_temporal_coherence(ifgram, G, X):
    """Calculate the temporal coherence from the network inversion results

    Parameters: ifgram   - 2D np.array in size of (num_ifgram, num_pixel), phase or offset
                G        - 2D np.array in size of (num_ifgram, num_date-1), design matrix A or B
                X        - 2D np.array in size of (num_date-1, num_pixel), solution
    Returns:    temp_coh - 1D np.array in size of (num_pixel), temporal coherence
    """

    num_ifgram, num_pixel = ifgram.shape
    temp_coh = np.zeros(num_pixel, dtype=np.float32)

    # chunk_size as the number of pixels
    chunk_size = int(ut.round_to_1(2e5 / num_ifgram))
    if num_pixel > chunk_size:
        num_chunk = int(np.ceil(num_pixel / chunk_size))
        num_chunk_step = int(ut.round_to_1(num_chunk / 5))
        print(('calcualting temporal coherence in chunks of {} pixels'
               ': {} chunks in total ...').format(chunk_size, num_chunk))

        for i in range(num_chunk):
            c0 = i * chunk_size
            c1 = min((i + 1) * chunk_size, num_pixel)

            # calc residual
            ifgram_diff = ifgram[:, c0:c1] - np.dot(G, X[:, c0:c1])

            # calc temporal coherence
            temp_coh[c0:c1] = np.abs(np.sum(np.exp(1j * ifgram_diff),
                                            axis=0)) / num_ifgram

            # print out message
            if (i + 1) % num_chunk_step == 0:
                print('chunk {} / {}'.format(i + 1, num_chunk))

    else:
        # calc residual
        ifgram_diff = ifgram - np.dot(G, X)

        # calc temporal coherence
        temp_coh = np.abs(np.sum(np.exp(1j * ifgram_diff),
                                 axis=0)) / num_ifgram

    return temp_coh
Example #2
0
def split2boxes(ts_file,
                geom_file=None,
                memory_size=4,
                num_step=0,
                print_msg=True):
    """Split into chunks in rows to reduce memory usage
    Parameters: ts_file     - str, path of time-series h5 file
                memory_size - float, max memory to use in GB
                print_msg   - bool
    Returns:    box_list    - list of tuple of 4 int
                num_box     - int, number of boxes
    """
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    length, width = ts_obj.length, ts_obj.width

    # 1st dimension size: ts (obs / cor / res / step) + dem_err/inc_angle/rg_dist (+pbase)
    num_epoch = ts_obj.numDate * 3 + num_step + 3
    if geom_file:
        geom_obj = geometry(geom_file)
        geom_obj.open(print_msg=False)
        if 'bperp' in geom_obj.datasetNames:
            num_epoch += ts_obj.numDate

    # split in lines based on the input memory limit
    y_step = (memory_size * (1e3**3)) / (num_epoch * width * 4)

    # calibrate based on experience
    y_step = int(ut.round_to_1(y_step * 0.7))

    num_box = int((length - 1) / y_step) + 1
    if print_msg and num_box > 1:
        print('maximum memory size: %.1E GB' % memory_size)
        print('split %d lines into %d patches for processing' %
              (length, num_box))
        print('    with each patch up to %d lines' % y_step)

    # y_step / num_box --> box_list
    box_list = []
    for i in range(num_box):
        y0 = i * y_step
        y1 = min([length, y0 + y_step])
        box = (0, y0, width, y1)
        box_list.append(box)

    return box_list, num_box
Example #3
0
def split2boxes(ifgram_file, memory_size=4, print_msg=True):
    """Split into chunks in rows to reduce memory usage
    Parameters: dataset_shape - tuple of 3 int
                memory_size   - float, max memory to use in GB
                print_msg     - bool
    Returns:    box_list      - list of tuple of 4 int
                num_box       - int, number of boxes
    """
    ifg_obj = ifgramStack(ifgram_file)
    ifg_obj.open(print_msg=False)

    # 1st dimension size: defo obs (phase / offset) + weight + time-series
    num_epoch = ifg_obj.numIfgram * 2 + ifg_obj.numDate + 5
    length, width = ifg_obj.length, ifg_obj.width

    # split in lines based on the input memory limit
    y_step = (memory_size * (1e3**3)) / (num_epoch * width * 4)

    # calibrate based on experience
    y_step = int(ut.round_to_1(y_step * 0.6))

    num_box = int((length - 1) / y_step) + 1
    if print_msg and num_box > 1:
        print('maximum memory size: %.1E GB' % memory_size)
        print('split %d lines into %d patches for processing' %
              (length, num_box))
        print('    with each patch up to %d lines' % y_step)

    # y_step / num_box --> box_list
    box_list = []
    for i in range(num_box):
        y0 = i * y_step
        y1 = min([length, y0 + y_step])
        box = (0, y0, width, y1)
        box_list.append(box)

    return box_list, num_box
Example #4
0
def calc_solid_earth_tides_timeseries(ts_file,
                                      geom_file,
                                      set_file,
                                      date_wise_acq_time=False,
                                      update_mode=True,
                                      verbose=False):
    """Calculate the time-series of solid Earth tides (SET) in LOS direction.
    Parameters: ts_file   - str, path of the time-series HDF5 file
                geom_file - str, path of the geometry HDF5 file
                set_file  - str, output SET time-sereis file
                date_wise_acq_time - bool, use the exact date-wise acquisition time
    Returns:    set_file  - str, output SET time-sereis file
    """

    if update_mode and os.path.isfile(set_file):
        print('update mode: ON')
        print('skip re-calculating and use existing file: {}'.format(set_file))
        return set_file

    # prepare LOS geometry: geocoding if in radar-coordinates
    inc_angle, head_angle, atr_geo = prepare_los_geometry(geom_file)

    # get LOS unit vector
    with warnings.catch_warnings():
        warnings.simplefilter("ignore", category=RuntimeWarning)
        unit_vec = [
            np.sin(inc_angle) * np.cos(head_angle) * -1,
            np.sin(inc_angle) * np.sin(head_angle),
            np.cos(inc_angle),
        ]

    # prepare datetime
    dt_objs = get_datetime_list(ts_file, date_wise_acq_time=date_wise_acq_time)

    # initiate data matrix
    num_date = len(dt_objs)
    length = int(atr_geo['LENGTH'])
    width = int(atr_geo['WIDTH'])
    ts_tide = np.zeros((num_date, length, width), dtype=np.float32)
    # default step size in meter: ~30 pixels
    step_size = ut.round_to_1(abs(float(atr_geo['Y_STEP'])) * 108e3 * 30)

    # loop for calc
    print('\n' + '-' * 50)
    print(
        'calculating solid Earth tides using PySolid (Milbert, 2018; Yunjun et al., 2022) ...'
    )
    prog_bar = ptime.progressBar(maxValue=num_date, print_msg=not verbose)
    for i, dt_obj in enumerate(dt_objs):
        # calculate tide in ENU direction
        (tide_e, tide_n,
         tide_u) = pysolid.calc_solid_earth_tides_grid(dt_obj,
                                                       atr_geo,
                                                       step_size=step_size,
                                                       display=False,
                                                       verbose=verbose)

        # convert ENU to LOS direction
        # sign convention: positive for motion towards satellite
        ts_tide[i, :, :] = (tide_e * unit_vec[0] + tide_n * unit_vec[1] +
                            tide_u * unit_vec[2])

        prog_bar.update(i + 1,
                        suffix='{} ({}/{})'.format(dt_obj.isoformat(), i + 1,
                                                   num_date))
    prog_bar.close()

    # radar-coding if input in radar-coordinates
    # use ts_file to avoid potential missing CENTER_LINE_UTC attributes in geom_file from alosStack
    atr = readfile.read_attribute(ts_file)
    if 'Y_FIRST' not in atr.keys():
        print('radar-coding the LOS tides time-series ...')
        res_obj = resample(lut_file=geom_file)
        res_obj.open()
        res_obj.src_meta = atr_geo
        res_obj.prepare()

        # resample data
        box = res_obj.src_box_list[0]
        ts_tide = res_obj.run_resample(src_data=ts_tide[:, box[1]:box[3],
                                                        box[0]:box[2]])

    ## output
    # attribute
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'
    for key in ['REF_Y', 'REF_X', 'REF_DATE']:
        if key in atr.keys():
            atr.pop(key)

    # write
    ds_dict = {}
    ds_dict['timeseries'] = ts_tide
    ds_dict['sensingMid'] = np.array(
        [i.strftime('%Y%m%dT%H%M%S') for i in dt_objs], dtype=np.string_)
    writefile.write(ds_dict, out_file=set_file, metadata=atr, ref_file=ts_file)

    return set_file