コード例 #1
0
ファイル: tsview_dev.py プロジェクト: frsmoreira/PySAR-1
def set_initial_map():
    global d_v, h5, k, dateList, inps, data_lim

    d_v = h5['timeseries'][inps.epoch_num][:] * inps.unit_fac
    # Initial Map
    print(str(dateList))
    d_v = readfile.read(
        inps.timeseries_file,
        datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac
    #d_v = h5[k].get(dateList[inps.epoch_num])[:]*inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = readfile.read(
            inps.timeseries_file, datasetName=inps.ref_date)[0] * inps.unit_fac
        d_v -= inps.ref_d_v

    if mask is not None:
        d_v = mask_matrix(d_v, mask)

    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]

    data_lim = [np.nanmin(d_v), np.nanmax(d_v)]

    if not inps.ylim_mat:
        inps.ylim_mat = data_lim

    print(('Initial data range: ' + str(data_lim)))
    print(('Display data range: ' + str(inps.ylim_mat)))

    print(('Initial data range: ' + str(data_lim)))
    print(('Display data range: ' + str(inps.ylim)))
コード例 #2
0
ファイル: tsview_dev.py プロジェクト: hfattahi/PySAR
def set_initial_map():
    global d_v, h5, k, dateList, inps, data_lim

    d_v = h5['timeseries'][inps.epoch_num][:] * inps.unit_fac
    # Initial Map
    print(str(dateList))
    d_v = readfile.read(inps.timeseries_file, datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac
    #d_v = h5[k].get(dateList[inps.epoch_num])[:]*inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = readfile.read(inps.timeseries_file, datasetName=inps.ref_date)[0]*inps.unit_fac
        d_v -= inps.ref_d_v

    if mask is not None:
        d_v = mask_matrix(d_v, mask)

    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]

    data_lim = [np.nanmin(d_v), np.nanmax(d_v)]

    if not inps.ylim_mat:
        inps.ylim_mat = data_lim

    print(('Initial data range: '+str(data_lim)))
    print(('Display data range: '+str(inps.ylim_mat)))

    print(('Initial data range: ' + str(data_lim)))
    print(('Display data range: ' + str(inps.ylim)))
コード例 #3
0
ファイル: tsview_dev.py プロジェクト: hfattahi/PySAR
def time_slider_update(val):
    '''Update Displacement Map using Slider'''
    global tims, tslider, ax_v, d_v, inps, img, fig_v, h5, k, dateList
    timein = tslider.val
    idx_nearest = np.argmin(np.abs(np.array(tims) - timein))
    ax_v.set_title('N = %d, Time = %s' % (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d')))
    d_v = h5[k][idx_nearest][:] * inps.unit_fac
    if inps.ref_date:
        d_v -= inps.ref_d_v
    if mask is not None:
        d_v = mask_matrix(d_v, mask)
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
    img.set_data(d_v)
    fig_v.canvas.draw()
コード例 #4
0
def time_slider_update(val):
    '''Update Displacement Map using Slider'''
    global inps.tims, tslider, ax_v, d_v, inps, img, fig_v, h5, k, dateList
    timein = tslider.val
    idx_nearest = np.argmin(np.abs(np.array(inps.tims) - timein))
    ax_v.set_title('N = %d, Time = %s' % (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d')))
    d_v = h5[k][idx_nearest][:] * inps.unit_fac
    if inps.ref_date:
        d_v -= inps.ref_d_v
    if mask is not None:
        d_v = mask_matrix(d_v, mask)
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
    img.set_data(d_v)
    fig_v.canvas.draw()
コード例 #5
0
def estimate_phase_elevation_ratio(dem, ts_data, inps):
    """Estimate phase/elevation ratio for each acquisition of timeseries
    Parameters: dem     : 2D array in size of (          length, width)
                ts_data : 3D array in size of (num_date, length, width)
                inps    : Namespace
    Returns:    X       : 2D array in size of (poly_num+1, num_date)
    """
    num_date = ts_data.shape[0]

    # prepare phase and elevation data
    print('reading mask from file: ' + inps.mask_file)
    mask = readfile.read(inps.mask_file, datasetName='mask')[0]
    dem = mask_matrix(np.array(dem), mask)
    ts_data = mask_matrix(np.array(ts_data), mask)

    # display
    # 1. effect of multilooking --> narrow phase range --> better ratio estimation
    debug_mode = False
    if debug_mode:
        import matplotlib.pyplot as plt
        d_index = 47  # np.argmax(topo_trop_corr)
        data = ts_data[d_index, :, :]
        title = inps.date_list[d_index]
        plt.figure()
        plt.plot(dem[~np.isnan(dem)],
                 data[~np.isnan(dem)],
                 '.',
                 label='Number of Looks = 1')
        mli_dem = multilook_data(dem, 8, 8)
        mli_data = multilook_data(data, 8, 8)
        plt.plot(mli_dem[~np.isnan(mli_dem)],
                 mli_data[~np.isnan(mli_dem)],
                 '.',
                 label='Number of Looks = 8')
        plt.legend()
        plt.xlabel('Elevation (m)')
        plt.ylabel('Range Change (m)')
        plt.title(title)
        out_file = 'phase_elevation_ratio_{}.png'.format(title)
        plt.savefig(out_file, bbox_inches='tight', transparent=True, dpi=300)
        print('save to {}'.format(out_file))
        plt.show()

    print('----------------------------------------------------------')
    print(
        'Empirical tropospheric delay correction based on phase/elevation ratio (Doin et al., 2009)'
    )
    print('polynomial order: {}'.format(inps.poly_order))

    if inps.num_multilook > 1:
        print('number of multilook: {} (multilook data for estimation only)'.
              format(inps.num_multilook))
        mask = multilook_data(mask, inps.num_multilook, inps.num_multilook)
        dem = multilook_data(dem, inps.num_multilook, inps.num_multilook)
        ts_data = multilook_data(ts_data, inps.num_multilook,
                                 inps.num_multilook)

    if inps.threshold > 0.:
        print('correlation threshold: {}'.format(inps.threshold))

    mask_nan = ~np.isnan(dem)
    dem = dem[mask_nan]
    ts_data = ts_data[:, mask_nan]

    # calculate correlation coefficient
    print('----------------------------------------------------------')
    print('calculate correlation of DEM with each acquisition')
    topo_trop_corr = np.zeros(num_date, np.float32)
    for i in range(num_date):
        phase = ts_data[i, :]
        cc = 0.
        if np.count_nonzero(phase) > 0:
            comp_data = np.vstack((dem, phase))
            cc = np.corrcoef(comp_data)[0, 1]
            topo_trop_corr[i] = cc
        print('{}: {:>5.2f}'.format(inps.date_list[i], cc))
    topo_trop_corr = np.abs(topo_trop_corr)
    print('average correlation magnitude: {:>5.2f}'.format(
        np.nanmean(topo_trop_corr)))

    # estimate ratio parameter
    print('----------------------------------------------------------')
    print('estimate phase/elevation ratio')
    A = design_matrix(dem=dem, poly_order=inps.poly_order)
    X = np.dot(np.linalg.pinv(A), ts_data.T)
    X = np.array(X, dtype=np.float32)
    X[:, topo_trop_corr < inps.threshold] = 0.
    return X
コード例 #6
0
def main():
    parser = build_parser()
    parseArgs = parser.parse_args()
    file_name = parseArgs.file
    output_folder = parseArgs.outputDir
    should_mask = True

    path_name_and_extension = os.path.basename(file_name).split(".")
    path_name = path_name_and_extension[0]
    # ---------------------------------------------------------------------------------------
    # start clock to track how long conversion process takes
    start_time = time.clock()

    # use h5py to open specified group(s) in the h5 file
    # then read datasets from h5 file into memory for faster reading of data
    he_obj = HDFEOS(file_name)
    he_obj.open(print_msg=False)
    displacement_3d_matrix = he_obj.read(datasetName='displacement')
    mask = he_obj.read(datasetName='mask')
    if should_mask:
        print("Masking displacement")
        displacement_3d_matrix = mask_matrix(displacement_3d_matrix, mask)
    del mask

    dates = he_obj.dateList
    attributes = dict(he_obj.metadata)

    #file = h5py.File(file_name,  "r")
    #timeseries_group = file["HDFEOS"]["GRIDS"]["timeseries"]
    #displacement_3d_matrix = timeseries_group["observation"]["displacement"]

    # get attributes (stored at root) of UNAVCO timeseries file
    #attributes = dict(file.attrs)

    # in timeseries displacement_3d_matrix, there are datasets
    # need to get datasets with dates - strings that can be converted to integers
    #dates = displacement_3d_matrix.attrs["DATE_TIMESERIES"].split(" ")

    # array that stores dates from dates that have been converted to decimal
    decimal_dates = []

    # read datasets in the group into a dictionary of 2d arrays and intialize decimal dates
    timeseries_datasets = {}
    num_date = len(dates)
    for i in range(num_date):
        timeseries_datasets[dates[i]] = np.squeeze(
            displacement_3d_matrix[i, :, :])
        d = get_date(dates[i])
        decimal = get_decimal_date(d)
        decimal_dates.append(decimal)
    del displacement_3d_matrix

    #for displacement_2d_matrix in displacement_3d_matrix:
    #    dataset = displacement_2d_matrix[:]
    #    if should_mask:
    #        print("Masking " + dates[i])
    #        mask = timeseries_group["quality"]["mask"][:]
    #        dataset = mask_matrix(dataset, mask)
    #    timeseries_datasets[dates[i]] = dataset
    #    d = get_date(dates[i])
    #    decimal = get_decimal_date(d)
    #    decimal_dates.append(decimal)
    #    i += 1

    # close h5 file
    #file.close()

    path_list = path_name.split("/")
    folder_name = path_name.split("/")[len(path_list) - 1]

    try:  # create path for output
        os.mkdir(output_folder)
    except:
        print(output_folder + " already exists")

    # read and convert the datasets, then write them into json files and insert into database
    convert_data(attributes, decimal_dates, timeseries_datasets, dates,
                 output_folder, folder_name)

    # run tippecanoe command to get mbtiles file
    os.chdir(os.path.abspath(output_folder))
    os.system(
        "tippecanoe *.json -l chunk_1 -x d -pf -pk -Bg -d9 -D12 -g12 -r0 -o " +
        folder_name + ".mbtiles")

    # ---------------------------------------------------------------------------------------
    # check how long it took to read h5 file data and create json files
    end_time = time.clock()
    print(("time elapsed: " + str(end_time - start_time)))
    return
コード例 #7
0
def main():
    parser = build_parser()
    parseArgs = parser.parse_args()
    file_name = parseArgs.file
    output_folder = parseArgs.outputDir
    should_mask = True

    path_name_and_extension = os.path.basename(file_name).split(".")
    path_name = path_name_and_extension[0]
    # ---------------------------------------------------------------------------------------
    # start clock to track how long conversion process takes
    start_time = time.clock()

    # use h5py to open specified group(s) in the h5 file 
    # then read datasets from h5 file into memory for faster reading of data
    he_obj = HDFEOS(file_name)
    he_obj.open(print_msg=False)
    displacement_3d_matrix = he_obj.read(datasetName='displacement')
    mask = he_obj.read(datasetName='mask')
    if should_mask:
        print("Masking displacement")
        displacement_3d_matrix = mask_matrix(displacement_3d_matrix, mask)
    del mask

    dates = he_obj.dateList
    attributes = dict(he_obj.metadata)

    #file = h5py.File(file_name,  "r")
    #timeseries_group = file["HDFEOS"]["GRIDS"]["timeseries"]
    #displacement_3d_matrix = timeseries_group["observation"]["displacement"]

    # get attributes (stored at root) of UNAVCO timeseries file
    #attributes = dict(file.attrs)

    # in timeseries displacement_3d_matrix, there are datasets
    # need to get datasets with dates - strings that can be converted to integers
    #dates = displacement_3d_matrix.attrs["DATE_TIMESERIES"].split(" ")

    # array that stores dates from dates that have been converted to decimal
    decimal_dates = []

    # read datasets in the group into a dictionary of 2d arrays and intialize decimal dates
    timeseries_datasets = {}
    num_date = len(dates)
    for i in range(num_date):
        timeseries_datasets[dates[i]] = np.squeeze(displacement_3d_matrix[i, :, :])
        d = get_date(dates[i])
        decimal = get_decimal_date(d)
        decimal_dates.append(decimal)
    del displacement_3d_matrix

    #for displacement_2d_matrix in displacement_3d_matrix:
    #    dataset = displacement_2d_matrix[:]
    #    if should_mask:
    #        print("Masking " + dates[i])
    #        mask = timeseries_group["quality"]["mask"][:]
    #        dataset = mask_matrix(dataset, mask)
    #    timeseries_datasets[dates[i]] = dataset
    #    d = get_date(dates[i])
    #    decimal = get_decimal_date(d)
    #    decimal_dates.append(decimal)
    #    i += 1

    # close h5 file
    #file.close()

    path_list = path_name.split("/")
    folder_name = path_name.split("/")[len(path_list)-1]

    try: # create path for output
        os.mkdir(output_folder)
    except:
        print(output_folder + " already exists")

    # read and convert the datasets, then write them into json files and insert into database
    convert_data(attributes, decimal_dates, timeseries_datasets, dates, output_folder, folder_name)

    # run tippecanoe command to get mbtiles file
    os.chdir(os.path.abspath(output_folder))
    os.system("tippecanoe *.json -l chunk_1 -x d -pf -pk -Bg -d9 -D12 -g12 -r0 -o " + folder_name + ".mbtiles")

    # ---------------------------------------------------------------------------------------
    # check how long it took to read h5 file data and create json files
    end_time =  time.clock()
    print(("time elapsed: " + str(end_time - start_time)))
    return
コード例 #8
0
def estimate_phase_elevation_ratio(dem, ts_data, inps):
    """Estimate phase/elevation ratio for each acquisition of timeseries
    Parameters: dem     : 2D array in size of (          length, width)
                ts_data : 3D array in size of (num_date, length, width)
                inps    : Namespace
    Returns:    X       : 2D array in size of (poly_num+1, num_date)
    """
    num_date = ts_data.shape[0]

    # prepare phase and elevation data
    print('reading mask from file: '+inps.mask_file)
    mask = readfile.read(inps.mask_file, datasetName='mask')[0]
    dem = mask_matrix(np.array(dem), mask)
    ts_data = mask_matrix(np.array(ts_data), mask)

    # display
    # 1. effect of multilooking --> narrow phase range --> better ratio estimation
    debug_mode = False
    if debug_mode:
        import matplotlib.pyplot as plt
        #d_index = np.argmax(topo_trop_corr)
        d_index = 47
        data = ts_data[d_index, :, :]
        title = inps.date_list[d_index]
        fig = plt.figure()
        plt.plot(dem[~np.isnan(dem)],
                 data[~np.isnan(dem)],
                 '.', label='Number of Looks = 1')
        mli_dem = multilook_data(dem, 8, 8)
        mli_data = multilook_data(data, 8, 8)
        plt.plot(mli_dem[~np.isnan(mli_dem)],
                 mli_data[~np.isnan(mli_dem)],
                 '.', label='Number of Looks = 8')
        plt.legend()
        plt.xlabel('Elevation (m)')
        plt.ylabel('Range Change (m)')
        plt.title(title)
        out_file = 'phase_elevation_ratio_{}.png'.format(title)
        plt.savefig(out_file, bbox_inches='tight', transparent=True, dpi=300)
        print('save to {}'.format(out_file))
        #plt.show()

    print('----------------------------------------------------------')
    print('Empirical tropospheric delay correction based on phase/elevation ratio (Doin et al., 2009)')
    print('polynomial order: {}'.format(inps.poly_order))

    if inps.num_multilook > 1:
        print('number of multilook: {} (multilook data for estimation only)'.format(inps.num_multilook))
        mask = multilook_data(mask, inps.num_multilook, inps.num_multilook)
        dem = multilook_data(dem, inps.num_multilook, inps.num_multilook)
        ts_data = multilook_data(ts_data, inps.num_multilook, inps.num_multilook)

    if inps.threshold > 0.:
        print('correlation threshold: {}'.format(inps.threshold))

    mask_nan = ~np.isnan(dem)
    dem = dem[mask_nan]
    ts_data = ts_data[:, mask_nan]

    # calculate correlation coefficient
    print('----------------------------------------------------------')
    print('calculate correlation of DEM with each acquisition')
    topo_trop_corr = np.zeros(num_date, np.float32)
    for i in range(num_date):
        phase = ts_data[i, :]
        cc = 0.
        if np.count_nonzero(phase) > 0:
            comp_data = np.vstack((dem, phase))
            cc = np.corrcoef(comp_data)[0, 1]
            topo_trop_corr[i] = cc
        print('{}: {:>5.2f}'.format(inps.date_list[i], cc))
    topo_trop_corr = np.abs(topo_trop_corr)
    print('average correlation magnitude: {:>5.2f}'.format(np.nanmean(topo_trop_corr)))

    # estimate ratio parameter
    print('----------------------------------------------------------')
    print('estimate phase/elevation ratio')
    A = design_matrix(dem=dem, poly_order=inps.poly_order)
    X = np.dot(np.linalg.pinv(A), ts_data.T)
    X = np.array(X, dtype=np.float32)
    X[:, topo_trop_corr < inps.threshold] = 0.
    return X