Esempio n. 1
0
def drive_velocity_gmtsar(intf_files,
                          nsbas_min_intfs,
                          smoothing,
                          wavelength,
                          rowref,
                          colref,
                          outdir,
                          signal_spread_file,
                          baseline_file=None,
                          coh_files=None):
    # GMTSAR DRIVING VELOCITIES
    signal_spread_file = outdir + "/" + signal_spread_file
    intf_tuple = rmd.reader(intf_files)
    coh_tuple = None
    if coh_files is not None:
        coh_tuple = rmd.reader(coh_files)
    signal_spread_data = rwr.read_grd(signal_spread_file)
    velocities = nsbas.Velocities(intf_tuple,
                                  nsbas_min_intfs,
                                  smoothing,
                                  wavelength,
                                  rowref,
                                  colref,
                                  signal_spread_data,
                                  baseline_file=baseline_file,
                                  coh_tuple=coh_tuple)
    rwr.produce_output_netcdf(intf_tuple.xvalues, intf_tuple.yvalues,
                              velocities, 'mm/yr', outdir + '/velo_nsbas.grd')
    rwr.produce_output_plot(outdir + '/velo_nsbas.grd', 'LOS Velocity',
                            outdir + '/velo_nsbas.png', 'velocity (mm/yr)')
    return
Esempio n. 2
0
def drive_point_ts_gmtsar(intf_files,
                          ts_points_file,
                          smoothing,
                          wavelength,
                          rowref,
                          colref,
                          outdir,
                          baseline_file=None,
                          coh_files=None,
                          geocoded_flag=0):
    # For general use, please provide a file with [lon, lat, row, col, name]
    lons, lats, names, rows, cols = stacking_utilities.drive_cache_ts_points(
        ts_points_file, intf_files[0], geocoded_flag)
    if lons is None:
        return
    outdir = outdir + "/ts"
    print("TS OUTPUT DIR IS: " + outdir)
    call(['mkdir', '-p', outdir], shell=False)
    print("Computing TS for %d pixels" % len(lons))
    intf_tuple = rmd.reader(intf_files)
    coh_tuple = None
    coh_value = None
    if coh_files is not None:
        coh_tuple = rmd.reader(coh_files)
    datestrs, x_dts, x_axis_days = nsbas.get_TS_dates(
        intf_tuple.date_pairs_julian)
    reference_pixel_vector = intf_tuple.zvalues[:, rowref, colref]

    for i in range(len(rows)):
        pixel_value = intf_tuple.zvalues[:, rows[i], cols[i]]
        pixel_value = np.subtract(pixel_value, reference_pixel_vector)
        # with respect to the reference pixel.
        if coh_tuple is not None:
            coh_value = coh_tuple.zvalues[:, rows[i], cols[i]]
        stacking_utilities.write_testing_pixel(
            intf_tuple, pixel_value, coh_value,
            outdir + '/testing_pixel_' + str(i) + '.txt')
        m_cumulative = nsbas.do_nsbas_pixel(pixel_value,
                                            intf_tuple.date_pairs_julian,
                                            smoothing,
                                            wavelength,
                                            datestrs,
                                            coh_value=coh_value)

        # If we're using DEM error, then we pass in the baseline table.
        if baseline_file is not None:
            m_cumulative = dem_error_correction.driver(m_cumulative, datestrs,
                                                       baseline_file)

        nsbas.nsbas_ts_points_outputs(x_dts, m_cumulative, rows[i], cols[i],
                                      names[i], lons[i], lats[i], outdir)
    return
def inputs(myfiles_no_ramp,
           remove_ramp_flag,
           myfiles_phase,
           signal_spread_file,
           manual_remove,
           number_of_excluded_images,
           wls_flag=0):
    signal_spread_data = rwr.read_grd(signal_spread_file)
    f = open(manual_remove, 'r')
    raw, content = f.readlines()[0:number_of_excluded_images], []
    for i in range(len(raw)):
        content.append(raw[i].strip('\n'))
    filesmodified = []
    for i in range(len(myfiles_phase)):
        if myfiles_phase[i][16:31] not in content:
            filesmodified.append(myfiles_phase[i])
    if remove_ramp_flag != 0:
        f = open("Metadata/Ramp_need_fix.txt", 'r')
        raw, content = f.readlines()[:], []
        for i in range(len(raw)):
            content.append(raw[i].strip('\n'))
        myfiles_new = []
        for i in range(len(filesmodified)):
            test = filesmodified[i].replace("ref", "ref_corrected")
            if test in myfiles_no_ramp:
                myfiles_new.append(test)
            if filesmodified[i][16:31] not in content:
                myfiles_new.append(filesmodified[i])
        print(len(myfiles_new))
        datatuple = rmd.reader(myfiles_new)
    if remove_ramp_flag == 0:
        print(len(filesmodified))
        datatuple = rmd.reader(filesmodified)
    if wls_flag == 1:
        filesmodified_coherence = [
            x.replace(x[32:], "corr.grd") for x in filesmodified
        ]
        coherence_cube = rmd.reader(filesmodified_coherence)
        coherence_cube = coherence_cube.zvalues
    else:
        coherence_cube = np.ones(np.shape(datatuple.zvalues))
    print(datatuple.dates_correct)
    dates = read_dates(myfiles_phase)
    date_pairs = datatuple.dates_correct
    print("Reading %d interferograms from %d acquisitions. " %
          (len(date_pairs), len(dates)))
    return datatuple, signal_spread_data, dates, date_pairs, coherence_cube
Esempio n. 4
0
def drive_signal_spread_calculation(corr_files, cutoff, output_dir, output_filename):
    print("Making stack_corr")
    output_file = output_dir + "/" + output_filename
    mytuple = rmd.reader(corr_files)  
    a = stack_corr(mytuple, cutoff)  # if unwrapped files, we use Nan to show when it was unwrapped successfully.
    rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', output_file)
    rwr.produce_output_plot(output_file, 'Signal Spread', output_dir + '/signalspread.png',
                            'Percentage of coherence (out of ' + str(len(corr_files)) + ' images)', aspect=1.2);
    return;
def velocity_simple_stack(filepathslist, wavelength, manual_exclude,
                          signal_threshold):
    """This function takes in a list of files that contain arrays of phases and times. It
    will compute the velocity of each pixel using the given wavelength of the satellite.
    Finally, it will return a 2D array of velocities, ready to be plotted. For the manual exclude
    argument, enter either 0 (no images excluded), 1 (15 images excluded), or 2 (40 images excluded). The
    final argument should be a number between 0 and 100 inclusive that tells the function which pixels
    to exclude based on this signal percentage."""
    print(signal_threshold)
    if manual_exclude != 0:
        f = open('Metadata/manual_remove.txt', 'r')
        if manual_exclude == 1:
            content, x = f.readlines()[0:15], []
            for i in range(len(content)):
                x.append(content[i].strip('\n'))
        if manual_exclude == 2:
            content = f.read()
            x = content.split('\n')
        f.close()
        filesmodified = []
        filepathslist = filesmodified
        for i in range(len(myfiles_new)):
            if myfiles_new[i][16:31] not in x:
                filesmodified.append(myfiles_new[i])
    print('Number of files being stacked: ' + str(len(filepathslist)))
    signal_spread_data = rwr.read_grd("signalspread_please_test.nc")
    mytuple = rmd.reader(filepathslist)
    phases, times = [], []
    velocities = np.zeros((len(mytuple.yvalues), len(mytuple.xvalues)))
    i, j, f, c = 0, 0, 0, 0
    for z in np.nditer(mytuple.zvalues, order='F'):
        if np.isnan(z) == False:
            if signal_spread_data[i, j] < signal_threshold:
                times.append(np.nan)
            else:
                phases.append(mytuple.zvalues[f][i][j])
                times.append(mytuple.date_deltas[f])
        if np.isnan(z) == True:
            times.append(np.nan)
        f += 1
        if f == len(mytuple.zvalues):
            velocities[i,
                       j] = (wavelength / (4 * (np.pi))) * ((np.sum(phases)) /
                                                            (np.sum(times)))
            phases, times = [], []
            c += 1
            print('Done with ' + str(c) + ' out of ' +
                  str(len(mytuple.xvalues) * len(mytuple.yvalues)) + ' pixels')
            f = 0
            j += 1
            if j == len(mytuple.xvalues):
                j = 0
                i += 1
                if i == len(mytuple.yvalues):
                    i = 0
    return velocities, mytuple.xvalues, mytuple.yvalues
Esempio n. 6
0
def drive_coseismic_stack_gmtsar(intf_files, wavelength, rowref, colref,
                                 outdir):
    intf_tuple = rmd.reader(intf_files)
    average_coseismic = get_avg_coseismic(intf_tuple, rowref, colref,
                                          wavelength)
    rwr.produce_output_netcdf(intf_tuple.xvalues, intf_tuple.yvalues,
                              average_coseismic, 'mm',
                              outdir + '/coseismic.grd')
    rwr.produce_output_plot(outdir + '/coseismic.grd', 'LOS Displacement',
                            outdir + '/coseismic.png', 'displacement (mm)')
    return
def drive_velocity_simple_stack(intfs, wavelength, rowref, colref, outdir):
    signal_spread_data = rwr.read_grd(outdir + "/signalspread.nc")
    intf_tuple = rmd.reader(intfs)
    velocities, x, y = velocity_simple_stack(intf_tuple, wavelength, rowref,
                                             colref, signal_spread_data, 25)
    # last argument is signal threshold (< 100%).  lower signal threshold allows for more data into the stack.
    rwr.produce_output_netcdf(x, y, velocities, 'mm/yr',
                              outdir + '/velo_simple_stack.grd')
    rwr.produce_output_plot(outdir + '/velo_simple_stack.grd', 'LOS Velocity ',
                            outdir + '/velo_simple_stack.png',
                            'velocity (mm/yr)')
    return
Esempio n. 8
0
def drive_full_TS_gmtsar(intf_files,
                         nsbas_min_intfs,
                         sbas_smoothing,
                         wavelength,
                         rowref,
                         colref,
                         outdir,
                         signal_spread_file,
                         baseline_file=None,
                         coh_files=None):
    # SETUP.
    start_index = 0
    end_index = 7000000
    signal_spread_file = outdir + "/" + signal_spread_file

    intf_tuple = rmd.reader(intf_files)
    coh_tuple = None
    if coh_files is not None:
        coh_tuple = rmd.reader(coh_files)
    xdates = stacking_utilities.get_xdates_from_intf_tuple(intf_tuple)
    signal_spread_data = rwr.read_grd(signal_spread_file)

    # TIME SERIES
    TS = nsbas.Full_TS(intf_tuple,
                       nsbas_min_intfs,
                       sbas_smoothing,
                       wavelength,
                       rowref,
                       colref,
                       signal_spread_data,
                       start_index=start_index,
                       end_index=end_index,
                       baseline_file=baseline_file,
                       coh_tuple=coh_tuple)
    rwr.produce_output_TS_grids(intf_tuple.xvalues, intf_tuple.yvalues, TS,
                                xdates, 'mm', outdir)
    return
Esempio n. 9
0
if __name__ == "__main__":
    f = open("Metadata/Ramp_need_fix.txt", 'r')
    raw, content = f.readlines(), []
    for i in range(len(raw)):
        content.append(raw[i].strip('\n'))
    model, content_1 = [], []
    for i in range(len(content)):
        content_1.append('intf_all_remote/' + content[i] + '/unwrap.grd')
        m1, m2, m3 = plane_fitter(
            content_1[i], content_1[i].replace('unwrap', 'unwrap_model'))
        model.append(content_1[i].replace('unwrap', 'unwrap_model'))
        remove_plane(content_1[i], model[i],
                     model[i].replace('model', 'no_ramp'), m1, m2, m3)
        temp1 = ['intf_all_remote/' + content[i] + '/unwrap_no_ramp.grd']
        d = rmd.reader(temp1)
        store = phr.phase_ref(d, 621, 32)
        temp = temp1[0].split('/')[-1]
        stem = temp1[0][0:-len(temp)]
        rwr.produce_output_netcdf(d.xvalues, d.yvalues, store[0], 'Radians',
                                  stem + 'unwrap_ref_corrected.grd')
        rwr.flip_if_necessary(stem + 'unwrap_ref_corrected.grd')
        rwr.produce_output_plot(stem + 'unwrap_ref_corrected.grd',
                                'Referenced and Corrected Unwrapped Phase',
                                stem + 'unwrap_ref_corrected.png',
                                'unwrapped phase')
        print('Done with file ' + str(i + 1))

# if __name__=="__main__":
# grdname="intf_all_remote/2018281_2018305/unwrap.grd"
# remove_trend2d(grdname,4);
Esempio n. 10
0
    for f in range(len(store)):
        ref_values.append(store[f, yvalue, xvalue])

    print(len(ref_values))

    i,j,f = 0,0,0
    while f < len(mytuple.zvalues):
            store[f, i ,j] = store[f, i ,j] - ref_values[f]
            j+=1
            if j==len(mytuple.xvalues):
                j=0
                i+=1
                if i == len(mytuple.yvalues):
                    i=0
                    print('Referencing phases in file ' + str(f+1) + ' out of ' + str(len(mytuple.zvalues)))
                    f+=1

    return store

if __name__ == "__main__":
    myfiles = glob.glob("intf_all_remote/???????_???????/unwrap.grd")
    d=rmd.reader(myfiles)
    store = phase_ref(d, 621, 32)
    for i in range(len(myfiles)):
        print('Dealing with file ' + str(i+1))
        temp = myfiles[i].split('/')[-1]
        stem = myfiles[i][0:-len(temp)]
        rwr.produce_output_netcdf(d.xvalues, d.yvalues, store[i], 'Radians', stem + 'unwrap_ref.grd')
        rwr.flip_if_necessary(stem + 'unwrap_ref.grd')
        rwr.produce_output_plot(stem + 'unwrap_ref.grd', 'Referenced Unwrapped Phase', stem + 'unwrap_ref.png', 'unwrapped phase')
Esempio n. 11
0
    a=np.zeros((len(mytuple.yvalues), len(mytuple.xvalues)))
    i,j = 0,0
    for z in np.nditer(mytuple.zvalues):
        if z >= cutoff:
            a[i,j] = a[i,j] + 1
        j+=1
        if j== len(mytuple.xvalues):
            j=0
            i+=1
            if i == len(mytuple.yvalues):
                i=0
    i,j = 0,0
    for n in np.nditer(a):
        a[i,j] = (a[i,j]/(len(mytuple.filepaths)))*100
        j+=1
        if j== len(mytuple.xvalues):
            j=0
            i+=1
            if i == len(mytuple.yvalues):
                i=0
    return a


if __name__ == "__main__":
    myfiles = glob.glob("intf_all_remote/???????_???????/corr.grd")
    mytuple=rmd.reader(myfiles)
    a=stack_corr(mytuple, 0.1)
    rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', 'signalspread_please_test.nc')
    rwr.flip_if_necessary('signalspread_please_test.nc')
    rwr.produce_output_plot('signalspread_please_test.nc', 'Signal Spread', 'signalspread_please_test.png', 'Percentage of coherence (out of 288 images)' )