Beispiel #1
0
def inputs(file_names, skip_file):
    try:
        [xdata, ydata] = netcdf_read_write.read_grd_xy(file_names[0])
        # can read either netcdf3 or netcdf4.
    except TypeError:
        [xdata, ydata, _] = netcdf_read_write.read_netcdf4_xyz(file_names[0])
    data_all = []
    date_pairs = []

    file_names = sorted(file_names)
    # To force into date-ascending order.

    for ifile in file_names:  # Read the data
        try:
            data = netcdf_read_write.read_grd(ifile)
        except TypeError:
            [_, _, data] = netcdf_read_write.read_netcdf4_xyz(ifile)
        data_all.append(data)
        pairname = ifile.split('/')[-2][0:15]
        date_pairs.append(pairname)
        # returning something like '2016292_2016316' for each intf
        print(pairname)

    skip_intfs = []
    if skip_file is not None:
        ifile = open(skip_file, 'r')
        for line in ifile:
            skip_intfs.append(line.split()[0])
        ifile.close()

    return [xdata, ydata, data_all, date_pairs, skip_intfs]
Beispiel #2
0
def inputs(file_names, start_time, end_time, run_type):

    # Read the input grd files. Support for netcdf3 and netcdf4.
    try:
        [xdata, ydata] = netcdf_read_write.read_grd_xy(file_names[0])
    except TypeError:
        [xdata, ydata, _] = netcdf_read_write.read_netcdf4_xyz(file_names[0])

    data_all = []
    date_pairs = []
    dates = []
    start_dt = dt.datetime.strptime(str(start_time), "%Y%m%d")
    end_dt = dt.datetime.strptime(str(end_time), "%Y%m%d")

    # Get the dates of the acquisitions from the file names.
    for ifile in file_names:  # this happens to be in date order on my mac
        if run_type == 'test':  # testing with Kang's format. "20171117_20171123/unwrap_new.grd"
            pairname = ifile.split('/')[-2]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%m%d")
            image2_dt = dt.datetime.strptime(image2, "%Y%m%d")
        else:  #  the usual GMTSAR format
            pairname = ifile.split('/')[-1][0:15]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%j")
            image2_dt = dt.datetime.strptime(image2, "%Y%j")

        if image1_dt >= start_dt and image1_dt <= end_dt:
            if image2_dt >= start_dt and image2_dt <= end_dt:
                try:
                    data = netcdf_read_write.read_grd(ifile)
                except TypeError:
                    [_, _, data] = netcdf_read_write.read_netcdf4_xyz(ifile)
                if run_type == "test":
                    data_all.append(data * -0.0555 / 4 / np.pi)
                    # mcandis preprocessing involves changing to LOS distances.
                    print(
                        "Converting phase to LOS (mm) with 55.5mm wavelength")
                else:
                    data_all.append(data)
                pairname = dt.datetime.strftime(
                    image1_dt, "%Y%j") + '_' + dt.datetime.strftime(
                        image2_dt, "%Y%j")
                date_pairs.append(pairname)
                # returning something like '2016292_2016316' for each intf
                dates.append(dt.datetime.strftime(image1_dt, "%Y%j"))
                dates.append(dt.datetime.strftime(image2_dt, "%Y%j"))

    data_all = np.array(data_all)
    # this allows easy indexing later on.
    dates = list(set(dates))
    dates = sorted(dates)
    print(date_pairs)
    print("Reading %d interferograms from %d acquisitions. " %
          (len(date_pairs), len(dates)))

    return [xdata, ydata, data_all, dates, date_pairs]
def reader_from_ts(filepathslist, xvar="x", yvar="y", zvar="z"):
    """ 
    This function makes a tuple of grids in timesteps
    It can read in radar coords or geocoded coords, depending on the use of xvar, yvar
    """
    filepaths = [];
    zvalues = [];
    ts_dates = [];
    for i in range(len(filepathslist)):
        print(filepathslist[i])
        # Establish timing and filepath information
        filepaths.append(filepathslist[i]);
        datestr = filepathslist[i].split('/')[-1][0:8];
        datestr = filepathslist[i].split('/')[-1]
        datestr = re.findall(r"\d\d\d\d\d\d\d\d", filepathslist[i])[0];
        ts_dates.append(datetime.strptime(datestr, "%Y%m%d"));
        # Read in the data, either netcdf3 or netcdf4
        [xvalues, yvalues, zdata] = rwr.read_netcdf4_xyz(filepathslist[i]);
        zvalues.append(zdata);
        if i == round(len(filepathslist) / 2):
            print('halfway done reading files...');
    mydata = data(filepaths=np.array(filepaths), date_pairs_julian=None, date_deltas=None,
                  xvalues=np.array(xvalues), yvalues=np.array(yvalues), zvalues=np.array(zvalues),
                  date_pairs_dt=None, ts_dates=np.array(ts_dates));
    return mydata;
def reader(filepathslist):
    """
    This function takes in a list of filepaths to GMTSAR grd files, effectively taking in a cuboid of data. 
    It splits and returns this data in a named tuple.
    """
    filepaths = []
    date_pairs_julian, date_deltas, date_pairs = [], [], []
    xvalues, yvalues, zvalues = [], [], []
    for i in range(len(filepathslist)):
        print(filepathslist[i])
        # Establish timing and filepath information
        filepaths.append(filepathslist[i])
        datesplit = re.findall(r"\d\d\d\d\d\d\d_\d\d\d\d\d\d\d", filepathslist[i])[0];  # example: 2010040_2014052
        # adding 1 to both dates because 000 = January 1
        date_new = datesplit.replace(datesplit[0:7], str(int(datesplit[0:7]) + 1))  # replacing first date
        date_new = date_new.replace(date_new[8:15], str(int(date_new[8:15]) + 1))  # replacing second date
        date_pairs_julian.append(date_new[0:15])  # example: 2015158_2018178
        acq1 = datetime.strptime(date_new[0:7], '%Y%j');
        acq2 = datetime.strptime(date_new[8:15], '%Y%j');
        date_pairs.append([acq1, acq2]);
        delta = abs(acq1 - acq2)  # timedelta object
        date_deltas.append(delta.days / 365.24)  # in years. 

        # Read in the data
        xdata, ydata, zdata = rwr.read_netcdf4_xyz(filepathslist[i]);  # does this work on netcdf3 as well? 
        zvalues.append(zdata)
        if i == round(len(filepathslist) / 2):
            print('halfway done reading files...')

    mydata = data(filepaths=np.array(filepaths), date_pairs_julian=np.array(date_pairs_julian),
                  date_deltas=np.array(date_deltas), xvalues=np.array(xdata), yvalues=np.array(ydata),
                  zvalues=np.array(zvalues), date_pairs_dt=np.array(date_pairs), ts_dates=None);
    return mydata
def get_ll_from_row_col(row, col, example_grd, trans_dat):
    [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4_xyz(example_grd)
    ra = xdata[col]
    az = ydata[row]
    # check this
    [lon, lat] = get_ll_from_ra(trans_dat, ra, az)
    return [lon, lat]
def get_nearest_row_col(example_grd, ra, az):
    [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4_xyz(example_grd)
    col_idx = (np.abs(xdata - ra)).argmin()  # xdata is columns
    row_idx = (np.abs(ydata - az)).argmin()  # ydata is rows
    print(ydata[row_idx])
    print(xdata[col_idx])
    return [row_idx, col_idx]
def inputs(gps_los_file, geocoded_insar_file):
    print("Reading files %s and %s for calculating misfit." %
          (gps_los_file, geocoded_insar_file))
    [gps_los_velfield] = los_projection_tools.input_gps_as_los(gps_los_file)
    [xarray, yarray,
     LOS_array] = netcdf_read_write.read_netcdf4_xyz(geocoded_insar_file)
    if np.nanmean(xarray) > 180:
        xarray = np.subtract(xarray, 360)
        # some files come in with 244 instead of -115.  Fixing that.
    return [gps_los_velfield, xarray, yarray, LOS_array]
def produce_min_max(filename, xyz=False):
    if xyz == False:
        x, y, z = netcdf_read_write.read_netcdf4_xyz(filename)
    else:
        x, y, z = netcdf_read_write.read_grd_xyz(filename)
    print("File:", filename)
    print("Max: ", np.nanmax(z))
    print("Min: ", np.nanmin(z))
    print("Shape: ", np.shape(z))
    return
Beispiel #9
0
def dummy_signal_spread(intfs, output_dir, output_filename):
    # Make a perfect signal spread for passing to other applications
    print("Making a dummy signal spread that matches interferograms' dimensions (perfect 100).");
    output_filename = output_dir + "/" + output_filename;
    [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4_xyz(intfs[0]);
    a = np.add(np.zeros(np.shape(zdata)), 100);
    rwr.produce_output_netcdf(xdata, ydata, a, 'Percentage', output_filename, dtype=np.float32)
    rwr.produce_output_plot(output_filename, 'Signal Spread', output_dir + '/signalspread.png',
                            'Percentage of coherence (out of ' + str(len(intfs)) + ' images)', aspect=1.2);
    return;
Beispiel #10
0
def multiply_file_by_minus1(filename, new_filename):
    print("multiplying %s by -1 " % filename)
    x, y, z = netcdf_read_write.read_netcdf4_xyz(filename)
    z = np.multiply(z, -1)
    filestem = filename.split('.grd')[0]
    netcdf_read_write.produce_output_netcdf(x,
                                            y,
                                            z,
                                            "mm/yr",
                                            new_filename,
                                            dtype=np.float32)
    return
def get_reference_pixel_from_geocoded_grd(ref_lon, ref_lat, ifile):
    # Find the nearest pixel to a reference point in a geocoded grid
    print("  Finding coordinate %.4f, %.4f in geocoded interferograms %s" % (ref_lon, ref_lat, ifile) );
    [xdata, ydata, _] = netcdf_read_write.read_netcdf4_xyz(ifile);
    if xdata[0] > 180:   # If numbers are in the range above 180, turn them into -180 to 180
        xdata = [i-360 for i in xdata];
    row_idx = np.argmin(np.abs(np.array(ydata) - ref_lat));
    col_idx = np.argmin(np.abs(np.array(xdata) - ref_lon));
    if row_idx == 0 or row_idx == len(ydata) or col_idx == 0 or col_idx == len(xdata):
        print("WARNING: Coordinate %f %f may be near edge of domain." % (ref_lon, ref_lat) );
        row_idx = np.nan;
        col_idx = np.nan; 
    print("  Found Coordinates at row/col: %d/%d " % (row_idx, col_idx));
    return row_idx, col_idx;
Beispiel #12
0
def write_unwrapped_ground_range_displacements(ground_range_phase_file,
                                               output_file, x_axis, y_axis,
                                               wavelength):
    # Given a file with ground range pixels in unwrapped phase,
    # Multiply by wavelength
    # Write the response into a unw.geo file with special xml
    lon_inc = x_axis[1] - x_axis[0]
    lat_inc = y_axis[1] - y_axis[0]

    [_, _, unw] = rwr.read_netcdf4_xyz(ground_range_phase_file)

    plt.figure(figsize=(11, 7), dpi=300)
    X, Y = np.meshgrid(x_axis, y_axis)
    plt.pcolormesh(X, Y, unw, cmap='jet', vmin=0, vmax=20)
    plt.colorbar()
    plt.savefig('unwrapped_geocoded_phase.png')

    # CONVERT TO MM using the wavelength of UAVSAR
    unw = np.multiply(unw, wavelength / (4 * np.pi))
    (ny, nx) = np.shape(unw)

    # ISCE UNW.GEO (IN MM)
    isce_read_write.write_isce_unw(unw,
                                   unw,
                                   nx,
                                   ny,
                                   "FLOAT",
                                   output_file,
                                   firstLat=max(y_axis),
                                   firstLon=min(x_axis),
                                   deltaLon=lon_inc,
                                   deltaLat=lat_inc,
                                   Xmin=min(x_axis),
                                   Xmax=max(x_axis))
    # 2 bands, floats
    return
def inputs_lkv(look_vector_files):
    print("-->Reading files ", look_vector_files);
    [xarray, yarray, lkv_e] = netcdf_read_write.read_netcdf4_xyz(look_vector_files[0]);
    [xarray, yarray, lkv_n] = netcdf_read_write.read_netcdf4_xyz(look_vector_files[1]);
    [xarray, yarray, lkv_u] = netcdf_read_write.read_netcdf4_xyz(look_vector_files[2]);
    return [xarray, yarray, lkv_e, lkv_n, lkv_u];