Exemple #1
0
def inputs(file_names, skip_file):
    try:
        filename = file_names[0]
        [xdata, ydata] = read_netcdf3(filename)[0:2];  # can read either netcdf3 or netcdf4.
    except TypeError:
        [xdata, ydata, _] = netcdf_read_write.read_netcdf4(file_names[0]);
    data_all = [];
    date_pairs = [];

    file_names = sorted(file_names);  # To force into date-ascending order.

    for ifile in file_names:  # Read the data
        try:
            data = read_netcdf3(ifile)[2];
        except TypeError:
            [_, _, data] = netcdf_read_write.read_netcdf4(ifile);
        data_all.append(data);
        pairname = ifile.split('/')[-2][0:15];
        date_pairs.append(pairname);  # returning something like '2016292_2016316' for each intf
        print(pairname);

    skip_intfs = [];
    if skip_file is not None:
        ifile = open(skip_file, 'r');
        for line in ifile:
            skip_intfs.append(line.split()[0]);
        ifile.close();

    return [xdata, ydata, data_all, date_pairs, skip_intfs];
def inputs_lkv(look_vector_files):
    print("-->Reading files ", look_vector_files)
    [_, _, lkv_e] = netcdf_read_write.read_netcdf4(look_vector_files[0])
    [_, _, lkv_n] = netcdf_read_write.read_netcdf4(look_vector_files[1])
    [xarray, yarray,
     lkv_u] = netcdf_read_write.read_netcdf4(look_vector_files[2])
    return [xarray, yarray, lkv_e, lkv_n, lkv_u]
Exemple #3
0
def inputs(file_names, start_time, end_time, run_type):
    # Read the input grd files. Support for netcdf3 and netcdf4.
    try:
        filename = file_names[0]
        [xdata, ydata] = read_netcdf3(filename)[0:2]
    except TypeError:
        [xdata, ydata, _] = netcdf_read_write.read_netcdf4(file_names[0])

    data_all = []
    date_pairs = []
    dates = []
    start_dt = dt.datetime.strptime(str(start_time), "%Y%m%d")
    end_dt = dt.datetime.strptime(str(end_time), "%Y%m%d")

    # Get the dates of the acquisitions from the file names.
    for ifile in file_names:  # this happens to be in date order on my mac
        if run_type == 'test':  # testing with Kang's format. "20171117_20171123/unwrap_new.grd"
            pairname = ifile.split('/')[-2]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%m%d")
            image2_dt = dt.datetime.strptime(image2, "%Y%m%d")
        else:  # the usual GMTSAR format
            pairname = ifile.split('/')[-1][0:15]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%j")
            image2_dt = dt.datetime.strptime(image2, "%Y%j")

        if start_dt <= image1_dt <= end_dt:
            if start_dt <= image2_dt <= end_dt:
                try:
                    data = read_netcdf3(ifile)[2]
                except TypeError:
                    [_, _, data] = netcdf_read_write.read_netcdf4(ifile)
                if run_type == "test":
                    data_all.append(data * -0.0555 / 4 / np.pi)
                    # mcandis preprocessing involves changing to LOS distances.
                    print(
                        "Converting phase to LOS (mm) with 55.5mm wavelength")
                else:
                    data_all.append(data)
                pairname = dt.datetime.strftime(
                    image1_dt, "%Y%j") + '_' + dt.datetime.strftime(
                        image2_dt, "%Y%j")
                date_pairs.append(pairname)
                # returning something like '2016292_2016316' for each intf
                dates.append(dt.datetime.strftime(image1_dt, "%Y%j"))
                dates.append(dt.datetime.strftime(image2_dt, "%Y%j"))

    data_all = np.array(data_all)
    # this allows easy indexing later on.
    dates = list(set(dates))
    dates = sorted(dates)
    print(date_pairs)
    print("Reading %d interferograms from %d acquisitions. " %
          (len(date_pairs), len(dates)))

    return [xdata, ydata, data_all, dates, date_pairs]
def reader_from_ts(filepathslist):
    """ 
    This function makes a tuple of grids in timesteps
    It can read in radar coords or geocoded coords, depending on the use of xvar, yvar
    """
    filepaths, zvalues, ts_dates = [], [], []
    xvalues, yvalues = [], []
    for i in range(len(filepathslist)):
        print(filepathslist[i])
        # Establish timing and filepath information
        filepaths.append(filepathslist[i])
        datestr = re.findall(r"\d\d\d\d\d\d\d\d", filepathslist[i])[0]
        ts_dates.append(datetime.strptime(datestr, "%Y%m%d"))
        # Read in the data, either netcdf3 or netcdf4
        [xvalues, yvalues, zdata] = rwr.read_netcdf4(filepathslist[i])
        zvalues.append(zdata)
        if i == round(len(filepathslist) / 2):
            print('halfway done reading files...')
    mydata = data(filepaths=np.array(filepaths),
                  date_pairs_julian=None,
                  date_deltas=None,
                  xvalues=np.array(xvalues),
                  yvalues=np.array(yvalues),
                  zvalues=np.array(zvalues),
                  date_pairs_dt=None,
                  ts_dates=np.array(ts_dates))
    return mydata
def get_ll_from_row_col(row, col, example_grd, trans_dat):
    [xdata, ydata, _] = netcdf_read_write.read_netcdf4(example_grd)
    ra = xdata[col]
    az = ydata[row]
    # check this
    [lon, lat] = get_ll_from_ra(trans_dat, ra, az)
    return [lon, lat]
def get_nearest_row_col(example_grd, ra, az):
    [xdata, ydata, _] = netcdf_read_write.read_netcdf4(example_grd)
    col_idx = (np.abs(xdata - ra)).argmin()  # xdata is columns
    row_idx = (np.abs(ydata - az)).argmin()  # ydata is rows
    print(ydata[row_idx])
    print(xdata[col_idx])
    return [row_idx, col_idx]
Exemple #7
0
def dummy_signal_spread(intfs, output_dir, output_filename):
    """ Make a perfect signal spread for passing to other applications """
    print("Making a dummy signal spread that matches interferograms' dimensions (perfect 100).");
    output_filename = output_dir + "/" + output_filename;
    [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4(intfs[0]);
    a = np.add(np.zeros(np.shape(zdata)), 100);
    netcdf_read_write.produce_output_netcdf(xdata, ydata, a, 'Percentage', output_filename, dtype=np.float32)
    netcdf_plots.produce_output_plot(output_filename, 'Signal Spread', output_dir + '/signalspread.png',
                                     'Percentage of coherence (out of ' + str(len(intfs)) + ' images)', aspect=1.2);
    return;
def multiply_file_by_minus1(filename, new_filename):
    print("multiplying %s by -1 " % filename)
    x, y, z = netcdf_read_write.read_netcdf4(filename)
    z = np.multiply(z, -1)
    netcdf_read_write.produce_output_netcdf(x,
                                            y,
                                            z,
                                            "mm/yr",
                                            new_filename,
                                            dtype=np.float32)
    return
def reader(filepathslist):
    """
    This function takes in a list of filepaths to GMTSAR grd files, taking in a cuboid of data.
    It splits and returns this data in a named tuple.
    """
    filepaths = []
    date_pairs_julian, date_deltas, date_pairs = [], [], []
    xdata, ydata, zvalues = [], [], []
    for i in range(len(filepathslist)):
        print(filepathslist[i])
        # Establish timing and filepath information
        filepaths.append(filepathslist[i])
        datesplit = re.findall(r"\d\d\d\d\d\d\d_\d\d\d\d\d\d\d",
                               filepathslist[i])[0]
        # example: 2010040_2014052
        # adding 1 to both dates because 000 = January 1
        date_new = datesplit.replace(datesplit[0:7],
                                     str(int(datesplit[0:7]) +
                                         1))  # replacing first date
        date_new = date_new.replace(date_new[8:15],
                                    str(int(date_new[8:15]) +
                                        1))  # replacing second date
        date_pairs_julian.append(date_new[0:15])  # example: 2015158_2018178
        acq1 = datetime.strptime(date_new[0:7], '%Y%j')
        acq2 = datetime.strptime(date_new[8:15], '%Y%j')
        date_pairs.append([acq1, acq2])
        delta = abs(acq1 - acq2)  # timedelta object
        date_deltas.append(delta.days / 365.24)  # in years.

        # Read in the data
        xdata, ydata, zdata = rwr.read_netcdf4(filepathslist[i])
        # does this work on netcdf3 as well?
        zvalues.append(zdata)
        if i == round(len(filepathslist) / 2):
            print('halfway done reading files...')

    # The sorted list of dates used in this interferogram network
    ts_dates = stacking_utilities.get_unique_dts_from_intf_dates(
        np.array(date_pairs))

    mydata = data(filepaths=np.array(filepaths),
                  date_pairs_julian=np.array(date_pairs_julian),
                  date_deltas=np.array(date_deltas),
                  xvalues=np.array(xdata),
                  yvalues=np.array(ydata),
                  zvalues=np.array(zvalues),
                  date_pairs_dt=np.array(date_pairs),
                  ts_dates=ts_dates)
    return mydata
Exemple #10
0
def read_and_reapply_mask(mask):  # re-apply the mask
    unw_grd = netcdf_read_write.read_netcdf4("unwrap.grd")
    unw_grd = np.multiply(unw_grd, mask)
    xdata = range(0,
                  np.shape(unw_grd)[1])
    ydata = range(0,
                  np.shape(unw_grd)[0])
    netcdf_read_write.produce_output_netcdf(xdata, ydata, unw_grd, 'radians',
                                            'unwrap_masked.grd')
    netcdf_plots.produce_output_plot('unwrap_masked.grd',
                                     'Unwrapped Phase',
                                     'unw_masked.png',
                                     'phase',
                                     aspect=1.0,
                                     invert_yaxis=False)
    return
def get_reference_pixel_from_geocoded_grd(ref_lon, ref_lat, ifile):
    """ Find the nearest pixel to a reference point in a geocoded grid"""
    print("  Finding coordinate %.4f, %.4f in geocoded interferograms %s" %
          (ref_lon, ref_lat, ifile))
    [xdata, ydata, _] = netcdf_read_write.read_netcdf4(ifile)
    if xdata[0] > 180:  # If numbers are in the range above 180, turn them into -180 to 180
        xdata = [i - 360 for i in xdata]
    row_idx = np.argmin(np.abs(np.array(ydata) - ref_lat))
    col_idx = np.argmin(np.abs(np.array(xdata) - ref_lon))
    if row_idx == 0 or row_idx == len(ydata) or col_idx == 0 or col_idx == len(
            xdata):
        print("WARNING: Coordinate %f %f may be near edge of domain." %
              (ref_lon, ref_lat))
        row_idx = np.nan
        col_idx = np.nan
    print("  Found Coordinates at row/col: %d/%d " % (row_idx, col_idx))
    return row_idx, col_idx
def write_unwrapped_ground_range_displacements(ground_range_phase_file,
                                               output_file, x_axis, y_axis,
                                               wavelength):
    # Given a file with ground range pixels in unwrapped phase,
    # Multiply by wavelength
    # Write the response into a unw.geo file with special xml
    lon_inc = x_axis[1] - x_axis[0]
    lat_inc = y_axis[1] - y_axis[0]

    [_, _, unw] = rwr.read_netcdf4(ground_range_phase_file)

    plt.figure(figsize=(11, 7), dpi=300)
    X, Y = np.meshgrid(x_axis, y_axis)
    plt.pcolormesh(X, Y, unw, cmap='jet', vmin=0, vmax=20)
    plt.colorbar()
    plt.savefig('unwrapped_geocoded_phase.png')

    # CONVERT TO MM using the wavelength of UAVSAR
    unw = np.multiply(unw, wavelength / (4 * np.pi))
    (ny, nx) = np.shape(unw)

    # ISCE UNW.GEO (IN MM)
    isce_read_write.write_isce_unw(unw,
                                   unw,
                                   nx,
                                   ny,
                                   "FLOAT",
                                   output_file,
                                   firstLat=max(y_axis),
                                   firstLon=min(x_axis),
                                   deltaLon=lon_inc,
                                   deltaLat=lat_inc,
                                   Xmin=min(x_axis),
                                   Xmax=max(x_axis))
    # 2 bands, floats
    return