Beispiel #1
0
def combine_all_files(datestr, input_dirs, output_dir):
    print("\nCombining files for date %s" % datestr)

    filename = input_dirs[0] + "/" + datestr + ".grd"
    xdata, ydata, zdata0 = read_netcdf3(filename)
    filename1 = input_dirs[1] + "/" + datestr + ".grd"
    xdata, ydata, zdata1 = read_netcdf3(filename1)
    zdata_total = np.zeros(np.shape(zdata0))

    for j in range(len(ydata)):
        if np.mod(j, 200) == 0:
            print(j)
        for k in range(len(xdata)):
            vector = [zdata0[j][k], zdata1[j][k]]
            # , zdata2[j][k], zdata3[j][k], zdata4[j][k], zdata5[j][k], zdata6[j][k] ];
            zdata_total[j][k] = np.sum(vector)
    output_file = output_dir + "/" + datestr + ".grd"
    output_plot = output_dir + "/" + datestr + ".png"
    produce_output_netcdf(xdata, ydata, zdata_total, "mm", output_file)
    netcdf_plots.produce_output_plot(output_file,
                                     datestr,
                                     output_plot,
                                     "mm",
                                     aspect=1.0,
                                     invert_yaxis=True,
                                     vmin=-50,
                                     vmax=100)
    return
Beispiel #2
0
def inputs(file_names, start_time, end_time, run_type):
    # Read the input grd files. Support for netcdf3 and netcdf4.
    try:
        filename = file_names[0]
        [xdata, ydata] = read_netcdf3(filename)[0:2]
    except TypeError:
        [xdata, ydata, _] = netcdf_read_write.read_netcdf4(file_names[0])

    data_all = []
    date_pairs = []
    dates = []
    start_dt = dt.datetime.strptime(str(start_time), "%Y%m%d")
    end_dt = dt.datetime.strptime(str(end_time), "%Y%m%d")

    # Get the dates of the acquisitions from the file names.
    for ifile in file_names:  # this happens to be in date order on my mac
        if run_type == 'test':  # testing with Kang's format. "20171117_20171123/unwrap_new.grd"
            pairname = ifile.split('/')[-2]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%m%d")
            image2_dt = dt.datetime.strptime(image2, "%Y%m%d")
        else:  # the usual GMTSAR format
            pairname = ifile.split('/')[-1][0:15]
            image1 = pairname.split('_')[0]
            image2 = pairname.split('_')[1]
            image1_dt = dt.datetime.strptime(image1, "%Y%j")
            image2_dt = dt.datetime.strptime(image2, "%Y%j")

        if start_dt <= image1_dt <= end_dt:
            if start_dt <= image2_dt <= end_dt:
                try:
                    data = read_netcdf3(ifile)[2]
                except TypeError:
                    [_, _, data] = netcdf_read_write.read_netcdf4(ifile)
                if run_type == "test":
                    data_all.append(data * -0.0555 / 4 / np.pi)
                    # mcandis preprocessing involves changing to LOS distances.
                    print(
                        "Converting phase to LOS (mm) with 55.5mm wavelength")
                else:
                    data_all.append(data)
                pairname = dt.datetime.strftime(
                    image1_dt, "%Y%j") + '_' + dt.datetime.strftime(
                        image2_dt, "%Y%j")
                date_pairs.append(pairname)
                # returning something like '2016292_2016316' for each intf
                dates.append(dt.datetime.strftime(image1_dt, "%Y%j"))
                dates.append(dt.datetime.strftime(image2_dt, "%Y%j"))

    data_all = np.array(data_all)
    # this allows easy indexing later on.
    dates = list(set(dates))
    dates = sorted(dates)
    print(date_pairs)
    print("Reading %d interferograms from %d acquisitions. " %
          (len(date_pairs), len(dates)))

    return [xdata, ydata, data_all, dates, date_pairs]
Beispiel #3
0
def subsample_read_dem(samplefile, demfile):
    # Take an example interferogram and subsample the topo_ra to exactly match this file size.
    # You may have to play with -T/-r options in GMT grdsample to force the same gridcell/gridline registration
    # You also may have to force the netcdf4 file into netcdf3 for later reading in python.

    if not os.path.isfile(demfile):
        print("ERROR! %s does not exist- exiting " % demfile)
        sys.exit(1)

    subsampled_file = 'topo/topo_ra_subsampled.grd'
    intervals = subprocess.check_output(['gmt', 'grdinfo', '-I', samplefile],
                                        shell=False)
    intervals = intervals.split('\n')[0]
    ranges = subprocess.check_output(['gmt', 'grdinfo', '-I-', samplefile],
                                     shell=False)
    ranges = ranges.split('\n')[0]
    command = 'gmt grdsample ' + demfile + ' -Gtopo/temp.grd -T ' + intervals + ' ' + ranges
    print(command)
    subprocess.call(command, shell=True)
    subprocess.call('nccopy -k classic topo/temp.grd ' + subsampled_file,
                    shell=True)
    subprocess.call(['rm', 'topo/temp.grd'], shell=False)

    [_, _, z] = read_netcdf3(subsampled_file)
    return z
Beispiel #4
0
def reader_simple_format(file_names):
    """
    An earlier reading function, works fast, useful for things like coherence statistics
    """
    filename = file_names[0]
    [xdata, ydata] = rwr.read_netcdf3(filename)[0:2]
    data_all = []
    for ifile in file_names:  # this happens to be in date order on my mac
        data = rwr.read_netcdf3(ifile)[2]
        data_all.append(data)
    date_pairs = []
    for name in file_names:
        pairname = name.split('/')[-2][0:15]
        date_pairs.append(pairname)
        # returning something like '2016292_2016316' for each intf
        print(pairname)
    return [xdata, ydata, data_all, date_pairs]
Beispiel #5
0
def reshape_TS_into_standard(outdir, earlyfile, cofile, latefile, outfile):
    """
    This is not particular general. It's using hard-coded information about time axis
    and the timing of the earthquake.
    It's for track 26509.
    This function pastes together a pre-seismic, co-seismic, and post-seismic set of time series or jumps.
    On the same xy grid.
    """
    tolerance = 300;  # Purposely killing all pixels above this value.
    print("Reshaping UAVSAR file into single TS File");
    [_tdata1, xdata1, ydata1, zdata1] = netcdf_read_write.read_3D_netcdf(earlyfile);
    [_xdata2, _ydata2, zdata2] = netcdf_read_write.read_netcdf3(cofile);
    [_tdata3, _xdata3, _ydata3, zdata3] = netcdf_read_write.read_3D_netcdf(latefile);
    print(np.shape(zdata1), np.shape(zdata2), np.shape(zdata3));
    ynum = np.shape(zdata1)[1];
    xnum = np.shape(zdata1)[2];
    znum = np.shape(zdata1)[0] + np.shape(zdata3)[0];
    total_data = np.zeros([znum, ynum, xnum]);
    print(np.shape(total_data));
    for i in range(np.shape(zdata1)[0]):
        temp = zdata1[i, :, :];
        temp[abs(temp) > tolerance] = np.nan;  # killing outliers
        total_data[i, :, :] = temp;  # doing this six times.
        print("Early data Slice %d" % i);
    temp = zdata2;
    temp[abs(temp) > tolerance] = np.nan;
    total_data[7, :, :] = np.add(total_data[6, :, :], zdata2);  # The coseismic chunk
    print("Coseismic data slice 7");
    for i in range(1, np.shape(zdata3)[0]):
        temp = zdata3[i, :, :];
        temp[abs(temp) > tolerance] = np.nan;  # killing outliers
        total_data[i + np.shape(zdata1)[0], :, :] = np.add(temp, total_data[7, :, :]);
        print("Postseismic data slice %d " % (i + np.shape(zdata1)[0]));
    dtarray = [];
    dtarray.append(dt.datetime.strptime("2009-04-24", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2009-09-21", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2010-04-12", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2010-07-01", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2010-12-01", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2011-05-18", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2011-11-10", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2012-09-26", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2013-05-24", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2014-06-11", "%Y-%m-%d"));  # Hard-coded
    dtarray.append(dt.datetime.strptime("2017-11-01", "%Y-%m-%d"));  # Hard-coded
    zunits = "mm";
    print(np.shape(total_data));
    netcdf_read_write.produce_output_timeseries(xdata1, ydata1, total_data, dtarray, zunits, outfile);
    stacking_utilities.plot_full_timeseries(outfile, dtarray, outdir + "TS_cumulative.png", vmin=-50, vmax=200,
                                            aspect=1 / 8);
    stacking_utilities.plot_incremental_timeseries(outfile, dtarray, outdir + "TS_incremental.png", vmin=-50, vmax=100,
                                                   aspect=1 / 8);
    return;
Beispiel #6
0
def signal_spread_to_mask(ss_file, cutoff, mask_file):
    """ Given a signal spread file, make a nice mask that we can use for plotting."""
    [xdata, ydata, zdata] = netcdf_read_write.read_netcdf3(ss_file);
    mask_response = np.zeros(np.shape(zdata));
    for i in range(len(ydata)):
        for j in range(len(xdata)):
            if zdata[i][j] >= cutoff:
                mask_response[i][j] = 1;
            else:
                mask_response[i][j] = np.nan;
    netcdf_read_write.produce_output_netcdf(xdata, ydata, mask_response, 'unitless', mask_file);
    return;
Beispiel #7
0
def main_function(staging_directory, outdir, rowref, colref, starttime,
                  endtime):
    [filenames, demfile] = configure(staging_directory, outdir, starttime,
                                     endtime)
    demdata = subsample_read_dem(filenames[0], demfile)

    for item in filenames:
        [xdata, ydata, zdata] = read_netcdf3(item)
        [corrected_zdata, zarray, corrarray,
         demarray] = global_compute_item(zdata, demdata, rowref, colref)
        output_item(xdata, ydata, zdata, corrected_zdata, zarray, corrarray,
                    demarray, item, outdir)
    return
Beispiel #8
0
def produce_output_contourf(netcdfname, plottitle, plotname, cblabel):
    # Read in the dataset
    [xread, yread, zread] = read_netcdf3(netcdfname);

    # Make a plot
    _fig = plt.figure(figsize=(7, 10));
    plt.contourf(xread, yread, zread)
    plt.title(plottitle);
    plt.gca().set_xlabel("Range", fontsize=16);
    plt.gca().set_ylabel("Azimuth", fontsize=16);
    cb = plt.colorbar();
    cb.set_label(cblabel, size=16);
    plt.savefig(plotname);
    plt.close();
    return;
Beispiel #9
0
def compute_loops(all_loops, loops_dir, loops_guide, rowref, colref):
    subprocess.call(['mkdir', '-p', loops_dir], shell=False)
    ofile = open(loops_dir + loops_guide, 'w')
    for i in range(len(all_loops)):
        ofile.write("Loop %d: %s %s %s\n" %
                    (i, all_loops[i][0], all_loops[i][1], all_loops[i][2]))
    ofile.close()

    unwrapped = 'unwrap.grd'
    wrapped = 'phasefilt.grd'
    filename = 'intf_all/' + all_loops[0][0] + '_' + all_loops[0][
        1] + '/' + unwrapped
    z1_sample = read_netcdf3(filename)[2]
    number_of_errors = np.zeros(np.shape(z1_sample))

    for i in range(0, len(all_loops)):
        edge1 = all_loops[i][0] + '_' + all_loops[i][1]
        edge2 = all_loops[i][1] + '_' + all_loops[i][2]
        edge3 = all_loops[i][0] + '_' + all_loops[i][2]
        [xdata, ydata,
         z1] = netcdf_read_write.read_any_grd('intf_all/' + edge1 + '/' +
                                              unwrapped)
        [_, _, z2] = netcdf_read_write.read_any_grd('intf_all/' + edge2 + '/' +
                                                    unwrapped)
        [_, _, z3] = netcdf_read_write.read_any_grd('intf_all/' + edge3 + '/' +
                                                    unwrapped)

        [xdata, ydata,
         wr_z1] = netcdf_read_write.read_any_grd('intf_all/' + edge1 + '/' +
                                                 wrapped)
        [_, _, wr_z2] = netcdf_read_write.read_any_grd('intf_all/' + edge2 +
                                                       '/' + wrapped)
        [_, _, wr_z3] = netcdf_read_write.read_any_grd('intf_all/' + edge3 +
                                                       '/' + wrapped)

        print("Loop " + str(i) + ":")

        rowdim, coldim = np.shape(z1)

        histdata_raw = []
        histdata_fix = []
        znew_raw = np.zeros(np.shape(z1))
        znew_fix = np.zeros(np.shape(z1))
        errorcount = 0

        for j in range(rowdim):
            for k in range(coldim):

                wr1 = wr_z1[j][k] - wr_z1[rowref, colref]
                z1_adj = z1[j][k] - z1[rowref, colref]
                wr2 = wr_z2[j][k] - wr_z2[rowref, colref]
                z2_adj = z2[j][k] - z2[rowref, colref]
                wr3 = wr_z3[j][k] - wr_z3[rowref, colref]
                z3_adj = z3[j][k] - z3[rowref, colref]

                # Using equation from Heresh Fattahi's PhD thesis to isolate unwrapping errors.
                wrapped_closure_raw = wr_z1[j][k] + wr_z2[j][k] - wr_z3[j][k]
                wrapped_closure_fix = wr1 + wr2 - wr3
                offset_before_unwrapping = np.mod(wrapped_closure_fix,
                                                  2 * np.pi)
                if offset_before_unwrapping > np.pi:
                    offset_before_unwrapping = offset_before_unwrapping - 2 * np.pi
                    # send it to the -pi to pi realm.

                unwrapped_closure_raw = z1[j][k] + z2[j][k] - z3[j][k]
                unwrapped_closure_fix = z1_adj + z2_adj - z3_adj

                znew_raw[j][
                    k] = unwrapped_closure_raw - offset_before_unwrapping
                znew_fix[j][
                    k] = unwrapped_closure_fix - offset_before_unwrapping

                if ~np.isnan(znew_raw[j][k]):
                    histdata_raw.append(znew_raw[j][k] / np.pi)
                if ~np.isnan(znew_fix[j][k]):
                    histdata_fix.append(znew_fix[j][k] / np.pi)
                if abs(znew_fix[j][k]) > 0.5:  # if this pixel has
                    errorcount = errorcount + 1
                    number_of_errors[j][k] = number_of_errors[j][k] + 1

        errorpixels = round(100 * float(errorcount) / len(histdata_fix), 2)
        print("Most common raw loop sum: ")
        print(np.median(histdata_raw))
        print("Most common fix loop sum: ")
        print(np.median(histdata_fix))
        print("\n")

        make_plot(xdata, ydata, znew_fix,
                  loops_dir + 'phase_closure_' + str(i) + '.eps', errorpixels)
        make_histogram(histdata_fix,
                       loops_dir + 'histogram_' + str(i) + '.eps')

    return [xdata, ydata, number_of_errors]
Beispiel #10
0
def inputs(inputfile, demfile):
    [_, _, topo] = read_netcdf3(demfile)
    [xdata, ydata, zdata] = read_netcdf3(inputfile)
    topo = np.flipud(topo)
    return [topo, xdata, ydata, zdata]