def save_unw_vs_elevation(unw_file_list, every=50): points = np.empty((0, 2)) elpts = sario.load("elevation_looked.dem") mask = ~np.isnan(sario.load(unw_file_list[0].replace(".unw", ".unwflat"))) # elpts = elpts[:, :400] elpts = elpts[mask] elpts = elpts.reshape((-1, )) for f in unw_file_list[::every]: out = sario.load(f) out -= np.mean(out) # new_pts = np.vstack((elpts, out[:, :400][mask].reshape((-1, )))).T new_pts = np.vstack((elpts, out[mask].reshape((-1, )))).T points = np.vstack((points, new_pts)) np.save("elevation_points.npy", points)
def plot_avg( directory=None, prefix="avg_", band=1, cmap="seismic_wide_y", hide_axes=True, num_igrams=-1, ): filenames = sorted(glob.glob(os.path.join(directory, f"{prefix}*")))[:num_igrams] print(f"Found {len(filenames)} average images in {directory}") imgs = np.stack([sario.load(f, band=band) for f in filenames], axis=0) vmax = np.max(np.abs(imgs)) vmin = -vmax nrow, ncol = _plot_rows_cols(len(imgs)) fig, axes = plt.subplots(nrow, ncol) for (img, ax, fname) in zip(imgs, axes.ravel(), filenames): axim = ax.imshow(img, vmax=vmax, vmin=vmin, cmap=cmap) title = os.path.split(fname)[1] title = os.path.splitext(title)[0].replace(prefix, "") ax.set_title(title) if hide_axes: ax.set_axis_off() fig.colorbar(axim, ax=ax) plt.tight_layout() slclist = sario.parse_slclist_strings([os.path.split(s)[1] for s in filenames]) plot_variances(slclist, imgs) return fig, axes, imgs
def subsample_dem( sub=5, stack_fname="unw_stack_20190101.h5", dem_fname="elevation_looked.dem", stack_dset="stack_flat_shifted", cor_fname="cor_stack_20190101.h5", cor_mean_dset="stack_mean", ): ds = xr.open_dataset(stack_fname) ifg_stack = ds[stack_dset] # Make DataArrays for the DEM and mean correlation image dem = xr.DataArray(sario.load(dem_fname), coords={ "lat": ifg_stack.lat, "lon": ifg_stack.lon }) ds_cor = xr.open_dataset(cor_fname) cor = ds_cor[cor_mean_dset] if sub > 1: ifg_stack_sub = ifg_stack.coarsen(lat=sub, lon=sub, boundary="trim").mean() dem_sub = dem.coarsen(lat=sub, lon=sub, boundary="trim").mean() cor_sub = cor.coarsen(lat=sub, lon=sub, boundary="trim").mean() return dem_sub, ifg_stack_sub, cor_sub else: return dem, ifg_stack, cor
def align_uavsar_images(image_list): """Aligns stack of images to first Args: image_list (list[str]): list of names of files from different dates over same acquisition area """ uav_files = [parsers.Uavsar(f) for f in image_list] # Align all to first acquisition date sorted_by_date = sorted(uav_files, key=lambda x: x.date) # IF WE WANT ALL POSSIBLE PAIRS: # Grab each pair of (earlier date, later date) # sorted_pairs = list(itertools.combinations(sorted_by_date, 2)) loaded_imgs = [sario.load(u.filename) for u in sorted_by_date] loaded_imgs = utils.crop_to_smallest(loaded_imgs) first_ann = sorted_by_date[0].ann_data first_img = loaded_imgs[0] # Align all subsequent images to first out_images = [first_img] for uavsar, img in zip(sorted_by_date[1:], loaded_imgs[1:]): shifted_late = utils.align_image_pair((first_img, img), (first_ann, uavsar.ann_data)) out_images.append(shifted_late) return out_images
def ratio_images(image_list, savename=None): fig, axes = plt.subplots(1, len(dates) - 1) axes = [axes] if len(dates) <= 2 else axes fig.tight_layout() fig.subplots_adjust(right=0.8) ratio_list = stack.make_uavsar_time_diffs(image_list) # Use same under image for all 3 under = plotting.equalize_and_mask(sario.load( image_list[0])[START_ROW:END_ROW, START_COL:END_COL], fill_value=0.0) for idx in range(len(ratio_list)): ax = axes[idx] over = ratio_list[idx][START_ROW:END_ROW, START_COL:END_COL] # cmap = plotting.make_shifted_cmap(over, cmap_name='seismic') stack.overlay(under, over, ax=ax, show_colorbar=False) hide_axes(ax) ax.set_title(date_diffs(dates)[idx]) cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7]) fig.colorbar(axes[-1].get_images()[-1], cax=cbar_ax) if savename: plt.savefig(savename, bbox_inches="tight", dpi=300) else: plt.show(block=True)
def looked_dem(src_dem, dest_rsc, outname): """Save a smaller DEM version to match size of dest-rsc file""" import apertools.sario as sario rsc = sario.load(dest_rsc) xstep, ystep = rsc["x_step"], rsc["y_step"] # -r nearest == Use nearest-neighbor resampling, -tr = target resolution cmd = ( f"gdal_translate -r nearest -of ROI_PAC -tr {xstep} {ystep} {src_dem} {outname}" ) _log_and_run(cmd)
def combine_complex(img_list, verbose=True): """Combine multiple complex images which partially overlap Used for SLCs/.geos of adjacent Sentinel frames Args: img_list: list of complex images (.geo files) can be filenames or preloaded arrays Returns: ndarray: Same size as each, with pixels combined """ if len(img_list) < 2: raise ValueError("Must pass more than 1 image to combine") # Start with each one where the other is nonzero img1 = (img_list[0] if isinstance(img_list[0], np.ndarray) else sario.load( img_list[0])) img_shape = img1.shape total = len(img_list) print("processing image 1 of %s" % (total)) img_out = np.copy(img1) for (idx, next_img) in enumerate(img_list[1:]): if verbose: print("processing image %s of %s" % (idx + 2, total)) if not isinstance(next_img, np.ndarray): next_img = sario.load(next_img) if next_img.shape != img_shape: raise ValueError("All images must have same size. Sizes: %s, %s" % (img_shape, next_img.shape)) nonzero_mask = next_img != 0 img_out[nonzero_mask] = next_img[nonzero_mask] # OLD WAY: # img_out += next_img # Now only on overlap, take the previous's pixels # overlap_idxs = (img_out != 0) & (next_img != 0) # img_out[overlap_idxs] = next_img[overlap_idxs] return img_out
def stack_igrams( event_date=MENTONE_EQ_DATE, num_igrams=10, use_cm=True, rate=False, outname=None, verbose=True, ref=(5, 5), window=5, ignore_geos=True, cc_thresh=None, avg_cc_thresh=0.0, sigma_filter=0.3, ): print(f"Event date: {event_date}") gi_file = "slclist_ignore.txt" if ignore_geos else None slclist, ifglist = sario.load_slclist_ifglist(".", slclist_ignore_file=gi_file) ifgs = select_cross_event(slclist, event_date, num_igrams=num_igrams) # stack_igrams = select_pre_event(slclist, ifglist, event_date) # stack_igrams = select_post_event(slclist, ifglist, event_date) stack_fnames = sario.ifglist_to_filenames(ifgs, ".unw") if verbose: print(f"Using the following {len(stack_fnames)} igrams in stack:") for f in stack_fnames: print(f) dts = [(pair[1] - pair[0]).days for pair in ifgs] cur_phase_sum, cc_stack = create_stack( stack_fnames, dts, rate=rate, use_cm=use_cm, ref=ref, window=window, cc_thresh=cc_thresh, avg_cc_thresh=avg_cc_thresh, sigma_filter=sigma_filter, ) if outname: import h5py with h5py.File(outname, "w") as f: f["stackavg"] = cur_phase_sum sario.save_dem_to_h5(outname, sario.load("dem.rsc")) return cur_phase_sum, cc_stack
def demo_zoom(hhfiles): ratio_list = make_uavsar_time_diffs(hhfiles) under = plotting.equalize_and_mask(sario.load(hhfiles[0]), fill_value=0.0) over = ratio_list[0] cmap = make_shifted_cmap(over, cmap_name='seismic') under_image_info = parsers.Uavsar(hhfiles[0]).ann_data bbox_ll = (-96.17, 30.05, 0.05, 0.07) zoomed_box(under, over, bbox_ll, under_image_info=under_image_info, cmap=cmap)
def stitch_topstrip(ftop, fbot, colshift=1): # This worked: # Out[14]: ((540, 32400), (2160, 0)) # In [29]: out = np.zeros((len(gbot) + len(gtop) - 540, gtop.shape[1]), dtype=gtop.dtype) # In [30]: out[:len(gtop), 1:] = gtop[:, 1:] # In [31]: out[-len(gbot):] = gbot print(f"Stitching {ftop} and {fbot}") (toplast_in_bot, _), (botfirst_in_top, _) = find_rows(ftop, fbot) print(f"{toplast_in_bot = }") # (botfirst_in_top, _), (toplast_in_bot, _) = find_rows(ftop, fbot) # breakpoint() imgtop = sario.load(ftop, band=1) # print("redo shift", imgtop[:5, -5:]) imgbot = sario.load(fbot, band=1) if np.iscomplexobj(imgtop): # Find any constant phase offset in overlap # np.abs(gbot[:361, -499:]), np.abs(gtop[1440:, -500:-1]) bot_slice = imgbot[:toplast_in_bot + 1, colshift:] top_slice = imgtop[botfirst_in_top:, :-colshift] phase_diff = rewrap_to_2pi(np.angle(top_slice) - np.angle(bot_slice)) # phase_shift = phase_diff.mean(axis=0) phase_shift = phase_diff.mean() imgtop[:, :-colshift] = imgtop[:, :-colshift] * np.exp( 1j * phase_shift) total_rows = imgbot.shape[0] + imgtop.shape[0] - toplast_in_bot out = np.zeros((total_rows, imgbot.shape[1]), dtype=imgbot.dtype) out[-len(imgbot):] = imgbot # out[:len(imgtop), colshift:] = imgtop[:, colshift:] # out[:len(imgtop), colshift:] = imgtop[:, :-colshift] out[:len(imgtop), :-colshift] = imgtop[:, colshift:] return out
def test_load_uavsar(self): try: temp_dir = tempfile.mkdtemp() grd1 = "brazos_14937_17090_017_170903_L090HHHH_CX_01_ML5X5.grd" grdfile = os.path.join(temp_dir, grd1) sario.save(grdfile, np.array([[1, 2], [3, 4]]).astype(sario.FLOAT_32_LE)) annfile = os.path.join( temp_dir, "brazos_14937_17090_017_170903_L090_CX_01_ML5X5.ann") with open(annfile, "w") as f: f.write( "grd_mag.set_rows (pixels) = 2 ; ground range data lines\n" ) f.write( "grd_mag.set_cols (pixels) = 2 ; ground range data samples\n" ) emptygrd = sario.load(grdfile, verbose=True) finally: shutil.rmtree(temp_dir)
def test_load_file(self): geo_path = join( self.datapath, "S1A_IW_SLC__1SDV_20180420T043026_20180420T043054_021546_025211_81BE.SAFE.small.geo", ) loaded_geo = sario.load_file(geo_path, verbose=True) expected_geo = np.array( [ [-27.189274 - 60.105267j, -41.34938 + 82.05109j], [58.716545 + 13.9955j, 68.892 - 42.065178j], [41.361275 - 152.78986j, -65.905945 - 61.246834j], ], dtype="complex64", ) assert_array_almost_equal(expected_geo, loaded_geo) loaded_dem = sario.load_file(self.dem_path, verbose=True) expected_dem = np.array([[1413, 1413], [1414, 1414], [1415, 1415]], dtype="<i2") assert_array_almost_equal(expected_dem, loaded_dem) # check again with buffer loaded_dem2 = sario.load(self.dem_path, arr=loaded_dem, verbose=True) assert_array_almost_equal(expected_dem, loaded_dem2)
if title: axes.set_title(title) print("h") plt.show(block=True) print("i") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("filename", help="Name of file to open") parser.add_argument( "-d", "--downsample", type=int, default=1, help="Factor to downsample file to display (default=1)", ) parser.add_argument( "--dem-rsc", help="Name of dem.rsc file to use for opening image" ) parser.add_argument( "--colorbar", action="store_true", default=True, help="Show colorbar" ) parser.add_argument("--title", help="Title for figure") args = parser.parse_args() img = sario.load(args.filename, downsample=args.downsample) plot_image(img, title=args.title, colorbar=args.colorbar)
def plot_img_diff( arrays=None, dset="defo_lowess", fnames=[], vm=6, vmax=None, vmin=None, twoway=True, titles=[], show_diff=True, vdiff=1, cmap=DEFAULT_CMAP, axis_off=False, cbar_label="", show=True, figsize=None, interpolation=None, aspect=None, bbox=None, extent=None, share=True, **kwargs, ): """Plot two images for comparison, (and their difference if `show_diff`)""" # import proplot as pplt if arrays is None: from apertools import sario arrays = [sario.load(f, dset=dset, **kwargs) for f in fnames] n = len(arrays) ncols = n + 1 if show_diff else n vmin, vmax = _get_vminmax(arrays[0], vm=vm, vmin=vmin, vmax=vmax, twoway=twoway) # print(f"{vmin} {vmax}") fig, axes = plt.subplots(1, ncols, sharex=share, sharey=share, figsize=figsize, squeeze=False) axes = axes.ravel() # fig, axes = pplt.subplots(ncols=ncols, sharex=share, sharey=share, figsize=figsize) for ii in range(n): if bbox: extent = [bbox[0], bbox[2], bbox[1], bbox[3]] ax = axes[ii] axim = ax.imshow( arrays[ii], cmap=cmap, vmax=vmax, vmin=vmin, interpolation=interpolation, extent=extent, ) if titles: ax.set_title(titles[ii]) # numbers: weird # https://stackoverflow.com/questions/18195758/set-matplotlib-colorbar-size-to-match-graph cbar = fig.colorbar(axim, ax=ax, fraction=0.033, pad=0.04) cbar.set_label(cbar_label) if axis_off: ax.set_axis_off() if aspect: ax.set_aspect(aspect) # fig.colorbar(axim, ax=axes[n - 1]) if show_diff: # Now different image at end diff_arr = arrays[0] - arrays[1] # the diff is always two way, even if arrays are positive only vmin, vmax = _get_vminmax(diff_arr, vm=vdiff, twoway=True) ax = axes[-1] axim = ax.imshow( diff_arr, cmap=cmap, vmax=vmin, vmin=vmax, interpolation=interpolation, extent=extent, ) ax.set_title("left - middle") if axis_off: ax.set_axis_off() if aspect: ax.set_aspect(aspect) cbar = fig.colorbar(axim, ax=ax, fraction=0.033, pad=0.04) cbar.set_label(cbar_label) # [f.close() for f in files] if show: plt.show(block=False) return fig, axes
def average_seasonal_igrams( ifg_dir=".", gi_file="slclist_ignore.txt", ext=".unw", day_lim=30, min_temporal=30, month_range=range(1, 13), normalize_by="total", # means sum(phase_i) / sum(times_i) # normalize_by="per_date", # means sum(phase_i / span_i ) / N to_cm=True, to_yearly_rate=True, ): slclist, ifglist = sario.load_slclist_ifglist(ifg_dir, slclist_ignore_file=gi_file) nearby_ifglist = select_nearby_igrams( ifglist, day_lim=day_lim, min_temporal=min_temporal, month_range=month_range, ) print( f"Filtered {len(ifglist)} original igrams down to {len(nearby_ifglist)}" ) fnames = sario.ifglist_to_filenames(nearby_ifglist, ext=ext) out = np.zeros(sario.load(fnames[0]).shape) mask_fname = os.path.join(ifg_dir, "masks.h5") with h5py.File(mask_fname, "r") as f: mask_stack = f["igram"][:].astype(bool) out_mask = np.zeros_like(out).astype(bool) # Get masks for deramping mask_igram_date_list = sario.load_ifglist_from_h5(mask_fname) total_days = 0 for (f, date_pair) in zip(fnames, nearby_ifglist): img = sario.load(f) baseline_days = (date_pair[1] - date_pair[0]).days if normalize_by == "per_date": img /= baseline_days elif normalize_by == "total": total_days += baseline_days out += img mask_idx = mask_igram_date_list.index(date_pair) out_mask |= mask_stack[mask_idx] if normalize_by == "per_date": out /= len(fnames) elif normalize_by == "total": out /= total_days out = remove_ramp(out, deramp_order=1, mask=out_mask) if to_cm: out *= PHASE_TO_CM if to_yearly_rate: out *= 365 return out
def create_stack( stack_fnames, dts, rate=False, use_cm=True, ref=(5, 5), window=5, cc_thresh=None, avg_cc_thresh=0.35, sigma_filter=0.3, ): cur_phase_sum = np.zeros(sario.load(stack_fnames[0]).shape).astype(float) cc_stack = np.zeros_like(cur_phase_sum) # for pixels that get masked sometimes, lower that count in the final stack dividing pixel_count = np.zeros_like(cur_phase_sum, dtype=int) dt_total = 0 for f, dt in zip(stack_fnames, dts): deramped_phase = remove_ramp(sario.load(f), deramp_order=1, mask=np.ma.nomask) cur_cc = sario.load(f.replace(".unw", ".cc")) if cc_thresh: bad_pixel_mask = cur_cc < cc_thresh else: # zeros => dont mask any to nan bad_pixel_mask = np.zeros_like(deramped_phase, dtype=bool) deramped_phase[bad_pixel_mask] = np.nan # cur_phase_sum += deramped_phase cur_phase_sum = np.nansum(np.stack([cur_phase_sum, deramped_phase]), axis=0) pixel_count += (~bad_pixel_mask).astype(int) dt_total += (~bad_pixel_mask) * dt cc_stack += cur_cc # subtract the reference location: ref_row, ref_col = ref win = window // 2 patch = cur_phase_sum[ref_row - win:ref_row + win + 1, ref_col - win:ref_col + win + 1] cur_phase_sum -= np.nanmean(patch) if rate: cur_phase_sum /= dt_total else: cur_phase_sum /= pixel_count cc_stack /= len(stack_fnames) if avg_cc_thresh: cur_phase_sum[cc_stack < avg_cc_thresh] = np.nan if use_cm: cur_phase_sum *= PHASE_TO_CM if sigma_filter: import blobsar.utils as blob_utils cur_phase_sum = blob_utils.gaussian_filter_nan(cur_phase_sum, sigma_filter) return cur_phase_sum, cc_stack
if __name__ == "__main__": if len(sys.argv) < 2: print("Usage: %s (ratio | thresh) [savename]" % sys.argv[0]) sys.exit(1) savename = None if len(sys.argv) < 3 else sys.argv[2] print("saving to %s" % savename if savename else "Not saving") if sys.argv[1] == "ratio": ratio_images(mlcpaths2, savename) elif sys.argv[1] == "thresh": mlcs2 = [sario.load(p) for p in mlcpaths2] print([m.shape for m in mlcs2]) blocks2 = np.stack( (m[START_ROW:END_ROW, START_COL:END_COL] for m in mlcs2), axis=0) dates = dates[0:1] + dates[-2:] threshold = 0.02 colors = [(1, 0, 0, c) for c in np.linspace(0, 1, 100)] cmapred = mcolors.LinearSegmentedColormap.from_list("mycmap", colors, N=5) fig, axes = plt.subplots(1, 3) fig.tight_layout()
def create_nc_stack( outname, file_list, band=2, stack_dim_name="idx", stack_data_name="stack", depth=None, date_list=None, gdal_file=None, dem_rsc_file=None, bbox=None, dtype="float32", lat_units="degrees north", lon_units="degrees east", overwrite=False, use_gdal=False, gdal_driver=None, ): """Create a NetCDF file with lat/lon dimensions written, but no data yet Prepares for stack creation with a data file specified later Args: outname (str): name of .nc output file to save file_list (list[str]): if the layers come from files, the list of files band (int): the gdal band number to read from file_list stack_dim_name (str): default = "idx". Name of the 3rd dimension of the stack (Dimensions are (stack_dim_name, lat, lon) ) If stack_dim_name="date", Need "date_list" passed. stack_data_name (str): default="stack", name of the data variable in the file depth (int): number of layers which will appear in output stack if date_list passed, will use len(date_list) if file_list passed, will use len(file_list) if None and no date_list, will create an unlimited dimension date_list (list[datetime.date]): if layers of stack correspond to dates, the list of dates to use for the coordinates gdal_file (str): filname with same lat/lon grid as desired new .nc file dem_rsc_file (str): .rsc file containing information for the desired output lat/lon grid bbox (tuple[float]): bounding box if using a subset of the lat/lons provided dtype: default="float32", the numpy datatype of the stack data lat_units (str): default = "degrees north", lon_units (str): default = "degrees east", overwrite (bool): default = False, will overwrite file if true use_gdal (bool): default = False, use gdal to read each file of `file_list` gdal_driver (str): optional, driver for opening files """ file_list = sorted(file_list) create_empty_nc_stack( outname, stack_dim_name=stack_dim_name, stack_data_name=stack_data_name, depth=depth, date_list=date_list, file_list=file_list, gdal_file=gdal_file, dem_rsc_file=dem_rsc_file, bbox=bbox, dtype=dtype, lat_units=lat_units, lon_units=lon_units, overwrite=overwrite, ) with nc.Dataset(outname, "a") as f: stack_var = f.variables[stack_data_name] chunk_depth, chunk_rows, chunk_cols = stack_var.chunking() depth, rows, cols = stack_var.shape buf = np.empty((chunk_depth, rows, cols), dtype=dtype) lastidx = 0 cur_chunk_size = 0 # runs from 0 to chunk_depth for idx, in_fname in enumerate(file_list): if idx % 100 == 0: logger.info( f"Processing {in_fname} -> {idx+1} out of {len(file_list)}" ) if idx % chunk_depth == 0 and idx > 0: logger.info(f"Writing {lastidx}:{lastidx+chunk_depth}") stack_var[lastidx:lastidx + cur_chunk_size, :, :] = buf cur_chunk_size = 0 lastidx = idx if use_gdal: with rio.open(in_fname, driver=gdal_driver) as fin: # now store this in the buffer until emptied data = fin.read(band) else: data = sario.load(in_fname) curidx = idx % chunk_depth cur_chunk_size += 1 buf[curidx, :, :] = data if cur_chunk_size > 0: # Write the final part of the buffer: stack_var[lastidx:lastidx + cur_chunk_size, :, :] = buf[:cur_chunk_size]