def plot_stress_strain_with_landsat(vx_stackfile, vy_stackfile, landsatfile, idx=0, fname='stress.svg'): vx_stack = ice.Stack(vx_stackfile) vy_stack = ice.Stack(vy_stackfile) vx = np.array(vx_stack._datasets["data"][idx]) vy = np.array(vy_stack._datasets["data"][idx]) dx = vx_stack.hdr.dx dy = vy_stack.hdr.dy print(vx_stack.hdr.extent) robust_opts = {'window_size': 250, 'order': 2} # Compute stress strain strain_dict, stress_dict = ice.compute_stress_strain( vx, vy, dx=dx, dy=dy, grad_method='robust_l2', inpaint=False, **robust_opts) fig, axs = plt.subplots(nrows=3, ncols=1, figsize=(6, 10)) e_xx = axs[0].imshow(strain_dict['e_xx'], aspect='auto', extent=vx_stack.hdr.extent, cmap='coolwarm', clim=[-2, 2]) axs[0].set_title('e_xx') fig.colorbar(e_xx, ax=axs[0]) e_yy = axs[1].imshow(strain_dict['e_yy'], aspect='auto', extent=vx_stack.hdr.extent, cmap='coolwarm', clim=[-2, 2]) axs[1].set_title('e_yy') fig.colorbar(e_yy, ax=axs[1]) e_xy = axs[2].imshow(strain_dict['e_xy'], aspect='auto', extent=vx_stack.hdr.extent, cmap='viridis', clim=[0, 4]) axs[2].set_title('e_xy') fig.colorbar(e_xy, ax=axs[2]) plt.tight_layout() plt.savefig('./figures/' + fname, format='svg') plt.show() vx_stack.fid.close() vy_stack.fid.close()
def plot_mean_with_landsat(landsatfile, stackfile, title, lb=float('-inf'), clim=None, fname=None): stack = ice.Stack(stackfile, mode='r') # Load Landsat 8 raster with same projection window e = stack.hdr.extent proj_win = [e[0], e[3], e[1], e[2]] landsat = ice.Raster(rasterfile=landsatfile, projWin=proj_win) fig, axs = plt.subplots(figsize=(9, 9)) data = np.array(stack._datasets["data"][0]) # data = np.nanmedian(data, axis=0) data[data < lb] = np.nan axs.imshow(landsat.data, extent=e, cmap='binary_r') im = axs.imshow(data, extent=e, cmap='Spectral_r', alpha=0.6, clim=clim) plt.title(title) fig.colorbar(im) if fname is not None: plt.savefig('./figures/' + fname, dpi=300) plt.show() stack.fid.close()
def main(args): # Load the stack stack = ice.Stack(args.stackfile) # Convert fps to interval (ms) interval = 1000 / args.fps cmap = ice.get_cmap(args.cmap) fig, ax = plt.subplots(figsize=(7, 11)) ax.set_xlabel('X (m)') ax.set_ylabel('Y (m)') fig.set_tight_layout(True) # Calculate frames print('Creating frames for animation:') ims = [] for i in tqdm(range(len(stack._datasets['data']))): ims.append(frame(i, stack, ax, cmap, args.clim)) # Add colorbar ''' cbar = plt.colorbar(ims[0][0], ax=ax) cbar.set_label('Velocity (m/day') ''' # Create animation print('Saving animation to', args.save) anim = animation.ArtistAnimation(fig, ims, interval=interval, repeat=True) anim.save(args.save, dpi=args.dpi)
def create_stack(data_root, dtype, stackfile): """ Create Stacks for the different data """ hdr = None # Set a lower bound for valid data lb = 0 if dtype in ['vx', 'vy']: lb = -1e6 # m/yr # Get dates and data from files data_dict = {} init_tdec, init_data = [], [] for root, _, files in os.walk(data_root): if len(files) == 0: continue # Get filename (not very elegant, but it works) rasterfile = '' for f in files: if dtype + '.tif' in f: rasterfile = f # Get date date = root.split('/')[-1] tdec = ice.datestr2tdec(yy=int(date[0:4]), mm=int(date[4:6]), dd=int(date[6:8])) init_tdec.append(tdec) # Get data e = [-3130000, -3090000, 642500, 680000] proj_win = [e[0], e[3], e[1], e[2]] raster = ice.Raster(rasterfile=root + '/' + rasterfile, projWin=proj_win) # Apply data bounds data = np.array(raster.data) data[data < lb] = np.nan # Store in dict so that data can be sorted by date before adding to Stack data_dict[tdec] = data # Store the first header if hdr is None: hdr = raster.hdr # Sort data before adding to Stack init_tdec.sort() for t in init_tdec: init_data.append(data_dict[t]) # Create Stack stack = ice.Stack(stackfile, mode='w', init_tdec=np.array(init_tdec), init_rasterinfo=hdr) stack.fid.create_dataset("data", data=np.array(init_data)) stack.fid.close()
def create_stack(data_path, data_table, stack_path, stack_fname, proj_win=None): """ Build a new Stack from a set of raster files. Parameters ---------- data_path: str path to directory storing all downloaded rasters to build Stack from data_table: dict table of information for each Raster stack_path: str path to directory that Stack will be saved to stack_fname: str Stackfile name proj_win: list Projection window to load each Raster in the Stack with. If None, the optimal projection window will be calculated Return ------ The projection window used for the Stack """ Path(stack_path).mkdir(parents=True, exist_ok=True) # Calculate largest common extent between rasers if proj_win is None: proj_win = calc_optimal_proj_win(data_path, data_table['fname']) print('Using projection window', proj_win) # Convert date strings to tdec dates = 'start_date' if 'start_date' in data_table.keys() else 'date' init_tdec = dates_to_tdec(data_table[dates]) # Get stack data print('Loading Stack data:') init_data = get_stack_data(data_path, data_table['fname'], proj_win) # Smooth data print('Loaded data has shape,', init_data.shape) # Get header to init stack with init_rasterinfo = ice.Raster(data_path + data_table['fname'][0], projWin=proj_win).hdr # Initialize stack print('Initializing Stack and saving to', stack_path + stack_fname) stack = ice.Stack(stack_path + stack_fname, mode='w', init_tdec=init_tdec, init_rasterinfo=init_rasterinfo) stack.fid.create_dataset("data", data=init_data) stack.fid.close() return proj_win
def plot_stream(vx_stackfile, vy_stackfile, v_stackfile, landsatfile, idx=0): vx_stack = ice.Stack(vx_stackfile) vy_stack = ice.Stack(vy_stackfile) v_stack = ice.Stack(v_stackfile) e = vx_stack.hdr.extent proj_win = [e[0], e[3], e[1], e[2]] landsat = ice.Raster(rasterfile=landsatfile, projWin=proj_win) v = np.array(v_stack._datasets['data'][idx]) vx = np.array(vx_stack._datasets['data'][idx]) vy = np.array(vy_stack._datasets['data'][idx]) dx = vx_stack.hdr.dx dy = vy_stack.hdr.dy x0, x1, y0, y1 = vx_stack.hdr.extent x = np.arange(x0, x0 + len(vx[0]) * dx, dx) y = np.arange(y1, y1 + len(vx) * dy, dy) fig, axs = plt.subplots(figsize=(9, 9)) axs.imshow(landsat.data, extent=e, cmap='binary_r') axs.set_ylim(bottom=y0, top=y1) axs.set_xlim(left=x0, right=x1) cmap = copy.copy(cm.get_cmap('plasma')) cmap.set_under(alpha=0) stream = axs.streamplot(x, y, vx, vy, color=v, cmap=cmap, norm=Normalize(vmin=150, vmax=5000), density=5) fig.colorbar(stream.lines, fraction=0.046, pad=0.04) plt.tight_layout() plt.savefig('./figures/streamline.jpg', dpi=300) plt.show()
def main(args): # Load the raster/stack and reference raster/stack if args.raster.endswith('.h5'): # Input stack inobj = ice.Stack(args.raster) # Reference stack ref = ice.Stack(args.reference) # Initialize output stack outobj = ice.Stack(args.output, mode='w') outobj.initialize(inobj.tdec, ref.hdr, data=False, weights=False) # Loop over keys to resample for key in args.keys: print('Resampling', key) inobj.resample(ref.hdr, outobj, key=key) else: # Input raster inobj = ice.Raster(rasterfile=args.raster) # Reference raster ref = ice.Raster(rasterfile=args.reference) # Resample inobj.resample(ref.hdr) # Write to disk if args.epsg is not None: out_epsg = args.epsg else: out_epsg = ref.hdr.epsg inobj.write_gdal(args.output, epsg=out_epsg, driver=args.driver)
def main(args): # Load stack stack = ice.Stack(args.stackfile) # Make sure output directory exists if not os.path.isdir(args.outdir): os.mkdir(args.outdir) # Launch solver ice.tseries.inversion(stack, args.user, args.outdir, nt_out=args.interp, dkey=args.dkey, solver_type=args.solver, n_proc=args.n_proc, regParam=args.penalty, rw_iter=args.rw_iter, n_min=args.n_min, no_weights=args.no_weights, prior_cov=args.prior_cov, mask_raster=args.mask, cleaned_stack=args.cleaned_stack, n_iter=args.n_iter)
def main(args): # Load the stack stack = ice.Stack(args.stackfile) # Check if requested dataset is 2D. If so, view it directly if stack[args.key].ndim == 2: mean = stack[args.key][()] # Otherwise, compute mean else: mean = stack.mean(key=args.key) # Load reference SAR image if args.ref is not None: sar = ice.Raster(rasterfile=args.ref) if sar.hdr != stack.hdr: sar.resample(stack.hdr) db = 10.0 * np.log10(sar.data) low = np.percentile(db.ravel(), 5) high = np.percentile(db.ravel(), 99.9) else: db = None fig, ax = plt.subplots() vmin, vmax = args.clim cmap = ice.get_cmap(args.cmap) if db is not None: ref = ax.imshow(db, aspect='auto', cmap='gray', vmin=low, vmax=high, extent=stack.hdr.extent) im = ax.imshow(mean, aspect='auto', vmin=vmin, vmax=vmax, cmap=cmap, extent=stack.hdr.extent, alpha=args.alpha) cbar = plt.colorbar(im, ax=ax, pad=0.02) cbar.set_label(args.key) plt.show() if args.save is not None: out = ice.Raster(data=mean, hdr=stack.hdr) out.write_gdal(args.save, epsg=args.save_epsg)
def main(): enderlin_data_path = './data/enderlin2018/' stack_path = './data/stacks/' stack_fname = 'enderlin_velocity_stack.hdf5' landsat8_raster_fname = './data/landsat8/LC08_L1TP_066017_20140224_20170306_01_T1/LC08_L1TP_066017_20140224_20170306_01_T1_B1.TIF' # Get all raster files from manifest rasterfiles = [] with open(enderlin_data_path + 'MANIFEST.TXT', 'r') as f: for line in f: rasterfiles.append(line.split()[0]) # plot_with_landsat(landsat8_raster_fname, enderlin_data_path + rasterfiles[0], 'Enderlin velocity on Landsat 8 Image') # Get dates and data from files init_tdec, init_data = [], [] for rasterfile in rasterfiles: # Parse date from filename date = rasterfile.split('_')[1] tdec = ice.datestr2tdec(yy=int(date[0:4]), mm=int(date[4:6]), dd=int(date[6:8])) init_tdec.append(tdec) # Load file and get data raster = ice.Raster(rasterfile=enderlin_data_path + rasterfile) print(raster.hdr.extent) # Data is flipped - not sure why init_data.append(np.flip(raster.data, axis=0)) # Create Stack hdr = ice.RasterInfo(rasterfile=enderlin_data_path + rasterfiles[0]) stack = ice.Stack(stack_path + stack_fname, mode='w', init_tdec=np.array(init_tdec), init_rasterinfo=hdr) stack.fid.create_dataset("data", data=np.array(init_data)) stack.fid.close() plot_mean_with_landsat(landsat8_raster_fname, stack_path + stack_fname, "Mean Velocity")
def get_stack_mean(stack_path, stack_fname, mean_fname): """ Calculate the mean for a Stack and save to a file to save future computational time. Parameters ---------- stack_path: str Path to directory containing stackfile stack_fname: Name of stackfile mean_fname: Name to save derived mean as Returns ------- mean: array_like The loaded Stack mean """ if os.path.exists(stack_path + mean_fname): # Load mean file if found print('Loading mean from file') raster = ice.Raster(rasterfile=stack_path + mean_fname) return raster.data, raster.hdr else: # Calculate mean from Stack and save to file print('Calculating mean from Stack') stack = ice.Stack(stack_path + stack_fname) data = np.array(stack._datasets['data']) data[data < 0] = np.nan mean = np.nanmean(data, axis=0) # Save to file for next time print('Saving mean to Raster for next time') raster = ice.Raster(data=mean, hdr=stack.hdr) raster.write_gdal(stack_path + mean_fname) stack.fid.close() return mean, raster.hdr
def main(args): # Get rasterinfo if args.rasterfile.endswith('.h5'): stack = ice.Stack(args.rasterfile) hdr = stack.hdr tdec = stack.tdec else: hdr = ice.RasterInfo(args.rasterfile, match=args.match) tdec = None print('Image shape: (%d, %d)' % (hdr.ny, hdr.nx)) print('Geographic extent: %f %f %f %f' % tuple(hdr.extent)) print('Geographic spacing: (dy = %f, dx = %f)' % (hdr.dy, hdr.dx)) if tdec is not None: print('Time span: %f -> %f' % (tdec[0], tdec[-1])) print('Median time spacing: %f' % np.median(np.diff(tdec))) print('EPSG:', hdr.epsg)
def main(args): # Load the stack stack = ice.Stack(args.stackfile) # Get frame if args.frame == 'initial': mean = stack.slice(0, key=args.key) elif args.frame == 'final': mean = stack.slice(stack.Nt - 1, key=args.key) elif args.frame == 'mean': mean = stack.mean(key=args.key) elif args.frame == 'std': mean = stack.std(key=args.key) else: raise ValueError('Unsupported frame type.') # If model directory is given, load model stack (full fit) mstack = None if args.mfile is not None: mstack = ice.Stack(args.mfile) # Load reference SAR image if args.ref is not None: sar = ice.Raster(rasterfile=args.ref) if sar.hdr != stack.hdr: sar.resample(stack.hdr) db = 10.0 * np.log10(sar.data.astype(np.float32)) low = np.percentile(db.ravel(), 5) high = np.percentile(db.ravel(), 99.9) else: db = None # Initialize image plot fig, ax = plt.subplots() vmin, vmax = args.clim cmap = ice.get_cmap(args.cmap) if db is not None: ref = ax.imshow(db, aspect='auto', cmap='gray', vmin=low, vmax=high, extent=stack.hdr.extent) im = ax.imshow(mean, aspect='auto', vmin=vmin, vmax=vmax, cmap=cmap, extent=stack.hdr.extent, alpha=args.alpha) cbar = plt.colorbar(im, ax=ax, pad=0.02) cbar.set_label(args.key) # Initialize plot for time series for a given pixel pts, axts = plt.subplots(figsize=(10, 6)) # Define action for clicking on deformation map def printcoords(event): if event.inaxes != ax: return # Get cursor coordinates y, x = event.ydata, event.xdata # Print out pixel locaton i, j = stack.hdr.xy_to_imagecoord(x, y) print('Row: %d Col: %d' % (i, j)) # Get time series for cursor location d = stack.timeseries(xy=(x, y), key=args.key) # Plot data and fit axts.clear() if args.sigma: w = stack.timeseries(xy=(x, y), key='weights') sigma = 1.0 / w axts.errorbar(stack.tdec, d, yerr=sigma, fmt='o') else: axts.plot(stack.tdec, d, 'o') if mstack is not None: fit = mstack.timeseries(xy=(x, y), key=args.mkey) axts.plot(mstack.tdec, fit, 'o') axts.set_xlabel('Year') axts.set_ylabel('Velocity') pts.canvas.draw() cid = fig.canvas.mpl_connect('button_press_event', printcoords) plt.show() fig.canvas.mpl_disconnect(cid)
def main(args): # Check if nothing is passed if args.projWin is None and args.srcWin is None and args.tWin is None: print('No cropping parameters provided.') return # Load stack stack = ice.Stack(args.stackfile) # Construct spatial slices if args.projWin is not None: # Unpack projWin parameters x0, y0, x1, y1 = args.projWin # Convert geographic coordinates to image coordinates i0, j0 = stack.hdr.xy_to_imagecoord(x0, y0) i1, j1 = stack.hdr.xy_to_imagecoord(x1, y1) islice = slice(i0, i1) jslice = slice(j0, j1) elif args.srcWin is not None: # Unpack srcWin parameters j0, i0, xsize, ysize = args.srcWin j1 = j0 + xsize i1 = i0 + ysize islice = slice(i0, i1) jslice = slice(j0, j1) else: islice = slice(0, stack.Ny) jslice = slice(0, stack.Nx) # Construct temporal subset if provided tdec = stack.tdec if args.tWin is not None: t0, tf = args.tWin k0 = np.argmin(np.abs(tdec - t0)) k1 = np.argmin(np.abs(tdec - tf)) tslice = slice(k0, k1) tdec = tdec[tslice] else: tslice = slice(0, stack.Nt) # Make meshgrid of coordinates X, Y = stack.hdr.meshgrid() # Apply slices if islice is not None and jslice is not None: X = X[islice, jslice] Y = Y[islice, jslice] # Create RasterInfo header hdr = ice.RasterInfo(X=X, Y=Y) # Create output stack ostack = ice.Stack(args.output, mode='w') ostack.initialize(tdec, hdr, data=True, weights=True, chunks=(1, args.chunks[0], args.chunks[1])) # Manually fill in the data try: ostack['data'][:, :, :] = stack['data'][tslice, islice, jslice] except KeyError: ostack['data'][:, :, :] = stack['igram'][tslice, islice, jslice] ostack['weights'][:, :, :] = stack['weights'][tslice, islice, jslice]
def main(args): # Load Stack stack = ice.Stack(args.stackfile) # Load reference SAR image if args.ref is not None: sar = ice.Raster(rasterfile=args.ref) if sar.hdr != stack.hdr: sar.resample(stack.hdr) db = 10.0 * np.log10(sar.data) low = np.percentile(db.ravel(), 5) high = np.percentile(db.ravel(), 99.9) else: db = None # Set up animation fig, ax = plt.subplots(figsize=args.figsize) data = stack._datasets[args.key] cmap = ice.get_cmap(args.cmap) # Add ref image if using if db is not None: ax.imshow(db, aspect='auto', cmap='gray', vmin=low, vmax=high, extent=stack.hdr.extent) # Extract reference frame if index provided if args.rel_index is not None: data_ref = data[args.rel_index] else: data_ref = 0.0 im = ax.imshow(data[0] - data_ref, extent=stack.hdr.extent, cmap=cmap, clim=args.clim, alpha=args.alpha) # Create title datestr = ice.tdec2datestr(stack.tdec[0]) tx = ax.set_title(args.title + ' ' + datestr, fontweight='bold') ax.set_xlabel(args.xlabel) ax.set_ylabel(args.ylabel) # Add colorbar div = make_axes_locatable(ax) cax = div.append_axes('right', '5%', '5%') cb = fig.colorbar(im, cax=cax) cb.set_label(args.clabel) # Update the frame def animate(i): im.set_data(data[i] - data_ref) datestr = ice.tdec2datestr(stack.tdec[i]) tx.set_text(args.title + ' ' + datestr) fig.set_tight_layout(True) print('Generating animation and saving to', args.save) interval = 1000 / args.fps # Convert fps to interval in milliseconds anim = animation.FuncAnimation(fig, animate, interval=interval, frames=len(data), repeat=True) anim.save(args.save, dpi=args.dpi) if args.show: plt.show()
def main(): stack_path = './data/stacks/' stack_fname = 'joughin_v_stack.hdf5' transect_fname = 'landsat_transect.npy' print('Loading data and building model ..................................') # Load transect coordinates and Stack x, y = np.load(stack_path + transect_fname) x, y = ice.transform_coordinates(x, y, 32606, 3413) stack = ice.Stack(stack_path + stack_fname) # Convert time vector to list of datetimes t = ice.tdec2datestr(stack.tdec, returndate=True) transect_velocities = np.array([stack.timeseries(xy=[x[i], y[i]], win_size=1) for i in range(len(x))]) # Create model model = ice.tseries.build_temporal_model(t, poly=2, isplines=[]) # Create temporally coherent (smooth) G over entire timespan of data stdec = ice.generateRegularTimeArray(min(stack.tdec), max(stack.tdec)) st = ice.tdec2datestr(stdec, returndate=True) sG = ice.tseries.build_temporal_model(st, poly=2, isplines=[]).G # Create image displaying found seasonal and secular variation along transect print('Solving regression along transect ................................') # Store the timeseries fit for each point along the transect data = { 'total' : transect_velocities, 'seasonal' : [], 'secular' : [] } fit = { 'seasonal' : [], 'secular' : [] } transect_amp = [] transect_phase = [] mean_vel = [] transect_sec_phase = [] transect_sec_min = [] for tseries in transect_velocities: tseries[tseries < 0] = np.nan solver = ice.tseries.select_solver('ridge', reg_indices=model.itransient, penalty=0.25) _, m, _ = solver.invert(model.G, tseries) amp, phase = compute_seasonal_amp_phase(m, model) transect_amp.append(amp) transect_phase.append(phase) a, b, c = m[model.isecular] # y = a + bx + cx**2 transect_sec_phase.append(-b/(2*c)) transect_sec_min.append(a - b**2/(4*c)) if m is None: continue seasonal_tseries = np.dot(sG[:,model.iseasonal], m[model.iseasonal]) secular_tseries = np.dot(sG[:,model.isecular], m[model.isecular]) mean_vel.append(np.nanmean(tseries)) # mean_vel.append(np.nanmedian(seasonal_tseries + secular_tseries)) fit['seasonal'].append(seasonal_tseries) fit['secular'].append(secular_tseries) seasonal_data = np.dot(model.G[:,model.iseasonal], m[model.iseasonal]) secular_data = np.dot(model.G[:,model.isecular], m[model.isecular]) data['seasonal'].append(tseries - secular_data) data['secular'].append(tseries - seasonal_data) # Calculate path length along the glacier in m path_len = ice.compute_path_length(x, y) # Get temporal extent of dataset extent = [stack.tdec[0], stack.tdec[-1], 0, path_len[-1]] # Generate plots # plot_timeseries_fit(int(0.20*len(transect_velocities)), data, stack.tdec, fit, stdec) # plot_transect_signal(fit, extent) # Smooth amp/phase smooth = True if smooth: win = 11 poly = 2 smooth_amp = savgol_filter(transect_amp, win, poly) smooth_phase = savgol_filter(transect_phase, win, poly) smooth_vel = savgol_filter(mean_vel, win, poly) smooth_sec_phase = savgol_filter(transect_sec_phase, win, poly) smooth_sec_min = savgol_filter(transect_sec_min, win, poly) plot_transect_amp_phase(smooth_amp, smooth_phase, smooth_vel, path_len) plot_transect_secular(smooth_sec_phase, smooth_phase, smooth_sec_min, smooth_vel, path_len) else: plot_transect_amp_phase(transect_amp, transect_phase, mean_vel, path_len) # Close fd stack.fid.close()