def drive_velocity_gmtsar(intf_files, nsbas_min_intfs, smoothing, wavelength, rowref, colref, outdir, signal_spread_file, baseline_file=None, coh_files=None): # GMTSAR DRIVING VELOCITIES signal_spread_file = outdir + "/" + signal_spread_file intf_tuple = rmd.reader(intf_files) coh_tuple = None if coh_files is not None: coh_tuple = rmd.reader(coh_files) signal_spread_data = rwr.read_grd(signal_spread_file) velocities = nsbas.Velocities(intf_tuple, nsbas_min_intfs, smoothing, wavelength, rowref, colref, signal_spread_data, baseline_file=baseline_file, coh_tuple=coh_tuple) rwr.produce_output_netcdf(intf_tuple.xvalues, intf_tuple.yvalues, velocities, 'mm/yr', outdir + '/velo_nsbas.grd') rwr.produce_output_plot(outdir + '/velo_nsbas.grd', 'LOS Velocity', outdir + '/velo_nsbas.png', 'velocity (mm/yr)') return
def combine_all_files(datestr, input_dirs, output_dir): print("\nCombining files for date %s" % datestr) xdata, ydata, zdata0 = netcdf_read_write.read_grd_xyz(input_dirs[0] + "/" + datestr + ".grd") xdata, ydata, zdata1 = netcdf_read_write.read_grd_xyz(input_dirs[1] + "/" + datestr + ".grd") zdata_total = np.zeros(np.shape(zdata0)) for j in range(len(ydata)): if np.mod(j, 200) == 0: print(j) for k in range(len(xdata)): vector = [zdata0[j][k], zdata1[j][k]] # , zdata2[j][k], zdata3[j][k], zdata4[j][k], zdata5[j][k], zdata6[j][k] ]; zdata_total[j][k] = np.sum(vector) output_file = output_dir + "/" + datestr + ".grd" output_plot = output_dir + "/" + datestr + ".png" netcdf_read_write.produce_output_netcdf(xdata, ydata, zdata_total, "mm", output_file) netcdf_read_write.produce_output_plot(output_file, datestr, output_plot, "mm", aspect=1.0, invert_yaxis=True, vmin=-50, vmax=100) return
def stack_corr_for_ref_unwrapped_isce(intf_files, rowref, colref, ts_output_dir, label=""): # WE MAKE THE SIGNAL SPREAD FOR THE CUT IMAGES cor_files = [ i.replace("fully_processed.unwrappedphase", "cut.cor") for i in intf_files ] # get for isce netcdfname = ts_output_dir + '/signalspread_cut_ref' + label + '.nc' cor_value = np.nan cor_data = readmytupledata.reader_isce(cor_files) a = stack_corr.stack_corr(cor_data, cor_value) rwr.produce_output_netcdf(cor_data.xvalues, cor_data.yvalues, a, 'Percentage', netcdfname) rwr.produce_output_plot(netcdfname, 'Signal Spread above cor=' + str(cor_value), ts_output_dir + '/signalspread_cut_ref' + label + '.png', 'Percentage of coherence', aspect=1 / 4, invert_yaxis=False, dot_points=[[colref], [rowref]]) signal_spread_ref = a[rowref, colref] print("Signal Spread of the reference pixel = %.2f " % (signal_spread_ref)) if signal_spread_ref < 50: print( "WARNING: Your reference pixel has very low coherence. Consider picking a different one." ) print("STOPPING ON PURPOSE.") sys.exit(0) return
def remove_plane(filename, planefile, outfile, m1, m2, m3): xvalues, yvalues, zvalues = rwr.read_grd_xyz(filename) i, j = 0, 0 new_z = np.zeros((len(yvalues), len(xvalues))) for z in np.nditer(zvalues): new_z[i, j] = zvalues[i, j] - (m1 + m2 * xvalues[j] + m3 * yvalues[i]) j += 1 if j == len(xvalues): j = 0 i += 1 if i == len(yvalues): i = 0 print(new_z[0, 0]) new_z = np.flipud(new_z) print(new_z[0, 0]) rwr.produce_output_netcdf(xvalues, yvalues, np.flipud(new_z), 'unwrapped phase', outfile) rwr.flip_if_necessary(outfile) outfile1 = outfile.replace("no_ramp.grd", "no_ramp_comparison.png") fr = netcdf.netcdf_file(outfile, 'r') xread = fr.variables['x'] yread = fr.variables['y'] zread = fr.variables['z'] zread_copy = zread[:][:].copy() fr2 = netcdf.netcdf_file(filename, 'r') xread2 = fr2.variables['x'] yread2 = fr2.variables['y'] zread2 = fr2.variables['z'] zread2_copy = zread2[:][:].copy() fig = plt.figure(figsize=(7, 10)) ax1 = plt.subplot(121) old = ax1.imshow(zread2_copy, aspect=1.2) ax1.invert_xaxis() ax1.get_xaxis().set_ticks([]) ax1.get_yaxis().set_ticks([]) ax1.set_xlabel("Range", fontsize=16) ax1.set_ylabel("Azimuth", fontsize=16) ax2 = plt.subplot(122) new = ax2.imshow(zread_copy, aspect=1.2, vmin=np.nanmin(zread_copy), vmax=np.nanmax(zread2_copy)) ax2.invert_xaxis() ax2.get_xaxis().set_ticks([]) ax2.get_yaxis().set_ticks([]) ax2.set_xlabel("Range", fontsize=16) cb = plt.colorbar(new) cb.set_label('unwrapped phase', size=16) plt.savefig(outfile1) plt.close() rwr.produce_output_plot(outfile, 'Ramp removed', outfile.replace('grd', 'png'), 'unwraped phase') return
def drive_signal_spread_calculation(corr_files, cutoff, output_dir, output_filename): print("Making stack_corr") output_file = output_dir + "/" + output_filename mytuple = rmd.reader(corr_files) a = stack_corr(mytuple, cutoff) # if unwrapped files, we use Nan to show when it was unwrapped successfully. rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', output_file) rwr.produce_output_plot(output_file, 'Signal Spread', output_dir + '/signalspread.png', 'Percentage of coherence (out of ' + str(len(corr_files)) + ' images)', aspect=1.2); return;
def dummy_signal_spread(intfs, output_dir, output_filename): # Make a perfect signal spread for passing to other applications print("Making a dummy signal spread that matches interferograms' dimensions (perfect 100)."); output_filename = output_dir + "/" + output_filename; [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4_xyz(intfs[0]); a = np.add(np.zeros(np.shape(zdata)), 100); rwr.produce_output_netcdf(xdata, ydata, a, 'Percentage', output_filename, dtype=np.float32) rwr.produce_output_plot(output_filename, 'Signal Spread', output_dir + '/signalspread.png', 'Percentage of coherence (out of ' + str(len(intfs)) + ' images)', aspect=1.2); return;
def drive_coseismic_stack_gmtsar(intf_files, wavelength, rowref, colref, outdir): intf_tuple = rmd.reader(intf_files) average_coseismic = get_avg_coseismic(intf_tuple, rowref, colref, wavelength) rwr.produce_output_netcdf(intf_tuple.xvalues, intf_tuple.yvalues, average_coseismic, 'mm', outdir + '/coseismic.grd') rwr.produce_output_plot(outdir + '/coseismic.grd', 'LOS Displacement', outdir + '/coseismic.png', 'displacement (mm)') return
def drive_velocity_simple_stack(intfs, wavelength, rowref, colref, outdir): signal_spread_data = rwr.read_grd(outdir + "/signalspread.nc") intf_tuple = rmd.reader(intfs) velocities, x, y = velocity_simple_stack(intf_tuple, wavelength, rowref, colref, signal_spread_data, 25) # last argument is signal threshold (< 100%). lower signal threshold allows for more data into the stack. rwr.produce_output_netcdf(x, y, velocities, 'mm/yr', outdir + '/velo_simple_stack.grd') rwr.produce_output_plot(outdir + '/velo_simple_stack.grd', 'LOS Velocity ', outdir + '/velo_simple_stack.png', 'velocity (mm/yr)') return
def test_read_write(filename): # A TEST OF READ/WRITE FUNCTIONS # Step 1: Read ISCE interferogram as phase # Step 2: Write it as ISCE format data # Step 3: Read ISCE interferogram again # Step 4: Write as .grd # Step 5: make plot. # RESULT: PRETTY GOOD! There is a flipup between the ISCE/GMTSAR conventions, # but it might never really be a problem. # Step 1: read phase slc = isce_read_write.read_phase_data(filename) print("Shape of slc is ", np.shape(slc)) isce_read_write.plot_complex_data(filename, aspect=1 / 10, outname="original.png") # Step 2: write ISCE data file ny, nx = np.shape(slc) # dtype = 'FLOAT'; isce_written = "isce_written_phase.phase" isce_read_write.write_isce_data(slc, nx, ny, dtype="FLOAT", filename=isce_written) # Step 3: read phase again. phase = isce_read_write.read_scalar_data(isce_written + ".vrt") print("Shape of phase is ", np.shape(phase)) isce_read_write.plot_scalar_data("isce_written_phase.phase", colormap='rainbow', aspect=1 / 10, outname="isce_written_phase.png") # Step 4: write that phase as grd. netcdfname = "netcdf_written_phase.grd" xdata = np.arange(0, nx) ydata = np.arange(0, ny) phase = np.flipud(phase) # THIS SEEMS TO BE NECESSARY TO SWITCH BETWEEN CONVENTIONS. GRD PLOTS ARE UPSIDE DOWN FROM ISCE. netcdf_read_write.produce_output_netcdf(xdata, ydata, phase, "radians", netcdfname) # Step 5: look at what's inside; netcdf_read_write.produce_output_plot(netcdfname, "phase", "grdstyle_phase.png", "radians", aspect=1 / 10) return
def make_vels_from_ts_grids(ts_dir, geocoded=False): if geocoded: filelist = glob.glob(ts_dir + "/publish/*_ll.grd") mydata = rmd.reader_from_ts(filelist, "lon", "lat", "z") # put these if using geocoded values else: filelist = glob.glob(ts_dir + "/????????.grd") mydata = rmd.reader_from_ts(filelist) vel = nsbas.Velocities_from_TS(mydata) rwr.produce_output_netcdf(mydata.xvalues, mydata.yvalues, vel, 'mm/yr', ts_dir + '/velo_nsbas.grd') rwr.produce_output_plot(ts_dir + '/velo_nsbas.grd', 'LOS Velocity', ts_dir + '/velo_nsbas.png', 'velocity (mm/yr)') return
def drive_coseismic_stack_isce(intf_files, wavelength, rowref, colref, outdir): intf_tuple = rmd.reader_isce(intf_files) average_coseismic = get_avg_coseismic(intf_tuple, rowref, colref, wavelength) rwr.produce_output_netcdf(intf_tuple.xvalues, intf_tuple.yvalues, average_coseismic, 'mm', outdir + '/coseismic.grd') rwr.produce_output_plot(outdir + '/coseismic.grd', 'LOS Displacement', outdir + '/coseismic.png', 'displacement (mm)', aspect=1 / 8, invert_yaxis=False, vmin=-50, vmax=200) return
def make_corrections_isce(config_params): if config_params.startstage > 1: # if we're starting after, we don't do this. return if config_params.endstage < 1: # if we're ending before, we don't do this. return print("Start Stage 1 - optional atm corrections") # For ISCE, we might want to re-make all the interferograms and unwrap them in custom fashion. # This operates on files in the Igram directory, no need to move directories yourself. if config_params.solve_unwrap_errors: unwrapping_isce_custom.main_function(config_params.rlks, config_params.alks, config_params.filt, config_params.xbounds, config_params.ybounds, config_params.cor_cutoff_mask) # WE ALSO MAKE THE SIGNAL SPREAD FOR FULL IMAGES cor_value = 0.5 filepathslist = glob.glob("../Igrams/????????_????????/filt*.cor") # *** This may change cor_data = readmytupledata.reader_isce(filepathslist) a = stack_corr.stack_corr(cor_data, cor_value) rwr.produce_output_netcdf( cor_data.xvalues, cor_data.yvalues, a, 'Percentage', config_params.ts_output_dir + '/signalspread_full.nc') rwr.produce_output_plot( config_params.ts_output_dir + '/signalspread_full.nc', 'Signal Spread above cor=' + str(cor_value), config_params.ts_output_dir + '/signalspread_full.png', 'Percentage of coherence', aspect=1 / 4, invert_yaxis=False) print("End Stage 1 - optional atm corrections\n") return
for f in range(len(store)): ref_values.append(store[f, yvalue, xvalue]) print(len(ref_values)) i,j,f = 0,0,0 while f < len(mytuple.zvalues): store[f, i ,j] = store[f, i ,j] - ref_values[f] j+=1 if j==len(mytuple.xvalues): j=0 i+=1 if i == len(mytuple.yvalues): i=0 print('Referencing phases in file ' + str(f+1) + ' out of ' + str(len(mytuple.zvalues))) f+=1 return store if __name__ == "__main__": myfiles = glob.glob("intf_all_remote/???????_???????/unwrap.grd") d=rmd.reader(myfiles) store = phase_ref(d, 621, 32) for i in range(len(myfiles)): print('Dealing with file ' + str(i+1)) temp = myfiles[i].split('/')[-1] stem = myfiles[i][0:-len(temp)] rwr.produce_output_netcdf(d.xvalues, d.yvalues, store[i], 'Radians', stem + 'unwrap_ref.grd') rwr.flip_if_necessary(stem + 'unwrap_ref.grd') rwr.produce_output_plot(stem + 'unwrap_ref.grd', 'Referenced Unwrapped Phase', stem + 'unwrap_ref.png', 'unwrapped phase')
if __name__ == "__main__": myfiles_no_ramp, remove_ramp_flag, wls_flag, myfiles_phase, manual_remove, signal_spread_file, wavelength, nsbas_good_num, smoothing, outfile = configure( ) datatuple, signal_spread_data, dates, date_pairs, coherence_cube = inputs( myfiles_no_ramp, remove_ramp_flag, myfiles_phase, signal_spread_file, manual_remove, 15, wls_flag) vel = compute(coherence_cube, datatuple, nsbas_good_num, signal_spread_data, dates, date_pairs, smoothing, wavelength, outfile, wls_flag) rwr.produce_output_netcdf(datatuple.xvalues, datatuple.yvalues, vel, 'velocity', outfile) rwr.flip_if_necessary(outfile) rwr.produce_output_plot( outfile, 'Reasonable WNSBAS - smoothing factor: ' + str(smoothing), "Stacking/NSBAS/velocity_weighted50NSBAS_reasonable_smooth1.png", 'velocity in mm/yr') # x,y,vel = rwr.read_grd_xyz(outfile) # signal = rwr.read_grd(signal_spread_file) # updated_vel = np.zeros((np.shape(vel))) # i, j,c = 0, 0, 0 # for v in np.nditer(vel): # print(c) # if signal[i,j] < 65 and signal[i,j] > 50: # updated_vel[i,j] = np.nan # else: # updated_vel[i,j] = vel[i,j] # j+=1 # c+=1 # if j==len(x):
def outputs(xdata, ydata, number_of_datas, zdim, vel, out_dir): netcdf_read_write.produce_output_netcdf(xdata, ydata, number_of_datas, 'coherent_intfs', out_dir+'/number_of_datas.grd'); netcdf_read_write.flip_if_necessary(out_dir+'/number_of_datas.grd'); netcdf_read_write.produce_output_plot(out_dir+'/number_of_datas.grd', "Number of Coherent Intfs (Total = "+str(zdim)+")", out_dir+'/number_of_coherent_intfs.eps', 'intfs'); geocode(out_dir+'/number_of_datas.grd',out_dir); netcdf_read_write.produce_output_netcdf(xdata,ydata, vel, 'mm/yr', out_dir+'/vel.grd'); netcdf_read_write.flip_if_necessary(out_dir+'/vel.grd'); geocode(out_dir+'/vel.grd',out_dir); # Visualizing the velocity field in a few different ways. zdata2=np.reshape(vel, [len(xdata)*len(ydata), 1]) zdata2=sentinel_utilities.remove_nans_array(zdata2); plt.figure(); plt.hist(zdata2,bins=80); plt.gca().set_yscale('log'); plt.title('Pixels by Velocity: mean=%.2fmm/yr, sdev=%.2fmm/yr' % (np.mean(zdata2), np.std(zdata2)) ) plt.ylabel('Number of Pixels'); plt.xlabel('LOS velocity (mm/yr)') plt.grid('on'); plt.savefig(out_dir+'/velocity_hist_log.png'); plt.close(); plt.figure(); plt.gca().set_yscale('linear'); plt.title('Pixels by Velocity: mean=%.2fmm/yr, sdev=%.2fmm/yr' % (np.mean(zdata2), np.std(zdata2)) ) plt.hist(zdata2,bins=80); plt.ylabel('Number of Pixels'); plt.xlabel('LOS velocity (mm/yr)') plt.grid('on'); plt.savefig(out_dir+'/velocity_hist_lin.png'); plt.close(); plt.figure(figsize=(8,10)); plt.imshow(vel,aspect=0.5,cmap='jet',vmin=-30, vmax=30); plt.gca().invert_yaxis() plt.gca().invert_xaxis() plt.gca().get_xaxis().set_ticks([]); plt.gca().get_yaxis().set_ticks([]); plt.title("Velocity"); plt.gca().set_xlabel("Range",fontsize=16); plt.gca().set_ylabel("Azimuth",fontsize=16); cb = plt.colorbar(); cb.set_label("mm/yr", size=16); plt.savefig(out_dir+"/vel_cutoff.png"); plt.close(); plt.figure(figsize=(8,10)); plt.imshow(vel,aspect=0.5,cmap='jet',vmin=-150, vmax=150); plt.gca().invert_yaxis() plt.gca().invert_xaxis() plt.gca().get_xaxis().set_ticks([]); plt.gca().get_yaxis().set_ticks([]); plt.title("Velocity"); plt.gca().set_xlabel("Range",fontsize=16); plt.gca().set_ylabel("Azimuth",fontsize=16); cb = plt.colorbar(); cb.set_label("mm/yr", size=16); plt.savefig(out_dir+"/vel.png"); plt.close(); return;
content_1.append('intf_all_remote/' + content[i] + '/unwrap.grd') m1, m2, m3 = plane_fitter( content_1[i], content_1[i].replace('unwrap', 'unwrap_model')) model.append(content_1[i].replace('unwrap', 'unwrap_model')) remove_plane(content_1[i], model[i], model[i].replace('model', 'no_ramp'), m1, m2, m3) temp1 = ['intf_all_remote/' + content[i] + '/unwrap_no_ramp.grd'] d = rmd.reader(temp1) store = phr.phase_ref(d, 621, 32) temp = temp1[0].split('/')[-1] stem = temp1[0][0:-len(temp)] rwr.produce_output_netcdf(d.xvalues, d.yvalues, store[0], 'Radians', stem + 'unwrap_ref_corrected.grd') rwr.flip_if_necessary(stem + 'unwrap_ref_corrected.grd') rwr.produce_output_plot(stem + 'unwrap_ref_corrected.grd', 'Referenced and Corrected Unwrapped Phase', stem + 'unwrap_ref_corrected.png', 'unwrapped phase') print('Done with file ' + str(i + 1)) # if __name__=="__main__": # grdname="intf_all_remote/2018281_2018305/unwrap.grd" # remove_trend2d(grdname,4); # remove_trend2d(grdname,3); # remove_trend2d(grdname,6); # outfile = "classic_unwrapped_N4_good.grd" # rwr.produce_output_plot(outfile, 'Ramp removed', 'Ramp_remove.png', 'phase') # # BASH Version # # Define name and number of coefficients in planar fit. # grdname="unwrap.grd"; # order="4"
def dummy_signal_spread(intfs, output_dir, output_filename): # Make a perfect signal spread for passing to other applications print("Making a dummy signal spread that matches interferograms' dimensions (perfect 100)."); output_filename = output_dir + "/" + output_filename; [xdata, ydata, zdata] = netcdf_read_write.read_netcdf4_xyz(intfs[0]); a = np.add(np.zeros(np.shape(zdata)), 100); rwr.produce_output_netcdf(xdata, ydata, a, 'Percentage', output_filename, dtype=np.float32) rwr.produce_output_plot(output_filename, 'Signal Spread', output_dir + '/signalspread.png', 'Percentage of coherence (out of ' + str(len(intfs)) + ' images)', aspect=1.2); return; def drive_signal_spread_calculation(corr_files, cutoff, output_dir, output_filename): print("Making stack_corr") output_file = output_dir + "/" + output_filename mytuple = rmd.reader(corr_files) a = stack_corr(mytuple, cutoff) # if unwrapped files, we use Nan to show when it was unwrapped successfully. rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', output_file) rwr.produce_output_plot(output_file, 'Signal Spread', output_dir + '/signalspread.png', 'Percentage of coherence (out of ' + str(len(corr_files)) + ' images)', aspect=1.2); return; if __name__ == "__main__": myfiles = glob.glob("intf_all_remote/???????_???????/corr.grd") mytuple = rmd.reader(myfiles) a = stack_corr(mytuple, 0.1) rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', 'signalspread.nc') rwr.produce_output_plot('signalspread.nc', 'Signal Spread', 'signalspread.png', 'Percentage of coherence')
# for x in range(len(xfinal_ratioed)): # pixel_box = z[(yfinal_ratioed[x]-50):(yfinal_ratioed[x]+51),(xfinal_ratioed[x]-50):(xfinal_ratioed[x]+51) ] # print(np.shape(pixel_box)) # for v in np.nditer(pixel_box): # if np.isnan(v) == False: # counter.append(v) # print(len(counter)) x1, y1, z1 = rwr.read_grd_xyz('signalspread_please_test.nc') x2, y2, z2 = rwr.read_grd_xyz(filenames[-2][0]) thing1 = z1[1200:1424, 400:660] thing2 = z2[1200:1424, 400:660] rwr.produce_output_netcdf(x1[400:660], y1[1200:1424], thing1, 'signal', 'possible_fault_info.grd') rwr.flip_if_necessary('possible_fault_info.grd') rwr.produce_output_plot('possible_fault_info.grd', '', 'possible_fault_info.png', '%') rwr.produce_output_netcdf(x2[400:660], y2[1200:1424], thing2, 'velocity', 'possible_fault_info_vel.grd') rwr.flip_if_necessary('possible_fault_info_vel.grd') [x, y] = np.meshgrid(x1[400:660], y1[1200:1424]) [x_, y_] = np.meshgrid(x2[400:660], y2[1200:1424]) fig = plt.figure(figsize=(18, 10)) ax1 = plt.subplot(121) image1 = ax1.contourf(x, y, thing1, cmap='jet', vmin=np.nanmin(thing1), vmax=np.nanmax(thing1))
a=np.zeros((len(mytuple.yvalues), len(mytuple.xvalues))) i,j = 0,0 for z in np.nditer(mytuple.zvalues): if z >= cutoff: a[i,j] = a[i,j] + 1 j+=1 if j== len(mytuple.xvalues): j=0 i+=1 if i == len(mytuple.yvalues): i=0 i,j = 0,0 for n in np.nditer(a): a[i,j] = (a[i,j]/(len(mytuple.filepaths)))*100 j+=1 if j== len(mytuple.xvalues): j=0 i+=1 if i == len(mytuple.yvalues): i=0 return a if __name__ == "__main__": myfiles = glob.glob("intf_all_remote/???????_???????/corr.grd") mytuple=rmd.reader(myfiles) a=stack_corr(mytuple, 0.1) rwr.produce_output_netcdf(mytuple.xvalues, mytuple.yvalues, a, 'Percentage', 'signalspread_please_test.nc') rwr.flip_if_necessary('signalspread_please_test.nc') rwr.produce_output_plot('signalspread_please_test.nc', 'Signal Spread', 'signalspread_please_test.png', 'Percentage of coherence (out of 288 images)' )
j] = (wavelength / (4 * (np.pi))) * ((np.sum(phases)) / (np.sum(times))) phases, times = [], [] c += 1 print('Done with ' + str(c) + ' out of ' + str(len(mytuple.xvalues) * len(mytuple.yvalues)) + ' pixels') f = 0 j += 1 if j == len(mytuple.xvalues): j = 0 i += 1 if i == len(mytuple.yvalues): i = 0 return velocities, mytuple.xvalues, mytuple.yvalues if __name__ == "__main__": ramps, outfile_stem, myfiles, myfiles_no_ramp, remove_ramp = configure() myfiles_new = inputs(ramps, myfiles, myfiles_no_ramp, remove_ramp) velocities, x, y = velocity_simple_stack(myfiles_new, 56, 1, 50) rwr.produce_output_netcdf( x, y, velocities, 'mm/yr', outfile_stem + 'velo_prof_reasonable50_remastered.grd') rwr.flip_if_necessary(outfile_stem + 'velo_prof_reasonable50_remastered.grd') rwr.produce_output_plot( outfile_stem + 'velo_prof_reasonable50_remastered.grd', 'Velocity Profile Reasonable (15 images removed)', outfile_stem + 'velo_prof_reasonable50_remastered.png', 'velocity (mm/yr)')