def ss_plot(): #Pandas method of importing data frame and getting extents conn = psycopg2.connect("dbname='reach_4a' user='******' host='localhost' port='9000'") df = pd.read_sql_query('select easting, northing, texture, sidescan_intensity from mosaic_2014_09', con=conn) minE = min(df['easting']) maxE = max(df['easting']) minN = min(df['northing']) maxN = max(df['northing']) conn.close() print 'Done Importing Data from Database' #Create grid for countourf plot res = 0.25 grid_x, grid_y = np.meshgrid( np.arange(np.floor(minE), np.ceil(maxE), res), np.arange(np.floor(minN), np.ceil(maxN), res)) grid_lon, grid_lat = trans(grid_x,grid_y,inverse=True) #Re-sampling procedure m_lon, m_lat = trans(df['easting'].values.flatten(), df['northing'].values.flatten(), inverse=True) orig_def = geometry.SwathDefinition(lons=m_lon, lats=m_lat) target_def = geometry.SwathDefinition(lons=grid_lon.flatten(), lats=grid_lat.flatten()) print 'Now Resampling...' result = kd_tree.resample_nearest(orig_def, df['sidescan_intensity'].values.flatten(), target_def, radius_of_influence=1, fill_value=None, nprocs = cpu_count()) print 'Done Resampling!!!' #format side scan intensities grid for plotting gridded_result = np.reshape(result,np.shape(grid_lon)) gridded_result = np.squeeze(gridded_result) gridded_result[np.isinf(gridded_result)] = np.nan gridded_result[gridded_result<=0] = np.nan grid2plot = np.ma.masked_invalid(gridded_result) # x = df['easting'] # y = df['northing'] # z = df['sidescan_intensity'] # # xi = df['easting'] # yi = df['northing'] # # X,Y= np.meshgrid(xi,yi) # grid_lon, grid_lat = trans(X,Y,inverse=True) # Z = griddata((x, y), z, (X, Y),method='nearest') # print 'Done Gridding Data' print 'Now mapping...' #Create Figure fig = plt.figure(frameon=True) ax = plt.subplot(1,1,1) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(grid_lon)-0.0009, llcrnrlat=np.min(grid_lat)-0.0009,urcrnrlon=np.max(grid_lon)+0.0009, urcrnrlat=np.max(grid_lat)+0.0009) gx,gy = map.projtran(grid_lon,grid_lat) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=1200) im = map.pcolormesh(gx, gy, grid2plot, cmap='gray',vmin=0.1, vmax=30) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im, cax=cax) cbr.set_label('Sidescan Intensity [dBw]', size=8) for t in cbr.ax.get_yticklabels(): t.set_fontsize(8) plt.show()
def print_contour_map(cs2cs_args, humlon, humlat, glon, glat, datm, sonpath, p, vmin, vmax): #merge, #levels = [0,0.25,0.5,0.75,1.25,1.5,1.75,2,3,5] print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution='i', #h #f llcrnrlon=np.min(humlon) - 0.001, llcrnrlat=np.min(humlat) - 0.001, urcrnrlon=np.max(humlon) + 0.001, urcrnrlat=np.max(humlat) + 0.001) #if dogrid==1: gx, gy = map.projtran(glon, glat) ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) #finally: # print "error: map could not be created..." #if dogrid==1: if 2 > 1: if datm.size > 25000000: print( "matrix size > 25,000,000 - decimating by factor of 5 for display" ) #map.contourf(gx[::5,::5], gy[::5,::5], datm[::5,::5], levels, cmap='YlOrRd') map.pcolormesh(gx, gy, datm, cmap='YlOrRd', vmin=vmin, vmax=vmax) else: #map.contourf(gx, gy, datm, levels, cmap='YlOrRd') map.pcolormesh(gx[::5, ::5], gy[::5, ::5], datm[::5, ::5], cmap='pink', vmin=vmin, vmax=vmax) custom_save2(sonpath, 'class_map_imagery' + str(p)) del fig
def ss_plot(): #Pandas method of importing data frame and getting extents db_connect="dbname='reach_4a' user='******' host='localhost' port='9000'" conn = psycopg2.connect(db_connect) df = pd.read_sql_query('SELECT * from mb_may_2012_1m tt inner join ( SELECT s.easting, s.northing, s.texture, s.sidescan_intensity FROM ss_2012_05 s) ss on tt.easting=ss.easting and tt.northing=ss.northing;', con=conn) minE = df['easting'].min()[0] maxE = df['easting'].max()[0] minN = df['northing'].min()[0] maxN = df['northing'].max()[0] conn.close() print 'Done Importing Data from Database' #Create grid for countourf plot res = 1 grid_x, grid_y = np.meshgrid( np.arange(np.floor(minE), np.ceil(maxE), res), np.arange(np.floor(minN), np.ceil(maxN), res)) grid_lon, grid_lat = trans(grid_x,grid_y,inverse=True) #Re-sampling procedure m_lon, m_lat = trans(df['easting'].values.flatten(), df['northing'].values.flatten(), inverse=True) orig_def = geometry.SwathDefinition(lons=m_lon, lats=m_lat) target_def = geometry.SwathDefinition(lons=grid_lon.flatten(), lats=grid_lat.flatten()) print 'Now Resampling...' result = kd_tree.resample_nearest(orig_def, df['sidescan_intensity'].values.flatten(), target_def, radius_of_influence=1, fill_value=None, nprocs = cpu_count()) print 'Done Resampling!!!' #format side scan intensities grid for plotting gridded_result = np.reshape(result,np.shape(grid_lon)) gridded_result = np.squeeze(gridded_result) gridded_result[np.isinf(gridded_result)] = np.nan gridded_result[gridded_result<=0] = np.nan grid2plot = np.ma.masked_invalid(gridded_result) print 'Now mapping...' #Create Figure fig = plt.figure(frameon=True) ax = plt.subplot(1,1,1) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(grid_lon)-0.0009, llcrnrlat=np.min(grid_lat)-0.0009,urcrnrlon=np.max(grid_lon)+0.0009, urcrnrlat=np.max(grid_lat)+0.0009) gx,gy = map.projtran(grid_lon,grid_lat) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=1200) im = map.pcolormesh(gx, gy, grid2plot, cmap='gray',vmin=0.1, vmax=30) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im, cax=cax) cbr.set_label('Sidescan Intensity [dBw]', size=8) for t in cbr.ax.get_yticklabels(): t.set_fontsize(8) plt.savefig(r'C:\workspace\Texture_Classification\output\May2012_1m_sidescan_intensity.png')
def print_contour_map(cs2cs_args, humlon, humlat, glon, glat, datm, sonpath, p, vmin, vmax): #merge, #levels = [0,0.25,0.5,0.75,1.25,1.5,1.75,2,3,5] print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001) #if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) #finally: # print "error: map could not be created..." #if dogrid==1: if 2>1: if datm.size > 25000000: print("matrix size > 25,000,000 - decimating by factor of 5 for display") #map.contourf(gx[::5,::5], gy[::5,::5], datm[::5,::5], levels, cmap='YlOrRd') map.pcolormesh(gx, gy, datm, cmap='YlOrRd', vmin=vmin, vmax=vmax) else: #map.contourf(gx, gy, datm, levels, cmap='YlOrRd') map.pcolormesh(gx[::5,::5], gy[::5,::5], datm[::5,::5], cmap='pink', vmin=vmin, vmax=vmax) custom_save2(sonpath,'class_map_imagery'+str(p)) del fig
print 'Now plotting R02028 Acoutic sediment classifications...' #Begin the plot cs2cs_args = "epsg:26949" fig = plt.figure(figsize=(15,12)) ax = plt.subplot2grid((5,2),(0, 0),rowspan=4) ax.set_title('R02028 \n May 2014 Acousic Sediment Classifications') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.nanmin(r28_lon)-0.0009, llcrnrlat=np.nanmin(r28_lat)-0.0006, urcrnrlon=np.nanmax(r28_lon)+0.0009, urcrnrlat=np.nanmax(r28_lat)+0.0006) #m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(r28_lon, r28_lat) im = m.contourf(x,y,R02028.T, cmap='Greys_r',levels=ss_level) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im, cax=cax) m.readshapefile(r"C:\workspace\Reach_4a\Multibeam\mb_sed_class\output\shapefiles\may2014_3m_buff_geo","layer",drawbounds = False) #sand, sand/gravel, gravel, sand/rock, rock s_patch, sg_patch, g_patch, sr_patch, r_patch, = [],[],[],[],[] bound = max(stat['count'] for stat in stats_28)/2 for info, shape in zip(m.layer_info, m.layer): if info['substrate'] == 'sand' and info['count_28'] > bound: s_patch.append(Polygon(np.asarray(shape),True)) if info['substrate'] == 'sand/gravel' and info['count_28'] > bound: sg_patch.append(Polygon(np.asarray(shape),True))
def plot_gmm_crf_images(mask, y_pred_gmm, y_prob_gmm, y_pred_crf, y_prob_crf, bs, bed, cmap, prefix): """ This function makes plots of GMM and CRF results side by side with underlying image (requires basemap) """ y_prob_gmm[mask==1] = np.nan y_pred_gmm[mask==1] = np.nan y_prob_crf[mask==1] = np.nan y_pred_crf[mask==1] = np.nan #base = '..'+os.sep+'outputs'+os.sep+prefix+'_' base = prefix+'_' cmap = colors.ListedColormap(cmap) #---------------------------------------------------------------- X = bed['Xlon'] Y = bed['Ylat'] n = 0.0075 fig = plt.figure(frameon=False, dpi=300) fig.subplots_adjust(hspace=0.4) ax1 = fig.add_subplot(221) map = Basemap(projection='merc', epsg='4326', resolution = 'i', #h #f llcrnrlon=np.min(X)-n, llcrnrlat=np.min(Y)-n, urcrnrlon=np.max(X)+n, urcrnrlat=np.max(Y)+n) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) finally: pass x = np.arange(bs[0]['xmin'],bs[0]['xmax'],bs[0]['gridres']) y = np.arange(bs[0]['ymin'],bs[0]['ymax'],bs[0]['gridres']) trans = pyproj.Proj(init=bs[0]['crs']['init']) glon, glat = np.meshgrid(x, y) glon, glat = trans(glon, glat, inverse=True) mx,my = map.projtran(glon, glat) im=map.pcolormesh(mx, my, np.flipud(y_pred_gmm), cmap=cmap, vmin=0, vmax=len(bed['labels'])) gx,gy = map.projtran(X, Y) #map.scatter(gx, gy, 5, bed['Ccodes'], cmap=cmap, vmin=0, vmax=len(bed['labels']), marker='s', edgecolor='k', lw=0.25) parallels = np.arange(np.min(glat),np.max(glat),0.005) map.drawparallels(parallels,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) meridians = np.arange(np.min(glon),np.max(glon),0.005) map.drawmeridians(meridians,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) plt.title('GMM substrate classification', fontsize=4) divider = make_axes_locatable(ax1) cax = divider.append_axes("right", size="5%") cb=plt.colorbar(im, cax=cax) cb.set_ticks(.5+np.arange(len(bed['labels'])+1)) cb.ax.set_yticklabels(bed['labels']) cb.ax.tick_params(labelsize=4) cb.ax.set_label('Substrate Type') ax1 = fig.add_subplot(222) map = Basemap(projection='merc', epsg='4326', resolution = 'i', #h #f llcrnrlon=np.min(X)-n, llcrnrlat=np.min(Y)-n, urcrnrlon=np.max(X)+n, urcrnrlat=np.max(Y)+n) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) finally: pass x = np.arange(bs[0]['xmin'],bs[0]['xmax'],bs[0]['gridres']) y = np.arange(bs[0]['ymin'],bs[0]['ymax'],bs[0]['gridres']) trans = pyproj.Proj(init=bs[0]['crs']['init']) glon, glat = np.meshgrid(x, y) glon, glat = trans(glon, glat, inverse=True) mx,my = map.projtran(glon, glat) im=map.pcolormesh(mx, my, np.flipud(y_pred_crf), cmap=cmap, vmin=0, vmax=len(bed['labels'])) gx,gy = map.projtran(X, Y) #map.scatter(gx, gy, 5, bed['Ccodes'], cmap=cmap, vmin=0, vmax=len(bed['labels']), marker='s', edgecolor='k', lw=0.25) parallels = np.arange(np.min(glat),np.max(glat),0.005) map.drawparallels(parallels,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) meridians = np.arange(np.min(glon),np.max(glon),0.005) map.drawmeridians(meridians,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) plt.title('CRF substrate classification', fontsize=4) divider = make_axes_locatable(ax1) cax = divider.append_axes("right", size="5%") cb=plt.colorbar(im, cax=cax) cb.set_ticks(.5+np.arange(len(bed['labels'])+1)) cb.ax.set_yticklabels(bed['labels']) cb.ax.tick_params(labelsize=4) cb.ax.set_label('Substrate Type') plt.savefig(base+'GMM_CRF_map_image.png', dpi=300, bbox_inches='tight') #base+ plt.close('all') del map, fig ##----------------------------------------------------------------------------- n = 0.0075 fig = plt.figure(frameon=False, dpi=300) fig.subplots_adjust(hspace=0.4) ax1 = fig.add_subplot(221) map = Basemap(projection='merc', epsg='4326', resolution = 'i', #h #f llcrnrlon=np.min(X)-n, llcrnrlat=np.min(Y)-n, urcrnrlon=np.max(X)+n, urcrnrlat=np.max(Y)+n) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) finally: pass x = np.arange(bs[0]['xmin'],bs[0]['xmax'],bs[0]['gridres']) y = np.arange(bs[0]['ymin'],bs[0]['ymax'],bs[0]['gridres']) trans = pyproj.Proj(init=bs[0]['crs']['init']) glon, glat = np.meshgrid(x, y) glon, glat = trans(glon, glat, inverse=True) mx,my = map.projtran(glon, glat) im=map.pcolormesh(mx, my, np.flipud(y_prob_gmm), cmap='RdBu', vmin=0.5, vmax=1.0) #, vmin=0, vmax=len(bed['labels'])) gx,gy = map.projtran(X, Y) #map.scatter(gx, gy, 5, bed['Ccodes'], cmap=cmap, vmin=0, vmax=len(bed['labels']), marker='s', edgecolor='k', lw=0.25) parallels = np.arange(np.min(glat),np.max(glat),0.005) map.drawparallels(parallels,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) meridians = np.arange(np.min(glon),np.max(glon),0.005) map.drawmeridians(meridians,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) plt.title('GMM posterior probability', fontsize=4) divider = make_axes_locatable(ax1) cax = divider.append_axes("right", size="5%") cb=plt.colorbar(im, cax=cax, extend='min') #cb.set_ticks(.5+np.arange(len(bed['labels'])+1)) #cb.ax.set_yticklabels(bed['labels']) cb.ax.tick_params(labelsize=4) cb.ax.set_label('Posterior Prob.') ax1 = fig.add_subplot(222) map = Basemap(projection='merc', epsg='4326', resolution = 'i', #h #f llcrnrlon=np.min(X)-n, llcrnrlat=np.min(Y)-n, urcrnrlon=np.max(X)+n, urcrnrlat=np.max(Y)+n) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) finally: pass x = np.arange(bs[0]['xmin'],bs[0]['xmax'],bs[0]['gridres']) y = np.arange(bs[0]['ymin'],bs[0]['ymax'],bs[0]['gridres']) trans = pyproj.Proj(init=bs[0]['crs']['init']) glon, glat = np.meshgrid(x, y) glon, glat = trans(glon, glat, inverse=True) mx,my = map.projtran(glon, glat) im=map.pcolormesh(mx, my, np.flipud(y_prob_crf), cmap='RdBu', vmin=0.5, vmax=1.0) #len(bed['labels'])) gx,gy = map.projtran(X, Y) #map.scatter(gx, gy, 5, bed['Ccodes'], cmap=cmap, vmin=0, vmax=len(bed['labels']), marker='s', edgecolor='k', lw=0.25) parallels = np.arange(np.min(glat),np.max(glat),0.005) map.drawparallels(parallels,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) meridians = np.arange(np.min(glon),np.max(glon),0.005) map.drawmeridians(meridians,labels=[1,0,0,1], color='w',fontsize=3, rotation=45, linewidth=0.25) plt.title('CRF posterior probability', fontsize=4) divider = make_axes_locatable(ax1) cax = divider.append_axes("right", size="5%") cb=plt.colorbar(im, cax=cax, extend='min') #cb.set_ticks(.5+np.arange(len(bed['labels'])+1)) #cb.ax.set_yticklabels(bed['labels']) cb.ax.tick_params(labelsize=4) cb.ax.set_label('Posterior Prob.') plt.savefig(base+'GMM_CRF_prob_image.png', dpi=300, bbox_inches='tight') #base+ plt.close('all') del map, fig
def map_texture(humfile, sonpath, cs2cs_args = "epsg:26949", dogrid = 1, res = 0.5, dowrite = 0, mode=3, nn = 64, influence = 1, numstdevs=5): ''' Create plots of the texture lengthscale maps made in PyHum.texture module using the algorithm detailed by Buscombe et al. (forthcoming) This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical representation that integrates over many attributes of bed texture, of which grain size is the most important. The technique is a physically based means to identify regions of texture within a sidescan echogram, and could provide a basis for objective, automated riverbed sediment classification. Syntax ---------- [] = PyHum.map_texture(humfile, sonpath, cs2cs_args, dogrid, res, dowrite, mode, nn, influence, numstdevs) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries dogrid : float, *optional* [Default=1] if 1, textures will be gridded with resolution 'res'. Otherwise, point cloud will be plotted res : float, *optional* [Default=0.5] grid resolution of output gridded texture map dowrite: int, *optional* [Default=0] if 1, point cloud data from each chunk is written to ascii file if 0, processing times are speeded up considerably but point clouds are not available for further analysis mode: int, *optional* [Default=3] gridding mode. 1 = nearest neighbour 2 = inverse weighted nearest neighbour 3 = Gaussian weighted nearest neighbour nn: int, *optional* [Default=64] number of nearest neighbours for gridding (used if mode > 1) influence: float, *optional* [Default=1] Radius of influence used in gridding. Cut off distance in meters numstdevs: int, *optional* [Default = 4] Threshold number of standard deviations in texture lengthscale per grid cell up to which to accept Returns ------- sonpath+'x_y_class'+str(p)+'.asc' : text file contains the point cloud of easting, northing, and texture lengthscales of the pth chunk sonpath+'class_GroundOverlay'+str(p)+'.kml': kml file contains gridded (or point cloud) texture lengthscale map for importing into google earth of the pth chunk sonpath+'class_map'+str(p)+'.png' : image overlay associated with the kml file sonpath+'class_map_imagery'+str(p)+'.png' : png image file gridded (or point cloud) texture lengthscale map overlain onto an image pulled from esri image server References ---------- .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment classification using low-cost sidescan sonar. Journal of Hydraulic Engineering, accepted ''' # prompt user to supply file if no input file given if not humfile: print 'An input file is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing inputfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print 'A *.SON directory is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print 'Input file is %s' % (humfile) if sonpath: print 'Sonar file path is %s' % (sonpath) if cs2cs_args: print 'cs2cs arguments are %s' % (cs2cs_args) if dogrid: dogrid = int(dogrid) if dogrid==1: print "Data will be gridded" if res: res = np.asarray(res,float) print 'Gridding resolution: %s' % (str(res)) if dowrite: dowrite = int(dowrite) if dowrite==0: print "Point cloud data will be written to ascii file" if mode: mode = int(mode) print 'Mode for gridding: %s' % (str(mode)) if nn: nn = int(nn) print 'Number of nearest neighbours for gridding: %s' % (str(nn)) if influence: influence = int(influence) print 'Radius of influence for gridding: %s (m)' % (str(influence)) if numstdevs: numstdevs = int(numstdevs) print 'Threshold number of standard deviations in texture lengthscale per grid cell up to which to accept: %s' % (str(numstdevs)) trans = pyproj.Proj(init=cs2cs_args) # if son path name supplied has no separator at end, put one on if sonpath[-1]!=os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename if base.find('_')>-1: base = base[:base.find('_')] if base.find('-')>-1: base = base[:base.find('-')] if base.find(' ')>-1: base = base[:base.find(' ')] meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat'))) esi = np.squeeze(meta['e']) nsi = np.squeeze(meta['n']) pix_m = np.squeeze(meta['pix_m']) dep_m = np.squeeze(meta['dep_m']) c = np.squeeze(meta['c']) dist_m = np.squeeze(meta['dist_m']) theta = np.squeeze(meta['heading'])/(180/np.pi) # load memory mapped scans shape_port = np.squeeze(meta['shape_port']) if shape_port!='': #port_fp = np.memmap(sonpath+base+'_data_port_l.dat', dtype='float32', mode='r', shape=tuple(shape_port)) if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat'))): with open(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat')), 'r') as ff: port_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape_port)) else: with open(os.path.normpath(os.path.join(sonpath,base+'_data_port_la.dat')), 'r') as ff: port_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape_port)) shape_star = np.squeeze(meta['shape_star']) if shape_star!='': #star_fp = np.memmap(sonpath+base+'_data_star_l.dat', dtype='float32', mode='r', shape=tuple(shape_star)) if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat'))): with open(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat')), 'r') as ff: star_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape_star)) else: with open(os.path.normpath(os.path.join(sonpath,base+'_data_star_la.dat')), 'r') as ff: star_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape_star)) shape = shape_port.copy() shape[1] = shape_port[1] + shape_star[1] #class_fp = np.memmap(sonpath+base+'_data_class.dat', dtype='float32', mode='r', shape=tuple(shape)) with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff: class_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape)) tvg = ((8.5*10**-5)+(3/76923)+((8.5*10**-5)/4))*c dist_tvg = ((np.tan(np.radians(25)))*dep_m)-(tvg) for p in xrange(len(class_fp)): e = esi[shape_port[-1]*p:shape_port[-1]*(p+1)] n = nsi[shape_port[-1]*p:shape_port[-1]*(p+1)] t = theta[shape_port[-1]*p:shape_port[-1]*(p+1)] d = dist_tvg[shape_port[-1]*p:shape_port[-1]*(p+1)] len_n = len(n) merge = class_fp[p].copy() merge[np.isnan(merge)] = 0 merge[np.isnan(np.vstack((np.flipud(port_fp[p]),star_fp[p])))] = 0 extent = shape_port[1] R1 = merge[extent:,:] R2 = np.flipud(merge[:extent,:]) merge = np.vstack((R2,R1)) del R1, R2 # get number pixels in scan line extent = int(np.shape(merge)[0]/2) yvec = np.linspace(pix_m,extent*pix_m,extent) X, Y = getXY(e,n,yvec,d,t,extent) merge[merge==0] = np.nan if len(merge.flatten()) != len(X): merge = merge[:,:len_n] merge = merge.T.flatten() index = np.where(np.logical_not(np.isnan(merge)))[0] X = X.flatten()[index] Y = Y.flatten()[index] merge = merge.flatten()[index] X = X[np.where(np.logical_not(np.isnan(Y)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(X)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(merge)))] Y = Y[np.where(np.logical_not(np.isnan(merge)))] merge = merge[np.where(np.logical_not(np.isnan(merge)))] if dowrite==1: # write raw bs to file #outfile = sonpath+'x_y_class'+str(p)+'.asc' outfile = os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc')) with open(outfile, 'w') as f: np.savetxt(f, np.hstack((humutils.ascol(X),humutils.ascol(Y), humutils.ascol(merge))), delimiter=' ', fmt="%8.6f %8.6f %8.6f") humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) #del humlat, humlon #influence = 1 #m #numneighbours = 64 if mode==1: try: # nearest neighbour dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, nprocs = cpu_count()) except: # nearest neighbour dat, stdev, counts = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, with_uncert = True, nprocs = 1) elif mode==2: # custom inverse distance wf = lambda r: 1/r**2 try: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = cpu_count()) except: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = 1) elif mode==3: sigmas = 1 #m eps = 2 try: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = cpu_count(), epsilon = eps) except: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = 1, epsilon = eps) dat = dat.reshape(shape) if mode>1: stdev = stdev.reshape(shape) counts = counts.reshape(shape) mask = dat.mask.copy() dat[mask==1] = 0 if mode>1: dat[(stdev>3) & (mask!=0)] = np.nan dat[(counts<nn) & (counts>0)] = np.nan dat2 = replace_nans.RN(dat.astype('float64'),1000,0.01,2,'localmean').getdata() dat2[dat==0] = np.nan # get a new mask mask = np.isnan(dat2) mask = ~binary_dilation(binary_erosion(~mask,structure=np.ones((15,15))), structure=np.ones((15,15))) #mask = binary_fill_holes(mask, structure=np.ones((15,15))) #mask = ~binary_fill_holes(~mask, structure=np.ones((15,15))) dat2[mask==1] = np.nan dat2[dat2<1] = np.nan del dat dat = dat2 del dat2 if dogrid==1: ## mask #dat[dist> 1 ] = np.nan #del dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y try: print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.0001, llcrnrlat=np.min(humlat)-0.0001, urcrnrlon=np.max(humlon)+0.0001, urcrnrlat=np.max(humlat)+0.0001) if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if dogrid==1: map.pcolormesh(gx, gy, datm, cmap='YlOrRd', vmin=0.25, vmax=2) del dat else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, merge.flatten(), cmap='YlOrRd', linewidth = '0') custom_save(sonpath,'class_map'+str(p)) del fig except: print "error: map could not be created..." kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'class_map'+str(p)+'.png' ground.latlonbox.north = np.min(humlat)-0.00001 ground.latlonbox.south = np.max(humlat)+0.00001 ground.latlonbox.east = np.max(humlon)+0.00001 ground.latlonbox.west = np.min(humlon)-0.00001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'class_GroundOverlay'+str(p)+'.kml') kml.save(os.path.normpath(os.path.join(sonpath,'class_GroundOverlay'+str(p)+'.kml'))) if dowrite==1: X = []; Y = []; S = []; for p in xrange(len(class_fp)): #dat = np.genfromtxt(sonpath+'x_y_class'+str(p)+'.asc', delimiter=' ') dat = np.genfromtxt(os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc')), delimiter=' ') X.append(dat[:,0]) Y.append(dat[:,1]) S.append(dat[:,2]) del dat # merge flatten and stack X = np.asarray(np.hstack(X),'float') X = X.flatten() # merge flatten and stack Y = np.asarray(np.hstack(Y),'float') Y = Y.flatten() # merge flatten and stack S = np.asarray(np.hstack(S),'float') S = S.flatten() humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) #del humlat, humlon #influence = 1 #m #numneighbours = 64 if mode==1: try: # nearest neighbour dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, nprocs = cpu_count()) except: # nearest neighbour dat, stdev, counts = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, with_uncert = True, nprocs = 1) elif mode==2: # custom inverse distance wf = lambda r: 1/r**2 try: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = cpu_count()) except: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = 1) elif mode==3: sigmas = 1 #m eps = 2 try: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = cpu_count(), epsilon = eps) except: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = 1, epsilon = eps) dat = dat.reshape(shape) if mode>1: stdev = stdev.reshape(shape) counts = counts.reshape(shape) mask = dat.mask.copy() dat[mask==1] = 0 if mode>1: dat[(stdev>5) & (mask!=0)] = np.nan dat[(counts<nn) & (counts>0)] = np.nan dat2 = replace_nans.RN(dat.astype('float64'),1000,0.01,2,'localmean').getdata() dat2[dat==0] = np.nan # get a new mask mask = np.isnan(dat2) mask = ~binary_dilation(binary_erosion(~mask,structure=np.ones((15,15))), structure=np.ones((15,15))) #mask = binary_fill_holes(mask, structure=np.ones((15,15))) #mask = ~binary_fill_holes(~mask, structure=np.ones((15,15))) dat2[mask==1] = np.nan dat2[dat2<1] = np.nan del dat dat = dat2 del dat2 if dogrid==1: ## mask #dat[dist> 1 ] = np.nan #el dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y levels = [0.5,0.75,1.25,1.5,1.75,2,3] try: print "drawing and printing map ..." fig = plt.figure() map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], resolution = 'i', llcrnrlon=np.min(humlon)-0.00001, llcrnrlat=np.min(humlat)-0.00001, urcrnrlon=np.max(humlon)+0.00001, urcrnrlat=np.max(humlat)+0.00001) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) if dogrid==1: gx,gy = map.projtran(glon, glat) if dogrid==1: map.contourf(gx, gy, datm, levels, cmap='YlOrRd') else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, S.flatten(), cmap='YlOrRd', linewidth = '0') custom_save2(sonpath,'class_map_imagery'+str(p)) del fig except: print "error: map could not be created..."
# the output of the lambda's is made into a list, and set equal to a variable geoLocation = list( # Input is a list of float tuples filter( lambda latlon: latlon[0] != 0 or latlon[1] != 0, # Input is a list of lists containing 2 string elements that were separated with split map( lambda parsedCoordinates: (float(parsedCoordinates[0]), float(parsedCoordinates[1])), # Input is a list of strings map(lambda row: row.split(','), df['GeoLocation'].dropna())))) m = Basemap(projection='mill') # creates a basic map m.drawcoastlines() m.drawcountries(color='red') m.drawstates(color="yellow") m.drawrivers(color='blue') m.bluemarble() # gives the map a life-like effect #list of colors for points plotted colors = ['b.', 'g.', 'r.', 'c.', 'm.', 'y.'] # plotting points for lon, lat in geoLocation: x, y = m.projtran(lon, lat) # coord transformation m.plot(x, y, random.choice(colors), markersize=2) # needs grid coords to plot #displays map with plotted points plt.show()
def make_map(e, n, t, d, dat_port, dat_star, data_R, pix_m, res, cs2cs_args, sonpath, p, mode, nn, numstdevs, c, dx, use_uncorrected, scalemax): #dogrid, influence,dowrite, thres=5 trans = pyproj.Proj(init=cs2cs_args) mp = np.nanmean(dat_port) ms = np.nanmean(dat_star) if mp>ms: merge = np.vstack((dat_port,dat_star*(mp/ms))) else: merge = np.vstack((dat_port*(ms/mp),dat_star)) del dat_port, dat_star merge[np.isnan(merge)] = 0 merge = merge[:,:len(n)] ## actual along-track resolution is this: dx times dy = Af tmp = data_R * dx * (c*0.007 / 2) #dx = np.arcsin(c/(1000*meta['t']*meta['f'])) res_grid = np.sqrt(np.vstack((tmp, tmp))) del tmp res_grid = res_grid[:np.shape(merge)[0],:np.shape(merge)[1]] #if use_uncorrected != 1: # merge = merge - 10*np.log10(res_grid) res_grid = res_grid.astype('float32') merge[np.isnan(merge)] = 0 merge[merge<0] = 0 merge = merge.astype('float32') merge = denoise_tv_chambolle(merge.copy(), weight=.2, multichannel=False).astype('float32') R = np.vstack((np.flipud(data_R),data_R)) del data_R R = R[:np.shape(merge)[0],:np.shape(merge)[1]] # get number pixels in scan line extent = int(np.shape(merge)[0]/2) yvec = np.squeeze(np.linspace(np.squeeze(pix_m),extent*np.squeeze(pix_m),extent)) X, Y, D, h, t = getXY(e,n,yvec,np.squeeze(d),t,extent) X = X.astype('float32') Y = Y.astype('float32') D = D.astype('float32') h = h.astype('float32') t = t.astype('float32') X = X.astype('float32') D[np.isnan(D)] = 0 h[np.isnan(h)] = 0 t[np.isnan(t)] = 0 X = X[np.where(np.logical_not(np.isnan(Y)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(Y)))] res_grid = res_grid.flatten()[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(Y)))] D = D[np.where(np.logical_not(np.isnan(Y)))] R = R.flatten()[np.where(np.logical_not(np.isnan(Y)))] h = h[np.where(np.logical_not(np.isnan(Y)))] t = t[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(X)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(X)))] res_grid = res_grid.flatten()[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(X)))] D = D[np.where(np.logical_not(np.isnan(X)))] R = R.flatten()[np.where(np.logical_not(np.isnan(X)))] h = h[np.where(np.logical_not(np.isnan(X)))] t = t[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(merge)))] Y = Y[np.where(np.logical_not(np.isnan(merge)))] merge = merge[np.where(np.logical_not(np.isnan(merge)))] res_grid = res_grid.flatten()[np.where(np.logical_not(np.isnan(merge)))] D = D[np.where(np.logical_not(np.isnan(merge)))] R = R[np.where(np.logical_not(np.isnan(merge)))] h = h[np.where(np.logical_not(np.isnan(merge)))] t = t[np.where(np.logical_not(np.isnan(merge)))] X = X[np.where(np.logical_not(np.isinf(merge)))] Y = Y[np.where(np.logical_not(np.isinf(merge)))] merge = merge[np.where(np.logical_not(np.isinf(merge)))] res_grid = res_grid.flatten()[np.where(np.logical_not(np.isinf(merge)))] D = D[np.where(np.logical_not(np.isinf(merge)))] R = R[np.where(np.logical_not(np.isinf(merge)))] h = h[np.where(np.logical_not(np.isinf(merge)))] t = t[np.where(np.logical_not(np.isinf(merge)))] print("writing point cloud") #if dowrite==1: ## write raw bs to file outfile = os.path.normpath(os.path.join(sonpath,'x_y_ss_raw'+str(p)+'.asc')) ##write.txtwrite( outfile, np.hstack((humutils.ascol(X.flatten()),humutils.ascol(Y.flatten()), humutils.ascol(merge.flatten()), humutils.ascol(D.flatten()), humutils.ascol(R.flatten()), humutils.ascol(h.flatten()), humutils.ascol(t.flatten()) )) ) np.savetxt(outfile, np.hstack((humutils.ascol(X.flatten()),humutils.ascol(Y.flatten()), humutils.ascol(merge.flatten()), humutils.ascol(D.flatten()), humutils.ascol(R.flatten()), humutils.ascol(h.flatten()), humutils.ascol(t.flatten()) )) , fmt="%8.6f %8.6f %8.6f %8.6f %8.6f %8.6f %8.6f") del D, R, h, t sigmas = 0.1 #m eps = 2 print("gridding ...") #if dogrid==1: if 2>1: if res==99: resg = np.min(res_grid[res_grid>0])/2 print('Gridding at resolution of %s' % str(resg)) else: resg = res tree = KDTree(np.c_[X.flatten(),Y.flatten()]) complete=0 while complete==0: try: grid_x, grid_y, res = getmesh(np.min(X), np.max(X), np.min(Y), np.max(Y), resg) longrid, latgrid = trans(grid_x, grid_y, inverse=True) longrid = longrid.astype('float32') latgrid = latgrid.astype('float32') shape = np.shape(grid_x) ## create mask for where the data is not if pykdtree==1: dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1) else: try: dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1, n_jobs=cpu_count()) except: #print ".... update your scipy installation to use faster kd-tree queries" dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1) dist = dist.reshape(grid_x.shape) targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid humlon, humlat = trans(X, Y, inverse=True) orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) del humlon, humlat if 'orig_def' in locals(): complete=1 except: print("memory error: trying grid resolution of %s" % (str(resg*2))) resg = resg*2 if mode==1: complete=0 while complete==0: try: try: dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=res*20, fill_value=None, nprocs = cpu_count(), reduce_data=1) except: dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=res*20, fill_value=None, nprocs = 1, reduce_data=1) try: r_dat = pyresample.kd_tree.resample_nearest(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, fill_value=None, nprocs = cpu_count(), reduce_data=1) except: r_dat = pyresample.kd_tree.resample_nearest(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, fill_value=None, nprocs = 1, reduce_data=1) stdev = None counts = None if 'dat' in locals(): complete=1 except: del grid_x, grid_y, targ_def, orig_def wf = None humlon, humlat = trans(X, Y, inverse=True) dat, stdev, counts, resg, complete, shape = getgrid_lm(humlon, humlat, merge, res*10, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) r_dat, stdev, counts, resg, complete, shape = getgrid_lm(humlon, humlat, res_grid, res*10, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) del humlon, humlat elif mode==2: # custom inverse distance wf = lambda r: 1/r**2 complete=0 while complete==0: try: try: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=res*20, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = cpu_count(), reduce_data=1) except: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=res*20, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = 1, reduce_data=1) try: r_dat = pyresample.kd_tree.resample_custom(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = False, nprocs = cpu_count(), reduce_data=1) except: r_dat = pyresample.kd_tree.resample_custom(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = False, nprocs = 1, reduce_data=1) if 'dat' in locals(): complete=1 except: del grid_x, grid_y, targ_def, orig_def humlon, humlat = trans(X, Y, inverse=True) dat, stdev, counts, resg, complete, shape = getgrid_lm(humlon, humlat, merge, res*2, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) r_dat, stdev, counts, resg, complete, shape = getgrid_lm(humlon, humlat, res_grid, res*2, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) del humlat, humlon del stdev_null, counts_null elif mode==3: wf = None complete=0 while complete==0: try: try: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = True, nprocs = cpu_count(), epsilon = eps, reduce_data=1) except: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = True, nprocs = 1, epsilon = eps, reduce_data=1) try: r_dat = pyresample.kd_tree.resample_gauss(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = False, nprocs = cpu_count(), epsilon = eps, reduce_data=1) except: r_dat = pyresample.kd_tree.resample_gauss(orig_def, res_grid.flatten(), targ_def, radius_of_influence=res*20, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = False, nprocs = 1, epsilon = eps, reduce_data=1) if 'dat' in locals(): complete=1 except: del grid_x, grid_y, targ_def, orig_def humlon, humlat = trans(X, Y, inverse=True) dat, stdev, counts, resg, complete, shape = getgrid_lm(humlon, humlat, merge, res*10, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) r_dat, stdev_null, counts_null, resg, complete, shape = getgrid_lm(humlon, humlat, res_grid, res*10, min(X), max(X), min(Y), max(Y), resg*2, mode, trans, nn, wf, sigmas, eps) del humlat, humlon del stdev_null, counts_null humlon, humlat = trans(X, Y, inverse=True) del X, Y, res_grid, merge dat = dat.reshape(shape) dat[dist>res*30] = np.nan del dist r_dat = r_dat.reshape(shape) r_dat[r_dat<1] = 1 r_dat[r_dat > 2*np.pi] = 1 r_dat[np.isnan(dat)] = np.nan dat = dat + r_dat #np.sqrt(np.cos(np.deg2rad(r_dat))) #dat*np.sqrt(r_dat) + dat del r_dat if mode>1: stdev = stdev.reshape(shape) counts = counts.reshape(shape) mask = dat.mask.copy() dat[mask==1] = np.nan #dat[mask==1] = 0 if mode>1: dat[(stdev>numstdevs) & (mask!=0)] = np.nan dat[(counts<nn) & (counts>0)] = np.nan #if dogrid==1: dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan dat[dat<thres] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) #del grid_x, grid_y try: from osgeo import gdal,ogr,osr proj = osr.SpatialReference() proj.ImportFromEPSG(int(cs2cs_args.split(':')[-1])) #26949) datout = np.squeeze(np.ma.filled(dat))#.astype('int16') datout[np.isnan(datout)] = -99 driver = gdal.GetDriverByName('GTiff') #rows,cols = np.shape(datout) cols,rows = np.shape(datout) outFile = os.path.normpath(os.path.join(sonpath,'geotiff_map'+str(p)+'.tif')) ds = driver.Create( outFile, rows, cols, 1, gdal.GDT_Float32, [ 'COMPRESS=LZW' ] ) if proj is not None: ds.SetProjection(proj.ExportToWkt()) xmin, ymin, xmax, ymax = [grid_x.min(), grid_y.min(), grid_x.max(), grid_y.max()] xres = (xmax - xmin) / float(rows) yres = (ymax - ymin) / float(cols) geotransform = (xmin, xres, 0, ymax, 0, -yres) ds.SetGeoTransform(geotransform) ss_band = ds.GetRasterBand(1) ss_band.WriteArray(np.flipud(datout)) #datout) ss_band.SetNoDataValue(-99) ss_band.FlushCache() ss_band.ComputeStatistics(False) del ds except: print("error: geotiff could not be created... check your gdal/ogr install") try: # ========================================================= print("creating kmz file ...") ## new way to create kml file pixels = 1024 * 10 fig, ax = humutils.gearth_fig(llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), pixels=pixels) cs = ax.pcolormesh(glon, glat, datm, vmax=scalemax, cmap='gray') ax.set_axis_off() fig.savefig(os.path.normpath(os.path.join(sonpath,'map'+str(p)+'.png')), transparent=True, format='png') del fig, ax # ========================================================= fig = plt.figure(figsize=(1.0, 4.0), facecolor=None, frameon=False) ax = fig.add_axes([0.0, 0.05, 0.2, 0.9]) cb = fig.colorbar(cs, cax=ax) cb.set_label('Intensity [dB W]', rotation=-90, color='k', labelpad=20) fig.savefig(os.path.normpath(os.path.join(sonpath,'legend'+str(p)+'.png')), transparent=False, format='png') del fig, ax, cs, cb # ========================================================= humutils.make_kml(llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), figs=[os.path.normpath(os.path.join(sonpath,'map'+str(p)+'.png'))], colorbar=os.path.normpath(os.path.join(sonpath,'legend'+str(p)+'.png')), kmzfile=os.path.normpath(os.path.join(sonpath,'GroundOverlay'+str(p)+'.kmz')), name='Sidescan Intensity') except: print("error: map could not be created...") #y1 = np.min(glat)-0.001 #x1 = np.min(glon)-0.001 #y2 = np.max(glat)+0.001 #x2 = np.max(glon)+0.001 print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(glat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(glat)+0.001) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) #finally: # print "error: map could not be created..." #if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) #if dogrid==1: if 2>1: if datm.size > 25000000: print("matrix size > 25,000,000 - decimating by factor of 5 for display") map.pcolormesh(gx[::5,::5], gy[::5,::5], datm[::5,::5], cmap='gray', vmin=np.nanmin(datm), vmax=scalemax) #vmax=np.nanmax(datm) else: map.pcolormesh(gx, gy, datm, cmap='gray', vmin=np.nanmin(datm), vmax=scalemax) #vmax=np.nanmax(datm) del datm, dat else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, merge.flatten(), cmap='gray', linewidth = '0') #map.drawmapscale(x1+0.001, y1+0.001, x1, y1, 200., units='m', barstyle='fancy', labelstyle='simple', fontcolor='k') #'#F8F8FF') #map.drawparallels(np.arange(y1-0.001, y2+0.001, 0.005),labels=[1,0,0,1], linewidth=0.0, rotation=30, fontsize=8) #map.drawmeridians(np.arange(x1, x2, 0.002),labels=[1,0,0,1], linewidth=0.0, rotation=30, fontsize=8) custom_save2(sonpath,'map_imagery'+str(p)) del fig del humlat, humlon return res #return the new resolution
circ5 = Line2D([0], [0], linestyle="none", marker="o", markersize=10, markerfacecolor=colors[4],alpha=a_val) #Total extents (i.e. misplaced SS and multibeam) fig, (ax,ax1) = plt.subplots(figsize=(13,8),ncols=2) #Start with april sidescan sonar ax.set_title('April 2014 \n Sidescan Sonar Imagery') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.nanmin(april_lon)-0.0001, llcrnrlat=np.nanmin(april_lat)-0.0002, urcrnrlon=np.nanmax(april_lon)+0.0001, urcrnrlat=np.nanmax(april_lat)+0.0002,ax=ax) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(april_lon, april_lat) im = m.contourf(x,y,april_ss.T, cmap='Greys_r', levels=[0,2.5,5,7.5,10,12.5,15,17.5,20,22.5,25,27.5,30,32.5,35]) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im, cax=cax) cbr.set_label('Sidescan Intensity [dBW]') m.readshapefile(r"C:\workspace\Reach_4a\Multibeam\mb_sed_class\output\shapefiles\visual_seg_2_geo","layer",drawbounds = False) #sand, sand/gravel, gravel, sand/rock, rock s_patch, g_patch, r_patch, = [],[],[] for info, shape in zip(m.layer_info, m.layer): if info['substrate'] == 'sand' : s_patch.append(Polygon(np.asarray(shape),True)) if info['substrate'] == 'gravel': g_patch.append(Polygon(np.asarray(shape),True))
def map_texture(humfile, sonpath, cs2cs_args, dogrid, calc_bearing, filt_bearing, res, cog, dowrite): ''' Create plots of the texture lengthscale maps made in PyHum.texture module using the algorithm detailed by Buscombe et al. (forthcoming) This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical representation that integrates over many attributes of bed texture, of which grain size is the most important. The technique is a physically based means to identify regions of texture within a sidescan echogram, and could provide a basis for objective, automated riverbed sediment classification. Syntax ---------- [] = PyHum.map_texture(humfile, sonpath, cs2cs_args, dogrid, calc_bearing, filt_bearing, res, cog, dowrite) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries dogrid : float, *optional* [Default=1] if 1, textures will be gridded with resolution 'res'. Otherwise, point cloud will be plotted calc_bearing : float, *optional* [Default=0] if 1, bearing will be calculated from coordinates filt_bearing : float, *optional* [Default=0] if 1, bearing will be filtered res : float, *optional* [Default=0.1] grid resolution of output gridded texture map cog : int, *optional* [Default=1] if 1, heading calculated assuming GPS course-over-ground rather than using a compass dowrite: int, *optional* [Default=1] if 1, point cloud data from each chunk is written to ascii file if 0, processing times are speeded up considerably but point clouds are not available for further analysis Returns ------- sonpath+'x_y_class'+str(p)+'.asc' : text file contains the point cloud of easting, northing, and texture lengthscales of the pth chunk sonpath+'class_GroundOverlay'+str(p)+'.kml': kml file contains gridded (or point cloud) texture lengthscale map for importing into google earth of the pth chunk sonpath+'class_map'+str(p)+'.png' : image overlay associated with the kml file sonpath+'class_map_imagery'+str(p)+'.png' : png image file gridded (or point cloud) texture lengthscale map overlain onto an image pulled from esri image server References ---------- .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., Automated riverbed sediment classification using low-cost sidescan sonar. submitted to Journal of Hydraulic Engineering ''' # prompt user to supply file if no input file given if not humfile: print 'An input file is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing inputfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print 'A *.SON directory is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print 'Input file is %s' % (humfile) if sonpath: print 'Sonar file path is %s' % (sonpath) if cs2cs_args: print 'cs2cs arguments are %s' % (cs2cs_args) if dogrid: dogrid = int(dogrid) if dogrid==1: print "Data will be gridded" if calc_bearing: calc_bearing = int(calc_bearing) if calc_bearing==1: print "Bearing will be calculated from coordinates" if filt_bearing: filt_bearing = int(filt_bearing) if filt_bearing==1: print "Bearing will be filtered" if res: res = np.asarray(res,float) print 'Gridding resolution: %s' % (str(res)) if cog: cog = int(cog) if cog==1: print "Heading based on course-over-ground" if dowrite: dowrite = int(dowrite) if dowrite==0: print "Point cloud data will be written to ascii file" if not cs2cs_args: # arguments to pass to cs2cs for coordinate transforms cs2cs_args = "epsg:26949" print '[Default] cs2cs arguments are %s' % (cs2cs_args) if not dogrid: if dogrid != 0: dogrid = 1 print "[Default] Data will be gridded" if not calc_bearing: if calc_bearing != 1: calc_bearing = 0 print "[Default] Heading recorded by instrument will be used" if not filt_bearing: if filt_bearing != 1: filt_bearing = 0 print "[Default] Heading will not be filtered" if not res: res = 0.5 print '[Default] Grid resolution is %s m' % (str(res)) if not cog: if cog != 0: cog = 1 print "[Default] Heading based on course-over-ground" if not dowrite: if dowrite != 0: dowrite = 1 print "[Default] Point cloud data will be written to ascii file" trans = pyproj.Proj(init=cs2cs_args) # if son path name supplied has no separator at end, put one on if sonpath[-1]!=os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename if base.find('_')>-1: base = base[:base.find('_')] if base.find('-')>-1: base = base[:base.find('-')] if base.find(' ')>-1: base = base[:base.find(' ')] esi = np.squeeze(loadmat(sonpath+base+'meta.mat')['e']) nsi = np.squeeze(loadmat(sonpath+base+'meta.mat')['n']) pix_m = np.squeeze(loadmat(sonpath+base+'meta.mat')['pix_m']) dep_m = np.squeeze(loadmat(sonpath+base+'meta.mat')['dep_m']) c = np.squeeze(loadmat(sonpath+base+'meta.mat')['c']) dist_m = np.squeeze(loadmat(sonpath+base+'meta.mat')['dist_m']) # over-ride measured bearing and calc from positions if calc_bearing==1: lat = np.squeeze(loadmat(sonpath+base+'meta.mat')['lat']) lon = np.squeeze(loadmat(sonpath+base+'meta.mat')['lon']) #point-to-point bearing bearing = np.zeros(len(lat)) for k in xrange(len(lat)-1): bearing[k] = bearingBetweenPoints(lat[k], lat[k+1], lon[k], lon[k+1]) del lat, lon else: # reported bearing by instrument (Kalman filtered?) bearing = np.squeeze(loadmat(sonpath+base+'meta.mat')['heading']) ## bearing can only be observed modulo 2*pi, therefore phase unwrap #bearing = np.unwrap(bearing) # if stdev in heading is large, there's probably noise that needs to be filtered out if np.std(bearing)>180: print "WARNING: large heading stdev - attempting filtering" from sklearn.cluster import MiniBatchKMeans # can have two modes data = np.column_stack([bearing, bearing]) k_means = MiniBatchKMeans(2) # fit the model k_means.fit(data) values = k_means.cluster_centers_.squeeze() labels = k_means.labels_ if np.sum(labels==0) > np.sum(labels==1): bearing[labels==1] = np.nan else: bearing[labels==0] = np.nan nans, y= humutils.nan_helper(bearing) bearing[nans]= np.interp(y(nans), y(~nans), bearing[~nans]) if filt_bearing ==1: bearing = humutils.runningMeanFast(bearing, len(bearing)/100) theta = np.asarray(bearing, 'float')/(180/np.pi) # this is standard course over ground if cog==1: #course over ground is given as a compass heading (ENU) from True north, or Magnetic north. #To get this into NED (North-East-Down) coordinates, you need to rotate the ENU # (East-North-Up) coordinate frame. #Subtract pi/2 from your heading theta = theta - np.pi/2 # (re-wrap to Pi to -Pi) theta = np.unwrap(-theta) # load memory mapped scans shape_port = np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_port']) if shape_port!='': port_fp = np.memmap(sonpath+base+'_data_port_l.dat', dtype='float32', mode='r', shape=tuple(shape_port)) shape_star = np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_star']) if shape_star!='': star_fp = np.memmap(sonpath+base+'_data_star_l.dat', dtype='float32', mode='r', shape=tuple(shape_star)) shape = shape_port.copy() shape[1] = shape_port[1] + shape_star[1] class_fp = np.memmap(sonpath+base+'_data_class.dat', dtype='float32', mode='r', shape=tuple(shape)) tvg = ((8.5*10**-5)+(3/76923)+((8.5*10**-5)/4))*c dist_tvg = ((np.tan(np.radians(25)))*dep_m)-(tvg) for p in xrange(len(class_fp)): e = esi[shape_port[-1]*p:shape_port[-1]*(p+1)] n = nsi[shape_port[-1]*p:shape_port[-1]*(p+1)] t = theta[shape_port[-1]*p:shape_port[-1]*(p+1)] d = dist_tvg[shape_port[-1]*p:shape_port[-1]*(p+1)] len_n = len(n) merge = class_fp[p].copy() merge[np.isnan(merge)] = 0 merge[np.isnan(np.vstack((np.flipud(port_fp[p]),star_fp[p])))] = 0 extent = shape_port[1] R1 = merge[extent:,:] R2 = np.flipud(merge[:extent,:]) merge = np.vstack((R2,R1)) del R1, R2 # get number pixels in scan line extent = int(np.shape(merge)[0]/2) yvec = np.linspace(pix_m,extent*pix_m,extent) print "getting point cloud ..." # get the points by rotating the [x,y] vector so it lines up with boat heading X=[]; Y=[]; for k in range(len(n)): x = np.concatenate((np.tile(e[k],extent) , np.tile(e[k],extent))) #y = np.concatenate((n[k]+yvec, n[k]-yvec)) rangedist = np.sqrt(np.power(yvec, 2.0) - np.power(d[k], 2.0)) y = np.concatenate((n[k]+rangedist, n[k]-rangedist)) # Rotate line around center point xx = e[k] - ((x - e[k]) * np.cos(t[k])) - ((y - n[k]) * np.sin(t[k])) yy = n[k] - ((x - e[k]) * np.sin(t[k])) + ((y - n[k]) * np.cos(t[k])) xx, yy = calc_beam_pos(d[k], t[k], xx, yy) X.append(xx) Y.append(yy) del e, n, t, x, y # merge flatten and stack X = np.asarray(X,'float') X = X.flatten() # merge flatten and stack Y = np.asarray(Y,'float') Y = Y.flatten() merge[merge==0] = np.nan if len(merge.flatten()) != len(X): merge = merge[:,:len_n] merge = merge.T.flatten() index = np.where(np.logical_not(np.isnan(merge)))[0] X = X.flatten()[index] Y = Y.flatten()[index] merge = merge.flatten()[index] X = X[np.where(np.logical_not(np.isnan(Y)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(X)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(merge)))] Y = Y[np.where(np.logical_not(np.isnan(merge)))] merge = merge[np.where(np.logical_not(np.isnan(merge)))] if dowrite==1: # write raw bs to file outfile = sonpath+'x_y_class'+str(p)+'.asc' with open(outfile, 'w') as f: np.savetxt(f, np.hstack((humutils.ascol(X),humutils.ascol(Y), humutils.ascol(merge))), delimiter=' ', fmt="%8.6f %8.6f %8.6f") humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) dat = griddata(np.c_[X.flatten(),Y.flatten()], merge.flatten(), (grid_x, grid_y), method='nearest') ## create mask for where the data is not tree = KDTree(np.c_[X.flatten(),Y.flatten()]) dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1) dist = dist.reshape(grid_x.shape) del X, Y if dogrid==1: ## mask dat[dist> 1 ] = np.nan del dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y try: print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001) if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if dogrid==1: map.pcolormesh(gx, gy, datm, cmap='YlOrRd', vmin=0.5, vmax=2) del dat else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, merge.flatten(), cmap='YlOrRd', linewidth = '0') custom_save(sonpath,'class_map'+str(p)) del fig except: print "error: map could not be created..." kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'class_map'+str(p)+'.png' ground.latlonbox.north = np.min(humlat)-0.001 ground.latlonbox.south = np.max(humlat)+0.001 ground.latlonbox.east = np.max(humlon)+0.001 ground.latlonbox.west = np.min(humlon)-0.001 ground.latlonbox.rotation = 0 kml.save(sonpath+'class_GroundOverlay'+str(p)+'.kml') if dowrite==1: X = []; Y = []; S = []; for p in xrange(len(class_fp)): dat = np.genfromtxt(sonpath+'x_y_class'+str(p)+'.asc', delimiter=' ') X.append(dat[:,0]) Y.append(dat[:,1]) S.append(dat[:,2]) del dat # merge flatten and stack X = np.asarray(np.hstack(X),'float') X = X.flatten() # merge flatten and stack Y = np.asarray(np.hstack(Y),'float') Y = Y.flatten() # merge flatten and stack S = np.asarray(np.hstack(S),'float') S = S.flatten() humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) dat = griddata(np.c_[X.flatten(),Y.flatten()], S.flatten(), (grid_x, grid_y), method='nearest') ## create mask for where the data is not tree = KDTree(np.c_[X.flatten(),Y.flatten()]) dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1) dist = dist.reshape(grid_x.shape) del X, Y if dogrid==1: ## mask dat[dist> 1 ] = np.nan del dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y levels = [0.5,0.75,1.25,1.5,1.75,2,3] try: print "drawing and printing map ..." fig = plt.figure() map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], resolution = 'i', llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) if dogrid==1: gx,gy = map.projtran(glon, glat) if dogrid==1: map.contourf(gx, gy, datm, levels, cmap='YlOrRd') else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, S.flatten(), cmap='YlOrRd', linewidth = '0') custom_save2(sonpath,'class_map_imagery'+str(p)) del fig except: print "error: map could not be created..."
ortnMapE=Basemap(projection='ortho',lat_0=latCtr,lon_0=lonCtr,resolution='c', ax=ax[1]) ortnMapE.drawcoastlines(linewidth=0.5) ortnMapE.drawcountries(linewidth=0.25) ortnMapE.drawmeridians(numpy.arange(0,360,30)) ortnMapE.drawparallels(numpy.arange(-90,90,30)) ax[1].set_title(r'${}\ over\ Europe$'.format(shapeRdr0.records()[0][4]), fontsize=titleFontSize) plotPrefecture(shp=shape,colour='gold',lwdth=2,bMap=ortnMapE,axes=ax[1], latOff=dLatJ-latJpn,longOff=dLonJ-lonJpn) fig.show() ' Japan and Kitakyushu overlaid on Europe.' fig,ax=matplotlib.pyplot.subplots(1,1,figsize=(16,8)) mercMapE=Basemap(projection='merc',llcrnrlat=30,urcrnrlat=75,llcrnrlon=-25, urcrnrlon=40,lat_ts=10,ax=ax,resolution='l') mercMapE.drawcoastlines(linewidth=0.5) mercMapE.drawcountries(linewidth=0.25) mercMapE.drawparallels(numpy.arange(mercMapE.latmin,mercMapE.latmax,10.)) mercMapE.drawmeridians(numpy.arange(mercMapE.lonmin,mercMapE.lonmax,15.)) ax.set_title(r'$Europe,\ true\ lat.$',fontsize=titleFontSize) plotPrefecture(shp=shape,colour='gold',lwdth=2,bMap=mercMapE,axes=ax, latOff=0,longOff=dLonJ-lonJpn) # Show annotation at the true latitude. xKIT,yKIT=mercMapE.projtran(130.834730+dLonJ-lonJpn,33.8924837) xTXT,yTXT=mercMapE.projtran(110.834730+dLonJ-lonJpn,45.8924837) ax.scatter([xKIT],[yKIT],s=50,c='crimson') ax.annotate('Here', xy=(xKIT,yKIT),xytext=(xTXT,yTXT),color='crimson', arrowprops=dict(facecolor='crimson', shrink=0.05)) fig.show()
colors = ['#EA5739','#FEFFBE','#4BB05C'] circ1 = Line2D([0], [0], linestyle="none", marker="s", markersize=10, markerfacecolor=colors[0],alpha=a_val) circ3 = Line2D([0], [0], linestyle="none", marker="s", markersize=10, markerfacecolor=colors[1],alpha=a_val) circ4 = Line2D([0], [0], linestyle="none", marker="s", markersize=10, markerfacecolor=colors[2],alpha=a_val) fig,(ax,ax1,ax2)= plt.subplots(ncols=3) m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon-0.0004), llcrnrlat=np.min(glat-0.0004), urcrnrlon=np.max(glon+0.0008), urcrnrlat=np.max(glat+0.0004),ax=ax) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(glon, glat) m.contourf(x,y,lsq_data.T, cmap='RdYlGn',levels=[0,1,2,3]) m.drawmapscale(np.min(glon)+0.001, np.min(glat)+0.0030, np.min(glon), np.min(glat), 200., units='m', barstyle='fancy', labelstyle='simple', fontcolor='black') ax.legend((circ1, circ3,circ4), ("1 = sand", "2 = gravel","3 = boulders"), numpoints=1, loc=1, borderaxespad=0., fontsize=8) m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon-0.0004), llcrnrlat=np.min(glat-0.0004), urcrnrlon=np.max(glon+0.0008), urcrnrlat=np.max(glat+0.0004),ax=ax1) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(glon, glat) m.contourf(x,y,gmm2_data.T, cmap='RdYlGn',levels=[0,1,2,3]) m.drawmapscale(np.min(glon)+0.001, np.min(glat)+0.0030, np.min(glon), np.min(glat), 200., units='m', barstyle='fancy', labelstyle='simple', fontcolor='black') ax1.legend((circ1, circ4), ("1 = sand", "3 = boulders"), numpoints=1, loc=1, borderaxespad=0., fontsize=8)
def make_map(e, n, t, d, dat_port, dat_star, data_R, pix_m, res, cs2cs_args, sonpath, p, dogrid, dowrite, mode, nn, influence, numstdevs): trans = pyproj.Proj(init=cs2cs_args) merge = np.vstack((dat_port,dat_star)) #merge = np.vstack((np.flipud(port_fp[p]),star_fp[p])) merge[np.isnan(merge)] = 0 merge = merge[:,:len(n)] R = np.vstack((np.flipud(data_R),data_R)) R = R[:np.shape(merge)[0],:np.shape(merge)[1]] # get number pixels in scan line extent = int(np.shape(merge)[0]/2) yvec = np.linspace(pix_m,extent*pix_m,extent) X, Y, D, h, t = getXY(e,n,yvec,d,t,extent) D[np.isnan(D)] = 0 h[np.isnan(h)] = 0 t[np.isnan(t)] = 0 X = X[np.where(np.logical_not(np.isnan(Y)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(Y)))] D = D[np.where(np.logical_not(np.isnan(Y)))] R = R.flatten()[np.where(np.logical_not(np.isnan(Y)))] h = h[np.where(np.logical_not(np.isnan(Y)))] t = t[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(X)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(X)))] D = D[np.where(np.logical_not(np.isnan(X)))] R = R.flatten()[np.where(np.logical_not(np.isnan(X)))] h = h[np.where(np.logical_not(np.isnan(X)))] t = t[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(merge)))] Y = Y[np.where(np.logical_not(np.isnan(merge)))] merge = merge[np.where(np.logical_not(np.isnan(merge)))] D = D[np.where(np.logical_not(np.isnan(merge)))] R = R[np.where(np.logical_not(np.isnan(merge)))] h = h[np.where(np.logical_not(np.isnan(merge)))] t = t[np.where(np.logical_not(np.isnan(merge)))] if dowrite==1: ## write raw bs to file outfile = os.path.normpath(os.path.join(sonpath,'x_y_ss_raw'+str(p)+'.asc')) write.txtwrite( outfile, np.hstack((humutils.ascol(X.flatten()),humutils.ascol(Y.flatten()), humutils.ascol(merge.flatten()), humutils.ascol(D.flatten()), humutils.ascol(np.cos(R.flatten())), humutils.ascol(h.flatten()), humutils.ascol(t.flatten()) )) ) del D, R, h, t humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) #del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) #del humlat, humlon #influence = 1 #m if mode==1: try: # nearest neighbour dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, nprocs = cpu_count()) except: print "Memory error: trying a grid resolution twice as big" res = res*2 grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) #del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) dat = pyresample.kd_tree.resample_nearest(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, fill_value=None, nprocs = cpu_count()) elif mode==2: # custom inverse distance wf = lambda r: 1/r**2 try: dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = cpu_count()) except: print "Memory error: trying a grid resolution twice as big" res = res*2 grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) #del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) dat, stdev, counts = pyresample.kd_tree.resample_custom(orig_def, merge.flatten(),targ_def, radius_of_influence=influence, neighbours=nn, weight_funcs=wf, fill_value=None, with_uncert = True, nprocs = cpu_count()) elif mode==3: sigmas = 1 #m eps = 2 try: dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = cpu_count(), epsilon = eps) except: print "Memory error: trying a grid resolution twice as big" res = res*2 grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) #del X, Y longrid, latgrid = trans(grid_x, grid_y, inverse=True) shape = np.shape(grid_x) #del grid_y, grid_x targ_def = pyresample.geometry.SwathDefinition(lons=longrid.flatten(), lats=latgrid.flatten()) del longrid, latgrid orig_def = pyresample.geometry.SwathDefinition(lons=humlon.flatten(), lats=humlat.flatten()) dat, stdev, counts = pyresample.kd_tree.resample_gauss(orig_def, merge.flatten(), targ_def, radius_of_influence=influence, neighbours=nn, sigmas=sigmas, fill_value=None, with_uncert = np.nan, nprocs = cpu_count(), epsilon = eps) del X, Y dat = dat.reshape(shape) if mode>1: stdev = stdev.reshape(shape) counts = counts.reshape(shape) mask = dat.mask.copy() dat[mask==1] = 0 if mode>1: dat[(stdev>numstdevs) & (mask!=0)] = np.nan dat[(counts<nn) & (counts>0)] = np.nan dat2 = replace_nans.RN(dat.astype('float64'),1000,0.01,2,'localmean').getdata() dat2[dat==0] = np.nan # get a new mask mask = np.isnan(dat2) mask = ~binary_dilation(binary_erosion(~mask,structure=np.ones((15,15))), structure=np.ones((15,15))) #mask = binary_fill_holes(mask, structure=np.ones((15,15))) #mask = ~binary_fill_holes(~mask, structure=np.ones((15,15))) dat2[mask==1] = np.nan dat2[dat2<1] = np.nan del dat dat = dat2 del dat2 if dogrid==1: ### mask #if np.floor(np.sqrt(1/res))-1 > 0.0: # dat[dist> np.floor(np.sqrt(1/res))-1 ] = np.nan #np.floor(np.sqrt(1/res))-1 ] = np.nan #else: # dat[dist> np.sqrt(1/res) ] = np.nan #np.floor(np.sqrt(1/res))-1 ] = np.nan #del dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y try: print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.00001, llcrnrlat=np.min(humlat)-0.00001, urcrnrlon=np.max(humlon)+0.00001, urcrnrlat=np.max(humlat)+0.00001) if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if dogrid==1: map.pcolormesh(gx, gy, datm, cmap='gray', vmin=np.nanmin(datm), vmax=np.nanmax(datm)) del datm, dat else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, merge.flatten(), cmap='gray', linewidth = '0') custom_save(sonpath,'map'+str(p)) del fig except: print "error: map could not be created..." kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'map'+str(p)+'.png' ground.latlonbox.north = np.min(humlat)-0.00001 ground.latlonbox.south = np.max(humlat)+0.00001 ground.latlonbox.east = np.max(humlon)+0.00001 ground.latlonbox.west = np.min(humlon)-0.00001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'GroundOverlay'+str(p)+'.kml') kml.save(os.path.normpath(os.path.join(sonpath,'GroundOverlay'+str(p)+'.kml'))) del humlat, humlon
glon, glat = trans(xx, yy, inverse=True) #ortho_lon, ortho_lat = trans(ortho_x, ortho_y, inverse=True) cs2cs_args = "epsg:26949" ss_level=[0,2.5,5,7.5,10,12.5,15,17.5,20,22.5,25,27.5,30,32.5,35] fig = plt.figure(figsize=(15,12)) ax = plt.subplot2grid((5,2),(0, 0),rowspan=4) ax.set_title('September 2014 \n R01767') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon) - 0.0002, llcrnrlat=np.min(glat) - 0.0006, urcrnrlon=np.max(glon) + 0.0002, urcrnrlat=np.max(glat) + 0.0006) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(glon, glat) im = m.contourf(x,y,data.T, cmap='Greys_r',levels=ss_level) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) cbr = plt.colorbar(im, cax=cax) cbr.set_label('Sidescan Intensity [dBW]', size=10) #read shapefile and create polygon collections ##NOTE: Shapefile has to be in WGS84 m.readshapefile( r"C:\workspace\Merged_SS\window_analysis\shapefiles\tex_seg_2014_09_67_geo","layer",drawbounds = False) #sand, sand/gravel, gravel/sand, ledge, gravel, gravel/boulders, boulders, boulder s_patch, sg_patch, gs_patch, g_patch, gb_patch, b_patch =[],[],[],[],[],[] for info, shape in zip(m.layer_info, m.layer): if info['substrate'] == 'sand':
def e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq, integ, numclusters, doplot): ''' Analysis of first (e1, 'roughness') and second (e2, 'hardness') echo returns from the high-frequency downward looking echosounder Generates generalised acoustic parameters for the purposes of point classification of submerged substrates/vegetation Accounts for the absorption of sound in water Does a basic k-means cluster of e1 and e2 coefficients into specified number of 'acoustic classes' based on code by Barb Fagetter ([email protected]) Syntax ---------- [] = PyHum.e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq, integ, numclusters, doplot) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries ph : float, *optional* [Default=7.0] water acidity in pH temp : float, *optional* [Default=10.0] water temperature in degrees Celsius salinity : float, *optional* [Default=0.0] salinity of water in parts per thousand beam : float, *optional* [Default=20.0] beam width in degrees transfreq : float, *optional* [Default=200.0] transducer frequency in kHz integ : int, *optional* [Default=5] number of pings over which to integrate numclusters : int, *optional* [Default=3] number of acoustic classes to classify all the data into doplot : int, *optional* [Default=1] 1 = make plots, otherwise do not Returns ------- sonpath+base+'rough_and_hard'+str(p)+'.csv' : csv file contains the following fields: 'longitude', 'latitude', 'easting', 'northing', 'depth', 'roughness', 'hardness', 'average roughness', 'average hardness','k-mean label' of the pth chunk 'average' implies average over 'integ' successive pings The following are returned if doplot==1: sonpath+'e1e2_scan'+str(p).png : png image file png image file showing the downward echosounder echogram overlain with the locations of the start and end of the first and second echo region envelope sonpath+'e1e2_kmeans'+str(p).png: png image file png image file showing 1) (left) volume scattering coefficient 1 versus volume scattering coefficient 2, colour-coded by k-means acoustic class, and 2) (right) e1 versus e2, colour-coded by k-means acoustic class sonpath+'rgh_hard_kmeans'+str(p).png : png image file png image file showing scatter plot of easting versus northing colour-coded by k-means acoustic class sonpath+'map_rgh'+str(p).png : png image file png image file showing scatter plot of 'roughness' (e1) overlying an aerial image pulled from an ESRI image server sonpath+'map_hard'+str(p).png : png image file png image file showing scatter plot of 'hardness' (e2) overlying an aerial image pulled from an ESRI image server sonpath,'Rough'+str(p).png : png image file png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml sonpath,'Rough'+str(p).kml : kml file kml overlay for showing roughness scatter plot (sonpath,'Rough'+str(p).png) sonpath,'Hard'+str(p).png : png image file png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml sonpath,'Hard'+str(p).kml : kml file kml overlay for showing harness scatter plot (sonpath,'Hard'+str(p).png) ''' # prompt user to supply file if no input file given if not humfile: print('An input file is required!!!!!!') Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing inputfile = askopenfilename(filetypes=[("DAT files", "*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print('A *.SON directory is required!!!!!!') Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print('Input file is %s' % (humfile)) if sonpath: print('Sonar file path is %s' % (sonpath)) if cs2cs_args: print('cs2cs arguments are %s' % (cs2cs_args)) if beam: beam = np.asarray(beam, float) print('Beam is %s deg' % (str(beam))) if salinity: salinity = np.asarray(salinity, float) print('Salinity is %s ppt' % (str(salinity))) if ph: ph = np.asarray(ph, float) print('pH is %s' % (str(ph))) if temp: temp = np.asarray(temp, float) print('Temperature is %s' % (str(temp))) if transfreq: transfreq = np.asarray(transfreq, float) print('Dwnward sonar freq. is %s' % (str(transfreq))) if integ: integ = np.asarray(integ, int) print('number of records for integration is %s' % (str(integ))) if numclusters: numclusters = np.asarray(numclusters, int) print('number of returned acoustic clusters is %s' % (str(numclusters))) if doplot: doplot = int(doplot) if doplot == 0: print("Plots will not be made") # if son path name supplied has no separator at end, put one on if sonpath[-1] != os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename base = humutils.strip_base(base) meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat'))) beamwidth = beam * (np.sqrt(0.5)) equivbeam = (5.78 / (np.power(1.6, 2))) * (np.power((np.sin( (beamwidth * np.pi) / (2 * 180))), 2)) meta = loadmat(sonpath + base + 'meta.mat') c = np.squeeze(meta['c']) t = np.squeeze(meta['t']) f = np.squeeze(meta['f']) maxW = np.squeeze(meta['maxW']) lat = np.squeeze(meta['lat']) lon = np.squeeze(meta['lon']) es = np.squeeze(meta['e']) ns = np.squeeze(meta['n']) dep = np.squeeze(meta['dep_m']) #del meta # load memory mapped scans shape_hi = np.squeeze(meta['shape_hi']) if shape_hi != '': try: #dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_hi)) with open( os.path.normpath( os.path.join(sonpath, base + '_data_dwnhi.dat')), 'r') as ff: dwnhi_fp = np.memmap(ff, dtype='int16', mode='r', shape=tuple(shape_hi)) except: shape_lo = np.squeeze(meta['shape_low']) #dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_lo)) with open( os.path.normpath( os.path.join(sonpath, base + '_data_dwnhi.dat')), 'r') as ff: dwnhi_fp = np.memmap(ff, dtype='int16', mode='r', shape=tuple(shape_lo)) if 'dwnhi_fp' in locals(): theta3dB = np.arcsin(c / (t * (f * 1000))) # *(180/pi) # to see in degs ft = (np.pi / 2) * (1 / theta3dB) bed = ft * dep if len(shape_hi) > 2: i = np.linspace(1, shape_hi[0] * shape_hi[2], len(bed)) #np.shape(beam_data)[1],len(bed)) #bedi = np.interp(np.linspace(1,np.shape(beam_data)[1],np.shape(beam_data)[1]), i, bed) bedi = np.interp( np.linspace(1, shape_hi[0] * shape_hi[2], shape_hi[0] * shape_hi[2]), i, bed) ei = np.interp( np.linspace(1, shape_hi[0] * shape_hi[2], shape_hi[0] * shape_hi[2]), i, es) ni = np.interp( np.linspace(1, shape_hi[0] * shape_hi[2], shape_hi[0] * shape_hi[2]), i, ns) lati = np.interp( np.linspace(1, shape_hi[0] * shape_hi[2], shape_hi[0] * shape_hi[2]), i, lat) loni = np.interp( np.linspace(1, shape_hi[0] * shape_hi[2], shape_hi[0] * shape_hi[2]), i, lon) del i else: i = np.linspace(1, shape_hi[1], len(bed)) #np.shape(beam_data)[1],len(bed)) #bedi = np.interp(np.linspace(1,np.shape(beam_data)[1],np.shape(beam_data)[1]), i, bed) bedi = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, bed) ei = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, es) ni = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, ns) lati = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, lat) loni = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, lon) del i bedi = np.asarray(bedi, 'int') depi = ((1 / ft) * bedi) # near-field region nf = int(ft * (1000 * (0.105**2) * f / (4 * 1500))) #absorption = calcAb(c, ph, salinity, temp, np.asarray(depi), transfreq) absorption = water_atten(np.asarray(depi), transfreq, c, ph, temp, salinity) if len(shape_hi) > 2: for p in range(len(dwnhi_fp)): #make an index of every other record ind = range(0, np.shape(dwnhi_fp[p])[1]) Zdepi = depi[shape_hi[2] * p:shape_hi[2] * (p + 1)] Zabsorp = absorption[shape_hi[2] * p:shape_hi[2] * (p + 1)] Zlat = lati[shape_hi[2] * p:shape_hi[2] * (p + 1)] Zlon = loni[shape_hi[2] * p:shape_hi[2] * (p + 1)] Zes = ei[shape_hi[2] * p:shape_hi[2] * (p + 1)] Zns = ni[shape_hi[2] * p:shape_hi[2] * (p + 1)] try: #parallel processing with all available cores w = Parallel(n_jobs=-1, verbose=0)(delayed( get_rgh_hrd)(dwnhi_fp[p][:, i], Zdepi[i], Zabsorp[i], c, nf, transfreq, equivbeam, maxW, pi, ft) for i in ind) except: #fall back to serial w = Parallel(n_jobs=1, verbose=0)(delayed( get_rgh_hrd)(dwnhi_fp[p][:, i], Zdepi[i], Zabsorp[i], c, nf, transfreq, equivbeam, maxW, pi, ft) for i in ind) rough, hard, sv_e1, sv_e2, e1a, e1b, e2a, e2b = zip(*w) rough = np.array(rough, 'float') rough[rough == 0.0] = np.nan hard = np.array(hard, 'float') hard[hard == 0.0] = np.nan sv_e1 = np.array(sv_e1, 'float') sv_e1[sv_e1 == 0.0] = np.nan sv_e2 = np.array(sv_e2, 'float') sv_e2[sv_e2 == 0.0] = np.nan try: nans, y = humutils.nan_helper(rough) rough[nans] = np.interp(y(nans), y(~nans), rough[~nans]) except: pass try: nans, y = humutils.nan_helper(hard) hard[nans] = np.interp(y(nans), y(~nans), hard[~nans]) except: pass try: nans, y = humutils.nan_helper(sv_e1) sv_e1[nans] = np.interp(y(nans), y(~nans), sv_e1[~nans]) except: pass try: nans, y = humutils.nan_helper(sv_e2) sv_e2[nans] = np.interp(y(nans), y(~nans), sv_e2[~nans]) except: pass data = np.column_stack([sv_e1, sv_e2]) k_means = MiniBatchKMeans(numclusters) # fit the model k_means.fit(data) values = k_means.cluster_centers_.squeeze() labels = k_means.labels_ hardav = humutils.runningMeanFast(hard, integ) roughav = humutils.runningMeanFast(rough, integ) #f = open(sonpath+base+'rough_and_hard'+str(p)+'.csv', 'wt') f = open( os.path.normpath( os.path.join(sonpath, base + 'rough_and_hard' + str(p) + '.csv')), 'wt') writer = csv.writer(f) writer.writerow( ('longitude', 'latitude', 'easting', 'northing', 'depth', 'roughness', 'hardness', 'average roughness', 'average hardness', 'k-mean label')) for i in range(0, len(rough)): writer.writerow( (float(Zlon[i]), float(Zlat[i]), float(Zes[i]), float(Zns[i]), float(Zdepi[i]), float(rough[i]), float(hard[i]), float(roughav[i]), float(hardav[i]), labels[i].astype(int))) f.close() if doplot == 1: try: fig = plt.figure() plt.imshow(dwnhi_fp[p], cmap='gray') plt.plot(e1a, 'r') plt.plot(e1b, 'y') plt.plot(e2a, 'c') plt.plot(e2b, 'm') plt.axis('tight') #plt.show() custom_save(sonpath, 'e1e2_scan' + str(p)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() fig.subplots_adjust(wspace=0.4, hspace=0.4) plt.subplot(221) plt.plot(sv_e1[labels == 0], sv_e2[labels == 0], 'ko') plt.plot(sv_e1[labels == 1], sv_e2[labels == 1], 'ro') plt.plot(sv_e1[labels == 2], sv_e2[labels == 2], 'bo') plt.xlabel('SV1') plt.ylabel('SV2') plt.xlim(0, 1) plt.ylim(0, 1) plt.subplot(222) plt.plot(rough[labels == 0], hard[labels == 0], 'ko') plt.plot(rough[labels == 1], hard[labels == 1], 'ro') plt.plot(rough[labels == 2], hard[labels == 2], 'bo') plt.xlabel('E1') plt.ylabel('E2') plt.xlim(1, 8) plt.ylim(1, 8) #plt.show() custom_save(sonpath, 'e1e2_kmeans' + str(p)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() s = plt.scatter(Zes[labels == 0], Zns[labels == 0], marker='o', c='k', s=10, linewidth=0, vmin=0, vmax=8) s = plt.scatter(Zes[labels == 1], Zns[labels == 1], marker='o', c='r', s=10, linewidth=0, vmin=0, vmax=8) s = plt.scatter(Zes[labels == 2], Zns[labels == 2], marker='o', c='b', s=10, linewidth=0, vmin=0, vmax=8) custom_save(sonpath, 'rgh_hard_kmeans' + str(p)) del fig except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #epsg=26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.0001, llcrnrlat=np.min(Zlat) - 0.0001, urcrnrlon=np.max(Zlon) + 0.0001, urcrnrlat=np.max(Zlat) + 0.0001) # draw point cloud x, y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth=0, vmin=0, vmax=8) try: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs, location='bottom', pad="5%") cbar.set_label('E1') cbar.set_ticks([0, 2, 4, 6, 8]) custom_save(sonpath, 'map_rgh' + str(p)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.0001, llcrnrlat=np.min(Zlat) - 0.0001, urcrnrlon=np.max(Zlon) + 0.0001, urcrnrlat=np.max(Zlat) + 0.0001) # draw point cloud x, y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth=0, vmin=0, vmax=8) try: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs, location='bottom', pad="5%") cbar.set_label('E2') cbar.set_ticks([0, 2, 4, 6, 8]) custom_save(sonpath, 'map_hard' + str(p)) del fig except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.001, llcrnrlat=np.min(Zlat) - 0.001, urcrnrlon=np.max(Zlon) + 0.001, urcrnrlat=np.max(Zlat) + 0.001) ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x, y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth='0', vmin=0, vmax=8) custom_save(sonpath, 'Rough' + str(p)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Rough' + str(p) + '.png' ground.latlonbox.north = np.min(Zlat) - 0.001 ground.latlonbox.south = np.max(Zlat) + 0.001 ground.latlonbox.east = np.max(Zlon) + 0.001 ground.latlonbox.west = np.min(Zlon) - 0.001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'Rough'+str(p)+'.kml') kml.save( os.path.normpath( os.path.join(sonpath, 'Rough' + str(p) + '.kml'))) except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.001, llcrnrlat=np.min(Zlat) - 0.001, urcrnrlon=np.max(Zlon) + 0.001, urcrnrlat=np.max(Zlat) + 0.001) ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x, y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth='0', vmin=0, vmax=8) custom_save(sonpath, 'Hard' + str(p)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Hard' + str(p) + '.png' ground.latlonbox.north = np.min(Zlat) - 0.001 ground.latlonbox.south = np.max(Zlat) + 0.001 ground.latlonbox.east = np.max(Zlon) + 0.001 ground.latlonbox.west = np.min(Zlon) - 0.001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'Hard'+str(p)+'.kml') kml.save( os.path.normpath( os.path.join(sonpath, 'Hard' + str(p) + '.kml'))) except: print("plot could not be produced") else: if 2 > 1: # need to tiday all this up later!! #make an index of every other record ind = range(0, np.shape(dwnhi_fp)[1]) Zdepi = depi Zabsorp = absorption Zlat = lati Zlon = loni Zes = ei Zns = ni try: #parallel processing with all available cores w = Parallel(n_jobs=-1, verbose=0)(delayed( get_rgh_hrd)(dwnhi_fp[:, i], Zdepi[i], Zabsorp[i], c, nf, transfreq, equivbeam, maxW, pi, ft) for i in ind) except: #fall back to serial w = Parallel(n_jobs=1, verbose=0)(delayed( get_rgh_hrd)(dwnhi_fp[:, i], Zdepi[i], Zabsorp[i], c, nf, transfreq, equivbeam, maxW, pi, ft) for i in ind) rough, hard, sv_e1, sv_e2, e1a, e1b, e2a, e2b = zip(*w) rough = np.array(rough, 'float') rough[rough == 0.0] = np.nan hard = np.array(hard, 'float') hard[hard == 0.0] = np.nan sv_e1 = np.array(sv_e1, 'float') sv_e1[sv_e1 == 0.0] = np.nan sv_e2 = np.array(sv_e2, 'float') sv_e2[sv_e2 == 0.0] = np.nan try: nans, y = humutils.nan_helper(rough) rough[nans] = np.interp(y(nans), y(~nans), rough[~nans]) except: pass try: nans, y = humutils.nan_helper(hard) hard[nans] = np.interp(y(nans), y(~nans), hard[~nans]) except: pass try: nans, y = humutils.nan_helper(sv_e1) sv_e1[nans] = np.interp(y(nans), y(~nans), sv_e1[~nans]) except: pass try: nans, y = humutils.nan_helper(sv_e2) sv_e2[nans] = np.interp(y(nans), y(~nans), sv_e2[~nans]) except: pass data = np.column_stack([sv_e1, sv_e2]) k_means = MiniBatchKMeans(numclusters) # fit the model k_means.fit(data) values = k_means.cluster_centers_.squeeze() labels = k_means.labels_ hardav = humutils.runningMeanFast(hard, integ) roughav = humutils.runningMeanFast(rough, integ) #f = open(sonpath+base+'rough_and_hard'+str(p)+'.csv', 'wt') f = open( os.path.normpath( os.path.join(sonpath, base + 'rough_and_hard' + str(0) + '.csv')), 'wt') writer = csv.writer(f) writer.writerow( ('longitude', 'latitude', 'easting', 'northing', 'depth', 'roughness', 'hardness', 'average roughness', 'average hardness', 'k-mean label')) for i in range(0, len(rough)): writer.writerow( (float(Zlon[i]), float(Zlat[i]), float(Zes[i]), float(Zns[i]), float(Zdepi[i]), float(rough[i]), float(hard[i]), float(roughav[i]), float(hardav[i]), labels[i].astype(int))) f.close() if doplot == 1: try: fig = plt.figure() plt.imshow(dwnhi_fp, cmap='gray') plt.plot(e1a, 'r') plt.plot(e1b, 'y') plt.plot(e2a, 'c') plt.plot(e2b, 'm') plt.axis('tight') #plt.show() custom_save(sonpath, 'e1e2_scan' + str(0)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() fig.subplots_adjust(wspace=0.4, hspace=0.4) plt.subplot(221) plt.plot(sv_e1[labels == 0], sv_e2[labels == 0], 'ko') plt.plot(sv_e1[labels == 1], sv_e2[labels == 1], 'ro') plt.plot(sv_e1[labels == 2], sv_e2[labels == 2], 'bo') plt.xlabel('SV1') plt.ylabel('SV2') plt.xlim(0, 1) plt.ylim(0, 1) plt.subplot(222) plt.plot(rough[labels == 0], hard[labels == 0], 'ko') plt.plot(rough[labels == 1], hard[labels == 1], 'ro') plt.plot(rough[labels == 2], hard[labels == 2], 'bo') plt.xlabel('E1') plt.ylabel('E2') plt.xlim(1, 8) plt.ylim(1, 8) #plt.show() custom_save(sonpath, 'e1e2_kmeans' + str(0)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() s = plt.scatter(Zes[labels == 0], Zns[labels == 0], marker='o', c='k', s=10, linewidth=0, vmin=0, vmax=8) s = plt.scatter(Zes[labels == 1], Zns[labels == 1], marker='o', c='r', s=10, linewidth=0, vmin=0, vmax=8) s = plt.scatter(Zes[labels == 2], Zns[labels == 2], marker='o', c='b', s=10, linewidth=0, vmin=0, vmax=8) custom_save(sonpath, 'rgh_hard_kmeans' + str(0)) del fig except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #epsg=26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.0001, llcrnrlat=np.min(Zlat) - 0.0001, urcrnrlon=np.max(Zlon) + 0.0001, urcrnrlat=np.max(Zlat) + 0.0001) # draw point cloud x, y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth=0, vmin=0, vmax=8) try: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs, location='bottom', pad="5%") cbar.set_label('E1') cbar.set_ticks([0, 2, 4, 6, 8]) custom_save(sonpath, 'map_rgh' + str(0)) del fig except: print("plot could not be produced") if doplot == 1: try: fig = plt.figure() #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.0001, llcrnrlat=np.min(Zlat) - 0.0001, urcrnrlon=np.max(Zlon) + 0.0001, urcrnrlat=np.max(Zlat) + 0.0001) # draw point cloud x, y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth=0, vmin=0, vmax=8) try: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage( server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs, location='bottom', pad="5%") cbar.set_label('E2') cbar.set_ticks([0, 2, 4, 6, 8]) custom_save(sonpath, 'map_hard' + str(0)) del fig except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.001, llcrnrlat=np.min(Zlat) - 0.001, urcrnrlon=np.max(Zlon) + 0.001, urcrnrlat=np.max(Zlat) + 0.001) ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x, y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth='0', vmin=0, vmax=8) custom_save(sonpath, 'Rough' + str(0)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Rough' + str(0) + '.png' ground.latlonbox.north = np.min(Zlat) - 0.001 ground.latlonbox.south = np.max(Zlat) + 0.001 ground.latlonbox.east = np.max(Zlon) + 0.001 ground.latlonbox.west = np.min(Zlon) - 0.001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'Rough'+str(p)+'.kml') kml.save( os.path.normpath( os.path.join(sonpath, 'Rough' + str(0) + '.kml'))) except: print("plot could not be produced") if doplot == 1: try: print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution='i', #h #f llcrnrlon=np.min(Zlon) - 0.001, llcrnrlat=np.min(Zlat) - 0.001, urcrnrlon=np.max(Zlon) + 0.001, urcrnrlat=np.max(Zlat) + 0.001) ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x, y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth='0', vmin=0, vmax=8) custom_save(sonpath, 'Hard' + str(0)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Hard' + str(0) + '.png' ground.latlonbox.north = np.min(Zlat) - 0.001 ground.latlonbox.south = np.max(Zlat) + 0.001 ground.latlonbox.east = np.max(Zlon) + 0.001 ground.latlonbox.west = np.min(Zlon) - 0.001 ground.latlonbox.rotation = 0 #kml.save(sonpath+'Hard'+str(p)+'.kml') kml.save( os.path.normpath( os.path.join(sonpath, 'Hard' + str(0) + '.kml'))) except: print("plot could not be produced") else: print("high-frequency downward echosounder data not available")
#Get grid of coordinates from raster xx_1, yy_1 = np.mgrid[xmin:xmax+xres:xres, ymax+yres:ymin:yres] trans = pyproj.Proj(init="epsg:26949") glon_1, glat_1 = trans(xx_1, yy_1, inverse=True) fig = plt.figure(figsize=(12,24)) plt.suptitle('May 2014') ax = fig.add_subplot(1,3,1) ax.set_title('R01359') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon_1 - 0.0007), llcrnrlat=np.min(glat_1 - 0.0006), urcrnrlon=np.max(glon_1 + 0.0005), urcrnrlat=np.max(glat_1 + 0.0006)) x,y = m.projtran(glon, glat) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['0'], xpixels=1000) im = m.contourf(x, y, data_59.T, cmap='Greys_r',levels=ss_level) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im,cax=cax) ax_2 = fig.add_subplot(1,3,2) ax_2.set_title('R01360') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon_1 - 0.0007), llcrnrlat=np.min(glat_1 - 0.0006), urcrnrlon=np.max(glon_1 + 0.0005), urcrnrlat=np.max(glat_1 + 0.0006))
tex_levels = list(np.arange(0,135,5)) ss_level=[0,2.5,5,7.5,10,12.5,15,17.5,20,22.5,25,27.5,30,32.5,35] fig = plt.figure(figsize=(15,6)) ax = fig.add_subplot(1,5,1) ax.set_title('50 square pixel') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon-0.0009), llcrnrlat=np.min(glat-0.0006), urcrnrlon=np.max(glon+0.0009), urcrnrlat=np.max(glat+0.0009)) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['0'], xpixels=1000) x,y = m.projtran(glon, glat) im = m.contourf(x,y,ss_data_50.T, cmap='Greys_r',levels=ss_level) im2 = m.contourf(x, y, tex_data_50.T, alpha=0.4, cmap='YlOrRd', levels=tex_levels)#levels=tex_levels divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cax2 = divider.append_axes("right", size="5%", pad=0.3) cbr = plt.colorbar(im, cax=cax) cbr2 = plt.colorbar(im2,cax=cax2) ax1 = fig.add_subplot(1,5,2) ax1.set_title('70 square pixel') m1 = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon-0.0009),
def make_map(e, n, t, d, dat_port, dat_star, pix_m, res, cs2cs_args, sonpath, p, dogrid): trans = pyproj.Proj(init=cs2cs_args) merge = np.vstack((dat_port,dat_star)) #merge = np.vstack((np.flipud(port_fp[p]),star_fp[p])) merge[np.isnan(merge)] = 0 merge = merge[:,:len(n)] # get number pixels in scan line extent = int(np.shape(merge)[0]/2) yvec = np.linspace(pix_m,extent*pix_m,extent) print "getting point cloud ..." # get the points by rotating the [x,y] vector so it lines up with boat heading, assumed to be the same as the curvature of the [e,n] trace X=[]; Y=[]; for k in range(len(n)): x = np.concatenate((np.tile(e[k],extent) , np.tile(e[k],extent))) #y = np.concatenate((n[k]+yvec, n[k]-yvec)) rangedist = np.sqrt(np.power(yvec, 2.0) - np.power(d[k], 2.0)) y = np.concatenate((n[k]+rangedist, n[k]-rangedist)) # Rotate line around center point xx = e[k] - ((x - e[k]) * np.cos(t[k])) - ((y - n[k]) * np.sin(t[k])) yy = n[k] - ((x - e[k]) * np.sin(t[k])) + ((y - n[k]) * np.cos(t[k])) xx, yy = calc_beam_pos(d[k], t[k], xx, yy) X.append(xx) Y.append(yy) del e, n, t, x, y #, X, Y # merge flatten and stack X = np.asarray(X,'float').T X = X.flatten() # merge flatten and stack Y = np.asarray(Y,'float').T Y = Y.flatten() X = X[np.where(np.logical_not(np.isnan(Y)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(Y)))] Y = Y[np.where(np.logical_not(np.isnan(X)))] merge = merge.flatten()[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(X)))] X = X[np.where(np.logical_not(np.isnan(merge)))] Y = Y[np.where(np.logical_not(np.isnan(merge)))] merge = merge[np.where(np.logical_not(np.isnan(merge)))] # write raw bs to file outfile = sonpath+'x_y_ss_raw'+str(p)+'.asc' with open(outfile, 'w') as f: np.savetxt(f, np.hstack((humutils.ascol(X.flatten()),humutils.ascol(Y.flatten()), humutils.ascol(merge.flatten()))), delimiter=' ', fmt="%8.6f %8.6f %8.6f") humlon, humlat = trans(X, Y, inverse=True) if dogrid==1: grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), res), np.arange(np.min(Y), np.max(Y), res) ) dat = griddata(np.c_[X.flatten(),Y.flatten()], merge.flatten(), (grid_x, grid_y), method='nearest') ## create mask for where the data is not tree = KDTree(np.c_[X.flatten(),Y.flatten()]) dist, _ = tree.query(np.c_[grid_x.ravel(), grid_y.ravel()], k=1) dist = dist.reshape(grid_x.shape) del X, Y #, bearing #, pix_m, yvec if dogrid==1: ## mask dat[dist> np.floor(np.sqrt(1/res))-1 ] = np.nan #np.floor(np.sqrt(1/res))-1 ] = np.nan del dist, tree dat[dat==0] = np.nan dat[np.isinf(dat)] = np.nan datm = np.ma.masked_invalid(dat) glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001) if dogrid==1: gx,gy = map.projtran(glon, glat) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if dogrid==1: map.pcolormesh(gx, gy, datm, cmap='gray', vmin=np.nanmin(dat), vmax=np.nanmax(dat)) del datm, dat else: ## draw point cloud x,y = map.projtran(humlon, humlat) map.scatter(x.flatten(), y.flatten(), 0.5, merge.flatten(), cmap='gray', linewidth = '0') custom_save(sonpath,'map'+str(p)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = sonpath+'map'+str(p)+'.png' ground.latlonbox.north = np.min(humlat)-0.001 ground.latlonbox.south = np.max(humlat)+0.001 ground.latlonbox.east = np.max(humlon)+0.001 ground.latlonbox.west = np.min(humlon)-0.001 ground.latlonbox.rotation = 0 kml.save(sonpath+'GroundOverlay'+str(p)+'.kml') del humlat, humlon
def mosaic_texture(humfile, sonpath, cs2cs_args = "epsg:26949", res = 99, nn = 5, weight = 1): ''' Create mosaics of the spatially referenced sidescan echograms Syntax ---------- [] = PyHum.mosaic_texture(humfile, sonpath, cs2cs_args, res, nn, weight) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries res : float, *optional* [Default=0] grid resolution of output gridded texture map if res=99, res will be determined automatically from the spatial resolution of 1 pixel nn: int, *optional* [Default=5] number of nearest neighbours for gridding weight: int, *optional* [Default=1] specifies the type of pixel weighting in the gridding process weight = 1, based on grazing angle and inverse distance weighting weight = 2, based on grazing angle only weight = 3, inverse distance weighting only weight = 4, no weighting Returns ------- sonpath+'GroundOverlay.kml': kml file contains gridded (or point cloud) sidescan intensity map for importing into google earth of the pth chunk sonpath+'map.png' : image overlay associated with the kml file ''' # prompt user to supply file if no input file given if not humfile: print('An input file is required!!!!!!') Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing humfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print('A *.SON directory is required!!!!!!') Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print('Input file is %s' % (humfile)) if sonpath: print('Sonar file path is %s' % (sonpath)) if cs2cs_args: print('cs2cs arguments are %s' % (cs2cs_args)) if res: res = np.asarray(res,float) print('Gridding resolution: %s' % (str(res))) if nn: nn = int(nn) print('Number of nearest neighbours for gridding: %s' % (str(nn))) if weight: weight = int(weight) print('Weighting for gridding: %s' % (str(weight))) ##nn = 5 #number of nearest neighbours in gridding ##noisefloor=10 # noise threshold in dB W # start timer if os.name=='posix': # true if linux/mac or cygwin on windows start = time.time() else: # windows start = time.clock() trans = pyproj.Proj(init=cs2cs_args) # if son path name supplied has no separator at end, put one on if sonpath[-1]!=os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename base = humutils.strip_base(base) meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat'))) esi = np.squeeze(meta['e']) nsi = np.squeeze(meta['n']) theta = np.squeeze(meta['heading'])/(180/np.pi) # load memory mapped scans shape_port = np.squeeze(meta['shape_port']) if shape_port!='': if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat'))): port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat', 'float32', tuple(shape_port)) else: port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', tuple(shape_port)) shape_star = np.squeeze(meta['shape_star']) if shape_star!='': if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat'))): star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat', 'float32', tuple(shape_star)) else: star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', tuple(shape_star)) # time varying gain tvg = ((8.5*10**-5)+(3/76923)+((8.5*10**-5)/4))*meta['c'] # depth correction dist_tvg = np.squeeze(((np.tan(np.radians(25)))*np.squeeze(meta['dep_m']))-(tvg)) # read in range data R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star)) dx = np.arcsin(meta['c']/(1000*meta['t']*meta['f'])) pix_m = meta['pix_m'] c = meta['c'] if not os.path.isfile( os.path.normpath(os.path.join(sonpath,base+"S.p")) ): #if 2 > 1: inputfiles = [] if len(shape_star)>2: for p in range(len(star_fp)): e = esi[shape_port[-1]*p:shape_port[-1]*(p+1)] n = nsi[shape_port[-1]*p:shape_port[-1]*(p+1)] t = theta[shape_port[-1]*p:shape_port[-1]*(p+1)] d = dist_tvg[shape_port[-1]*p:shape_port[-1]*(p+1)] dat_port = port_fp[p] dat_star = star_fp[p] data_R = R_fp[p] print("writing chunk %s " % (str(p))) write_points(e, n, t, d, dat_port, dat_star, data_R, pix_m, res, cs2cs_args, sonpath, p, c, dx) inputfiles.append(os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc'))) else: p=0 print("writing chunk %s " % (str(p))) write_points(esi, nsi, theta, dist_tvg, port_fp, star_fp, R_fp, meta['pix_m'], res, cs2cs_args, sonpath, 0, c, dx) inputfiles.append(os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc'))) #trans = pyproj.Proj(init=cs2cs_args) # D, R, h, t print("reading points from %s files" % (str(len(inputfiles)))) X,Y,S,D,R,h,t,i = getxys(inputfiles) print("%s points read from %s files" % (str(len(S)), str(len(inputfiles)))) # remove values where sidescan intensity is zero ind = np.where(np.logical_not(S==0))[0] X = X[ind]; Y = Y[ind] S = S[ind]; D = D[ind] R = R[ind]; h = h[ind] t = t[ind]; i = i[ind] del ind # save to file for temporary storage pickle.dump( S, open( os.path.normpath(os.path.join(sonpath,base+"S.p")), "wb" ) ); del S pickle.dump( D, open( os.path.normpath(os.path.join(sonpath,base+"D.p")), "wb" ) ); del D pickle.dump( t, open( os.path.normpath(os.path.join(sonpath,base+"t.p")), "wb" ) ); del t pickle.dump( i, open( os.path.normpath(os.path.join(sonpath,base+"i.p")), "wb" ) ); del i pickle.dump( X, open( os.path.normpath(os.path.join(sonpath,base+"X.p")), "wb" ) ); del X pickle.dump( Y, open( os.path.normpath(os.path.join(sonpath,base+"Y.p")), "wb" ) ); del Y pickle.dump( R, open( os.path.normpath(os.path.join(sonpath,base+"R.p")), "wb" ) ); pickle.dump( h, open( os.path.normpath(os.path.join(sonpath,base+"h.p")), "wb" ) ); #grazing angle g = np.arctan(R.flatten(),h.flatten()) pickle.dump( g, open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "wb" ) ); del g, R, h print("creating grids ...") if res==0: res=99 if res==99: #### prepare grids R = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"R.p")), "rb" ) ) ## actual along-track resolution is this: dx times dy = Af tmp = R * dx * (c*0.007 / 2) del R resg = np.min(tmp[tmp>0]) del tmp else: resg = res X = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"X.p")), "rb" ) ) Y = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"Y.p")), "rb" ) ) humlon, humlat = trans(X, Y, inverse=True) grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), resg), np.arange(np.min(Y), np.max(Y), resg) ) shape = np.shape(grid_x) tree = KDTree(zip(X.flatten(), Y.flatten())) del X, Y print("mosaicking ...") #k nearest neighbour try: dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k = nn, n_jobs=-1) except: #print ".... update your scipy installation to use faster kd-tree" dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k = nn) #del grid_x, grid_y if weight==1: g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) ) w = g[inds] + 1.0 / dist**2 del g elif weight==2: g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) ) w = g[inds] del g elif weight==3: w = 1.0 / dist**2 elif weight==4: w = 1.0 #g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) ) #w = g[inds] + 1.0 / dist**2 #del g if weight < 4: w[np.isinf(w)]=1 w[np.isnan(w)]=1 w[w>10000]=10000 w[w<=0]=1 # load in sidescan intensity S = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"S.p")), "rb" ) ) # filter out noise pixels S[S<noisefloor] = np.nan if nn==1: Sdat_g = (w * S.flatten()[inds]).reshape(shape) del w dist = dist.reshape(shape) else: if weight < 4: Sdat_g = (np.nansum(w * S.flatten()[inds], axis=1) / np.nansum(w, axis=1)).reshape(shape) else: Sdat_g = (np.nansum(S.flatten()[inds], axis=1)).reshape(shape) del w dist = np.nanmean(dist,axis=1).reshape(shape) del S Sdat_g[dist>1] = np.nan Sdat_g[Sdat_g<noisefloor] = np.nan dat = Sdat_g.copy() dat[dist>1] = 0 dat2 = replace_nans.RN(dat.astype('float64'),1000,0.01,2,'localmean').getdata() dat2[dat==0] = np.nan del dat dat2[dat2<noisefloor] = np.nan Sdat_g = dat2.copy() del dat2 Sdat_g[Sdat_g==0] = np.nan Sdat_g[np.isinf(Sdat_g)] = np.nan Sdat_gm = np.ma.masked_invalid(Sdat_g) del Sdat_g glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y # ========================================================= print("creating kmz file ...") ## new way to create kml file pixels = 1024 * 10 fig, ax = humutils.gearth_fig(llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), pixels=pixels) cs = ax.pcolormesh(glon, glat, Sdat_gm) ax.set_axis_off() fig.savefig(os.path.normpath(os.path.join(sonpath,'class_overlay1.png')), transparent=True, format='png') fig = plt.figure(figsize=(1.0, 4.0), facecolor=None, frameon=False) ax = fig.add_axes([0.0, 0.05, 0.2, 0.9]) cb = fig.colorbar(cs, cax=ax) cb.set_label('Texture lengthscale [m]', rotation=-90, color='k', labelpad=20) fig.savefig(os.path.normpath(os.path.join(sonpath,'class_legend.png')), transparent=False, format='png') humutils.make_kml(llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), figs=[os.path.normpath(os.path.join(sonpath,'class_overlay1.png'))], colorbar=os.path.normpath(os.path.join(sonpath,'class_legend.png')), kmzfile=os.path.normpath(os.path.join(sonpath,'class_GroundOverlay.kmz')), name='Sidescan Intensity') # ========================================================= print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], resolution = 'i', #h #f llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001, urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001) gx,gy = map.projtran(glon, glat) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) #finally: # print "error: map could not be created..." ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if Sdat_gm.size > 25000000: print("matrix size > 25,000,000 - decimating by factor of 5 for display") map.pcolormesh(gx[::5,::5], gy[::5,::5], Sdat_gm[::5,::5], vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm)) else: map.pcolormesh(gx, gy, Sdat_gm, vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm)) custom_save2(sonpath,'class_map_imagery') del fig if os.name=='posix': # true if linux/mac elapsed = (time.time() - start) else: # windows elapsed = (time.clock() - start) print("Processing took "+str(elapsed)+"seconds to analyse") print("Done!")
circ5 = Line2D([0], [0], linestyle="none", marker="o", markersize=10, markerfacecolor=colors[4],alpha=a_val) print 'Now plotting R02028 Acoutic sediment classifications...' #Begin the plot cs2cs_args = "epsg:26949" fig = plt.figure(figsize=(15,12)) ax = plt.subplot2grid((5,2),(0, 0),rowspan=4) ax.set_title('August 2013 Acousic \n Sediment Classifications') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.nanmin(glon)-0.0009, llcrnrlat=np.nanmin(glat)-0.0006, urcrnrlon=np.nanmax(glon)+0.0009, urcrnrlat=np.nanmax(glat)+0.0006) m.wmsimage(server='http://grandcanyon.usgs.gov/arcgis/services/Imagery/ColoradoRiverImageryExplorer/MapServer/WmsServer?', layers=['3'], xpixels=1000) x,y = m.projtran(aug_13_lon, aug_13_lat) im = m.contourf(x,y,aug_sed_class.T, cmap='coolwarm', levels=[0,1,2,3,4,5]) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1) cbr = plt.colorbar(im, cax=cax) ax.legend((circ1, circ2, circ3,circ4,circ5),('rock','sand/rock','Gravel','Sand/Gravel','sand'),numpoints=1, loc='best') print 'Now plotting May 2014 Acoustic Sediment Classifications...' ax = plt.subplot2grid((5,2),(0, 1),rowspan=4) ax.set_title('May 2014 Acousic \n Sediment Classifications') m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.nanmin(may_lon)-0.0009, llcrnrlat=np.nanmin(may_lat)-0.0006, urcrnrlon=np.nanmax(may_lon)+0.0009, urcrnrlat=np.nanmax(may_lat)+0.0006)
def mosaic_texture(humfile, sonpath, cs2cs_args="epsg:26949", res=99, nn=5, weight=1): ''' Create mosaics of the spatially referenced sidescan echograms Syntax ---------- [] = PyHum.mosaic_texture(humfile, sonpath, cs2cs_args, res, nn, weight) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries res : float, *optional* [Default=0] grid resolution of output gridded texture map if res=99, res will be determined automatically from the spatial resolution of 1 pixel nn: int, *optional* [Default=5] number of nearest neighbours for gridding weight: int, *optional* [Default=1] specifies the type of pixel weighting in the gridding process weight = 1, based on grazing angle and inverse distance weighting weight = 2, based on grazing angle only weight = 3, inverse distance weighting only weight = 4, no weighting Returns ------- sonpath+'GroundOverlay.kml': kml file contains gridded (or point cloud) sidescan intensity map for importing into google earth of the pth chunk sonpath+'map.png' : image overlay associated with the kml file ''' # prompt user to supply file if no input file given if not humfile: print('An input file is required!!!!!!') Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print('A *.SON directory is required!!!!!!') Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print('Input file is %s' % (humfile)) if sonpath: print('Sonar file path is %s' % (sonpath)) if cs2cs_args: print('cs2cs arguments are %s' % (cs2cs_args)) if res: res = np.asarray(res, float) print('Gridding resolution: %s' % (str(res))) if nn: nn = int(nn) print('Number of nearest neighbours for gridding: %s' % (str(nn))) if weight: weight = int(weight) print('Weighting for gridding: %s' % (str(weight))) ##nn = 5 #number of nearest neighbours in gridding ##noisefloor=10 # noise threshold in dB W # start timer if os.name == 'posix': # true if linux/mac or cygwin on windows start = time.time() else: # windows start = time.clock() trans = pyproj.Proj(init=cs2cs_args) # if son path name supplied has no separator at end, put one on if sonpath[-1] != os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename base = humutils.strip_base(base) meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat'))) esi = np.squeeze(meta['e']) nsi = np.squeeze(meta['n']) theta = np.squeeze(meta['heading']) / (180 / np.pi) # load memory mapped scans shape_port = np.squeeze(meta['shape_port']) if shape_port != '': if os.path.isfile( os.path.normpath( os.path.join(sonpath, base + '_data_port_lar.dat'))): port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat', 'float32', tuple(shape_port)) else: port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', tuple(shape_port)) shape_star = np.squeeze(meta['shape_star']) if shape_star != '': if os.path.isfile( os.path.normpath( os.path.join(sonpath, base + '_data_star_lar.dat'))): star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat', 'float32', tuple(shape_star)) else: star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', tuple(shape_star)) # time varying gain tvg = ((8.5 * 10**-5) + (3 / 76923) + ((8.5 * 10**-5) / 4)) * meta['c'] # depth correction dist_tvg = np.squeeze(( (np.tan(np.radians(25))) * np.squeeze(meta['dep_m'])) - (tvg)) # read in range data R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star)) dx = np.arcsin(meta['c'] / (1000 * meta['t'] * meta['f'])) pix_m = meta['pix_m'] c = meta['c'] if not os.path.isfile(os.path.normpath(os.path.join(sonpath, base + "S.p"))): #if 2 > 1: inputfiles = [] if len(shape_star) > 2: for p in range(len(star_fp)): e = esi[shape_port[-1] * p:shape_port[-1] * (p + 1)] n = nsi[shape_port[-1] * p:shape_port[-1] * (p + 1)] t = theta[shape_port[-1] * p:shape_port[-1] * (p + 1)] d = dist_tvg[shape_port[-1] * p:shape_port[-1] * (p + 1)] dat_port = port_fp[p] dat_star = star_fp[p] data_R = R_fp[p] print("writing chunk %s " % (str(p))) write_points(e, n, t, d, dat_port, dat_star, data_R, pix_m, res, cs2cs_args, sonpath, p, c, dx) inputfiles.append( os.path.normpath( os.path.join(sonpath, 'x_y_class' + str(p) + '.asc'))) else: p = 0 print("writing chunk %s " % (str(p))) write_points(esi, nsi, theta, dist_tvg, port_fp, star_fp, R_fp, meta['pix_m'], res, cs2cs_args, sonpath, 0, c, dx) inputfiles.append( os.path.normpath( os.path.join(sonpath, 'x_y_class' + str(p) + '.asc'))) #trans = pyproj.Proj(init=cs2cs_args) # D, R, h, t print("reading points from %s files" % (str(len(inputfiles)))) X, Y, S, D, R, h, t, i = getxys(inputfiles) print("%s points read from %s files" % (str(len(S)), str(len(inputfiles)))) # remove values where sidescan intensity is zero ind = np.where(np.logical_not(S == 0))[0] X = X[ind] Y = Y[ind] S = S[ind] D = D[ind] R = R[ind] h = h[ind] t = t[ind] i = i[ind] del ind # save to file for temporary storage pickle.dump( S, open(os.path.normpath(os.path.join(sonpath, base + "S.p")), "wb")) del S pickle.dump( D, open(os.path.normpath(os.path.join(sonpath, base + "D.p")), "wb")) del D pickle.dump( t, open(os.path.normpath(os.path.join(sonpath, base + "t.p")), "wb")) del t pickle.dump( i, open(os.path.normpath(os.path.join(sonpath, base + "i.p")), "wb")) del i pickle.dump( X, open(os.path.normpath(os.path.join(sonpath, base + "X.p")), "wb")) del X pickle.dump( Y, open(os.path.normpath(os.path.join(sonpath, base + "Y.p")), "wb")) del Y pickle.dump( R, open(os.path.normpath(os.path.join(sonpath, base + "R.p")), "wb")) pickle.dump( h, open(os.path.normpath(os.path.join(sonpath, base + "h.p")), "wb")) #grazing angle g = np.arctan(R.flatten(), h.flatten()) pickle.dump( g, open(os.path.normpath(os.path.join(sonpath, base + "g.p")), "wb")) del g, R, h print("creating grids ...") if res == 0: res = 99 if res == 99: #### prepare grids R = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "R.p")), "rb")) ## actual along-track resolution is this: dx times dy = Af tmp = R * dx * (c * 0.007 / 2) del R resg = np.min(tmp[tmp > 0]) del tmp else: resg = res X = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "X.p")), "rb")) Y = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "Y.p")), "rb")) humlon, humlat = trans(X, Y, inverse=True) grid_x, grid_y = np.meshgrid(np.arange(np.min(X), np.max(X), resg), np.arange(np.min(Y), np.max(Y), resg)) shape = np.shape(grid_x) tree = KDTree(zip(X.flatten(), Y.flatten())) del X, Y print("mosaicking ...") #k nearest neighbour try: dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k=nn, n_jobs=-1) except: #print ".... update your scipy installation to use faster kd-tree" dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k=nn) #del grid_x, grid_y if weight == 1: g = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "g.p")), "rb")) w = g[inds] + 1.0 / dist**2 del g elif weight == 2: g = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "g.p")), "rb")) w = g[inds] del g elif weight == 3: w = 1.0 / dist**2 elif weight == 4: w = 1.0 #g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) ) #w = g[inds] + 1.0 / dist**2 #del g if weight < 4: w[np.isinf(w)] = 1 w[np.isnan(w)] = 1 w[w > 10000] = 10000 w[w <= 0] = 1 # load in sidescan intensity S = pickle.load( open(os.path.normpath(os.path.join(sonpath, base + "S.p")), "rb")) # filter out noise pixels S[S < noisefloor] = np.nan if nn == 1: Sdat_g = (w * S.flatten()[inds]).reshape(shape) del w dist = dist.reshape(shape) else: if weight < 4: Sdat_g = (np.nansum(w * S.flatten()[inds], axis=1) / np.nansum(w, axis=1)).reshape(shape) else: Sdat_g = (np.nansum(S.flatten()[inds], axis=1)).reshape(shape) del w dist = np.nanmean(dist, axis=1).reshape(shape) del S Sdat_g[dist > 1] = np.nan Sdat_g[Sdat_g < noisefloor] = np.nan dat = Sdat_g.copy() dat[dist > 1] = 0 dat2 = replace_nans.RN(dat.astype('float64'), 1000, 0.01, 2, 'localmean').getdata() dat2[dat == 0] = np.nan del dat dat2[dat2 < noisefloor] = np.nan Sdat_g = dat2.copy() del dat2 Sdat_g[Sdat_g == 0] = np.nan Sdat_g[np.isinf(Sdat_g)] = np.nan Sdat_gm = np.ma.masked_invalid(Sdat_g) del Sdat_g glon, glat = trans(grid_x, grid_y, inverse=True) del grid_x, grid_y # ========================================================= print("creating kmz file ...") ## new way to create kml file pixels = 1024 * 10 fig, ax = humutils.gearth_fig(llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), pixels=pixels) cs = ax.pcolormesh(glon, glat, Sdat_gm) ax.set_axis_off() fig.savefig(os.path.normpath(os.path.join(sonpath, 'class_overlay1.png')), transparent=True, format='png') fig = plt.figure(figsize=(1.0, 4.0), facecolor=None, frameon=False) ax = fig.add_axes([0.0, 0.05, 0.2, 0.9]) cb = fig.colorbar(cs, cax=ax) cb.set_label('Texture lengthscale [m]', rotation=-90, color='k', labelpad=20) fig.savefig(os.path.normpath(os.path.join(sonpath, 'class_legend.png')), transparent=False, format='png') humutils.make_kml( llcrnrlon=glon.min(), llcrnrlat=glat.min(), urcrnrlon=glon.max(), urcrnrlat=glat.max(), figs=[os.path.normpath(os.path.join(sonpath, 'class_overlay1.png'))], colorbar=os.path.normpath(os.path.join(sonpath, 'class_legend.png')), kmzfile=os.path.normpath( os.path.join(sonpath, 'class_GroundOverlay.kmz')), name='Sidescan Intensity') # ========================================================= print("drawing and printing map ...") fig = plt.figure(frameon=False) map = Basemap( projection='merc', epsg=cs2cs_args.split(':')[1], resolution='i', #h #f llcrnrlon=np.min(humlon) - 0.001, llcrnrlat=np.min(humlat) - 0.001, urcrnrlon=np.max(humlon) + 0.001, urcrnrlat=np.max(humlat) + 0.001) gx, gy = map.projtran(glon, glat) try: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300) except: map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) #finally: # print "error: map could not be created..." ax = plt.Axes( fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) if Sdat_gm.size > 25000000: print( "matrix size > 25,000,000 - decimating by factor of 5 for display") map.pcolormesh(gx[::5, ::5], gy[::5, ::5], Sdat_gm[::5, ::5], vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm)) else: map.pcolormesh(gx, gy, Sdat_gm, vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm)) custom_save2(sonpath, 'class_map_imagery') del fig if os.name == 'posix': # true if linux/mac elapsed = (time.time() - start) else: # windows elapsed = (time.clock() - start) print("Processing took " + str(elapsed) + "seconds to analyse") print("Done!")
######################################################################################################################### ######################################################################################################################## # Begin Subplot 1 ######################################################################################################################### ######################################################################################################################### print 'Now Plotting April 2014...' fig = plt.figure(figsize=(9,10)) ax = plt.subplot2grid((10,2),(0, 0),rowspan=9) ax.set_title(title_04) m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], llcrnrlon=np.min(glon_04) - 0.0004, llcrnrlat=np.min(glat_04) - 0.0006, urcrnrlon=np.max(glon_04) + 0.0006, urcrnrlat=np.max(glat_04) + 0.0009) x,y = m.projtran(glon_04, glat_04) m.wmsimage(server=wms_url, layers=['3'], xpixels=1000) im = m.contourf(x,y,data_04.T, cmap=c_ramp) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) cbr = plt.colorbar(im, cax=cax) cbr.set_label(cbr_txt, size=10) #read shapefile and create polygon collections m.readshapefile(geo_shp_04,"layer",drawbounds = False) #sand, gravel, boulders s_patch, g_patch, b_patch =[],[],[] for info, shape in zip(m.layer_info, m.layer): if info['substrate'] == 'sand': s_patch.append(Polygon(np.asarray(shape),True))
def e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq, integ, numclusters, doplot): ''' Analysis of first (e1, 'roughness') and second (e2, 'hardness') echo returns from the high-frequency downward looking echosounder Generates generalised acoustic parameters for the purposes of point classification of submerged substrates/vegetation Accounts for the absorption of sound in water Does a basic k-means cluster of e1 and e2 coefficients into specified number of 'acoustic classes' based on code by Barb Fagetter ([email protected]) Syntax ---------- [] = PyHum.e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq, integ, numclusters, doplot) Parameters ---------- humfile : str path to the .DAT file sonpath : str path where the *.SON files are cs2cs_args : int, *optional* [Default="epsg:26949"] arguments to create coordinates in a projected coordinate system this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates into any projection supported by the proj.4 libraries ph : float, *optional* [Default=7.0] water acidity in pH temp : float, *optional* [Default=10.0] water temperature in degrees Celsius salinity : float, *optional* [Default=0.0] salinity of water in parts per thousand beam : float, *optional* [Default=20.0] beam width in degrees transfreq : float, *optional* [Default=200.0] transducer frequency in kHz integ : int, *optional* [Default=5] number of pings over which to integrate numclusters : int, *optional* [Default=3] transducer frequency in kHz doplot : int, *optional* [Default=1] 1 = make plots, otherwise do not Returns ------- sonpath+base+'rough_and_hard'+str(p)+'.csv' : csv file contains the following fields: 'longitude', 'latitude', 'easting', 'northing', 'depth', 'roughness', 'hardness', 'average roughness', 'average hardness','k-mean label' of the pth chunk 'average' implies average over 'integ' successive pings The following are returned if doplot==1: sonpath+'e1e2_scan'+str(p).png : png image file png image file showing the downward echosounder echogram overlain with the locations of the start and end of the first and second echo region envelope sonpath+'e1e2_kmeans'+str(p).png: png image file png image file showing 1) (left) volume scattering coefficient 1 versus volume scattering coefficient 2, colour-coded by k-means acoustic class, and 2) (right) e1 versus e2, colour-coded by k-means acoustic class sonpath+'rgh_hard_kmeans'+str(p).png : png image file png image file showing scatter plot of easting versus northing colour-coded by k-means acoustic class sonpath+'map_rgh'+str(p).png : png image file png image file showing scatter plot of 'roughness' (e1) overlying an aerial image pulled from an ESRI image server sonpath+'map_hard'+str(p).png : png image file png image file showing scatter plot of 'hardness' (e2) overlying an aerial image pulled from an ESRI image server sonpath,'Rough'+str(p).png : png image file png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml sonpath,'Rough'+str(p).kml : kml file kml overlay for showing roughness scatter plot (sonpath,'Rough'+str(p).png) sonpath,'Hard'+str(p).png : png image file png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml sonpath,'Hard'+str(p).kml : kml file kml overlay for showing harness scatter plot (sonpath,'Hard'+str(p).png) ''' # prompt user to supply file if no input file given if not humfile: print 'An input file is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing inputfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) # prompt user to supply directory if no input sonpath is given if not sonpath: print 'A *.SON directory is required!!!!!!' Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing sonpath = askdirectory() # print given arguments to screen and convert data type where necessary if humfile: print 'Input file is %s' % (humfile) if sonpath: print 'Sonar file path is %s' % (sonpath) if cs2cs_args: print 'cs2cs arguments are %s' % (cs2cs_args) if beam: beam = np.asarray(beam,float) print 'Beam is %s deg' % (str(beam)) if salinity: salinity = np.asarray(salinity,float) print 'Salinity is %s ppt' % (str(salinity)) if ph: ph = np.asarray(ph,float) print 'pH is %s' % (str(ph)) if temp: temp = np.asarray(temp,float) print 'Temperature is %s' % (str(temp)) if transfreq: transfreq = np.asarray(transfreq,float) print 'Dwnward sonar freq. is %s' % (str(transfreq)) if integ: integ = np.asarray(integ,int) print 'number of records for integration is %s' % (str(integ)) if numclusters: numclusters = np.asarray(numclusters,int) print 'number of returned acoustic clusters is %s' % (str(numclusters)) if doplot: doplot = int(doplot) if doplot==0: print "Plots will not be made" if not beam: beam = 20.0 print '[Default] Beam is %s deg' % (str(beam)) if not salinity: if salinity != 0.0: salinity = 0.0 print '[Default] Salinity is %s ppt' % (str(salinity)) if not ph: ph = 7.0 print '[Default] pH is %s' % (str(ph)) if not integ: integ = 5 print '[Default] Number of records for integration is %s' % (str(ph)) if not numclusters: numclusters = 3 print '[Default] Number of acoustic clusters is %s' % (str(ph)) if not temp: temp = 10.0 print '[Default] Temperature is %s degC' % (str(temp)) if not transfreq: transfreq = 200.0 print '[Default] Dwnward freq. is %s kHz' % (str(transfreq)) if not cs2cs_args: # arguments to pass to cs2cs for coordinate transforms cs2cs_args = "epsg:26949" print '[Default] cs2cs arguments are %s' % (cs2cs_args) if not doplot: if doplot != 0: doplot = 1 print "[Default] Plots will be made" # if son path name supplied has no separator at end, put one on if sonpath[-1]!=os.sep: sonpath = sonpath + os.sep base = humfile.split('.DAT') # get base of file name for output base = base[0].split(os.sep)[-1] # remove underscores, negatives and spaces from basename if base.find('_')>-1: base = base[:base.find('_')] if base.find('-')>-1: base = base[:base.find('-')] if base.find(' ')>-1: base = base[:base.find(' ')] beamwidth = beam*(np.sqrt(0.5)) equivbeam = (5.78/(np.power(1.6,2)))*(np.power((np.sin((beamwidth*np.pi)/(2*180))),2)) meta = loadmat(sonpath+base+'meta.mat') c = np.squeeze(meta['c']) t = np.squeeze(meta['t']) f = np.squeeze(meta['f']) maxW = np.squeeze(meta['maxW']) lat = np.squeeze(meta['lat']) lon = np.squeeze(meta['lon']) es = np.squeeze(meta['es']) ns = np.squeeze(meta['ns']) dep = np.squeeze(meta['dep_m']) del meta # load memory mapped scans shape_hi= np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_hi']) if shape_hi!='': try: dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_hi)) except: shape_lo= np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_low']) dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_lo)) if 'dwnhi_fp' in locals(): theta3dB = np.arcsin(c/(t*(f*1000))) # *(180/pi) # to see in degs ft = (np.pi/2)*(1/theta3dB) bed = ft*dep i = np.linspace(1,shape_hi[0]*shape_hi[2], len(bed)) #np.shape(beam_data)[1],len(bed)) #bedi = np.interp(np.linspace(1,np.shape(beam_data)[1],np.shape(beam_data)[1]), i, bed) bedi = np.interp(np.linspace(1,shape_hi[0]*shape_hi[2],shape_hi[0]*shape_hi[2]), i, bed) ei = np.interp(np.linspace(1,shape_hi[0]*shape_hi[2],shape_hi[0]*shape_hi[2]), i, es) ni = np.interp(np.linspace(1,shape_hi[0]*shape_hi[2],shape_hi[0]*shape_hi[2]), i, ns) lati = np.interp(np.linspace(1,shape_hi[0]*shape_hi[2],shape_hi[0]*shape_hi[2]), i, lat) loni = np.interp(np.linspace(1,shape_hi[0]*shape_hi[2],shape_hi[0]*shape_hi[2]), i, lon) del i bedi = np.asarray(bedi,'int') depi = ((1/ft)*bedi) # near-field region nf = int(ft*(1000*(0.105**2)*f/(4*1500))) #absorption = calcAb(c, ph, salinity, temp, np.asarray(depi), transfreq) absorption = water_atten(np.asarray(depi), transfreq, c, ph, temp, salinity) for p in xrange(len(dwnhi_fp)): #make an index of every other record ind = range(0,np.shape(dwnhi_fp[p])[1]) Zdepi = depi[shape_hi[2]*p:shape_hi[2]*(p+1)] Zabsorp = absorption[shape_hi[2]*p:shape_hi[2]*(p+1)] Zlat = lati[shape_hi[2]*p:shape_hi[2]*(p+1)] Zlon = loni[shape_hi[2]*p:shape_hi[2]*(p+1)] Zes = ei[shape_hi[2]*p:shape_hi[2]*(p+1)] Zns = ni[shape_hi[2]*p:shape_hi[2]*(p+1)] try: #parallel processing with all available cores w = Parallel(n_jobs=-1, verbose=0)(delayed(get_rgh_hrd)(dwnhi_fp[p][:,i],Zdepi[i],Zabsorp[i],c,nf,transfreq,equivbeam,maxW,pi,ft) for i in ind) except: #fall back to serial w = Parallel(n_jobs=1, verbose=0)(delayed(get_rgh_hrd)(dwnhi_fp[p][:,i],Zdepi[i],Zabsorp[i],c,nf,transfreq,equivbeam,maxW,pi,ft) for i in ind) rough, hard, sv_e1, sv_e2, e1a, e1b, e2a, e2b = zip(*w) rough = np.array(rough,'float') rough[rough==0.0] = np.nan hard = np.array(hard,'float') hard[hard==0.0] = np.nan sv_e1 = np.array(sv_e1,'float') sv_e1[sv_e1==0.0] = np.nan sv_e2 = np.array(sv_e2,'float') sv_e2[sv_e2==0.0] = np.nan try: nans, y= humutils.nan_helper(rough) rough[nans]= np.interp(y(nans), y(~nans), rough[~nans]) except: continue try: nans, y= humutils.nan_helper(hard) hard[nans]= np.interp(y(nans), y(~nans), hard[~nans]) except: continue try: nans, y= humutils.nan_helper(sv_e1) sv_e1[nans]= np.interp(y(nans), y(~nans), sv_e1[~nans]) except: continue try: nans, y= humutils.nan_helper(sv_e2) sv_e2[nans]= np.interp(y(nans), y(~nans), sv_e2[~nans]) except: continue data = np.column_stack([sv_e1, sv_e2]) k_means = MiniBatchKMeans(numclusters) # fit the model k_means.fit(data) values = k_means.cluster_centers_.squeeze() labels = k_means.labels_ hardav = humutils.runningMeanFast(hard,integ) roughav = humutils.runningMeanFast(rough,integ) f = open(sonpath+base+'rough_and_hard'+str(p)+'.csv', 'wt') writer = csv.writer(f) writer.writerow( ('longitude', 'latitude', 'easting', 'northing', 'depth', 'roughness', 'hardness', 'average roughness', 'average hardness','k-mean label') ) for i in range(0, len(rough)): writer.writerow(( float(Zlon[i]),float(Zlat[i]),float(Zes[i]),float(Zns[i]),float(Zdepi[i]),float(rough[i]),float(hard[i]),float(roughav[i]),float(hardav[i]), labels[i].astype(int) )) f.close() if doplot==1: try: fig = plt.figure() plt.imshow(dwnhi_fp[p], cmap='gray') plt.plot(e1a,'r'); plt.plot(e1b,'y'); plt.plot(e2a,'c'); plt.plot(e2b,'m'); plt.axis('tight') #plt.show() custom_save(sonpath,'e1e2_scan'+str(p)) del fig except: print "plot could not be produced" if doplot==1: try: fig = plt.figure() fig.subplots_adjust(wspace = 0.4, hspace=0.4) plt.subplot(221) plt.plot(sv_e1[labels==0],sv_e2[labels==0],'ko'); plt.plot(sv_e1[labels==1],sv_e2[labels==1],'ro'); plt.plot(sv_e1[labels==2],sv_e2[labels==2],'bo'); plt.xlabel('SV1'); plt.ylabel('SV2') plt.xlim(0,1); plt.ylim(0,1) plt.subplot(222) plt.plot(rough[labels==0],hard[labels==0],'ko'); plt.plot(rough[labels==1],hard[labels==1],'ro'); plt.plot(rough[labels==2],hard[labels==2],'bo'); plt.xlabel('E1'); plt.ylabel('E2') plt.xlim(1,8); plt.ylim(1,8) #plt.show() custom_save(sonpath,'e1e2_kmeans'+str(p)) del fig except: print "plot could not be produced" if doplot==1: try: fig = plt.figure() s=plt.scatter(Zes[labels==0],Zns[labels==0],marker='o',c='k', s=10, linewidth=0, vmin=0, vmax=8); s=plt.scatter(Zes[labels==1],Zns[labels==1],marker='o',c='r', s=10, linewidth=0, vmin=0, vmax=8); s=plt.scatter(Zes[labels==2],Zns[labels==2],marker='o',c='b', s=10, linewidth=0, vmin=0, vmax=8); custom_save(sonpath,'rgh_hard_kmeans'+str(p)) del fig except: print "plot could not be produced" if doplot==1: try: print "drawing and printing map ..." fig = plt.figure(frameon=False) #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #epsg=26949, resolution = 'i', #h #f llcrnrlon=np.min(Zlon)-0.0001, llcrnrlat=np.min(Zlat)-0.0001, urcrnrlon=np.max(Zlon)+0.0001, urcrnrlat=np.max(Zlat)+0.0001) # draw point cloud x,y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth=0, vmin=0, vmax=8) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs,location='bottom',pad="5%") cbar.set_label('E1') cbar.set_ticks([0,2,4,6,8]) custom_save(sonpath,'map_rgh'+str(p)) del fig except: print "plot could not be produced" if doplot==1: try: fig = plt.figure() #fig.subplots_adjust(wspace = 0.4, hspace=0.4) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], resolution = 'i', #h #f llcrnrlon=np.min(Zlon)-0.0001, llcrnrlat=np.min(Zlat)-0.0001, urcrnrlon=np.max(Zlon)+0.0001, urcrnrlat=np.max(Zlat)+0.0001) # draw point cloud x,y = map.projtran(Zlon, Zlat) cs = map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth=0, vmin=0, vmax=8) map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300) cbar = map.colorbar(cs,location='bottom',pad="5%") cbar.set_label('E2') cbar.set_ticks([0,2,4,6,8]) custom_save(sonpath,'map_hard'+str(p)) del fig except: print "plot could not be produced" if doplot==1: try: print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(Zlon)-0.001, llcrnrlat=np.min(Zlat)-0.001, urcrnrlon=np.max(Zlon)+0.001, urcrnrlat=np.max(Zlat)+0.001) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x,y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, rough.flatten(), linewidth = '0', vmin=0, vmax=8) custom_save(sonpath,'Rough'+str(p)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Rough'+str(p)+'.png' ground.latlonbox.north = np.min(Zlat)-0.001 ground.latlonbox.south = np.max(Zlat)+0.001 ground.latlonbox.east = np.max(Zlon)+0.001 ground.latlonbox.west = np.min(Zlon)-0.001 ground.latlonbox.rotation = 0 kml.save(sonpath+'Rough'+str(p)+'.kml') except: print "plot could not be produced" if doplot==1: try: print "drawing and printing map ..." fig = plt.figure(frameon=False) map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], #26949, resolution = 'i', #h #f llcrnrlon=np.min(Zlon)-0.001, llcrnrlat=np.min(Zlat)-0.001, urcrnrlon=np.max(Zlon)+0.001, urcrnrlat=np.max(Zlat)+0.001) ax = plt.Axes(fig, [0., 0., 1., 1.], ) ax.set_axis_off() fig.add_axes(ax) ## draw point cloud x,y = map.projtran(Zlon, Zlat) map.scatter(x.flatten(), y.flatten(), 1, hard.flatten(), linewidth = '0', vmin=0, vmax=8) custom_save(sonpath,'Hard'+str(p)) del fig kml = simplekml.Kml() ground = kml.newgroundoverlay(name='GroundOverlay') ground.icon.href = 'Hard'+str(p)+'.png' ground.latlonbox.north = np.min(Zlat)-0.001 ground.latlonbox.south = np.max(Zlat)+0.001 ground.latlonbox.east = np.max(Zlon)+0.001 ground.latlonbox.west = np.min(Zlon)-0.001 ground.latlonbox.rotation = 0 kml.save(sonpath+'Hard'+str(p)+'.kml') except: print "plot could not be produced" else: print "high-frequency downward echosounder data not available"
fig, ax = matplotlib.pyplot.subplots(1, 1, figsize=(16, 8)) mercMapE = Basemap(projection='merc', llcrnrlat=30, urcrnrlat=75, llcrnrlon=-25, urcrnrlon=40, lat_ts=10, ax=ax, resolution='l') mercMapE.drawcoastlines(linewidth=0.5) mercMapE.drawcountries(linewidth=0.25) mercMapE.drawparallels(numpy.arange(mercMapE.latmin, mercMapE.latmax, 10.)) mercMapE.drawmeridians(numpy.arange(mercMapE.lonmin, mercMapE.lonmax, 15.)) ax.set_title(r'$Europe,\ true\ lat.$', fontsize=titleFontSize) plotPrefecture(shp=shape, colour='gold', lwdth=2, bMap=mercMapE, axes=ax, latOff=0, longOff=dLonJ - lonJpn) # Show annotation at the true latitude. xKIT, yKIT = mercMapE.projtran(130.834730 + dLonJ - lonJpn, 33.8924837) xTXT, yTXT = mercMapE.projtran(110.834730 + dLonJ - lonJpn, 45.8924837) ax.scatter([xKIT], [yKIT], s=50, c='crimson') ax.annotate('Here', xy=(xKIT, yKIT), xytext=(xTXT, yTXT), color='crimson', arrowprops=dict(facecolor='crimson', shrink=0.05)) fig.show()