def plot_dp(storm, datafile1, datafile2=None): #Single file netCDF reading ncf=datafile1 nco=netCDF4.Dataset(ncf) #Get fields to plot lon=nco.variables['longitude'][:] lat=nco.variables['latitude'][:] timeindays=nco.variables['time'][:] dp=nco.variables['dp'][:] triangles=nco.variables['tri'][:,:] reflon=np.linspace(lon.min(),lon.max(),1000) reflat=np.linspace(lat.min(),lat.max(),1000) #reflon=np.linspace(-80.40, -74.75, 1000) #reflat=np.linspace(32.50, 36.60, 1000) #reflon=np.linspace(-75.70, -71.05, 1000) #reflat=np.linspace(38.50, 41.40, 1000) reflon,reflat=np.meshgrid(reflon,reflat) plt.figure(figsize = [6.4, 3.8]) flatness=0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles=triangles-1 # Correct indices for Python's zero-base tri=Triangulation(lon,lat,triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) # Loop through each time step and plot results for ind in range(0, len(timeindays)): plt.clf() ax = plt.axes(projection=ccrs.Mercator()) dt = datetime.datetime.combine(datetime.date(1990, 1, 1), datetime.time(0, 0)) + datetime.timedelta(days=timeindays[ind]) dstr = datetime.date.strftime(dt,'%Y%m%d%H:%M:%S') dstr = dstr[0:8]+' '+dstr[8:17] print('Plotting '+dstr) par=np.double(dp[ind,:]) tli=LinearTriInterpolator(tri,par) par_interp=tli(reflon,reflat) plt.pcolormesh(reflon,reflat,par_interp,vmin=0.0,vmax=360.0,shading='flat',cmap=plt.cm.jet, transform=ccrs.PlateCarree()) cb = plt.colorbar() cb.ax.tick_params(labelsize=8) coast = cfeature.GSHHSFeature(scale='high',edgecolor='black',facecolor='none',linewidth=0.25) ax.add_feature(coast) plt.xticks(fontsize=9) plt.yticks(fontsize=9) figtitle = storm.capitalize()+': Peak Dir (deg): '+dstr plt.title(figtitle) dtlabel = datetime.date.strftime(dt,'%Y%m%d%H%M%S') dtlabel = dtlabel[0:8]+'_'+dtlabel[8:14] filenm = 'nsem_'+storm+'_dp_'+dtlabel+'.png' plt.savefig(filenm,dpi=150,bbox_inches='tight',pad_inches=0.1) del(par) del(par_interp)
def buildTriangulation( self, x, y ): trig=None if qgis_qhull_fails: trig=self._buildtrig_workaround(x,y) else: trig=Triangulation(x,y) analyzer=TriAnalyzer(trig) mask=analyzer.get_flat_tri_mask() trig.set_mask(mask) return trig
def plot_magnetic_field_contour(self, ax=None): if ax is None: fig, ax = plt.subplots() else: fig = plt.gcf() ax.set_aspect('equal') from matplotlib.tri import Triangulation, TriAnalyzer, UniformTriRefiner import matplotlib.cm as cm element_to_magnetic_field = self.magnetic_field_per_element() x = [] y = [] Z = [] for group in self.mesh.elements_groups: for element in group.elements: x_center, y_center = element.center x.append(x_center) y.append(y_center) Z.append(element_to_magnetic_field[element].Norm()) tri = Triangulation(x, y) #----------------------------------------------------------------------------- # Improving the triangulation before high-res plots: removing flat triangles #----------------------------------------------------------------------------- # masking badly shaped triangles at the border of the triangular mesh. min_circle_ratio = -1 mask = TriAnalyzer(tri).get_flat_tri_mask(min_circle_ratio) tri.set_mask(mask) # refining the data refiner = UniformTriRefiner(tri) subdiv = 3 tri_refi, z_test_refi = refiner.refine_field(Z, subdiv=subdiv) levels = npy.arange(0., 1., 0.05) cmap = cm.get_cmap(name='Blues', lut=None) ax.tricontour(tri_refi, z_test_refi, levels=levels, #cmap=cmap, linewidths=[2.0, 0.5, 1.0, 0.5]) # ax.triplot(tri_refi, color='0.97') # ax.triplot(tri, color='0.7') # ax.tricontour(x, y, Z) return ax
def plot_samples(num): samples_hundred = BM_sampling_method(num) tri = Triangulation(samples_hundred[0], samples_hundred[1]) random_gen = np.random.mtrand.RandomState(seed=127260) init_mask_frac = 0.0 min_circle_ratio = .01 subdiv = 3 ntri = tri.triangles.shape[0] print 'hi' mask_init = np.zeros(ntri, dtype=np.bool) masked_tri = random_gen.randint(0, ntri, int(ntri * init_mask_frac)) mask_init[masked_tri] = True tri.set_mask(mask_init) print 'hey' z_exp = experiment_res(tri.x, tri.y) print z_exp mask = TriAnalyzer(tri).get_flat_tri_mask(min_circle_ratio) tri.set_mask(mask) # refining the data refiner = UniformTriRefiner(tri) tri_refi, z_test_refi = refiner.refine_field(z_exp, subdiv=subdiv) # analytical 'results' for comparison z_expected = experiment_res(tri_refi.x, tri_refi.y) plt.tricontour(tri_refi, z_expected, levels=levels, cmap=cmap, linestyles='--') plt.show() x, y = np.mgrid[-1:1:0.001, -1:1:0.001] pos = np.empty(x.shape + (2, )) pos[:, :, 0] = x pos[:, :, 1] = y rv = multivariate_normal([0, 0], [[1.0, 0.0], [0.0, 1.0]]) plt.contour(x, y, rv.pdf(pos))
CS = ax.tricontour(tri, z, breaks, linewidths=[0.5, 0.25], colors='saddlebrown') plt.clabel(CS, inline=5, fontsize=8) ax.set_ylabel('X[m]') ax.set_xlabel('Y[m]') plt.grid() plt.savefig('plot.png', dpi=300) plt.show() # Tworzenie mapy hipsometrycznej matplotlib.rcParams['contour.negative_linestyle'] = 'solid' tri = Triangulation(y, x) mask = TriAnalyzer(tri).get_flat_tri_mask(0.02) tri.set_mask(mask) fig, ax = plt.subplots() fig.set_size_inches(10, 15) ax.tick_params(labelsize=15) ax.yaxis.set_major_formatter(FormatStrFormatter('%d')) ax.xaxis.set_major_formatter(FormatStrFormatter('%d')) ax.set_aspect('equal') ax.set_title("Mapa warstwicowa") CS = ax.tricontourf(tri, z, cmap='RdBu') ax.tricontour(tri, z, breaks, linewidths=[0.5, 0.25], colors='saddlebrown') plt.clabel(CS, inline=1, fontsize=10) ax.set_ylabel('X[m]') ax.set_xlabel('Y[m]') plt.grid() plt.savefig('plotHipso.png', dpi=300)
def spatial_autocorr_fft(tri, U, V, N_grid=512, auto=False, transform=False, tree=None, interp='Lanczos'): """ Function to estimate autocorrelation from a single increment in cartesian coordinates(tau, eta) Input: ----- tri - Delaunay triangulation object of unstructured grid. N_grid - Squared, structured grid resolution to apply FFT. U, V - Arrays with cartesian components of wind speed. Output: ------ r_u,r_v - 2D arrays with autocorrelation function rho(tau,eta) for U and V, respectively. """ if transform: U_mean = avetriangles(np.c_[tri.x, tri.y], U, tri) V_mean = avetriangles(np.c_[tri.x, tri.y], V, tri) # Wind direction gamma = np.arctan2(V_mean, U_mean) # Components in matrix of coefficients S11 = np.cos(gamma) S12 = np.sin(gamma) T = np.array([[S11, S12], [-S12, S11]]) vel = np.array(np.c_[U, V]).T vel = np.dot(T, vel) X = np.array(np.c_[tri.x, tri.y]).T X = np.dot(T, X) U = vel[0, :] V = vel[1, :] tri = Triangulation(X[0, :], X[1, :]) mask = TriAnalyzer(tri).get_flat_tri_mask(.05) tri = Triangulation(tri.x, tri.y, triangles=tri.triangles[~mask]) U_mean = avetriangles(np.c_[tri.x, tri.y], U, tri) V_mean = avetriangles(np.c_[tri.x, tri.y], V, tri) else: # Demeaning U_mean = avetriangles(np.c_[tri.x, tri.y], U, tri) V_mean = avetriangles(np.c_[tri.x, tri.y], V, tri) grid = np.meshgrid(np.linspace(np.min(tri.x), np.max(tri.x), N_grid), np.linspace(np.min(tri.y), np.max(tri.y), N_grid)) U = U - U_mean V = V - V_mean # Interpolated values of wind field to a squared structured grid if interp == 'cubic': U_int = CubicTriInterpolator(tri, U)(grid[0].flatten(), grid[1].flatten()).data V_int = CubicTriInterpolator(tri, V)(grid[0].flatten(), grid[1].flatten()).data U_int = np.reshape(U_int, grid[0].shape) V_int = np.reshape(V_int, grid[0].shape) else: # U_int= lanczos_int_sq(grid,tree,U) # V_int= lanczos_int_sq(grid,tree,V) U_int = LinearTriInterpolator(tri, U)(grid[0].flatten(), grid[1].flatten()).data V_int = LinearTriInterpolator(tri, V)(grid[0].flatten(), grid[1].flatten()).data U_int = np.reshape(U_int, grid[0].shape) V_int = np.reshape(V_int, grid[0].shape) #zero padding U_int[np.isnan(U_int)] = 0.0 V_int[np.isnan(V_int)] = 0.0 fftU = np.fft.fft2(U_int) fftV = np.fft.fft2(V_int) if auto: # Autocorrelation r_u = np.real(np.fft.fftshift(np.fft.ifft2(np.absolute(fftU)** 2))) / len(U_int.flatten()) r_v = np.real(np.fft.fftshift(np.fft.ifft2(np.absolute(fftV)** 2))) / len(U_int.flatten()) r_uv = np.real( np.fft.fftshift(np.fft.ifft2(np.real( fftU * np.conj(fftV))))) / len(U_int.flatten()) dx = np.max(np.diff(grid[0].flatten())) dy = np.max(np.diff(grid[1].flatten())) n = grid[0].shape[0] m = grid[1].shape[0] # Spectra fftU = np.fft.fftshift(fftU) fftV = np.fft.fftshift(fftV) fftUV = fftU * np.conj(fftV) Suu = 2 * (np.abs(fftU)**2) * (dx * dy) / (n * m) Svv = 2 * (np.abs(fftV)**2) * (dx * dy) / (n * m) Suv = 2 * np.real(fftUV) * (dx * dy) / (n * m) k1 = np.fft.fftshift((np.fft.fftfreq(n, d=dx))) k2 = np.fft.fftshift((np.fft.fftfreq(m, d=dy))) if auto: return (r_u, r_v, r_uv, Suu, Svv, Suv, k1, k2) else: return (Suu, Svv, Suv, k1, k2)
mask = tri_y == 0 # t=0 are not valid datapoints tri_y = tri_y[~mask] tri_x = tri_x[~mask] tri_z = tri_z[~mask] # }}} tri = Triangulation(tri_x, tri_y) # {{{ refining the data -- see https://matplotlib.org/3.1.0/gallery/images_contours_and_fields/tricontour_smooth_delaunay.html#sphx-glr-gallery-images-contours-and-fields-tricontour-smooth-delaunay-py # I don't see a difference in the refined (tri_refi) vs. unrefined (tri), # but I'm quite possibly missing something, or it's more helpful in other cases refiner = UniformTriRefiner(tri) subdiv = 3 # Number of recursive subdivisions of the initial mesh for smooth # plots. Values >3 might result in a very high number of triangles # for the refine mesh: new triangles numbering = (4**subdiv)*ntri tri_refi, tri_z_refi = refiner.refine_field(tri_z, subdiv=subdiv) mask = TriAnalyzer(tri_refi).get_flat_tri_mask(10) tri_refi = tri_refi.set_mask(~mask) # }}} figure(figsize=(5,15), facecolor=(1,1,1,0)) if not presentation: plot(tri_x,tri_y,'o', color='k',alpha=0.3) triplot(tri, color='k',alpha=0.3) tricontourf(tri,tri_z, levels=linspace(tri_z.min(),tri_z.max(),100), #cmap=cm, ) colorbar() xlabel('frequency\n($\\nu_{\\mu w}-%0.5f$ GHz)/ kHz'%(f_axis.mean()/1e9))
plt.figure(figsize = [6.4, 3.8]) #Loop through each time step and plot results for ind in range(0, len(timeindays)): dt = datetime.datetime.combine(datetime.date(1990, 1, 1), datetime.time(0, 0)) + datetime.timedelta(days=timeindays[ind]) dstr = datetime.date.strftime(dt,'%Y%m%d%H:%M:%S') dstr = dstr[0:8]+' '+dstr[8:17] print('Plotting '+dstr) par=np.double(wlv[ind,:]) par2=np.double(ucur[ind,:]) par3=np.double(vcur[ind,:]) flatness=0.10 # flatness is from 0-.5 .5 is equilateral triangle tri=Triangulation(lon,lat) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) tli=LinearTriInterpolator(tri,par) par_interp=tli(reflon,reflat) tli2=LinearTriInterpolator(tri,par2) par2_interp=tli2(reflon,reflat) tli3=LinearTriInterpolator(tri,par3) par3_interp=tli3(reflon,reflat) #Set up a Mercator projection basemap plt.clf() #m=Basemap(projection='merc',llcrnrlon=reflon.min(),urcrnrlon=reflon.max(),\ # llcrnrlat=reflat.min(),urcrnrlat=reflat.max(),resolution='h') #x,y=m(reflon,reflat) x = reflon
def __init__(self, **kwargs): super(ContourMap, self).__init__(**kwargs) n_test = 200 # Number of test data points, tested from 3 to 5000 for subdiv=3 subdiv = 3 # Number of recursive subdivisions of the initial mesh for smooth # plots. Values >3 might result in a very high number of triangles # for the refine mesh: new triangles numbering = (4**subdiv)*ntri init_mask_frac = 0.0 # Float > 0. adjusting the proportion of # (invalid) initial triangles which will be masked # out. Enter 0 for no mask. min_circle_ratio = .01 # Minimum circle ratio - border triangles with circle # ratio below this will be masked if they touch a # border. Suggested value 0.01 ; Use -1 to keep # all triangles. # Random points random_gen = np.random.mtrand.RandomState(seed=127260) x_test = random_gen.uniform(-1., 1., size=n_test) y_test = random_gen.uniform(-1., 1., size=n_test) z_test = experiment_res(x_test, y_test) # meshing with Delaunay triangulation tri = Triangulation(x_test, y_test) ntri = tri.triangles.shape[0] # Some invalid data are masked out mask_init = np.zeros(ntri, dtype=np.bool) masked_tri = random_gen.randint(0, ntri, int(ntri * init_mask_frac)) mask_init[masked_tri] = True tri.set_mask(mask_init) #----------------------------------------------------------------------------- # Improving the triangulation before high-res plots: removing flat triangles #----------------------------------------------------------------------------- # masking badly shaped triangles at the border of the triangular mesh. mask = TriAnalyzer(tri).get_flat_tri_mask(min_circle_ratio) tri.set_mask(mask) # refining the data refiner = UniformTriRefiner(tri) tri_refi, z_test_refi = refiner.refine_field(z_test, subdiv=subdiv) # analytical 'results' for comparison z_expected = experiment_res(tri_refi.x, tri_refi.y) # for the demo: loading the 'flat' triangles for plot flat_tri = Triangulation(x_test, y_test) flat_tri.set_mask(~mask) #----------------------------------------------------------------------------- # Now the plots #----------------------------------------------------------------------------- # User options for plots plot_tri = True # plot of base triangulation plot_masked_tri = True # plot of excessively flat excluded triangles plot_refi_tri = False # plot of refined triangulation plot_expected = False # plot of analytical function values for comparison # Graphical options for tricontouring levels = np.arange(0., 1., 0.025) cmap = cm.get_cmap(name='Blues', lut=None) plt.figure() plt.gca().set_aspect('equal') plt.title("Filtering a Delaunay mesh\n" + "(application to high-resolution tricontouring)") # 1) plot of the refined (computed) data countours: plt.tricontour(tri_refi, z_test_refi, levels=levels, cmap=cmap, linewidths=[2.0, 0.5, 1.0, 0.5]) # 2) plot of the expected (analytical) data countours (dashed): if plot_expected: plt.tricontour(tri_refi, z_expected, levels=levels, cmap=cmap, linestyles='--') # 3) plot of the fine mesh on which interpolation was done: if plot_refi_tri: plt.triplot(tri_refi, color='0.97') # 4) plot of the initial 'coarse' mesh: if plot_tri: plt.triplot(tri, color='0.7') # 4) plot of the unvalidated triangles from naive Delaunay Triangulation: if plot_masked_tri: plt.triplot(flat_tri, color='red') plt.show()
def plot_wnd(storm, datafile1, datafile2=None): #Single file netCDF reading ncf = datafile1 nco = netCDF4.Dataset(ncf) #Get fields to plot lon = nco.variables['x'][:] lat = nco.variables['y'][:] timeinsec = nco.variables['time'][:] uwnd = nco.variables['windx'][:] vwnd = nco.variables['windy'][:] triangles = nco.variables['element'][:, :] reflon = np.linspace(lon.min(), lon.max(), 1000) reflat = np.linspace(lat.min(), lat.max(), 1000) #reflon=np.linspace(-80.40, -74.75, 1000) #reflat=np.linspace(32.50, 36.60, 1000) #reflon=np.linspace(-75.70, -71.05, 1000) #reflat=np.linspace(38.50, 41.40, 1000) #reflon=np.linspace(-80.40, -73.35, 1000) #reflat=np.linspace(32.50, 39.50, 1000) #reflon=np.linspace(-85.30, -78.50, 1000) #reflat=np.linspace(23.00, 29.70, 1000) reflon, reflat = np.meshgrid(reflon, reflat) plt.figure(figsize=[6.4, 3.8]) flatness = 0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles = triangles - 1 # Correct indices for Python's zero-base tri = Triangulation(lon, lat, triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) # Loop through each time step and plot results for ind in range(0, len(timeinsec)): besttrack = pd.read_csv(PARMnsem + '/storms/' + STORM + '/best_track.txt', header=None, skiprows=4, delim_whitespace=True) plt.clf() ax = plt.axes(projection=ccrs.Mercator()) ax.set_extent([-100.00, -50.00, 4.00, 48.00], crs=ccrs.PlateCarree()) #ax.set_extent([-80.40, -74.75, 32.50, 36.60], crs=ccrs.PlateCarree()) #ax.set_extent([-80.40, -73.35, 32.50, 39.50], crs=ccrs.PlateCarree()) #ax.set_extent([-85.30, -78.50, 23.00, 29.70], crs=ccrs.PlateCarree()) dt = base_info.tide_spin_start_date + datetime.timedelta( seconds=timeinsec[ind]) dstr = datetime.date.strftime(dt, '%Y%m%d%H:%M:%S') dstr = dstr[0:8] + ' ' + dstr[8:17] print('Plotting ' + dstr) par = np.sqrt( np.square(np.double(uwnd[ind, :])) + np.square(np.double(vwnd[ind, :]))) tli = LinearTriInterpolator(tri, par) par_interp = tli(reflon, reflat) plt.pcolormesh(reflon, reflat, par_interp, vmax=60.0, shading='flat', cmap=plt.cm.jet, transform=ccrs.PlateCarree()) cb = plt.colorbar() cb.ax.tick_params(labelsize=8) coast = cfeature.GSHHSFeature(scale='high', edgecolor='black', facecolor='none', linewidth=0.25) ax.add_feature(coast) plt.plot(besttrack.iloc[:, 3].values, besttrack.iloc[:, 2].values, 'k--', linewidth=1.0, transform=ccrs.PlateCarree()) gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, linewidth=2, color='gray', alpha=0.5, linestyle='--') gl.xlabels_top = False gl.ylabels_right = False gl.xlines = False gl.ylines = False gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER gl.xlabel_style = {'size': 6, 'color': 'black'} gl.ylabel_style = {'size': 6, 'color': 'black'} figtitle = storm.capitalize() + ': U10 (m/s): ' + dstr plt.title(figtitle) dtlabel = datetime.date.strftime(dt, '%Y%m%d%H%M%S') dtlabel = dtlabel[0:8] + '_' + dtlabel[8:14] filenm = 'nsem_' + storm + '_wnd_' + dtlabel + '.png' plt.savefig(filenm, dpi=150, bbox_inches='tight', pad_inches=0.1) del (par) del (par_interp)
def plot_wlv(storm, datafile1, datafile2=None, lonmin=None, lonmax=None, latmin=None, latmax=None, domain=None): #Single file netCDF reading ncf=datafile1 nco=netCDF4.Dataset(ncf) ncf2=datafile2 nco2=netCDF4.Dataset(ncf2) #Get fields to plot lon=nco.variables['longitude'][:] lat=nco.variables['latitude'][:] timeindays=nco.variables['time'][:] wlv=nco.variables['wlv'][:] triangles=nco.variables['tri'][:,:] dpt=nco2.variables['dpt'][:] if lonmin != None: reflon=np.linspace(lonmin, lonmax, 1000) reflat=np.linspace(latmin, latmax, 1000) else: reflon=np.linspace(lon.min(),lon.max(),1000) reflat=np.linspace(lat.min(),lat.max(),1000) reflon,reflat=np.meshgrid(reflon,reflat) plt.figure(figsize = [6.4, 3.8]) flatness=0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles=triangles-1 # Correct indices for Python's zero-base tri=Triangulation(lon,lat,triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) # Loop through each time step and plot results for ind in range(0, len(timeindays)): besttrack = pd.read_csv(PARMnsem+'/storms/'+STORM+'/best_track.txt', header=None, skiprows=4, delim_whitespace=True) plt.clf() ax = plt.axes(projection=ccrs.Mercator()) if lonmin != None: ax.set_extent([lonmin, lonmax, latmin, latmax], crs=ccrs.PlateCarree()) dt = datetime.datetime.combine(datetime.date(1990, 1, 1), datetime.time(0, 0)) + datetime.timedelta(days=timeindays[ind]) dstr = datetime.date.strftime(dt,'%Y%m%d%H:%M:%S') dstr = dstr[0:8]+' '+dstr[8:17] print('Plotting '+dstr) par=np.double(wlv[ind,:]) tli=LinearTriInterpolator(tri,par) par_interp=tli(reflon,reflat) plt.pcolormesh(reflon,reflat,par_interp,vmin=-2.0,vmax=2.0, shading='flat',cmap=plt.cm.bwr, transform=ccrs.PlateCarree()) cb = plt.colorbar() cb.ax.tick_params(labelsize=8) coast = cfeature.GSHHSFeature(scale='high',edgecolor='black',facecolor='none',linewidth=0.25) ax.add_feature(coast) plt.plot(besttrack.iloc[:,3].values, besttrack.iloc[:,2].values, 'k--', linewidth=1.0, transform=ccrs.PlateCarree()) gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, linewidth=2, color='gray', alpha=0.5, linestyle='--') gl.xlabels_top = False gl.ylabels_right = False gl.xlines = False gl.ylines = False gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER gl.xlabel_style = {'size': 6, 'color': 'black'} gl.ylabel_style = {'size': 6, 'color': 'black'} figtitle = storm.capitalize()+': WL (m MSL): '+dstr plt.title(figtitle) dtlabel = datetime.date.strftime(dt,'%Y%m%d%H%M%S') dtlabel = dtlabel[0:8]+'_'+dtlabel[8:14] filenm = 'nsem_'+storm+'_wlv_'+dtlabel+'_'+domain+'.png' plt.savefig(filenm,dpi=150,bbox_inches='tight',pad_inches=0.1) del(par) del(par_interp)
def density(model, refinement=0): """ Create a Voronoi mesh and calculate the local particle density on its vertices. The local density is calculated as follows: for each vertex, compute the density of each neighbour region as one over the area and assign the average of the neighbouring density to the vertex. Parameters ---------- model : simulation.builder.Model the Model object containing refinement : int (defaults : 0) number of subdivision for refining the mesh (0 == None) Returns ------- tri : matplotlib.tri.Triangulation the triangulation mesh (refined if set as) vert_density : numpy.array the array containing the local denstity associated with the tri mesh Example ------- To plot the result using matplotlib use : .. code-block:: python import matplotlib.pyplot as plt tri, density = data_proc.density(model) plt.tricontour(tri, density) # to draw contours plt.tricontourf(tri, density) # ot draw filled contours plt.show() Note ---- As of now, the numerical results may not be quantitatively accurate but should qualitatively represent the density. """ vor = Voronoi(model.pos) vert_density = np.zeros(max(vor.vertices.shape)) # density vector reg_num = np.zeros(max( vor.vertices.shape)) # nbr of regions per vertex --> averaging for point_index, reg in enumerate(vor.point_region): vertices = vor.regions[reg] if vertices: if -1 not in vertices: area = ConvexHull(vor.vertices[vertices]).area # gets the area vert_density[ vertices] += 1 / area # makes it a density (sort-of) reg_num[vertices] += 1 vert_density /= reg_num # averaging # getting rid of really ugly border points new_vert, vert_density = ( vor.vertices[vor.vertices[:, 0] >= np.min(model.pos[:, 0])], vert_density[vor.vertices[:, 0] >= np.min(model.pos[:, 0])]) new_vert, vert_density = ( new_vert[new_vert[:, 0] <= np.max(model.pos[:, 0])], vert_density[new_vert[:, 0] <= np.max(model.pos[:, 0])]) new_vert, vert_density = ( new_vert[new_vert[:, 1] >= np.min(model.pos[:, 1])], vert_density[new_vert[:, 1] >= np.min(model.pos[:, 1])]) new_vert, vert_density = ( new_vert[new_vert[:, 1] <= np.max(model.pos[:, 1])], vert_density[new_vert[:, 1] <= np.max(model.pos[:, 1])]) # for triangulation refinement tri2 = Triangulation(*new_vert.T) if refinement: tri2.set_mask(TriAnalyzer(tri2).get_flat_tri_mask(0.1)) refiner = UniformTriRefiner(tri2) print(len(tri2.neighbors), vert_density.shape) tri, vert_density = refiner.refine_field(vert_density, subdiv=refinement) else: tri, vert_density = tri2, vert_density return tri, vert_density
def mesh_2D(fname, var=None, flabels=None, fformat='csv', xlabel='X axis', ylabel='Y axis', vmins=None, output_path=None): """Visualization of specific variable on a user provided 2D mesh. The provided mesh should contain two columns (x,y coordinates for each mesh point) and be one of :func:`batman.input_output.available_formats`. (x, y) must be respectively the first and second column. Any other column is treated as an extra variable and will be used to plot a figure. If :attr:`var` is not `None`, its content will be used as plotting variables. :param str fname: name of mesh file. :param array_like var: data to be plotted shape (n_coords, n_vars). :param list(str) flabels: names of the variables. :param str fformat: format of the mesh file. :param str xlabel: name of the x-axis. :param str ylabel: name of the y-axis. :param lst(double) vmins: value of the minimal output for data filtering. :param str output_path: name of the output path. :returns: figure. :rtype: Matplotlib figure instances. """ # Read the mesh file io = formater(fformat) mesh = io.read(fname) if var is not None: var = np.asarray(var) else: var = mesh[:, 2:] if flabels is None: flabels = ['y' + str(i) for i in range(var.shape[1])] # Input variables var_len = var.shape[0] if var_len != len(mesh): raise ValueError( 'Variable size not equal: Variable {} - Mesh {}'.format( var_len, len(mesh))) if vmins is None: vmins = [None] * var_len # Meshing with Delaunay triangulation tri = Triangulation(mesh[:, 0], mesh[:, 1]) # Masking badly shaped triangles at the border of the triangular mesh mask = TriAnalyzer(tri).get_flat_tri_mask(0.01) tri.set_mask(mask) # Loop over input parameters figs, axs = [], [] for i, _ in enumerate(var[0]): fig, ax = plt.subplots() figs.append(fig) axs.append(ax) cmap = cm.viridis cmap.set_bad(alpha=0.0) cmap.set_under('w', alpha=0.0) plt.tricontourf(tri, var[:, i], antialiased=True, cmap=cmap, vmin=vmins[i]) plt.xlabel(xlabel) plt.ylabel(ylabel) plt.tick_params(axis='x') plt.tick_params(axis='y') cbar = plt.colorbar() cbar.set_label(flabels[i]) cbar.ax.tick_params() bat.visualization.save_show(output_path, figs, extend='neither') return figs, axs
def extract_wlv(storm, datafile1, stations, df, lonmin=None, lonmax=None, latmin=None, latmax=None): print('Reading', datafile1) #Single file netCDF reading #ncf='ww3.field.2018_wnd.nc' ncf = datafile1 nco = netCDF4.Dataset(ncf) #ncf2='ww3.field.2018_dp.nc' #ncf2=datafile2 #nco2=netCDF4.Dataset(ncf2) #Get fields to plot lon = nco.variables['x'][:] lat = nco.variables['y'][:] pnt_lon = stations.iloc[0, :].values pnt_lat = stations.iloc[1, :].values timeinsec = nco.variables['time'][:] #base_date=nco.variables['time:base_date'] #print(base_date) wlv = nco.variables['zeta'][:] triangles = nco.variables['element'][:, :] #timeindays2=nco2.variables['time'][:] #dir=nco2.variables['dp'][:] if lonmin != None: reflon = np.linspace(lonmin, lonmax, 1000) reflat = np.linspace(latmin, latmax, 1000) else: reflon = np.linspace(lon.min(), lon.max(), 1000) reflat = np.linspace(lat.min(), lat.max(), 1000) reflon, reflat = np.meshgrid(reflon, reflat) flatness = 0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles = triangles - 1 # Correct indices for Python's zero-base tri = Triangulation(lon, lat, triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) #Read obs point locations #data=np.loadtxt('erie_ndbc.loc', comments = '$') #buoylon=data[:,0] #buoylat=data[:,1] df_dates = pd.DataFrame(columns=['Date']) # Loop through each time step and plot results for ind in range(0, len(timeinsec)): #plt.clf() #ax = plt.axes(projection=ccrs.Mercator()) dt = base_info.tide_spin_start_date + datetime.timedelta( seconds=timeinsec[ind]) dstr = datetime.date.strftime(dt, '%Y%m%d%H:%M:%S') dstr = dstr[0:8] + ' ' + dstr[8:17] print('Plotting ' + dstr) par = np.double(wlv[ind, :]) tli = LinearTriInterpolator(tri, par) par_interp = tli(pnt_lon, pnt_lat) #print(par_interp) df.loc[len(df)] = par_interp df_dates.loc[len(df_dates)] = pd.to_datetime( dt, format="%Y-%m-%d %H:%M:%S") #line = pd.to_datetime(dt, format="%Y-%m-%d %H:%M:%S") #new_row = pd.DataFrame(par_interp, columns=stations.columns, index=line) #df = pd.concat([df, pd.DataFrame(new_row)], ignore_index=False) del (par) del (par_interp) df['Date'] = df_dates df.set_index('Date', inplace=True) return df
def extract_wnd(storm, datafile1, stations, df, lonmin=None, lonmax=None, latmin=None, latmax=None): #Single file netCDF reading #ncf='ww3.field.2018_wnd.nc' ncf = datafile1 nco = netCDF4.Dataset(ncf) #ncf2='ww3.field.2018_dp.nc' #ncf2=datafile2 #nco2=netCDF4.Dataset(ncf2) #Get fields to plot lon = nco.variables['longitude'][:] lat = nco.variables['latitude'][:] pnt_lon = stations.iloc[0, :].values pnt_lat = stations.iloc[1, :].values timeindays = nco.variables['time'][:] uwnd = nco.variables['uwnd'][:] vwnd = nco.variables['vwnd'][:] #timeindays2=nco2.variables['time'][:] #dir=nco2.variables['dp'][:] if lonmin != None: reflon = np.linspace(lonmin, lonmax, 1000) reflat = np.linspace(latmin, latmax, 1000) else: reflon = np.linspace(lon.min(), lon.max(), 1000) reflat = np.linspace(lat.min(), lat.max(), 1000) reflon, reflat = np.meshgrid(reflon, reflat) flatness = 0.10 # flatness is from 0-.5 .5 is equilateral triangle tri = Triangulation(lon, lat) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) #Read obs point locations #data=np.loadtxt('erie_ndbc.loc', comments = '$') #buoylon=data[:,0] #buoylat=data[:,1] plt.figure(figsize=[6.4, 3.8]) df_dates = pd.DataFrame(columns=['Date']) # Loop through each time step and plot results for ind in range(0, len(timeindays)): plt.clf() ax = plt.axes(projection=ccrs.Mercator()) dt = datetime.datetime.combine(datetime.date( 1990, 1, 1), datetime.time( 0, 0)) + datetime.timedelta(days=timeindays[ind]) dstr = datetime.date.strftime(dt, '%Y%m%d%H:%M:%S') dstr = dstr[0:8] + ' ' + dstr[8:17] print('Extracting ' + dstr) par = np.sqrt( np.square(np.double(uwnd[ind, :])) + np.square(np.double(vwnd[ind, :]))) #par2=np.double(dir[ind,:]) tli = LinearTriInterpolator(tri, par) par_interp = tli(pnt_lon, pnt_lat) #print(par_interp) df.loc[len(df)] = par_interp df_dates.loc[len(df_dates)] = pd.to_datetime( dt, format="%Y-%m-%d %H:%M:%S") #line = pd.to_datetime(dt, format="%Y-%m-%d %H:%M:%S") #new_row = pd.DataFrame(par_interp, columns=stations.columns, index=line) #df = pd.concat([df, pd.DataFrame(new_row)], ignore_index=False) del (par) del (par_interp) df['Date'] = df_dates df.set_index('Date', inplace=True) return df
def plot_cur(storm, datafile1, datafile2=None): #Single file netCDF reading ncf=datafile1 nco=netCDF4.Dataset(ncf) #ncf2=datafile2 #nco2=netCDF4.Dataset(ncf2) #Get fields to plot lon=nco.variables['longitude'][:] lat=nco.variables['latitude'][:] timeindays=nco.variables['time'][:] ucur=nco.variables['ucur'][:] vcur=nco.variables['vcur'][:] triangles=nco.variables['tri'][:,:] #timeindays2=nco2.variables['time'][:] #dir=nco2.variables['dp'][:] reflon=np.linspace(lon.min(),lon.max(),1000) reflat=np.linspace(lat.min(),lat.max(),1000) #reflon=np.linspace(-80.40, -74.75, 1000) #reflat=np.linspace(32.50, 36.60, 1000) #reflon=np.linspace(-75.70, -71.05, 1000) #reflat=np.linspace(38.50, 41.40, 1000) reflon,reflat=np.meshgrid(reflon,reflat) plt.figure(figsize = [6.4, 3.8]) flatness=0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles=triangles-1 # Correct indices for Python's zero-base tri=Triangulation(lon,lat,triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) # Loop through each time step and plot results for ind in range(0, len(timeindays)): plt.clf() ax = plt.axes(projection=ccrs.Mercator()) dt = datetime.datetime.combine(datetime.date(1990, 1, 1), datetime.time(0, 0)) + datetime.timedelta(days=timeindays[ind]) dstr = datetime.date.strftime(dt,'%Y%m%d%H:%M:%S') dstr = dstr[0:8]+' '+dstr[8:17] print('Plotting '+dstr) par=np.sqrt( np.square(np.double(ucur[ind,:])) + np.square(np.double(vcur[ind,:])) ) #par2=np.double(dir[ind,:]) tli=LinearTriInterpolator(tri,par) par_interp=tli(reflon,reflat) #u=np.cos(np.pi/180*(270-par_interp)) #v=np.sin(np.pi/180*(270-par_interp)) plt.pcolormesh(reflon,reflat,par_interp,vmin=0.0,vmax=2.0,shading='flat',cmap=plt.cm.jet, transform=ccrs.PlateCarree()) #plt.contourf(reflon, reflat, par_interp, vmax=60.0, cmap=plt.cm.jet, transform=ccrs.PlateCarree()) cb = plt.colorbar() cb.ax.tick_params(labelsize=8) #rowskip=np.floor(par_interp.shape[0]/25) #colskip=np.floor(par_interp.shape[1]/25) rowskip = 50 colskip = 50 #plt.quiver(reflon[0::rowskip,0::colskip],reflat[0::rowskip,0::colskip],\ # u[0::rowskip,0::colskip],v[0::rowskip,0::colskip], \ # scale = 50, color='black',pivot='middle',units='xy',alpha=0.7) coast = cfeature.GSHHSFeature(scale='high',edgecolor='black',facecolor='none',linewidth=0.25) ax.add_feature(coast) plt.xticks(fontsize=9) plt.yticks(fontsize=9) figtitle = storm.capitalize()+': Cur (m/s): '+dstr plt.title(figtitle) dtlabel = datetime.date.strftime(dt,'%Y%m%d%H%M%S') dtlabel = dtlabel[0:8]+'_'+dtlabel[8:14] filenm = 'nsem_'+storm+'_cur_'+dtlabel+'.png' plt.savefig(filenm,dpi=150,bbox_inches='tight',pad_inches=0.1) del(par) del(par_interp)
# meshing with Delaunay triangulation tri = Triangulation(x_test, y_test) ntri = tri.triangles.shape[0] # Some invalid data are masked out mask_init = np.zeros(ntri, dtype=bool) masked_tri = random_gen.randint(0, ntri, int(ntri * init_mask_frac)) mask_init[masked_tri] = True tri.set_mask(mask_init) #----------------------------------------------------------------------------- # Improving the triangulation before high-res plots: removing flat triangles #----------------------------------------------------------------------------- # masking badly shaped triangles at the border of the triangular mesh. mask = TriAnalyzer(tri).get_flat_tri_mask(min_circle_ratio) tri.set_mask(mask) # refining the data refiner = UniformTriRefiner(tri) tri_refi, z_test_refi = refiner.refine_field(z_test, subdiv=subdiv) # analytical 'results' for comparison z_expected = experiment_res(tri_refi.x, tri_refi.y) # for the demo: loading the 'flat' triangles for plot flat_tri = Triangulation(x_test, y_test) flat_tri.set_mask(~mask) #-----------------------------------------------------------------------------
def plot_cur(storm, datafile1, datafile2=None): #Single file netCDF reading ncf = datafile1 nco = netCDF4.Dataset(ncf) #ncf2=datafile2 #nco2=netCDF4.Dataset(ncf2) #Get fields to plot lon = nco.variables['longitude'][:] lat = nco.variables['latitude'][:] timeindays = nco.variables['time'][:] ucur = nco.variables['ucur'][:] vcur = nco.variables['vcur'][:] triangles = nco.variables['tri'][:, :] #timeindays2=nco2.variables['time'][:] #dir=nco2.variables['dp'][:] reflon = np.linspace(lon.min(), lon.max(), 1000) reflat = np.linspace(lat.min(), lat.max(), 1000) #reflon=np.linspace(-80.40, -73.35, 1000) #reflat=np.linspace(32.50, 39.50, 1000) reflon, reflat = np.meshgrid(reflon, reflat) plt.figure(figsize=[6.4, 3.8]) flatness = 0.10 # flatness is from 0-.5 .5 is equilateral triangle triangles = triangles - 1 # Correct indices for Python's zero-base tri = Triangulation(lon, lat, triangles=triangles) mask = TriAnalyzer(tri).get_flat_tri_mask(flatness) tri.set_mask(mask) # Loop through each time step and plot results for ind in range(0, len(timeindays)): besttrack = pd.read_csv(PARMnsem + '/storms/' + STORM + '/best_track.txt', header=None, skiprows=4, delim_whitespace=True) plt.clf() ax = plt.axes(projection=ccrs.Mercator()) ax.set_extent([-100.00, -50.00, 4.00, 48.00], crs=ccrs.PlateCarree()) #ax.set_extent([-80.40, -73.35, 32.50, 39.50], crs=ccrs.PlateCarree()) dt = base_info.tide_spin_start_date + datetime.timedelta( days=timeindays[ind]) dstr = datetime.date.strftime(dt, '%Y%m%d%H:%M:%S') dstr = dstr[0:8] + ' ' + dstr[8:17] print('Plotting ' + dstr) par = np.sqrt( np.square(np.double(ucur[ind, :])) + np.square(np.double(vcur[ind, :]))) #par2=np.double(dir[ind,:]) tli = LinearTriInterpolator(tri, par) par_interp = tli(reflon, reflat) #u=np.cos(np.pi/180*(270-par_interp)) #v=np.sin(np.pi/180*(270-par_interp)) plt.pcolormesh(reflon, reflat, par_interp, vmin=0.0, vmax=2.0, shading='flat', cmap=plt.cm.jet, transform=ccrs.PlateCarree()) #plt.contourf(reflon, reflat, par_interp, vmax=60.0, cmap=plt.cm.jet, transform=ccrs.PlateCarree()) cb = plt.colorbar() cb.ax.tick_params(labelsize=8) #rowskip=np.floor(par_interp.shape[0]/25) #colskip=np.floor(par_interp.shape[1]/25) rowskip = 50 colskip = 50 #plt.quiver(reflon[0::rowskip,0::colskip],reflat[0::rowskip,0::colskip],\ # u[0::rowskip,0::colskip],v[0::rowskip,0::colskip], \ # scale = 50, color='black',pivot='middle',units='xy',alpha=0.7) coast = cfeature.GSHHSFeature(scale='high', edgecolor='black', facecolor='none', linewidth=0.25) ax.add_feature(coast) plt.plot(besttrack.iloc[:, 3].values, besttrack.iloc[:, 2].values, 'k--', linewidth=1.0, transform=ccrs.PlateCarree()) gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, linewidth=2, color='gray', alpha=0.5, linestyle='--') gl.xlabels_top = False gl.ylabels_right = False gl.xlines = False gl.ylines = False gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER gl.xlabel_style = {'size': 6, 'color': 'black'} gl.ylabel_style = {'size': 6, 'color': 'black'} figtitle = storm.capitalize() + ': Cur (m/s): ' + dstr plt.title(figtitle) dtlabel = datetime.date.strftime(dt, '%Y%m%d%H%M%S') dtlabel = dtlabel[0:8] + '_' + dtlabel[8:14] filenm = 'nsem_' + storm + '_cur_' + dtlabel + '.png' plt.savefig(filenm, dpi=150, bbox_inches='tight', pad_inches=0.1) del (par) del (par_interp)