def plot(self): self.data, lonwrap = addcyclic(self.data, self.lons) # Sort latitudes and data lat_idx = np.argsort(self.lats) self.lats = self.lats[lat_idx] self.data = self.data[lat_idx] data_lon_min = min(lonwrap) data_lon_max = max(lonwrap) data_lat_min = min(self.lats) data_lat_max = max(self.lats) new_lons = np.arange(data_lon_min - 1.0, data_lon_max + 1.0, 1.0) new_lats = np.arange(data_lat_min - 1.0, data_lat_max + 1.0, 1.0) x, y = self.m(*np.meshgrid(new_lons[:], new_lats[:])) # Two pass interpolation to deal with the mask. # First pass does bilinear, the next does nearest neighbour # interpolation. # It's not clear this is working, and the problem is likely # solved by ensuring the right mask is used! data_bl = interp(self.data, lonwrap[:], self.lats[:], x, y, order=1) data_nn = interp(self.data, lonwrap[:], self.lats[:], x, y, order=0) data_bl[data_nn.mask == 1] = data_nn[data_nn.mask == 1] if self.parameters.has_key('color_levels'): self.__print_custom_color_plot(x, y, data_bl) else: self.__print_cmap_plot(x, y, data_bl) return self.main_render
def plot(self): self.data, lonwrap = addcyclic(self.data, self.lons) # Sort latitudes and data lat_idx = np.argsort(self.lats) self.lats = self.lats[lat_idx] self.data = self.data[lat_idx] data_lon_min = min(lonwrap) data_lon_max = max(lonwrap) data_lat_min = min(self.lats) data_lat_max = max(self.lats) new_lons = np.arange(data_lon_min - 1.0, data_lon_max + 1.0, 1.0) new_lats = np.arange(data_lat_min - 1.0, data_lat_max + 1.0, 1.0) x, y = self.m(*np.meshgrid(new_lons[:], new_lats[:])) # Two pass interpolation to deal with the mask. # First pass does bilinear, the next does nearest neighbour # interpolation. # It's not clear this is working, and the problem is likely # solved by ensuring the right mask is used! data_bl = interp(self.data, lonwrap[:], self.lats[:], x, y, order=1) data_nn = interp(self.data, lonwrap[:], self.lats[:], x, y, order=0) data_bl[data_nn.mask == 1] = data_nn[data_nn.mask == 1] if self.parameters.has_key("color_levels"): self.__print_custom_color_plot(x, y, data_bl) else: self.__print_cmap_plot(x, y, data_bl) return self.main_render
def interpVel(p, t): np = len(p) / 2 x = p[0:np] y = p[np:2 * np] u = interp(vx, X[0, :], Y[:, 0], x, y) v = interp(vy, X[0, :], Y[:, 0], x, y) return numpy.append(u, v)
def extract_transect(self, dataout_line, data=None, data_x=None, data_y=None): if data is 'depth': data=self.depth elif data is 'strike': data=self.strike elif data is 'dip': data=self.dip elif data is None: print("Error in extract transect: need to specify input for 'data'") else: data=data if data_x is not None: data_x=data_x else: data_x=self.x if data_y is not None: data_y=data_y else: data_y=self.y dataout1 = interp(data, data_x, data_y, dataout_line[:,0],dataout_line[:,1], order=1) dataout2 = interp(data, data_x, data_y, dataout_line[:,0],dataout_line[:,1], order=0) for i in range(0,np.size(dataout1)): if dataout1[i] is np.ma.masked: if dataout2[i] is not np.ma.masked: dataout1[i] = dataout2[i] else: r = i while dataout2[r] is np.ma.masked: if r < np.size(dataout1) - 1: r += 1 try: right = dataout2[r] except IndexError: pass l = i while dataout2[l-1] is np.ma.masked: l += -1 try: left = dataout2[l-1] except IndexError: pass dataout1[i] = np.average([right,left]) return dataout1
def regrid(x, y, arr, inc_by=2): """Regrid a 2d array increasing its resolution.""" ny, nx = arr.shape xi = np.linspace(x.min(), x.max(), inc_by * len(x)) yi = np.linspace(y.min(), y.max(), inc_by * len(y)) xx, yy = np.meshgrid(xi, yi) arr = np.ma.masked_invalid(arr) arr1 = bm.interp(arr, x, y, xx, yy, order=0) # nearest neighb. arr2 = bm.interp(arr, x, y, xx, yy, order=1) # linear interp. ind = np.where(arr2 == 0) #<<<<< check! try: arr2[ind] = arr1[ind] except: pass return [xi, yi, arr2]
def generic_regrid(datain, lats_in, lons_in, lats_out, lons_out, order, long_second=True): """takes a geo gridded variable and regrids it to different grid""" # long_second is a flag that means the second dimension of datain is the longitude from mpl_toolkits import basemap import numpy as np if (not long_second): data_in = datain.T else: data_in = datain lons_tri = np.concatenate([lons_in - 360, lons_in, lons_in + 360]) data_tri = np.concatenate([data_in, data_in, data_in], axis=1) lats, lons = np.meshgrid(lats_out, lons_out) # regrided_data = basemap.interp(data_tri, lats_in, lons_tri, lats, lons, order=order) regrided_data = basemap.interp(data_tri, lons_tri, lats_in, lons, lats, order=order).T return regrided_data
def interpolate(data, navlon, navlat, interp=None): """ Perform a spatial interpolation if required; return x_reg,y_reg,data_reg. data : raw data nalon : longitude navlat : latitude interp : if None return data with cordinates in meters, if 'basemap', return interpolated data using basemap from mpl_toolkits and also cordinates in meters. """ e1, e2 = _e1e2(navlon, navlat) x1d_in = e1[0, :].cumsum() - e1[0, 0] y1d_in = e2[:, 0].cumsum() - e2[0, 0] x2d_in, y2d_in = np.meshgrid(x1d_in, y1d_in) # print x1d_in if interp is None: return x2d_in, y2d_in, data elif interp == 'basemap': # only for rectangular grid... from mpl_toolkits import basemap x1d_reg = np.linspace(x1d_in[0], x1d_in[-1], len(x1d_in)) y1d_reg = np.linspace(y1d_in[0], y1d_in[-1], len(y1d_in)) x2d_reg, y2d_reg = np.meshgrid(x1d_reg, y1d_reg) data_reg = basemap.interp(data, x1d_in, y1d_in, x2d_reg, y2d_reg, checkbounds=False, order=1) return x2d_reg, y2d_reg, data_reg else: raise ValueError( 'Your choice of interp is not available in this sript.')
def extened_grid(zi,x1,y1,zoom=2): ''' xinterval : X插值的间隔 yinterval : Y 插值的间隔 扩展网格区域zoom为扩展倍数 ''' #print(x1) nx = np.size(x1) ny = np.size(y1) x2 = np.linspace(x1.min(), x1.max(), nx * zoom) y2 = np.linspace(y1.min(), y1.max(), ny * zoom) xi,yi = np.meshgrid(x2,y2) #插值方法1 Zoom方法 #from scipy import ndimage #z2 = ndimage.interpolation.zoom(zi[:,:], zoom) #插值方法2 basemap.interp方法 from mpl_toolkits.basemap import interp z2 = interp(zi, x1, y1, xi, yi, checkbounds=True, masked=False, order=1) #插值方法3 interpolate.RectBivariateSpline 矩形网格上的样条逼近。 # Bivariate spline approximation over a rectangular mesh #from scipy import interpolate #sp = interpolate.RectBivariateSpline(y1,x1,zi,kx=1, ky=1, s=0) #z2 = sp(y2,x2) #sp = interpolate.LSQBivariateSpline(y1,x1,zi) #z2 = sp(y2,x2) #terpolate.LSQBivariateSpline? print('extend shapes:=',z2.shape,xi.shape,yi.shape) return z2,xi,yi,x2,y2,nx*zoom,ny*zoom
def interpolate(data,navlon,navlat,interp=None): """ interpolate(data,navlon,navlat,interp=None) Perform a spatial interpolation if required; return x_reg,y_reg,data_reg. data : raw data nalon : longitude navlat : latitude interp : if None return data with cordinates in meters, if 'basemap', return interpolated data using basemap from mpl_toolkits and also cordinates in meters. """ e1,e2 = e1e2(navlon,navlat) # ideally we would like e1u and not e1t... x1d_in = e1[0,:].cumsum() - e1[0,0] y1d_in = e2[:,0].cumsum() - e2[0,0] x2d_in,y2d_in = np.meshgrid(x1d_in,y1d_in) # print x1d_in if interp is None or interp=='0': return x2d_in, y2d_in, data.copy() elif interp=='basemap': # only for rectangular grid... from mpl_toolkits import basemap x1d_reg=np.linspace(x1d_in[0],x1d_in[-1],len(x1d_in)) y1d_reg=np.linspace(y1d_in[0],y1d_in[-1],len(y1d_in)) x2d_reg,y2d_reg = np.meshgrid(x1d_reg,y1d_reg) data_reg=basemap.interp(data,x1d_in,y1d_in,x2d_reg,y2d_reg,checkbounds=False,order=3) return x2d_reg,y2d_reg,data_reg
def interpdata(data, lats, lons): ''' Interpola dados para 1 grau Os dados de entrada devem ter 3 dimenões: tempo, lat, lon :param: data - Dados com 3 dimensões :type param: numpy array :param: lats - Latitudes a serem interpoladas :type param: numpy array 1d :param: lons - Longitudes a serem interpoladas :type param: numpy array 1d ''' # Criando grade de 1 grau newlats = np.linspace(-90, 90, 181) newlons = np.linspace(-180, 179, 360) x, y = np.meshgrid(newlons, newlats) # Interpola dados newdata = np.empty((int(data.shape[0]), int(len(newlats)), int(len(newlons)))) for i in range(0, int(data.shape[0])): newdata[i, :, :] = interp(data[i, :, :], lons, lats, x, y, order=1) return newdata, newlats, newlons
def highResolutionGrid(lats, lons, grid, **options): from mpl_toolkits.basemap import interp from mpl_toolkits.basemap import maskoceans # interpolate data to higher resolution grid in order to better match # the builtin land/sea mask. Output looks less 'blocky' near coastlines. ## rbf = Rbf(lons[0], lats[:,0], map_val, epsilon=2) ## nlats = 5 * lats.shape[0] nlons = 5 * lats.shape[1] interp_lons = N.linspace(N.min(lons), N.max(lons), nlons) interp_lats = N.linspace(N.min(lats), N.max(lats), nlats) interp_lons, interp_lats = N.meshgrid(interp_lons, interp_lats) # interpolated high resolution data grid interp_grid = interp(grid, lons[0], lats[:, 0], interp_lons, interp_lats) ##map_val to rbf # interpolate land/sea mask to data grid, then mask nodes in ocean if options.get('mask_coastlines', True): interp_grid = maskoceans(interp_lons, interp_lats, interp_grid, resolution=options['shape_resolution'], grid=1.25, inlands=False) interp_grid[interp_grid == -999] = N.nan return interp_lons, interp_lats, interp_grid
def regrid(lon, lat, dhdt, factor=10): m, n = len(lon), len(lat) lon2 = np.linspace(lon[0], lon[-1], m*factor) lat2 = np.linspace(lat[0], lat[-1], n*factor) xx, yy = np.meshgrid(lon2, lat2) dhdt2 = interp(dhdt, lon, lat, xx, yy, order=1) # good!!! return lon2, lat2, dhdt2
def regrid(lon, lat, dhdt, factor=10): m, n = len(lon), len(lat) lon2 = np.linspace(lon[0], lon[-1], m * factor) lat2 = np.linspace(lat[0], lat[-1], n * factor) xx, yy = np.meshgrid(lon2, lat2) dhdt2 = interp(dhdt, lon, lat, xx, yy, order=1) # good!!! return lon2, lat2, dhdt2
def _refinement(self, lons, lats, v, refinementFactor=2): """ Returns a refinement of the data Parameters ---------- lons : numpy.ndarray Array of longitudes lats : numpy.ndarray Array of latitudes v : numpy.ndarray 3D tracer concentration vector refinementFactor : int, default: 2 Refinement factor Returns ------- tuple [numpy.ndarray] Tuple with a - numpy array including the refined longitudes, - numpy array including the refined latitudes, - numpy array including the refined tracer concentration """ xFine = np.linspace(lons[0], lons[-1], lons.shape[0] * refinementFactor) yFine = np.linspace(lats[0], lats[-1], lats.shape[0] * refinementFactor) lonsFine, latsFine = np.meshgrid(xFine, yFine) vFine = interp(v, lons, lats, lonsFine, latsFine, order=1) return (lonsFine, latsFine, vFine)
def make_cross_section(data3d, x, y, z, **kwargs): try: start_point, end_point = kwargs['start_point'], kwargs['end_point'] npoints = kwargs['npoints'] xout = np.linspace(start_point[0], end_point[0], npoints) yout = np.linspace(start_point[1], end_point[1], npoints) return3 = True except KeyError: try: xout = kwargs['xout'] yout = kwargs['yout'] npoints = len(xout) return3 = False except KeyError: raise ValueError( 'Must define either xout and yout or start_point, end_point and npoints' ) section = np.empty((npoints, len(z))) for iz in range(len(z)): # note the flip of dimensions line = basemap.interp(data3d[:, :, iz].T, x, y, xout, yout) section[:, iz] = line if return3: return xout, yout, section else: return section
def interpolate(data,navlon,navlat,interp=None): """Perform a spatial interpolation if required; return x_reg,y_reg,data_reg. """ e1,e2 = e1e2(navlon,navlat) # ideally we would like e1u and not e1t... x1d_in = e1[0,:].cumsum() - e1[0,0] y1d_in = e2[:,0].cumsum() - e2[0,0] x2d_in,y2d_in = npy.meshgrid(x1d_in,y1d_in) # print x1d_in if interp is None or interp=='0': return x2d_in, y2d_in, data.copy() elif interp=='basemap': # only for rectangular grid... from mpl_toolkits import basemap x1d_reg=npy.linspace(x1d_in[0],x1d_in[-1],len(x1d_in)) y1d_reg=npy.linspace(y1d_in[0],y1d_in[-1],len(y1d_in)) x2d_reg,y2d_reg = npy.meshgrid(x1d_reg,y1d_reg) data_reg=basemap.interp(data,x1d_in,y1d_in,x2d_reg,y2d_reg,checkbounds=False,order=1) return x2d_reg,y2d_reg,data_reg elif interp=='scipy': # only for rectangular grid... import scipy.interpolate x1d_reg=npy.linspace(x1d_in[0],x1d_in[-1],len(x1d_in)) y1d_reg=npy.linspace(y1d_in[0],y1d_in[-1],len(y1d_in)) x2d_reg,y2d_reg = npy.meshgrid(x1d_reg,y1d_reg) interp = scipy.interpolate.interp2d(x1d_in, y1d_in,data, kind='linear') a1d = interp(x2d_reg[0,:],y2d_reg[:,0]) data_reg = npy.reshape(a1d,y2d_reg.shape) #test_plot(x2d_in,y2d_in,data) #test_plot(x2d_reg,y2d_reg,data_reg) return x2d_reg,y2d_reg,data_reg
def extend_interp(datafield): # add masked values at southernmost end southernlimitmask = ma.masked_all(len(self.olon)) olat_ext = np.append(-82.1,self.olat) dfield_ext = ma.concatenate([ma.column_stack(southernlimitmask), datafield], 0) # f = interp2d(self.olon, olat_ext, dfield_ext) # return f(self.pismlon, self.pismlat) return interp(dfield_ext, self.olon, olat_ext, self.pismlon, self.pismlat)
def interpolate(self, xout, yout, srs=None, checkbounds=False, masked=True, order=1): """ Interpolate grid. Note: masked values will be replaced with NaN values :param xout: array, X coordinates in native SRS or :param:`srs` :param yout: array, Y coordinates in native SRS or :param:`srs` :param srs: osr SpatialReference object, SRS of output coordinates (default: None) :param checkbounds: bool, whether or not values of xout and yout are checked to see that they are within the range of the grid. If True, points falling outside the grid are masked if :param:`masked` is True, else they are clipped to the boundary of the grid (default: False) :param masked: bool, whether or not points outside the range of the grid are masked (default: True) :param order: int, type of interpolation, 0=nearest neighbor, 1=bilinear, 3=cubic spline (default: 1) :return: array, interpolated grid values """ ## Check scipy.interpolate.Rbf for additional interpolation methods from mpl_toolkits.basemap import interp from mapping.geotools.coordtrans import transform_mesh_coordinates ## xin, yin must be linearly increasing values = self.values if self.lon0 < self.lon1: xin = np.linspace(self.lon0, self.lon1, self.ncols) else: xin = np.linspace(self.lon1, self.lon0, self.ncols) values = values[:,::-1] if self.lat0 < self.lat1: yin = np.linspace(self.lat0, self.lat1, self.nrows) else: yin = np.linspace(self.lat1, self.lat0, self.nrows) values = values[::-1,:] ## Transform output coordinates to lon/lat coordinates if necessary if srs and srs != self.srs: xout, yout = transform_mesh_coordinates(self.srs, WGS84, xout, yout) out_data = interp(values, xin, yin, xout, yout, checkbounds=checkbounds, masked=masked, order=order) if hasattr(out_data, 'mask'): out_data = out_data.filled(np.nan) return out_data
def __call__(self,array): masked = ma.is_masked(array) if self.method is 'basemap': return basemap.interp(array, self.xin, self.yin, self.xout, self.yout, checkbounds=False, masked=masked, order=1) elif self.method is 'scipy': import scipy.interpolate interp = scipy.interpolate.interp2d(self.xin, self.yin, array, kind='linear') a1d = interp(self.xout[0,:],self.yout[:,0]) return npy.reshape(a1d,self.yout.shape)
def insert_monthly(var, var_target, source_nc, target_nc): from mpl_toolkits import basemap #create variable and attributes if var_target in target_nc.variables: var_target = target_nc[var_target] else: var_target = target_nc.createVariable(var_target,"f8",("lat","lon","time")) #set attributes as same as original for attr in source_nc[var].ncattrs(): var_target.setncattr(attr, str(getattr(source_nc[var], attr))) #insert into projections print("processing ", var) #change resolution lats_source = source_nc['lat'][:] lons_source = source_nc['lon'][:] lats_fine = target_nc['lat'][:] lons_fine = target_nc['lon'][:] lons_sub, lats_sub = np.meshgrid(lons_fine, lats_fine) if source_nc[var].shape in [(360,720,1,12), (360, 720, 14, 12)]: for mo_ix, month in enumerate(target_nc['time'][:]): var_source = source_nc[var][:,:,0,mo_ix] var_fine = basemap.interp(var_source, lons_source, lats_source, lons_sub, lats_sub, order=1) if np.ma.is_masked(var_fine): var_fine = var_fine.filled(fill_value=np.nan) #insert into projections var_target[:,:,mo_ix] = var_fine elif source_nc[var].shape in [(12, 37, 180, 360), (12, 57, 180, 360)]: #need to use len(vert)-2 layer; 36 or 56 vert_layer = len(source_nc['vert'][:])-2 print('using vertical layer ', vert_layer, ' for ', var) for mo_ix, month in enumerate(target_nc['time'][:]): var_source = source_nc[var][mo_ix,vert_layer,:,:] var_fine = basemap.interp(var_source, lons_source, lats_source, lons_sub, lats_sub, order=1) if np.ma.is_masked(var_fine): var_fine = var_fine.filled(fill_value=np.nan) #insert into projections var_target[:,:,mo_ix] = var_fine else: print('shape not as expected')
def downscale_time_series(data, lats, lons, resolution=1): ''' Downscale timeseries to a 1x1 lat/lon resolution for each timestep. Use bilinear interpolation as the default, but use nearest neighbor infilling for any gridcells adjacent to areas of missing data. ''' if lons.max() > 200: data, lons = shiftgrid(180, data, lons, start=False) lons_fine = np.arange(-179.5, 180, resolution) lats_fine = np.arange(-89.5, 90, resolution) lons_sub, lats_sub = np.meshgrid(lons_fine, lats_fine) month = [] for i in range(data.shape[0]): fine_bilinear = interp(data[i], lons, lats, lons_sub, lats_sub, checkbounds=False, masked=False, order=1) fine_nn = interp(data[i], lons, lats, lons_sub, lats_sub, checkbounds=False, masked=False, order=0) fine_bilinear = np.ma.masked_invalid(fine_bilinear) mask = np.ma.getmask(fine_bilinear) fine = np.ma.where(mask, fine_nn, fine_bilinear) try: results = np.dstack((results, fine)) except: results = fine month.append(i) results = np.ma.masked_invalid(results) return { 'anoms': results, 'month': month, 'lons': lons_fine, 'lats': lats_fine }
def insert_annual(var, var_target, source_nc, target_nc): from mpl_toolkits import basemap #create variable and attributes if var_target in target_nc.variables: var_target = target_nc[var_target] else: var_target = target_nc.createVariable(var_target,"f8",("lat","lon")) #set attributes as same as original for attr in source_nc[var].ncattrs(): var_target.setncattr(attr, str(getattr(source_nc[var], attr))) #insert into projections print("processing ", var) #change resolution lats_source = source_nc['lat'][:] lons_source = source_nc['lon'][:] lats_fine = target_nc['lat'][:] lons_fine = target_nc['lon'][:] lons_sub, lats_sub = np.meshgrid(lons_fine, lats_fine) if source_nc[var].shape in [(1, 2160, 4320)]: var_source = source_nc[var][0,:,:] var_fine = var_source elif source_nc[var].shape in [(1, 102, 180, 360)]: print('using level 100 for ', var) #gotta use len(vert)-2 level to get surface data (centered around 5m) var_source = source_nc[var][0,100,:,:] var_fine = basemap.interp(var_source, lons_source, lats_source, lons_sub, lats_sub, order=1) elif source_nc[var].shape in [(360, 720)]: var_source = source_nc[var][:,:] var_fine = basemap.interp(var_source, lons_source, lats_source, lons_sub, lats_sub, order=1) else: print('shape not as expected') if np.ma.is_masked(var_fine): print("filling masked") var_fine = var_fine.filled(fill_value=np.nan) #insert into projections var_target[:,:] = var_fine
def setPtValues(self, aPt, chgLatLon=True): chged = False for varname, geoNc in self.ncDs.items(): if chgLatLon or not self.ncData.has_key(varname): row, col, gridlon, gridlat = geoNc.get_row_col( aPt[LON], aPt[LAT]) aPt[varname] = geoNc.ds.variables[varname][row, col] if chgLatLon and not chged: aPt[LON] = gridlon aPt[LAT] = gridlat chged = True else: rval = bm.interp(self.ncData[varname].astype(np.float), self.xGrid, self.yGrid, np.array(aPt[LON]), np.array(aPt[LAT]), checkbounds=False, masked=True, order=1) if np.ma.is_masked(rval): rval = bm.interp(self.ncData[varname], self.xGrid, self.yGrid, np.array(aPt[LON]), np.array(aPt[LAT]), checkbounds=False, masked=True, order=0) if np.ma.is_masked(rval): rval = geoNc.ds.variables[varname].missing_value aPt[varname] = rval
def interp_CRU(path, fname, long_new, lat_new, zip=True, dtype=None): """ Extracts from a CRU file, interpolates it to a non-grid point set. """ from mpl_toolkits import basemap long_old, lat_old, data = CRU_extract(path, fname, zip, dtype) N_new = len(long_new) out_vals = zeros(N_new, dtype=float) for i in xrange(N_new): out_vals[i] = basemap.interp(data,long_old,lat_old,long_new[i],lat_new[i],order=1) return out_vals
def regrid2d(arr3d, x, y, inc_by=2): """Regrid 2d time series (3d array) increasing resolution.""" nt, ny, nx = arr3d.shape out = np.empty((nt, inc_by * ny, inc_by * nx), 'f8') xi = np.linspace(x.min(), x.max(), inc_by * len(x)) yi = np.linspace(y.min(), y.max(), inc_by * len(y)) xx, yy = np.meshgrid(xi, yi) arr3d = np.ma.masked_invalid(arr3d) for k, field in enumerate(arr3d): field1 = bm.interp(field, x, y, xx, yy, order=0) # nearest neighb. field2 = bm.interp(field, x, y, xx, yy, order=1) # linear ########################################################## # soemthing "wierd" when the field is zero ind = np.where(field2 == 0) #<<<<< check! try: field2[ind] = field1[ind] except: pass ########################################################## out[k] = field2 return [out, xi, yi]
def resample_slice(slice_, grid_lon, grid_lat, order=1): """ Resample a single time slice of a larger xr.DataArray :param slice: xr.DataArray single slice :param grid_lon: meshgrid of longitudes for the new grid :param grid_lat: meshgrid of latitudes for the new grid :param order: Interpolation method 0 - nearest neighbour, 1 - bilinear (default), 3 - cubic spline :return: xr.DataArray, resampled slice """ result = basemap.interp(slice_.values, slice_['lon'].data, slice_['lat'].data, grid_lon, grid_lat) return xr.DataArray(result)
def BasemapInterp(tlat, tlon, slat, slon, sdata): lat_new = np.flipud(slat) sdata_new = np.flipud(sdata) rada_nom = interp(sdata_new, slon, lat_new, tlon, tlat, checkbounds=False, masked=-999., order=1) return rada_nom
def interpolation(self, data, ip=1): if ip <= 1: return self.x, self.y, data else: nx = np.arange(self.lonmin, self.lonmax + self.res / ip, self.res / ip) ny = np.arange(self.latmin, self.latmax + self.res / ip, self.res / ip) newx, newy = np.meshgrid(nx, ny) if self.trans: nx, ny = self.m(newx, newy) ndata = interp(data, self.x, self.y, newx, newy, order=3) return nx, ny, ndata
def griddata_nearest(x, y, z, xi, yi): x = x.astype(np.float32) y = y.astype(np.float32) z = z.astype(np.float32) xi = xi.astype(np.float32) yi = yi.astype(np.float32) (nx,ny)=xi.shape xi, yi = xi.flatten(), yi.flatten() from scipy.interpolate import griddata interp = griddata((x, y), z,(xi,yi), method='nearest')#linear zi = np.reshape(interp(xi, yi),(nx,ny)) zi = zi.astype(np.float32) return zi
def resample_xy_grid(xyslice, oldlat, oldlon, newlat, newlon, masked=False): #xyslice is a data grid of dimensions (oldlat,oldlon) #newslice is a output data grid of dimensions (newlat,newlon) xout, yout = np.meshgrid(newlon, newlat) newslice = basemap.interp(xyslice, oldlon, oldlat, xout, yout, checkbounds=False, masked=masked, order=1) xout return newslice
def interpolate(tecmap): """Interpolate TEC Map.""" lat2 = np.linspace(tecmap.lat[0][0], tecmap.lat[-1][0], tecmap.lat.shape[0] * 10) lon2 = np.linspace(tecmap.lon[0][0], tecmap.lon[0][-1], tecmap.lon.shape[1] * 20) lon_inter, lat_inter = np.meshgrid(lon2, lat2) tecmap_inter = interp( tecmap.value, tecmap.lon[0], np.flipud(tecmap.lat[:, 0]), lon_inter, np.flipud(lat_inter), order=1) return lon_inter, lat_inter, tecmap_inter
def griddata_linear_rbf2(x, y, z, xi, yi,function='linear'): x = x.astype(np.float32) y = y.astype(np.float32) z = z.astype(np.float32) xi = xi.astype(np.float32) yi = yi.astype(np.float32) (nx,ny)=xi.shape xi, yi = xi.flatten(), yi.flatten() from scipy.interpolate import Rbf interp = Rbf(x, y, z, epsilon=1)#linear zi = np.reshape(interp(xi, yi),(nx,ny)) zi = zi.astype(np.float32) return zi
def _interpolate_structured(self, data, src_lat, src_lon, trg_lat, trg_lon, order=0): """ Interpolate structured data with basemap.interp function. """ reshaped_data = data.reshape((-1, data.shape[-2], data.shape[-1])) remapped_data = np.zeros( (reshaped_data.shape[0], trg_lat.shape[-2], trg_lat.shape[-1])) for i in range(reshaped_data.shape[0]): sliced_array = reshaped_data[i, :, :] remapped_data[i, :, :] = interp(sliced_array.T, src_lat, src_lon, trg_lat, trg_lon, order=order) remapped_shape = list(data.shape[:-2])+list(remapped_data.shape[-2:]) remapped_data = remapped_data.reshape(remapped_shape) remapped_data = np.atleast_2d(remapped_data) return remapped_data
def maskData(self, data, lon, lat, mlon, mlat, mask=None): """ This function apply a mask to a data array Parameters ---------- data:numpy.ndarray mask:numpy.ndarray or None Returns ------- None """ self.x, self.y = self.basemap(*meshgrid(mlon, mlat)) result = interp(data, lon, lat, self.x, self.y) return MaskedArray(result, mask=mask)
def do_stuff(year): rg_field = np.nan*np.zeros((12,len(sub_gpcc_lat),len(sub_gpcc_lon))) dates =[] for m in np.arange(0,12): dates.append(dt.datetime(year,m+1,1)) # read in tamsat file tam_f = dir_tam+str(year)+'/'+mon_string(m+1)+'/rfe'+str(year)+'_'+mon_string(m+1)+'.v3.1.nc' nc_fid = Dataset(tam_f,'r') field = np.array(nc_fid.variables['rfe'][:]).squeeze() field[field<0]=np.nan field_units = str(nc_fid.variables['rfe'].units) nc_fid.close() rg_field[m,:,:] = basemap.interp(np.flip(field,axis=0),tam_lon,np.flip(tam_lat),regrid[0],regrid[1],order=1) # save new field in netcdf # save all data as netcdf nc_outfile = dir_out+'TAMSATv3.1_monthly_1d_gpcc_'+str(year)+'.nc' dataset = Dataset(nc_outfile,'w',format='NETCDF3_CLASSIC') lat = dataset.createDimension('lat', len(sub_gpcc_lat)) # create lat (dims depend on region) lon = dataset.createDimension('lon', len(sub_gpcc_lon)) # create lon time = dataset.createDimension('time', 12) # create time # create variables var_out = dataset.createVariable('rfe', 'd',('time','lat','lon')) latitudes = dataset.createVariable('latitude','f',('lat',)) longitudes = dataset.createVariable('longitude','f',('lon',)) times = dataset.createVariable('time', np.float64, ('time',)) # Global Attributes (will need modified accordingly) dataset.description = 'TAMSATv3.1 monthly total rainfall regridded to GPCC 1.0d grid' dataset.history = 'Created ' + tt.ctime(tt.time()) dataset.source = 'Subset by M. Young' # Variable Attributes latitudes.units = 'degrees_north' longitudes.units = 'degrees_east' var_out.units = field_units times.units = time_units times.calendar = 'gregorian' # Fill variables with data latitudes[:] = sub_gpcc_lat longitudes[:] = sub_gpcc_lon var_out[:] = rg_field times[:] = date2num(dates,units=time_units,calendar=times.calendar) dataset.close() return []
def linear_interpolate_for_regrid(lon_list_in_grid, lat_list_in_grid, lon_list_out_grid, lat_list_out_grid, input_array): lat_length, lon_length = input_array.shape lon_array, lat_array = np.meshgrid(lon_list_out_grid, lat_list_out_grid) output_array = np.zeros( (lat_list_out_grid.shape[0], lon_list_out_grid.shape[0])) output_array = basemap.interp(input_array, lon_list_in_grid, lat_list_in_grid, lon_array, lat_array, order=1) return output_array
def interpolate_scatter1(self): ''' step 1 transform [lon1,lon2,...,lonN], [lat1,lat2,...,latN] to spatial array :return: ''' csv = this_root + 'data\\bio_diversity\\ellis_2012_l8_dataset_2012_01_17.dbf.csv' data = pd.read_csv(csv) x = data['X'] y = data['Y'] val = data['N'] # xx = np.linspace(-180,179.5,720) xx = np.arange(-180, 179.5, 1) # yy = np.linspace(-90,89.5,360) yy = np.arange(-90, 89.5, 1)[::-1] xi, yi = np.meshgrid(xx, yy) # # print xi # exit() function = 'linear' # ------------------------------------------------# # 'multiquadric': sqrt((r/self.epsilon)**2 + 1) # # 'inverse': 1.0/sqrt((r/self.epsilon)**2 + 1) # # 'gaussian': exp(-(r/self.epsilon)**2) # # 'linear': r # # 'cubic': r**3 # # 'quintic': r**5 # # 'thin_plate': r**2 * log(r) # # -------------------------------------------q-----# print 'interpolating1' interp = Rbf(x, y, val, function=function) print 'interpolating2' zi = interp(xi, yi) print 'saving' np.save(self.this_class_arr + 'bio_diversity_arr_1_degree_non_clip', zi) plt.imshow(zi, 'jet') plt.colorbar() plt.show()
def extened_grid(self, zi, x1, y1, zoom): # print(x1) nx = np.size(x1) ny = np.size(y1) x2 = np.linspace(x1.min(), x1.max(), nx * zoom) y2 = np.linspace(y1.min(), y1.max(), ny * zoom) xi, yi = np.meshgrid(x2, y2) from mpl_toolkits.basemap import interp z2 = interp(zi, x1, y1, xi, yi, checkbounds=True, masked=False, order=1) return z2, xi, yi, x2, y2, nx * zoom, ny * zoom
def linear_interpolate_for_regrid(lon_list_in_grid, lat_list_in_grid, lon_list_out_grid, lat_list_out_grid, input_array): time_length, z_length, lat_length, lon_length = input_array.shape lon_array, lat_array = np.meshgrid(lon_list_out_grid, lat_list_out_grid) output_array = np.zeros((time_length, z_length, lat_list_out_grid.shape[0], lon_list_out_grid.shape[0])) for tim in range(time_length): for z in range(z_length): input_array_2d = np.squeeze(input_array[tim, z, ...]) output_array[tim, z, ...] = basemap.interp(input_array_2d, lon_list_in_grid, lat_list_in_grid, lon_array, lat_array, order=1) return output_array
def reproject_data(location, varname, map, lonname='lon', latname='lat', step=1, xsize=100, ysize=100, filter=np.nan): nc = Dataset(location) latvar = nc.variables[latname] lonvar = nc.variables[lonname] datavar = nc.variables[varname] lons = lonvar[::step] lats = latvar[::step] if len(datavar.dimensions) == 2: data = datavar[::step, ::step] elif len(datavar.dimensions) == 3: data = datavar[0,::step, ::step] # Set masked (i.e. land) data to 0. # plot_surface ignores masks, and if we set it to NaN, it screws up the colour map # TODO: try this again with a custom colour map... if filter is not None: data[np.where(np.ma.getmask(data) == True)] = filter # Now fix the longitude wrapping so that all values go from -180:180 wrapindex = None for i, lon in enumerate(lons): if lon > 180: lons[i] -= 360 if wrapindex is None: wrapindex = i if wrapindex is not None: lons = np.hstack((lons[wrapindex:],lons[:wrapindex])) data = np.hstack((data[:,wrapindex:],data[:,:wrapindex])) lons_proj, lats_proj = map.makegrid(xsize, ysize) data_proj = interp(data, lons, lats, lons_proj, lats_proj, checkbounds=False, masked=False, order=1) XX, YY = np.meshgrid(np.arange(xsize), np.arange(ysize)) nc.close() return (lons_proj, lats_proj, XX, YY, data_proj)
def read(self, n=1, squeeze=True): dataread = self.reader.read(n, squeeze=False) # 5d array num_times = dataread.shape[4] num_vars = dataread.shape[3] num_levs = dataread.shape[2] #print num_times, num_vars, num_levs shape_out = (self._shape2d[0], self._shape2d[1], num_levs, num_vars, num_times) dataout = np.ma.MaskedArray(np.empty(shape_out, dtype=np.float32), False) for ind_time in range(num_times): for ind_var in range(num_vars): for ind_lev in range(num_levs): # transpose -- interp assumes y in 1st index, x in 2nd fld = dataout[:, :, ind_lev, ind_var, ind_time] fldmask = dataout.mask[:, :, ind_lev, ind_var, ind_time] #interp = basemap.interp(dataread[:,:,ind_lev,ind_var,ind_time].T, # self.xin, self.yin, self.x_interp.T, self.y_interp.T, # masked=True, order=1).T interp = basemap.interp(dataread[:, :, ind_lev, ind_var, ind_time], self.yin, self.xin, self.y_interp, self.x_interp, masked=True, order=1) #print np.any(interp.mask) fld[...] = interp fldmask[...] = interp.mask if squeeze: dataout = dataout.squeeze() #print np.any(dataout.mask) return dataout
def interpolate_to_NEMO_lateral(interps, dataset, NEMOlon, NEMOlat, shape): """Interpolates arrays in interps laterally to NEMO grid. Assumes these arrays have already been interpolated vertically. Note that by this point interps should be a full array :arg interps: dictionary of 4D numpy arrays. Key represents the variable name. :type interps: dictionary :arg dataset: LiveOcean results. Used to look up lateral grid. :type dataset: xarray Dataset :arg NEMOlon: array of NEMO boundary longitudes :type NEMOlon: 1D numpy array :arg NEMOlat: array of NEMO boundary longitudes :type NEMOlat: 1D numpy array :arg shape: the lateral shape of NEMO boundary area. :type shape: 2-tuple :returns: a dictionary, like var_arrays, but with arrays replaced with interpolated values """ # LiveOcean grid lonsLO = dataset.lon_rho.values[0, :] latsLO = dataset.lat_rho.values[:, 0] # interpolate each variable interpl = {} for var in interps.keys(): var_new = np.zeros((interps[var].shape[0], shape[0], shape[1])) for k in range(var_new.shape[0]): var_grid = interps[var][k, :, :] var_new[k, ...] = Basemap.interp( var_grid, lonsLO, latsLO, NEMOlon, NEMOlat ) interpl[var] = var_new return interpl
def interpolate_data(self, data): #data_interp = data_process = data[self.sub_indexes[2]:self.sub_indexes[0]+1:,self.sub_indexes[3]:self.sub_indexes[1]+1:] if self.raw_variables['lat'][0] > self.raw_variables['lat'][ self.lat_size - 1]: data_interp = data_process = data[ self.sub_indexes['max_lat']:self.sub_indexes['min_lat'] + 1:, self.sub_indexes['min_lon']:self.sub_indexes['max_lon'] + 1:] else: data_interp = data_process = data[ self.sub_indexes['min_lat']:self.sub_indexes['max_lat'] + 1:, self.sub_indexes['min_lon']:self.sub_indexes['max_lon'] + 1:] if self.data_interpolation_factor > 1: coordinates_xo, coordinates_yo = np.meshgrid( self.data_output["lon"], self.data_output['lat']) data_interp = interp(data_process, np.sort(self.raw_variables["lon"]), np.sort(self.raw_variables['lat']), coordinates_xo, coordinates_yo, masked=True) #print(self.data_precision_factor) #print("data interp shape", data_interp.shape) return data_interp
def griddata_scipy_idw(x, y, z, xi, yi,function='linear'): ''' scipy反向距离加权插值 'multiquadric': sqrt((r/self.epsilon)**2 + 1) #不能 'inverse': 1.0/sqrt((r/self.epsilon)**2 + 1) #不能 'gaussian': exp(-(r/self.epsilon)**2) 不能用来插值 'linear': r #能 'cubic': r**3 #能 'quintic': r**5 #效果差,勉强能 'thin_plate': r**2 * log(r) 能可以用用来插值 ''' x = x.astype(np.float32) y = y.astype(np.float32) z = z.astype(np.float32) xi = xi.astype(np.float32) yi = yi.astype(np.float32) (nx,ny)=xi.shape xi, yi = xi.flatten(), yi.flatten() from scipy.interpolate import Rbf interp = Rbf(x, y, z, function=function,epsilon=2)#linear zi = np.reshape(interp(xi, yi),(nx,ny)) zi = zi.astype(np.float32) return zi
def interp_geodata(lon_old, lat_old, data, lon_new, lat_new, mask=None, chunk=None, view='y-x+', order=1, nan_handler=None): """ Takes gridded data, interpolates it to a non-grid point set. """ from mpl_toolkits import basemap def chunker(v,i,chunk): return v[i*chunk:(i+1)*chunk] lat_argmins = np.array([np.argmin(np.abs(ln-lat_old)) for ln in lat_new]) lon_argmins = np.array([np.argmin(np.abs(ln-lon_old)) for ln in lon_new]) if view[0]=='y': lat_index = 0 lon_index = 1 lat_dir = int(view[1]+'1') lon_dir = int(view[3]+'1') else: lat_index = 1 lon_index = 0 lat_dir = int(view[3]+'1') lon_dir = int(view[1]+'1') N_new = len(lon_new) out_vals = zeros(N_new, dtype=float) if chunk is None: data = data[:] if mask is not None: data = ma.MaskedArray(data, mask) dconv = grid_convert(data,view,'y+x+') for i in xrange(N_new): out_vals[i] = basemap.interp(dconv,lon_old,lat_old,lon_new[i:i+1],lat_new[i:i+1],order=order) if nan_handler is not None: where_nan = np.where(np.isnan(out_vals)) out_vals[where_nan] = nan_handler(lon_old, lat_old, dconv, lon_new[where_nan], lat_new[where_nan], order) else: where_inlon = [np.where((lon_argmins>=ic*chunk[lon_index])*(lon_argmins<(ic+1)*chunk[lon_index]))[0] for ic in range(len(lon_old)/chunk[lon_index])] where_inlat = [np.where((lat_argmins>=jc*chunk[lat_index])*(lat_argmins<(jc+1)*chunk[lat_index]))[0] for jc in range(len(lat_old)/chunk[lat_index])] # Always iterate forward in longitude and latitude. for ic in range(data.shape[lon_index]/chunk[lon_index]): for jc in range(data.shape[lat_index]/chunk[lat_index]): # Who is in this chunk? where_inchunk = intersect1d(where_inlon[ic],where_inlat[jc]) if len(where_inchunk) > 0: # Which slice in latitude? if lat_dir == 1: lat_slice = slice(jc*chunk[lat_index],(jc+1)*chunk[lat_index],None) else: lat_slice = slice(len(lat_old)-(jc+1)*chunk[lat_index],len(lat_old)-jc*chunk[lat_index],None) # Which slice in longitude? if lon_dir == 1: lon_slice = slice(ic*chunk[lon_index],(ic+1)*chunk[lon_index],None) else: lon_slice = slice(len(lon_old)-(ic+1)*chunk[lon_index],len(lon_old)-ic*chunk[lon_index],None) # Combine longitude and latitude slices in correct order dslice = [None,None] dslice[lat_index] = lat_slice dslice[lon_index] = lon_slice dslice = tuple(dslice) dchunk = data[dslice] if mask is not None: mchunk = mask[dslice] dchunk = ma.MaskedArray(dchunk, mchunk) latchunk = chunker(lat_old,jc,chunk[lat_index]) lonchunk = chunker(lon_old,ic,chunk[lon_index]) dchunk_conv = grid_convert(dchunk,view,'y+x+') # for index in where_inchunk: out_vals[where_inchunk] = basemap.interp(dchunk_conv, lonchunk, latchunk, lon_new[where_inchunk], lat_new[where_inchunk], order=order) if nan_handler is not None: where_nan = np.where(np.isnan(out_vals[where_inchunk])) out_vals[where_inchunk][where_nan] = nan_handler(lonchunk, latchunk, dchunk_conv, lon_new[where_inchunk][where_nan], lat_new[where_inchunk][where_nan], order) return out_vals
def createmap(data, lats, lons, make_edges=False, GC_shift=True, vmin=None, vmax=None, latlon=True, region=__GLOBALREGION__, aus=False, linear=False, clabel=None, colorbar=True, cbarfmt=None, cbarxtickrot=None, ticks=None, cbarorient='bottom', xticklabels=None, set_bad=None, set_under=None, set_over=None, pname=None,title=None,suptitle=None, smoothed=False, cmapname=None): ''' Pass in data[lat,lon], lats[lat], lons[lon] arguments: set_bad='blue' #should mask nans as blue GC_shift=True #will shift plot half a square left and down Returns map, cs, cb ''' # Create a basemap map with region as inputted if aus: region=__AUSREGION__ if __VERBOSE__: print("createmap called over %s (S,W,N,E)"%str(region)) #print("Data %s, %d lats and %d lons"%(str(data.shape),len(lats), len(lons))) # First reduce data,lats,lons to the desired region (should save plotting time) regionplus=np.array(region) + np.array([-5,-10,5,10]) # add a little padding so edges aren't lost lati,loni=util.lat_lon_range(lats,lons,regionplus) data=data[lati,:] data=data[:,loni] lats=lats[lati] #print(lons) #print(loni) lons=lons[loni] lllat=region[0]; urlat=region[2]; lllon=region[1]; urlon=region[3] m=Basemap(llcrnrlat=lllat, urcrnrlat=urlat, llcrnrlon=lllon, urcrnrlon=urlon, resolution='i', projection='merc') # plt.colormesh arguments will be added to dictionary pcmeshargs={} if not linear: if __VERBOSE__: print('removing %d negative datapoints in createmap'%np.nansum(data<0)) # ignore warnings of NaN comparison with warnings.catch_warnings(): warnings.filterwarnings("ignore",category =RuntimeWarning) data[data<=0] = np.NaN pcmeshargs['norm']=LogNorm() # Set vmin and vmax if necessary if vmin is None: vmin=1.05*np.nanmin(data) if vmax is None: vmax=0.95*np.nanmax(data) ## basemap pcolormesh uses data edges ## lats_e,lons_e=lats,lons lats_m,lons_m=lats,lons if make_edges: if __VERBOSE__: print("Making edges from lat/lon mids") nlat,nlon=len(lats), len(lons) lats_e=regularbounds(lats) lons_e=regularbounds(lons) assert nlat == len(lats_e)-1, "regularbounds failed: %d -> %d"%(nlat, len(lats_e)) assert nlon == len(lons_e)-1, "regularbounds failed: %d -> %d"%(nlon, len(lons_e)) ## midpoints, derive simply from edges lons_m=(lats_e[0:-1] + lats_e[1:])/2.0 lats_m=(lons_e[0:-1] + lons_e[1:])/2.0 elif GC_shift: # non edge-based grids need to be shifted left and down by half a box latres=lats[3]-lats[2] lonres=lons[3]-lons[2] lats=lats-latres/2.0 lons=lons-lonres/2.0 lats[lats < -89.9] = -89.9 lats[lats > 89.9] = 89.9 lats_e,lons_e=lats,lons lats_m,lons_m=lats,lons ## interpolate for smoothed output if desired ## if smoothed: factor=5 if __VERBOSE__: print("Smoothing data, by factor of %d"%factor) # 'increase' resolution nlats = factor*data.shape[0] nlons = factor*data.shape[1] lonsi = np.linspace(lons_m[0],lons[-1],nlons) latsi = np.linspace(lats_m[0],lats[-1],nlats) # also increase resolution of our edge lats/lons lats_e=regularbounds(latsi); lons_e=regularbounds(lonsi) lonsi, latsi = np.meshgrid(lonsi, latsi) # Smoothe data to increased resolution data = interp(data,lons,lats,lonsi,latsi) # Make edges into 2D meshed grid mlons_e,mlats_e=np.meshgrid(lons_e,lats_e) #x_e,y_e=m(lons_e,lats_e) errmsg="pcolormesh likes edges for lat/lon (array: %s, lats:%s)"%(str(np.shape(data)),str(np.shape(mlats_e))) if __VERBOSE__: print(errmsg) if cmapname is None: cmapname = matplotlib.rcParams['image.cmap'] cmap=plt.cm.cmap_d[cmapname] cmap.set_under(cmap(0.0)) cmap.set_over(cmap(1.0)) if set_bad is not None: cmap.set_bad(set_bad,alpha=0.0) pcmeshargs.update({'vmin':vmin, 'vmax':vmax, 'clim':(vmin, vmax), 'latlon':latlon, 'cmap':cmap, }) #force nan into any pixel with nan results, so color is not plotted there... mdata=np.ma.masked_invalid(data) # mask non-finite elements #mdata=data # masking occasionally throws up all over your face if __VERBOSE__: shapes=tuple([ str(np.shape(a)) for a in [mlats_e, mlons_e, mdata, mdata.mask] ]) print("lats: %s, lons: %s, data: %s, mask: %s"%shapes) #for arr in mlons_e,mlats_e,mdata: # print(np.shape(arr)) cs=m.pcolormesh(mlons_e, mlats_e, mdata, **pcmeshargs) # colour limits for contour mesh if set_over is not None: cs.cmap.set_over(set_over) if set_under is not None: cs.cmap.set_under(set_under) cs.set_clim(vmin,vmax) # draw coastline and equator(no latlon labels) m.drawcoastlines() m.drawparallels([0],labels=[0,0,0,0]) # add titles and cbar label if title is not None: plt.title(title) if suptitle is not None: plt.suptitle(suptitle) cb=None if colorbar: cbargs={'format':cbarfmt, 'ticks':ticks, 'size':'5%', 'pad':'1%', 'extend':'both'} cb=m.colorbar(cs, cbarorient, **cbargs) if xticklabels is not None: cb.ax.set_xticklabels(xticklabels) if clabel is not None: cb.set_label(clabel) if cbarxtickrot is not None: cb.ax.set_xticklabels(cb.ax.get_xticklabels(), rotation=cbarxtickrot) # if a plot name is given, save and close figure if pname is not None: plt.savefig(pname) print("Saved "+pname) plt.close() return # if no colorbar is wanted then don't return one (can be set externally) return m, cs, cb
obs_aux, obs_lons = shiftgrid(180., obs_aux, obs_lons_360, start=False) print 'Shapes:', obs_aux.shape print 'lats:', obs_lats print 'lons:', obs_lons print u"\n === INTERPOLACAO ===" ### Interpolação para 1 grau ### # Nova grade de 1 grau newlats = np.linspace(-90, 90, 181) newlons = np.linspace(-180, 179, 360) x, y = np.meshgrid(newlons, newlats) # Interpola previsão for i in range(0, int(fcst_aux.shape[0])): fcst = interp(fcst_aux[i, :, :], fcst_lons, fcst_lats, x, y, order=1) fcst = np.expand_dims(fcst, axis=0) #print fcst.shape #exit() # Interpola hindcast hind = np.zeros((int(hind_aux.shape[0]), int(len(newlats)), int(len(newlons)))) for i in range(0, int(hind_aux.shape[0])): hind[i, :, :] = interp(hind_aux[i, :, :], hind_lons, hind_lats, x, y, order=1) # Interpola obs obs = np.zeros((int(obs_aux.shape[0]), int(len(newlats)), int(len(newlons)))) for i in range(0, int(obs_aux.shape[0])): obs[i, :, :] = interp(obs_aux[i, :, :], obs_lons, obs_lats, x, y, order=1) print '\n ... FCST ...', fcst.shape
# get arthern accumulation from bedmap 2 accumulation = ncbm2.variables['accum'][:] # get others from albmap xalb = ncalb.variables['x'][:] yalb = ncalb.variables['y'][:] #lat = ncalb.variables['topg'][:] precip = ncalb.variables['precipitation'][:] artm = ncalb.variables['air_temp'][:] xgrid, ygrid = np.meshgrid(xalb,yalb) # adjust bedm2 to centered x,y, see bedmap2 readme file xbm2 -= x0_bm2 ybm2 -= x0_bm2 thkbm2 = np.asarray((interp(thk, xbm2, ybm2, xgrid, ygrid ))) topgbm2 = np.asarray((interp(topg, xbm2, ybm2, xgrid, ygrid ))) maskbm2 = np.asarray((interp(mask, xbm2, ybm2, xgrid, ygrid ))) usurfbm2 = np.asarray((interp(usurf, xbm2, ybm2, xgrid, ygrid ))) thkbm2[thkbm2 > 10000.] = 0. topgbm2[topgbm2 > 10000.] = -9999 velbm2 = np.asarray((interp(vel, xbm2, ybm2, xgrid, ygrid ))) accumbm2 = np.asarray((interp(accumulation, xbm2, ybm2, xgrid, ygrid ))) ### add some difference field to compare albmap and bedma2 topg and thk if compare_bm2_alb: ncmsk = ncalb.createVariable( 'mask','float32',('t','y','x') ) ncusurf = ncalb.createVariable( 'usurf','float32',('t','y','x') ) ncthkold = ncalb.createVariable( 'thk_alb','float32',('t','y','x') ) nctopgold = ncalb.createVariable( 'topg_alb','float32',('t','y','x') )
# H*wind data from http://www.aoml.noaa.gov/hrd/data_sub/wind.html ncfile = NetCDFFile('rita.nc') udat = ncfile.variables['sfc_u'][0,:,:] vdat = ncfile.variables['sfc_v'][0,:,:] lons1 = ncfile.variables['longitude'][:] lats1 = ncfile.variables['latitude'][:] lat0 = lats1[len(lats1)/2]; lon0 = lons1[len(lons1)/2] lons, lats = np.meshgrid(lons1,lats1) ncfile.close() # downsample to finer grid for nicer looking plot. nlats = 2*udat.shape[0]; nlons = 2*udat.shape[1] lons = np.linspace(lons1[0],lons1[-1],nlons) lats = np.linspace(lats1[0],lats1[-1],nlats) lons, lats = np.meshgrid(lons, lats) udat = interp(udat,lons1,lats1,lons,lats,order=3) vdat = interp(vdat,lons1,lats1,lons,lats,order=3) fig = plt.figure(figsize=(8,8)) m = Basemap(projection='cyl',llcrnrlat=lats1[0],llcrnrlon=lons1[0],urcrnrlat=lats1[-1],urcrnrlon=lons1[-1],resolution='i') # pass texture, kernel and data to LIC function from vectorplot. kernellen=31 texture = np.random.rand(udat.shape[0],udat.shape[1]).astype(np.float32) kernel = np.sin(np.arange(kernellen)*np.pi/kernellen).astype(np.float32) image = lic_internal.line_integral_convolution(udat.astype(np.float32),\ vdat.astype(np.float32), texture, kernel) # plot the resulting image. im = m.imshow(image,plt.cm.gist_stern) m.drawcoastlines() m.drawmeridians(np.arange(-120,-60,2),labels=[0,0,0,1])
freeboard = freeboard[::-1] # flip y-dim thickness = thickness[::-1] thickness_err = thickness_err[::-1] ind = np.where((freeboard == nodata) | (thickness == nodata)) freeboard[ind] = np.nan thickness[ind] = np.nan thickness_err[thickness_err==nodata] = np.nan # grid coords in polar stere xx, yy = np.meshgrid(lon, lat) xx, yy = ap.ll2xy(xx, yy, units='m') xx_bm, yy_bm = np.meshgrid(x_bm, y_bm) xx_mask, yy_mask = np.meshgrid(x_mask, y_mask) # regrid the error to match resolutions thickness_err1 = interp(thickness_err, x_err, y_err, xx_bm, yy_bm, order=1) thickness_err2 = interp(thickness_err, x_err, y_err, xx_bm, yy_bm, order=0) thickness_err1[np.isnan(thickness_err1)] = thickness_err2[np.isnan(thickness_err1)] thickness_err = thickness_err1 # mask ice shelves mask_bm = interp(mask, x_mask, y_mask, xx_bm, yy_bm, order=0) freeboard[mask_bm!=4] = np.nan thickness[mask_bm!=4] = np.nan thickness_err[mask_bm!=4] = np.nan # account for Mean Dynamic Topography: H - MDT # average MDT around Antarctica = -1.4 m freeboard -= mean_dynamic_topo # calculate density, and error
def main(): startTime = time.time() """Define the start and end date you want data extracted for:""" startYear=2009 startMonth=10 endYear=2012 endMonth=12 maxTries=3 delay=10 firstIteration=True lastIteration=False createFigure=False figureNumber=0 USENETCDF4=True # if false then use NETCDF3_CLASSIC """Name of output file to be created""" outputFile="NS8KM_obsSST_%s_to_%s.nc"%(startYear,endYear) if os.path.exists(outputFile): os.remove(outputFile) """Read the grid info from the grid file""" filename="/Users/trondkr/Projects/is4dvar/Grid/nordsjoen_8km_grid_hmax20m_v3.nc" mask_rho, lon_rho,lat_rho,grid_h = getGrid(filename) """Calculate the x,y grid coordinates""" (Mp,Lp)=lon_rho.shape X=np.arange(0,Mp,1) Y=np.arange(0,Lp,1) roms_Xgrid,roms_Ygrid=np.meshgrid(Y,X) """CoRTAD time is days since 1980/12/31 12:00:00""" mytime=getCortad.getCORTADtime() refDate=datetime.datetime(1981,12,31,12,0,0) """Have to convert the day of observation to the relative time used by ROMS which is 1948/1/1:00:00:00""" refDateROMS=datetime.datetime(1948,1,1,0,0,0) delta=refDate-refDateROMS daysSince1948to1980=delta.days """Find the start and end indexes to extract""" foundStart=False; foundEnd=False; startIndex=-9; endIndex=-9 for index in xrange(len(mytime)): currentDate = refDateROMS + datetime.timedelta(days=float(mytime[index])+daysSince1948to1980) if foundStart is False: if currentDate.year==startYear: if currentDate.month==startMonth: foundStart=True startIndex=index print "\n-----------------------------------------------" print "Start date %s at index %s"%(currentDate,startIndex) if foundEnd is False: if currentDate.year==endYear: if currentDate.month==endMonth: foundEnd=True endIndex=index print "FIXME : HARDCODING LAST INDEX !!!!!!!!!!!!!!!!!\n\n\n" endIndex=1616 currentDate = refDateROMS + datetime.timedelta(days=float(mytime[endIndex])+daysSince1948to1980) print "FIXME : HARDCODING LAST INDEX !!!!!!!!!!!!!!!!!\n\n\n" print "End date %s at index %s"%(currentDate,endIndex) times=[i for i in range(startIndex,endIndex,1)] print "Created array of %s time-steps to iterate and extract data from"%(len(times)) print "-----------------------------------------------\n" """Get the lomgitude-latitudes of the combination of tiles""" longitude, latitude, lonSST, latSST, indexes = getCortad.extractCoRTADLongLat(maxTries, delay, lon_rho.min(), lon_rho.max(), lat_rho.min(), lat_rho.max()) indexes=np.asarray(indexes,dtype=np.int32) latitude = np.flipud(latitude[indexes[3]:indexes[2]]) longitude = longitude[indexes[0]:indexes[1]] """Loop over all times and store to file or make map""" polygon_data = getPolygon(lonSST[indexes[3]:indexes[2],indexes[0]:indexes[1]], latSST[indexes[3]:indexes[2],indexes[0]:indexes[1]], lon_rho,lat_rho) survey_time=[] for t in xrange(len(times)): """Open the files and check that NOAA is online""" cdf = getCortad.openCoRTAD(maxTries,delay) currentDate=refDateROMS + datetime.timedelta(days=int(mytime[times[t]])+daysSince1948to1980) """ Get the data for the current time""" filledSST = getCortad.extractCORTADSST("North Sea",times[t],cdf,indexes) """Interpolate the original values to the grid. This is the data that will be saved to file""" SSTi = mp.interp(np.flipud(filledSST),longitude,latitude, lon_rho,lat_rho,checkbounds=False,masked=True,order=1) SSTi = np.where(SSTi < -0.5, -0.5, SSTi) SSTi = SSTi*mask_rho igood=np.nonzero(SSTi) numberOfobs=len(SSTi[igood]) obs_lon=lon_rho[igood] obs_lat=lat_rho[igood] obs_value=SSTi[igood] obs_Xgrid=roms_Xgrid[igood] obs_Ygrid=roms_Ygrid[igood] Nobs=numberOfobs survey_time.append(int(mytime[times[t]])+daysSince1948to1980) obs_time=[] for ot in xrange(numberOfobs): obs_time.append(int(mytime[times[t]])+daysSince1948to1980) if ot==0: print refDateROMS + datetime.timedelta(days=int(mytime[times[t]])+daysSince1948to1980), int(mytime[times[t]])+daysSince1948to1980 print "Found %s observations for %s"%(numberOfobs, currentDate) """Create map where the colored data shows the interpolated values and the grey colored data are the original data""" """Define the max and minimim area to crate map for (not used to create obs file)""" lat_start=43; lat_end=71.5; lon_start=-20; lon_end=35 if createFigure is True: makeMap(figureNumber,lon_start,lon_end,lat_start,lat_end,filename,SSTi,lon_rho,lat_rho,polygon_data,currentDate, filledSST,lonSST[indexes[3]:indexes[2],indexes[0]:indexes[1]], latSST[indexes[3]:indexes[2],indexes[0]:indexes[1]]) figureNumber+=1 """ Finished, now cleanup and make sure everything are arrays""" obs_time=np.asarray(obs_time) """Finally write the results to file""" """Temp variables not used until lastIteration is set to True, but required for function call""" obs_flag = 6; is3d = 1; survey =0; Nstate = 7 if firstIteration is True: print "Writing data of TYPE: %s to file (6=Temperature)"%(obs_flag) unos = np.ones(len(obs_value)) obs_type = obs_flag*unos obs_error = unos # error eqaul one scale later obs_Zgrid = 0*unos obs_depth = 35*unos #If positive has to be the sigma level, if negative depth in meters obs_variance=np.asarray(np.ones(Nstate)) print "Min and max of SST to file: %s - %s"%(obs_value.min(),obs_value.max()) writeObsfile.writeData(outputFile,obs_lat,obs_lon,obs_value,Nobs,survey_time,obs_time,obs_Xgrid,obs_Ygrid, firstIteration,lastIteration, obs_flag,obs_type,obs_error,obs_Zgrid,obs_depth,obs_variance, survey,is3d,Nstate,USENETCDF4) firstIteration=False """Close the opendap files""" cdf.close(); """Cleanup and write final dimensions and variables""" lastIteration=True """ some extra variables """ obs_flag = 6 # for temperature data is3d = 1 survey=len(survey_time) survey_time=np.asarray(survey_time) survey_time=survey_time.flatten() Nstate = 7; writeObsfile.writeData(outputFile,obs_lat,obs_lon,obs_value,Nobs,survey_time,obs_time,obs_Xgrid,obs_Ygrid, firstIteration,lastIteration, obs_flag,obs_type,obs_error,obs_Zgrid,obs_depth,obs_variance, survey,is3d,Nstate,USENETCDF4) endTime=time.time() print "\n--------------------------------------------------------------" print "Program ended successfully after %s seconds"%(endTime-startTime) print "\n--------------------------------------------------------------\n"
analysis_variable_array = ensmemanal.variables[analysis_variable][:, :, :] # Print message to user print "Interpolating variable %s from horizontal resolution within %s to horizontal resolution of %s ..." % ( analysis_variable, hybridanal_file, ensmemanal_file, ) # Compute local variable analysis_variable_array = interp( hybrid_variable_array, hybrid_variable_xcoord, hybrid_variable_ycoord, ensmem_variable_xcoord, ensmem_variable_ycoord, order=3, ) # Define local variable analysis_variable_2d = analysis_variable_array # Perform necessary tasks on file analysis_variable_2d = interpanal.createVariable( analysis_variable, ensmemanal.variables[analysis_variable].dtype, (analysis_variable_strname_tdim, analysis_variable_strname_ydim, analysis_variable_strname_xdim), )
# print myt2max.shape, myt2m.shape, myt2min.shape newlats = np.linspace(-90, 90, 181) newlons = np.linspace(-180, 179, 360) x, y = np.meshgrid(newlons, newlats) tmax = np.zeros((20, 3, 181, 360)) tmax[:] = np.nan tmean = np.copy(tmax) tmin = np.copy(tmax) print ("--- INTERPOLANDO OS DADOS ---") for mem in range(20): for lead in range(3): tmax[mem, lead, :, :] = interp(myt2max[mem, lead, :, :], lons, lats, x, y, order=1) tmean[mem, lead, :, :] = interp(myt2m[mem, lead, :, :], lons, lats, x, y, order=1) tmin[mem, lead, :, :] = interp(myt2min[mem, lead, :, :], lons, lats, x, y, order=1) tmax = np.expand_dims(tmax, axis=0) tmean = np.expand_dims(tmean, axis=0) tmin = np.expand_dims(tmin, axis=0) # print tmax.shape # mydatatmax = np.swapaxes(tmax, 2, 1) # mydatatmean = np.swapaxes(tmean, 2, 1) # mydatatmin = np.swapaxes(tmin, 2, 1) fileout = "io/echam46/hind8110/{1}/monthly/temp/" "temp_monthly_echam46_hind8110_fcst_{0}_{1}_{2}.nc".format( year_aux, ini_mon_eng[j].lower(), name_season[j].lower() )
def main(): startTime = time.time() """Define the start and end date you want data extracted for:""" startDate=date(2013,12,2) endDate=date(2014,12,31) firstIteration=True lastIteration=False createFigure=False figureNumber=0 USENETCDF4=True # if false then use NETCDF3_CLASSIC """Name of output file to be created""" outputFile="NS8KM_AVHRR_obsSST_%s_to_%s.nc"%(startDate.year,endDate.year) if os.path.exists(outputFile): os.remove(outputFile) """Read the grid info from the grid file""" filename="/Users/trondkr/Projects/is4dvar/Grid/nordsjoen_8km_grid_hmax20m_v3.nc" mask_rho, lon_rho,lat_rho,grid_h = getGrid(filename) """Calculate the x,y grid coordinates""" (Mp,Lp)=lon_rho.shape X=np.arange(0,Mp,1) Y=np.arange(0,Lp,1) roms_Xgrid,roms_Ygrid=np.meshgrid(Y,X) """AVHRR time is days since 1978/1/1 00:00:00""" refDate=datetime.datetime(1978,1,1,0,0,0) """Have to convert the day of observation to the relative time used by ROMS which is 1948/1/1:00:00:00""" refDateROMS=datetime.datetime(1948,1,1,0,0,0) delta=refDate-refDateROMS daysSince1948to1978=delta.days """Get the longitude-latitudes of the AVHRR files""" longitude, latitude, lonSST, latSST, indexes = getAVHRR.extractAVHRRLongLat(lon_rho.min(), lon_rho.max(), lat_rho.min(), lat_rho.max(), startDate) latitude = latitude[indexes[2]:indexes[3]] longitude = longitude[indexes[0]:indexes[1]] """Loop over all times and store to file or make map""" polygon_data = getPolygon(lonSST[indexes[2]:indexes[3],indexes[0]:indexes[1]], latSST[indexes[2]:indexes[3],indexes[0]:indexes[1]], lon_rho,lat_rho) survey_time=[] for currentDate in daterange(startDate, endDate): print "\n-----\nCurrent date", currentDate """Open the files and check that NOAA is online""" currentTime, sst,longitude = getAVHRR.openAVHRR(currentDate,indexes) currentDate=refDateROMS + datetime.timedelta(days=currentTime+daysSince1948to1978) """Interpolate the original values to the grid. This is the data that will be saved to file""" SSTi = mp.interp(sst,longitude,latitude, lon_rho,lat_rho,checkbounds=False,masked=True,order=1) SSTi = np.where(SSTi < -0.5, -0.5, SSTi) print "Mean SST %s"%(np.ma.mean(SSTi)) SSTi = SSTi*mask_rho igood=np.nonzero(SSTi) numberOfobs=len(SSTi[igood]) obs_lon=lon_rho[igood] obs_lat=lat_rho[igood] obs_value=SSTi[igood] obs_Xgrid=roms_Xgrid[igood] obs_Ygrid=roms_Ygrid[igood] Nobs=numberOfobs survey_time.append(currentTime+daysSince1948to1978) obs_time=[] for ot in xrange(numberOfobs): obs_time.append(currentTime+daysSince1948to1978) if ot==0: print "Date to file:", refDateROMS + datetime.timedelta(days=currentTime+daysSince1948to1978),currentTime+daysSince1948to1978 print "Found %s observations for %s"%(numberOfobs, currentDate) """Create map where the colored data shows the interpolated values and the grey colored data are the original data""" """Define the max and minimim area to crate map for (not used to create obs file)""" lat_start=43; lat_end=71.5; lon_start=-20; lon_end=35 if createFigure is True: makeMap(figureNumber,lon_start,lon_end,lat_start,lat_end,filename,SSTi,lon_rho,lat_rho,polygon_data,currentDate, sst,lonSST[indexes[2]:indexes[3],indexes[0]:indexes[1]], latSST[indexes[2]:indexes[3],indexes[0]:indexes[1]]) figureNumber+=1 """ Finished, now cleanup and make sure everything are arrays""" obs_time=np.asarray(obs_time) """Finally write the results to file""" """Temp variables not used until lastIteration is set to True, but required for function call""" obs_flag = 6; is3d = 1; survey =0; Nstate = 7 if firstIteration is True: print "Writing data of TYPE: %s to file (6=Temperature)"%(obs_flag) unos = np.ones(len(obs_value)) obs_type = obs_flag*unos obs_error = unos # error eqaul one scale later obs_Zgrid = 0*unos obs_depth = 35*unos #If positive has to be the sigma level, if negative depth in meters obs_variance=np.asarray(np.ones(Nstate)) print "Min and max of SST to file: %s - %s"%(obs_value.min(),obs_value.max()) writeObsfile.writeData(outputFile,obs_lat,obs_lon,obs_value,Nobs,survey_time,obs_time,obs_Xgrid,obs_Ygrid, firstIteration,lastIteration, obs_flag,obs_type,obs_error,obs_Zgrid,obs_depth,obs_variance, survey,is3d,Nstate,USENETCDF4) firstIteration=False """Cleanup and write final dimensions and variables""" lastIteration=True """ some extra variables """ obs_flag = 6 # for temperature data is3d = 1 survey=len(survey_time) survey_time=np.asarray(survey_time) survey_time=survey_time.flatten() Nstate = 7; writeObsfile.writeData(outputFile,obs_lat,obs_lon,obs_value,Nobs,survey_time,obs_time,obs_Xgrid,obs_Ygrid, firstIteration,lastIteration, obs_flag,obs_type,obs_error,obs_Zgrid,obs_depth,obs_variance, survey,is3d,Nstate,USENETCDF4) endTime=time.time() print "\n--------------------------------------------------------------" print "Program ended successfully after %s seconds"%(endTime-startTime) print "\n--------------------------------------------------------------\n"
def mapdap( varname = 'hr24_prcp', bbox = '-180,-90,180,90', url = 'http://opendap.bom.gov.au:8080/thredds/dodsC/PASAP/atmos_latest.nc', timeindex = 'Default', imgwidth = 256, imgheight = 256, request = 'GetMap', time = 'Default', save_local_img = False, colorrange = (-4,4), palette = 'RdYlGn', colorbounds = 'Default', style = 'grid', ncolors = 10, mask = -999, plot_mask = True, mask_varname = 'mask', mask_value = 1.0 ): """ Using Basemap, create a contour plot using some dap available data Data is assumed to have dimensions [time,lat,lon] TODO -- deal with other shapes TODO -- determine the dimension ordering using CF convention varname -- name of variable in opendap file bbox -- lonmin,latmin,lonmax,latmax for plot url -- OPEnDAP url timeindex -- time index to plot imgwidth,imgheight -- size of png image to return request -- 'GetMap','GetLegend','GetFullFigure' time -- time vale to plot. Assumes a particular format."%Y-%m-%dT%H:%M:%S" mask -- mask out these values if plot_mask is True, mask_varname and mask_value must be given """ transparent = True lonmin,latmin,lonmax,latmax = tuple([float(a) for a in bbox.rsplit(',')]) # It's not clear there is any point in this. Pydap doesn't actually # download data until you subscript if url not in cache: dset = open_url(url) else: dset = cache[url] # Get the correct time. time_var = dset['time'] time_units = time_var.attributes['units'] available_times = np.array(time_var[:]) # TODO there is a potential conflict here between time and timeindex. # On the one hand we want to allow using the actual time value. # On the other hand we want to make it easy to get a time index # without knowing the value. timestep=0 if timeindex == 'Default': timestep=0 else: timestep=int(timeindex) if time != 'Default': dtime = datetime.datetime.strptime(time, "%Y-%m-%dT%H:%M:%S" ) reftime = date2num(dtime,time_units) timestep = np.where(available_times >= reftime)[0].min() # TODO Get only the section of the field we need to plot # TODO Determine lat/lon box indices and only download this slice # TODO Set default range (the below does not work) #colorrange = np.min(var),np.max(var) lat = dset['lat'][:] lon = dset['lon'][:] # CHANGED var = dset[varname][timestep,:,:] #xcoords = lonmin,lonmax #xcoords,lon,var = transform_lons(xcoords,lon,var) # TODO # Needs mre thought - the idea here is to only grab a slice of the data # Need to grab a slightly larger slice of data so that tiling works. #lat_idx = (lat > latmin) & (lat < latmax) #lon_idx = (lon > lonmin) & (lon < lonmax) #lat = dset['lat'][lat_idx] #lon = dset['lon'][lon_idx] #latdx1 = np.where(lat_idx)[0].min() #latdx2 = np.where(lat_idx)[0].max() #londx1 = np.where(lon_idx)[0].min() #londx2 = np.where(lon_idx)[0].max() #var = var[latdx1:latdx2+1,londx1:londx2+1] #var = dset[varname][timestep,latdx1:latdx2+1,londx1:londx2+1] # todo clean up this logic if 'mask' in dset.keys(): if plot_mask: maskvar = dset['mask'][timestep,:,:] #maskvar = dset['mask'][timestep,latdx1:latdx2+1,londx1:londx2+1] varm = np.ma.masked_array(var,mask=maskvar) mask = varm.mask else: varm = np.ma.masked_array(var,mask=np.isinf(var)) xcoords = lonmin,lonmax # Call the trans_coords function to ensure that basemap is asked to # plot something sensible. xcoords,lon,varm = transform_lons(xcoords,lon,varm) lonmin,lonmax = xcoords varnc = dset[varname] try: var_units = varnc.attributes['units'] except KeyError: var_units = '' # Plot the data # For the basemap drawing we can't go outside the range of coordinates # WMS requires us to give an empty (transparent) image for these spurious lats # uc = upper corner, lc = lower corner bmapuclon=lonmax bmaplclon=lonmin bmapuclat=min(90,latmax) bmaplclat=max(-90,latmin) if bmaplclat==90: bmaplclat = 89.0 if bmapuclat==-90: bmapuclat = -89.0 # TODO set figsize etc here fig = mpl.figure.Figure() canvas = FigureCanvas(fig) ax = fig.add_axes((0,0,1,1),frameon=False,axisbg='k',alpha=0,visible=False) m = Basemap(projection='cyl',resolution='c',urcrnrlon=bmapuclon, urcrnrlat=bmapuclat,llcrnrlon=bmaplclon,llcrnrlat=bmaplclat, suppress_ticks=True,fix_aspect=False,ax=ax) DPI=100.0 # Convert the latitude extents to Basemap coordinates bmaplatmin,bmaplonmin = m(latmin,lonmin) bmaplatmax,bmaplonmax = m(latmax,lonmax) lon_offset1 = abs(bmaplclon - bmaplonmin) lat_offset1 = abs(bmaplclat - bmaplatmin) lon_offset2 = abs(bmapuclon - bmaplonmax) lat_offset2 = abs(bmapuclat - bmaplatmax) lon_normstart = lon_offset1 / abs(bmaplonmax - bmaplonmin) lat_normstart = lat_offset1 / abs(bmaplatmax - bmaplatmin) ax_xfrac = abs(bmapuclon - bmaplclon)/abs(bmaplonmax - bmaplonmin) ax_yfrac = abs(bmapuclat - bmaplclat)/abs(bmaplatmax - bmaplatmin) # Set plot_coords, the plot boundaries. If this is a regular WMS request, # the plot must fill the figure, with whitespace for invalid regions. # If it's a full figure, we need to make sure there is space for the legend # and also for the text. if request == 'GetFullFigure': coords = lonmin,latmin,lonmax,latmax plot_coords = figurePlotDims(imgheight,imgwidth,coords) else: plot_coords = (lon_normstart,lat_normstart,ax_xfrac,ax_yfrac) m = Basemap(projection='cyl',resolution='c',urcrnrlon=bmapuclon, urcrnrlat=bmapuclat,llcrnrlon=bmaplclon,llcrnrlat=bmaplclat, suppress_ticks=True,fix_aspect=False,ax=ax) ax = fig.add_axes(plot_coords,frameon=False,axisbg='k') m.ax = ax varm,lonwrap = addcyclic(varm,lon) x,y = m(*np.meshgrid(lonwrap[:],lat[:])) """ To plot custom colors rgb_cmap = mpl.colors.ListedColormap([ (0.0,0.0,0.0), (0.25,0.25,0.25), (0.3,0.25,0.25), (0.5,0.5,0.5), (0.6,0.5,0.5), (0.75,0.75,0.75), (0.75,0.85,0.75), (1.0,1.0,1.0) ],name='rgbcm') default_color_bounds = [-1,-0.75,-0.5,-0.25,0.0,0.25,0.5,0.75,1.0] default_norm = mpl.colors.BoundaryNorm(default_color_bounds, rgb_cmap.N) m.contourf(x,y,var,cmap=rgb_cmap,norm=default_norm) contours = m.contour(x,y,var,cmap=rgb_cmap,norm=default_norm) contours.clabel(colors='k') """ colormap = mpl.cm.get_cmap(palette) # colormap = cmap_discretize(colormap,ncolors) # if colorbounds = 'Default': # colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment)) # else: # colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment)) # Do some checks on the size of the list, and fix if we can # pass if style == 'contour': # Interpolate to a finer resolution # TODO: make this sensitive to the chosen domain increment = float(colorrange[1]-colorrange[0]) / float(ncolors-2) colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment)) # CHANGED colormap = cmap_discretize(colormap,ncolors) colvs =[-999]+colorbounds+[999] lat_idx = np.argsort(lat) lat = lat[lat_idx] varm = varm[lat_idx,:] data_lonmin = min(lonwrap) data_lonmax = max(lonwrap) data_latmin = min(lat) data_latmax = max(lat) new_lons = np.arange(data_lonmin-1.0,data_lonmax+1.0,1.0) new_lats = np.arange(data_latmin-1.0,data_latmax+1.0,1.0) newx,newy = m(*np.meshgrid(new_lons[:],new_lats[:])) x = newx y = newy # Two pass interpolation to deal with the mask. # The first pass does a bilinear, the next pass does a nearest neighbour to keep the mask # These steps slow down the plotting significantly # It's not clear this is working, and the problem is likely solved by # ensuring the right mask is used! varm_bl = interp(varm, lonwrap[:], lat[:], newx, newy,order=1) varm_nn = interp(varm, lonwrap[:], lat[:], newx, newy,order=0) varm = varm_bl varm[varm_nn.mask == 1] = varm_nn[varm_nn.mask == 1] # contourf has an extent keyword (x0,x1,y0,y1) # return "mapdap\n" # STUCK it gets stuck here (in apache) main_render = m.contourf(x,y,varm[:,:],colorbounds,extend='both',cmap=colormap,ax=ax) contours = m.contour(x,y,varm,colorbounds,colors='k',ax=ax) contours.clabel(colors='k',rightside_up=True,fmt='%1.1f',inline=True) elif style == 'grid': main_render = m.pcolormesh(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1], cmap=colormap,ax=ax) elif style == 'grid_threshold': increment = float(colorrange[1]-colorrange[0]) / float(ncolors) colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment)) colornorm = mpl.colors.BoundaryNorm(colorbounds,colormap.N) main_render = m.pcolor(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1], cmap=colormap,ax=ax,norm=colornorm) else: main_render = m.pcolormesh(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1], cmap=colormap,ax=ax) fig.set_dpi(DPI) fig.set_size_inches(imgwidth/DPI,imgheight/DPI) title_font_size = 9 tick_font_size = 8 if request == 'GetFullFigure': # Default - draw 5 meridians and 5 parallels n_merid = 5 n_para = 5 # base depends on zoom mint = (lonmax - lonmin)/float(n_merid) base = mint meridians = [lonmin + i*mint for i in range(n_merid)] meridians = [ int(base * round( merid / base)) for merid in meridians] # Some sensible defaults for debugging #meridians = [45,90,135,180,-135,-90,-45] pint = int((latmax - latmin)/float(n_para)) base = pint parallels = [latmin + i*pint for i in range(1,n_para+1)] parallels = [ int(base * round( para / base)) for para in parallels] #parallels = [-60,-40,-20,0,20,40,60] #parallels = [((parallel + 180.) % 360.) - 180. for parallel in parallels] m.drawcoastlines(ax=ax) m.drawmeridians(meridians,labels=[0,1,0,1],fmt='%3.1f',fontsize=tick_font_size) m.drawparallels(parallels,labels=[1,0,0,0],fmt='%3.1f',fontsize=tick_font_size) m.drawparallels([0],linewidth=1,dashes=[1,0],labels=[0,1,1,1],fontsize=tick_font_size) titlex,titley = (0.05,0.98) # CHANGED # STUCK getting an error somewhere in this function # title = get_pasap_plot_title(dset,varname=varname,timestep=timestep) title = "We're getting errors in the get title function" fig.text(titlex,titley,title,va='top',fontsize=title_font_size) colorbar_font_size = 8 if request == 'GetLegendGraphic': # Currently we make the plot, and then if the legend is asked for # we use the plot as the basis for the legend. This is not optimal. # Instead we should be making the legend manually. However we need # to set up more variables, and ensure there is a sensible min and max. # See the plot_custom_colors code above fig = mpl.figure.Figure(figsize=(64/DPI,256/DPI)) canvas = FigureCanvas(fig) # make some axes cax = fig.add_axes([0,0.1,0.2,0.8],axisbg='k') # put a legend in the axes cbar = fig.colorbar(main_render,cax=cax,extend='both',format='%1.1f') cbar.set_label(var_units,fontsize=colorbar_font_size) for t in cbar.ax.get_yticklabels(): t.set_fontsize(colorbar_font_size) # i.e. you don't need to plot the figure... #fig.colorbar(filled_contours,cax=cax,norm=colornorm,boundaries=colvs,values=colvs, # ticks=colorbounds,spacing='proportional') elif request == 'GetFullFigure': # Add the legend to the figure itself. # Figure layout parameters # plot_coords = tuple with (xi,yi,dx,dy) # legend_coords = tuple with (xi,yi,dx,dy) as per mpl convention # First change the plot coordinates so that they do not cover the whole image legend_coords = (0.8,0.1,0.02,plot_coords[3]) cax = fig.add_axes(legend_coords,axisbg='k') cbar = fig.colorbar(main_render,cax=cax,extend='both') for t in cbar.ax.get_yticklabels(): t.set_fontsize(colorbar_font_size) cbar.set_label(var_units,fontsize=colorbar_font_size) transparent=False # Experimenting here with custom color map and ticks. Assigning everything manually # (e.g. ticks=[-2,-1,0,1,2]) is easy. Doing it in an automated way given a range is # hard... #fig.colorbar(filled_contours,cax=cax,boundaries=colvs,ticks=colorbounds) #,norm=colornorm,#boundaries=colvs,values=colvs, #extend='both') imgdata = StringIO.StringIO() fig.savefig(imgdata,format='png',transparent=transparent) if save_local_img: fig.savefig('map_plot_wms_output.png',format='png') return if url not in cache: cache[url] = dset value = imgdata.getvalue() #imgdata.close() fig = None return value
cbounds = list(np.arange(crange[0],crange[1] + increment, increment )) colvs = [-999]+cbounds+[999] # Sort latitudes and data lat_idx = np.argsort(lats) lats = lats[lat_idx] data = data[lat_idx] data_lon_min = min(lonwrap) data_lon_max = max(lonwrap) data_lat_min = min(lats) data_lat_max = max(lats) new_lons = np.arange(data_lon_min - 1.0, data_lon_max + 1.0, 1.0) new_lats = np.arange(data_lat_min - 1.0, data_lat_max + 1.0, 1.0) x,y = m(*np.meshgrid(new_lons[:], new_lats[:])) data_bl = interp(data,lonwrap[:],lats[:],x,y,order=1) data_nn = interp(data,lonwrap[:],lats[:],x,y,order=0) data_bl[data_nn.mask == 1] = data_nn[data_nn.mask == 1] #m.contourf(x,y,data_bl[:,:],cbounds,cmap=cmap,extend='both') print cbounds col = ('g', '#FFFF33','k','#330066','#6633FF') m.contourf(x,y,data_bl[:,:],levels=cbounds,colors=col, extend = 'both' ) m.contour(x,y,data_bl,cbounds,colors='k') m.drawcoastlines() fig.savefig("color.png", format="png")