def test_gmap_transformed(): dem = GeoTiff(get_demo_file('hef_srtm.tif')) dem.set_subset(margin=-100) dem = mercator_grid(center_ll=(10.76, 46.798444), extent=(10000, 7000)) i, j = dem.ij_coordinates g = GoogleVisibleMap(x=i, y=j, crs=dem, size_x=500, size_y=400) img = g.get_vardata() m = Map(dem, countries=False) with pytest.raises(ValueError): m.set_data(img) m.set_lonlat_contours(interval=0.025) m.set_shapefile(get_demo_file('Hintereisferner.shp'), linewidths=2, edgecolor='darkred') m.set_rgb(img, g.grid) fig, ax = plt.subplots(1, 1) m.visualize(ax=ax, addcbar=False) plt.tight_layout() return fig
def _plot_city(self, ds): """Plot the results of gyms in a city. Parameters ---------- self.ds_sorted : xr.Dataset xr.Dataset with Coordinates: gyms. Returns ------- matplotlib.pyplot.figure : matplotlib.pyplot.figure Creates city plot. """ # Create extend of map [W, E, S, N] extent = [ds['longitude'].values.min(), ds['longitude'].values.max(), ds['latitude'].values.min(), ds['latitude'].values.max()] # Setup colors colors = cm.nipy_spectral(np.linspace(0,1,len(ds['gyms']))) # Get google map. Scale is for more details. Mapytype can have # 'terrain' or 'satellite' g = GoogleVisibleMap(x=[extent[0], extent[1]], y=[extent[2], extent[3]], scale=4, maptype='terrain') ggl_img = g.get_vardata() # Plot map fig, ax = plt.subplots(1, 1, figsize=(20,20)) sm = Map(g.grid, factor=1, countries=False) sm.set_rgb(ggl_img) sm.visualize(ax=ax) # Plot gym points for i in range(0, len(ds['gyms'])): # Create label self.regcount = i self._rank() # Add self.rank _label = self.rank+' '+ds['gyms'].values[i]+': '+\ ds['athlete_names'].values[i]+' ('+str(ds[self.how].values[i])+')' x, y = sm.grid.transform(ds['longitude'].values[i], ds['latitude'].values[i]) ax.scatter(x, y, color=colors[i], s=400, label=_label) plt.title(self.fname+' | '+self.city+' | '+self.column+' | '+self.how) # Shrink current axis by 20% to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) plt.savefig(self.plotdir+self.fname+'_'+self.city+'_'+self.column+'_'+\ self.how+'.png', bbox_inches = 'tight') #plt.savefig(self.plotdir+self.fname+'_'+self.city+'_'+self.column+\ # self.how+'.png', bbox_inches = 'tight', format='eps') plt.show()
def main(argc, argv): country = 'NGA' fig, (ax1, ax2, ax3, ax4) = plt.subplots(1, 4, figsize=(15, 10)) cmap = plt.cm.magma bbox = box(3.35-0.1, 6.5-0.1, 3.35+0.1, 6.5+0.1) geo = gpd.GeoDataFrame({'geometry': bbox}, index=[0], crs='EPSG:4326') coords = geo['geometry'] # Worldpop: wp_file = os.path.join(get_project_path(), "data/worldpop", '%s/%s_ppp_2015.tif' % (country, country.lower())) with rasterio.open(wp_file) as pop: X = pop.read(1) out_img, out_transform = mask.mask(pop, coords, crop=True) pop_wp = np.float32(out_img[0].copy()) pop_wp[np.where(pop_wp==-99999)] = 0 im1 = ax1.imshow(pop_wp, cmap=cmap, norm=LogNorm()) # Humdata: fb_file = os.path.join(get_project_path(), "data/humdata", '%s/population_%s_2018-10-01.tif' % (country, country.lower())) with rasterio.open(fb_file) as pop: X = pop.read(1) out_img, out_transform = mask.mask(pop, coords, crop=True) pop_fb = np.float32(out_img[0].copy()) im2 = ax2.imshow(pop_fb, cmap=cmap, norm=LogNorm()) # GRID3: grid_file = os.path.join(get_project_path(), "data/grid", '%s/%s_population.tif' % (country, country.lower())) with rasterio.open(grid_file) as pop: X = pop.read(1) sum = np.mean(X) out_img, out_transform = mask.mask(pop, coords, crop=True) pop_fb = np.float32(out_img[0].copy()) im3 = ax3.imshow(pop_fb, cmap=cmap, norm=LogNorm()) # Satelite: g = GoogleVisibleMap(x=[3.35-0.1, 3.35+0.1], y=[6.5-0.1, 6.5+0.1], size_x = 500, size_y = 500, #size_x=img_arr1.shape[0], size_y=img_arr1.shape[1], scale=4, # scale is for more details maptype='satellite' ) # try out also: 'terrain' ggl_img = g.get_vardata() ax4.imshow(ggl_img) plt.show() return 0
if not (row['Indice_Corregido'] in indexes): indexes.append(row['Indice_Corregido']) lats.append(row['Latitud_Centro']) longs.append((row['Longitud_Centro'])) new_df = pd.DataFrame({'Indice': indexes, 'Latitud': lats, 'Longitud': longs}) new_df['Ubicacion'] = new_df.agg( 'https://www.google.com/maps/search/?api=1&query={0[Latitud]},{0[Longitud]}' .format, axis=1) new_df.sort_values(by=['Indice'], inplace=True) new_df.to_csv('Indices.csv', index=False) g = GoogleVisibleMap( x=[-76.533, -76.525], y=[3.340, 3.375], scale=2, # scale is for more details maptype='roadmap') f, ax = plt.subplots(1, figsize=(12, 12)) ggl_img = g.get_vardata() sm = Map(g.grid, factor=1, countries=False) sm.set_rgb(ggl_img) sm.visualize(ax=ax) n = new_df['Indice'].to_numpy() x = new_df['Longitud'].to_numpy() y = new_df['Latitud'].to_numpy() tipo = 'OUTDOOR'
dist = np.asarray(dist) dist = dist.reshape((-1,2)) a = a[:,4:] a = a.reshape((-1,(npart * n_levels))) min_x = np.amin(lon_stag) max_x = np.amax(lon_stag) min_y = np.amin(lat_stag) max_y = np.amax(lat_stag) g = GoogleVisibleMap(x=[min_x, max_x], y=[min_y, max_y], scale=2, # scale is for more details maptype='hybrid') # try out also: 'terrain,hybrid' ggl_img = g.get_vardata() column = 0 header = "ncols %s\n" % loading2D.shape[1] header += "nrows %s\n" % loading2D.shape[0] header += "xllcorner " + str(lon_stag[0]) +"\n" header += "yllcorner " + str(lat_stag[1]) +"\n" header += "cellsize " + str(spacing_lat) +"\n" header += "NODATA_value 0" for i in range(npart):
""" import numpy as np import pandas as pd import salem from salem import get_demo_file, DataLevels, GoogleVisibleMap, Map import matplotlib.pyplot as plt # prepare the figure f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) # read the shapefile and use its extent to define a ideally sized map shp = salem.read_shapefile(get_demo_file('rgi_kesselwand.shp')) # I you need to do a lot of maps you might want # to use an API key and set it here with key='YOUR_API_KEY' g = GoogleVisibleMap(x=[shp.min_x, shp.max_x], y=[shp.min_y, shp.max_y], maptype='satellite') # try out also: 'terrain' # the google static image is a standard rgb image ggl_img = g.get_vardata() ax1.imshow(ggl_img) ax1.set_title('Google static map') # make a map of the same size as the image (no country borders) sm = Map(g.grid, factor=1, countries=False) sm.set_shapefile(shp) # add the glacier outlines sm.set_rgb(ggl_img) # add the background rgb image sm.visualize(ax=ax2) # plot it ax2.set_title('GPR measurements') # read the point GPR data and add them to the plot df = pd.read_csv(get_demo_file('gtd_ttt_kesselwand.csv'))
import numpy as np import pandas as pd import salem from salem import get_demo_file, DataLevels, GoogleVisibleMap, Map import matplotlib.pyplot as plt # prepare the figure f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) # read the shapefile and use its extent to define a ideally sized map shp = salem.read_shapefile(get_demo_file('rgi_kesselwand.shp')) # I you need to do a lot of maps you might want # to use an API key and set it here with key='YOUR_API_KEY' g = GoogleVisibleMap(x=[shp.min_x, shp.max_x], y=[shp.min_y, shp.max_y], maptype='satellite') # try out also: 'terrain' # the google static image is a standard rgb image ggl_img = g.get_vardata() ax1.imshow(ggl_img) ax1.set_title('Google static map') # make a map of the same size as the image (no country borders) sm = Map(g.grid, factor=1, countries=False) sm.set_shapefile(shp) # add the glacier outlines sm.set_rgb(ggl_img) # add the background rgb image sm.set_scale_bar(location=(0.88, 0.94)) # add scale sm.visualize(ax=ax2) # plot it ax2.set_title('GPR measurements')
import numpy as np import pandas as pd import salem from salem import get_demo_file, DataLevels, GoogleVisibleMap, Map import matplotlib.pyplot as plt # prepare the figure f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) # read the shapefile and use its extent to define a ideally sized map shp = salem.read_shapefile(get_demo_file('rgi_kesselwand.shp')) # I you need to do a lot of maps you might want # to use an API key and set it here with key='YOUR_API_KEY' g = GoogleVisibleMap( x=[shp.min_x, shp.max_x], y=[shp.min_y, shp.max_y], scale=2, # scale is for more details maptype='satellite') # try out also: 'terrain' # the google static image is a standard rgb image ggl_img = g.get_vardata() ax1.imshow(ggl_img) ax1.set_title('Google static map') # make a map of the same size as the image (no country borders) sm = Map(g.grid, factor=1, countries=False) sm.set_shapefile(shp) # add the glacier outlines sm.set_rgb(ggl_img) # add the background rgb image sm.set_scale_bar(location=(0.88, 0.94)) # add scale sm.visualize(ax=ax2) # plot it ax2.set_title('GPR measurements')
from pathlib import Path from salem import get_demo_file, DataLevels, GoogleVisibleMap, Map import matplotlib.pyplot as plt data_dir = Path("/Volumes/Lees_Extend/data/ecmwf_sowc/") path = data_dir / "chirps_kenya.nc" ds = xr.open_dataset(path, decode_times=False) times = pd.date_range("1900-01-01", "2019-12-31", freq="M") ds["time"] = times da = ds.precip # sds = salem.open_xr_dataset(path, **dict(decode_times=False)) fig, ax = plt.subplots() map_ = da.mean(dim="time").salem.quick_map(ax=ax) map_ # conda install -c motionless # prepare the figure f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) # read the shapefile and use its extent to define a ideally sized map shp = salem.read_shapefile(get_demo_file("rgi_kesselwand.shp")) g = GoogleVisibleMap( x=[shp.min_x, shp.max_x], y=[shp.min_y, shp.max_y], scale=2, # scale is for more details maptype="satellite", ) # try out also: 'terrain'
def spatial_bin_plot(self, category, quantity, bin_step=20, color='viridis'): # scale Dimension scaleDim = 5 # Binning base on spatial data = self.data # filter the data by category data = data[data[self.ccategorical].isin(category)] # This maximum constant is what we can get from # the google map static image # greater or lower than these can produce error maxconst = (-86.82743293, 86.92841107, -176.1111116, 176.4292565) minlat = data[self.clatitude].min( ) if data[self.clatitude].min() > maxconst[0] else maxconst[0] maxlat = data[self.clatitude].max( ) if data[self.clatitude].max() < maxconst[1] else maxconst[1] minlong = data[self.clongitude].min( ) if data[self.clongitude].min() > maxconst[2] else maxconst[2] maxlong = data[self.clongitude].max( ) if data[self.clongitude].max() < maxconst[3] else maxconst[3] #print(minlat,maxlat,minlong,maxlong) g = GoogleVisibleMap( x=[minlong, maxlong], y=[minlat, maxlat], maptype='terrain') # satellitetry out also: 'terrain' # the google static image is a standard rgb image ggl_img = g.get_vardata() #ax.imshow(ggl_img) # make a map of the same size as the image (no country borders) sm = Map(g.grid, factor=1, countries=False) sm.set_rgb(ggl_img) # add the background rgb image #print(minlat,maxlat,minlong,maxlong) # make range for Latitude # set step xstep = bin_step ystep = bin_step latBin = np.linspace(data[self.clatitude].min(), data[self.clatitude].max(), xstep) longBin = np.linspace(data[self.clongitude].min(), data[self.clongitude].max(), ystep) #logger.debug(latBin) #print(longBin) quantBinArr = [] quantmeanArr = [] quantsumArr = [] latStepLen = latBin[1] - latBin[0] longStepLen = longBin[1] - longBin[0] #treesWithoutVacant = trees.filter_ne('Tree Species','vacant site large') for x in range(latBin.size): if (x < latBin.size - 1): latSelMin = latBin[x] if latBin[x] < latBin[x + 1] else latBin[x + 1] latSelMax = latBin[x] if latBin[x] > latBin[x + 1] else latBin[x + 1] #latData = data[(data['latitude']>latBin[x])&(data['latitude']<=latBin[x+1])] latData = data[(data[self.clatitude] > latSelMin) & (data[self.clatitude] <= latSelMax)] latmean = latSelMin + (latStepLen / 2) for y in range(longBin.size): if (y < longBin.size - 1): lonSelMin = longBin[y] if longBin[y] < longBin[ y + 1] else longBin[y + 1] lonSelMax = longBin[y] if longBin[y] > longBin[ y + 1] else longBin[y + 1] #print(lonSelMin,lonSelMax) #print((latData['longitude']>lonSelMin)&(latData['longitude']<=lonSelMax)) #areaData = latData[(latData['longitude']>longBin[y])&latData['longitude']<longBin[y+1]] areaData = latData[ (latData[self.clongitude] > lonSelMin) & (latData[self.clongitude] <= lonSelMax)] #print(areaData.shape) # group the areaData by category to get the mean and sum category meanCat = areaData.groupby( self.ccategorical)[quantity].mean().sort_values( ascending=False) sumCat = areaData.groupby( self.ccategorical)[quantity].sum().sort_values( ascending=False) """ # get mean for the quantity area bin quantmean = areaData[quantity].mean() quantsum = areaData[quantity].sum() """ if areaData.shape[0] > 0: longmean = lonSelMin + (longStepLen / 2) quantmeanArr.append(meanCat.max()) quantsumArr.append(sumCat.max()) #print(meanCat) #print(sumCat) quantBinArr.append({ 'lat': latmean, 'long': longmean, 'mean': meanCat, 'sum': sumCat }) """ quantmeanArr.append(quantmean) quantsumArr.append(quantsum) longmean = (longBin[y]+longBin[y+1])/2 quantBinArr.append({'lat': latmean, 'long': longmean, 'quantmean': quantmean, 'quantsum': quantsum}) """ dataFig = plt.figure(figsize=(15, 15)) loc_ax = dataFig.add_subplot(1, 1, 1) sm.visualize(ax=loc_ax) # plot it # loc_ax.set_title('Distribution of Most Common Trees accross Spatial Binning: {}x{} square'.format(xstep,ystep)) loc_ax.set_xlabel('Longitude') loc_ax.set_ylabel('Latitude') minMean = np.array(quantmeanArr).min() maxMean = np.array(quantmeanArr).max() # calculate the scale # we scale it using 8 level scale = (maxMean - minMean) / scaleDim #define color representation for each category cm = plt.get_cmap(color) colorArr = {} norm = mpl.colors.Normalize(vmin=0, vmax=len(category)) patch_array = [] for i in range(len(category)): color = cm(norm(i)) colorArr[category[i]] = color patch_array.append( mpl.patches.Patch(color=color, label=category[i])) for quantBin in quantBinArr: x, y = sm.grid.transform(quantBin['long'], quantBin['lat']) scatter = loc_ax.scatter(x, y, s=(quantBin['mean'].values[0] / scale) * (longStepLen / 2) * scaleDim, c=colorArr[quantBin['mean'].index[0]], alpha=.75, edgecolors='none') #tooltip = plugins.PointHTMLTooltip(scatter, ['test']) #plugins.connect(dataFig, tooltip) scale_array = [] scale_label = [] # Make scale legend for i in range(scaleDim): #patch_array.append(mpl.patches.Patch(color='none',label=i,)) label = '{0:.2f} < x <= {1:.2f}'.format( minMean + (scale * i), minMean + (scale * (i + 1))) scatter = plt.scatter([], [], s=(i + 1) * (longStepLen / 2) * scaleDim, marker='o', label=label, color='grey') #scatter = plt.plot([],[],markersize=(i+1)/scaleDim,marker='o',label=label) scale_array.append(scatter) scale_label.append(label) #patch_array.append(scatter.get_patches()) #patch_array.append(mpl.lines.Line2D([],[],markersize=(i+1)/scaleDim,marker='o',label=label)) # Legend and Title #legend2 = mpl.pyplot.legend(handles=scale_array, loc=1) legend2 = mpl.pyplot.legend(scale_array, scale_label, scatterpoints=1, loc='upper right', ncol=1, bbox_to_anchor=(1, 1) #,fontsize=8 ) #[ patch_array.append(x) for x in legend2.get_patches() ] #legend1 = mpl.pyplot.legend(handles=patch_array, loc=4,bbox_to_anchor=(1, 0.5)) loc_ax.legend(handles=patch_array, loc='center left', bbox_to_anchor=(1, 0.5)) mpl.pyplot.gca().add_artist(legend2) loc_ax.set_title('Quantity {} across Spatial Bining'.format(quantity)) #mpld3.enable_notebook() return None