def test_max(): out = xr.DataArray(df.i64.values.reshape((2, 2, 5)).max(axis=2).astype('f8').T, coords=coords, dims=dims) assert_eq(c.points(df, 'x', 'y', ds.max('i32')), out) assert_eq(c.points(df, 'x', 'y', ds.max('i64')), out) assert_eq(c.points(df, 'x', 'y', ds.max('f32')), out) assert_eq(c.points(df, 'x', 'y', ds.max('f64')), out)
def test_max(): out = xr.DataArray(df.i64.values.reshape((2, 2, 5)).max(axis=2).astype('f8').T, coords=coords, dims=dims) assert_eq(c.points(ddf, 'x', 'y', ds.max('i32')), out) assert_eq(c.points(ddf, 'x', 'y', ds.max('i64')), out) assert_eq(c.points(ddf, 'x', 'y', ds.max('f32')), out) assert_eq(c.points(ddf, 'x', 'y', ds.max('f64')), out)
def generate(self, bounds_polygon: sg.Polygon, raster_shape: Tuple[int, int], from_cache: bool = False, **kwargs) -> \ Tuple[Geometry, np.ndarray, gpd.GeoDataFrame]: import colorcet import datashader as ds from holoviews.operation.datashader import rasterize import geoviews as gv from copy import deepcopy bounds = bounds_polygon.bounds polys_df = self.query_osm_polygons(bounds_polygon) bounded_census_wards = self._census_wards.cx[bounds[1]:bounds[3], bounds[0]:bounds[2]] # Find landuse polygons intersecting/within census wards and merge left census_df = gpd.overlay(polys_df, bounded_census_wards, how='intersection') # Estimate the population of landuse polygons from the density of the census ward they are within # EPSG:4326 is *not* an equal area projection so would give gibberish areas # Project geometries to an equidistant/equal areq projection census_df['population'] = census_df['density'] * census_df[ 'geometry'].to_crs('EPSG:3395').area census_df['ln_density'] = np.log(census_df['density']) # Construct the GeoViews Polygons gv_polys = gv.Polygons(census_df, kdims=['Longitude', 'Latitude'], vdims=['population', 'ln_density', 'density']) \ .opts(color='ln_density', cmap=colorcet.CET_L18, alpha=0.8, colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'}, show_legend=False, line_color='ln_density') if self.buffer_dist > 0: buffered_df = deepcopy(census_df) buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \ .buffer(self.buffer_dist).to_crs('EPSG:4326') buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['name', 'density']) raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) else: raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) raster_grid = np.copy( list(raster.data.data_vars.items())[0][1].data.astype(np.float)) return gv_polys, raster_grid, gpd.GeoDataFrame(census_df)
def water_hover_gen(self, x_range, FFT_Size='256', Percent_Overlap='50', Num_Avgs='1', Window='blackmanharris'): width = 128 height = 128 fft_size = int(FFT_Size) if fft_size < 128: width = fft_size height = fft_size if self.hover_count > self.update_count: self.gen_spec_points(x_range, FFT_Size, Percent_Overlap, Num_Avgs, Window) self.hover_count += 1 opts_hover = dict(tools=['hover'], alpha=0, hover_alpha=0.2, fill_alpha=0) agg = aggregate(self.points, width=width, height=height, dynamic=False, aggregator=ds.max('PowerdB'), x_sampling=self.step_size, y_sampling=1) return hv.QuadMesh(agg).options(**opts_hover)
def shade_mesh(vertices, triangles, cmap=colorcet.rainbow, **kwargs): """""" if "plot_width" not in kwargs or "plot_height" not in kwargs: raise ValueError( "Please provide plot_width and plot_height for the canvas.") if not isinstance(vertices, pd.DataFrame): vertices = pd.DataFrame(vertices, columns=["x", "y", "z", "patch_id"], copy=False) if not isinstance(triangles, pd.DataFrame): triangles = pd.DataFrame(triangles, columns=["v0", "v1", "v2"], copy=False) cvs = ds.Canvas(**kwargs) img = cvs.trimesh(vertices, triangles, interpolate="nearest") summary = ds.summary(id_info=ds.max("patch_id")) summary.column = "z" hover_agg = cvs.trimesh(vertices, triangles, agg=summary) res = tf.shade(img, cmap=cmap, how="linear", span=[0, len(cmap)]), hover_agg return res
def tests_datashader(): import datashader as ds import datashader.transfer_functions as tf import pandas as pd df = pd.read_csv('/Users/iregon/C3S/dessaps/test_data/imma_converter/observations-sst-2014-6.psv',usecols=[6,7,14],sep="|",skiprows=0) agg_mean = cvs.points(df, 'longitude', 'latitude', ds.mean('observation_value')) agg_max = cvs.points(df, 'longitude', 'latitude', ds.max('observation_value')) agg_min = cvs.points(df, 'longitude', 'latitude', ds.min('observation_value')) agg_count = cvs.points(df, 'longitude', 'latitude', ds.count('observation_value'))
def make_image(data, time_range, y_range, size, scale=None, width=0): "Flatten the given range of the data into a 2d image" time_range = (time_range[0].timestamp() * 1e6, time_range[1].timestamp() * 1e6) cvs = datashader.Canvas(x_range=time_range, y_range=y_range, plot_width=size[0], plot_height=size[1], y_axis_type=scale or "linear") # aggregate some useful measures agg_line = cvs.line(source=data["data"], x="t", y="value_r") agg_points = cvs.points(source=data["data"], x="t", y="value_r", agg=datashader.summary( count=datashader.count("value_r"), vmean=datashader.mean("value_r"), vmin=datashader.min("value_r"), vmax=datashader.max("value_r"))) color = data["info"].get("color", "red") image = datashader.transfer_functions.shade(agg_line, cmap=[color]) if width > 0: image = datashader.transfer_functions.spread(image, px=width) # image = datashader.transfer_functions.spread( # image, mask=np.matrix([[False, False, False], # [False, True, True], # [False, True, True]])) with timer("Making hover info"): indices = np.where(np.nanmax(agg_points["count"].values, axis=0))[0] vmin = np.take(np.nanmin(agg_points["vmin"].values, axis=0), indices) vmax = np.take(np.nanmax(agg_points["vmax"].values, axis=0), indices) # vmean = np.take(np.nanmax(agg_points["vmean"].values, axis=0), indices) # TODO: aggregating the mean is not quite this simple... timestamps = np.take(agg_points["x_axis"].values, indices) count = np.take(np.sum(agg_points["count"].values, axis=0), indices) desc = { "total_points": data["points"], "indices": indices.tolist(), "min": np.where(np.isnan(vmin), None, vmin).tolist(), "max": np.where(np.isnan(vmax), None, vmax).tolist(), "timestamp": [float(t) for t in timestamps], # "mean": np.where(np.isnan(vmean), None, vmean).tolist(), "count": np.where(np.isnan(count), None, count).tolist() } return image, desc
def tests_datashader(): import datashader as ds import datashader.transfer_functions as tf import pandas as pd df = pd.read_csv( '/Users/iregon/C3S/dessaps/test_data/imma_converter/observations-sst-2014-6.psv', usecols=[6, 7, 14], sep="|", skiprows=0) agg_mean = cvs.points(df, 'longitude', 'latitude', ds.mean('observation_value')) agg_max = cvs.points(df, 'longitude', 'latitude', ds.max('observation_value')) agg_min = cvs.points(df, 'longitude', 'latitude', ds.min('observation_value')) agg_count = cvs.points(df, 'longitude', 'latitude', ds.count('observation_value'))
descriptors[descriptor] = dict() for vari in vars_in: descriptors[descriptor][vari] = dict() i = 1 for yr in range(y_ini,y_end + 1): for mo in range(1,13): logging.info('{0}-{1}'.format(yr,mo)) mm = '{:02d}'.format(mo) # Read monthly data (header and observed vars) to df indexed by report_id df_mo = read_monthly_data() # For each observed variable in df, compute stats and aggregate to its monhtly composite. for vari in vars_in: if mm in descriptors['counts'][vari].keys(): descriptors['counts'][vari][mm] = descriptors['counts'][vari][mm] + cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.count(vari)) descriptors['max'][vari][mm] = np.fmax(descriptors['max'][vari][mm],cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.max(vari))) descriptors['min'][vari][mm] = np.fmin(descriptors['min'][vari][mm],cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.min(vari))) # All this mess, because addition propagates nan's in xarrays! mean_mm = cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.mean(vari)) max_frame = np.fmax(descriptors['ave'][vari][mm],mean_mm) min_frame = np.fmin(descriptors['ave'][vari][mm],mean_mm) descriptors['ave'][vari][mm] = 0.5*max_frame + 0.5*min_frame else: descriptors['counts'][vari][mm] = cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.count(vari)) # <class 'xarray.core.dataarray.DataArray'> descriptors['max'][vari][mm] = cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.max(vari)) descriptors['min'][vari][mm] = cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.min(vari)) mean_mm = cvs.points(df_mo[['latitude','longitude',vari]], 'longitude', 'latitude', ds.mean(vari)) descriptors['ave'][vari][mm] = mean_mm # Also add monthly stats to the global aggregate if 'global' in descriptors['counts'][vari].keys():
if black: img = tf.set_background(img, 'black') return img def tests_datashader(): import datashader as ds import datashader.transfer_functions as tf import pandas as pd df = pd.read_csv('/Users/iregon/C3S/dessaps/test_data/imma_converter/observations-sst-2014-6.psv',usecols=[6,7,14],sep="|",skiprows=0) agg_mean = cvs.points(df, 'longitude', 'latitude', ds.mean('observation_value')) agg_max = cvs.points(df, 'longitude', 'latitude', ds.max('observation_value')) agg_min = cvs.points(df, 'longitude', 'latitude', ds.min('observation_value')) agg_count = cvs.points(df, 'longitude', 'latitude', ds.count('observation_value')) #tf.shade(agg.where(agg > 0), cmap=["lightblue", "darkblue"]) #img = tf.shade(agg.where(agg > 0), cmap=['green', 'yellow', 'red'], how='linear', span=[275,305]) df = pd.read_csv('/Users/iregon/C3S/dessaps/test_data/imma_converter/observations-sst-2014-6.psv',usecols=[6,7,14],sep="|",skiprows=0) bounds = dict(x_range = (-180, 180), y_range = (-90, 90)) plot_width = 360*10 plot_height = 180*10 canvas = ds.Canvas(plot_width=plot_width, plot_height=plot_height,**bounds) agg_mean = canvas.points(df, 'longitude', 'latitude', ds.max('observation_value')) img = tf.shade(agg_mean, cmap=['green', 'yellow', 'red'], how='linear', span=[275,305]) utils.export_image(img=img,filename='Oct2431doshade.png', fmt=".png", background=None) points = hv.Points(df['observation_value'].values) img = points.hist()
ds.max('observation_value')) agg_min = cvs.points(df, 'longitude', 'latitude', ds.min('observation_value')) agg_count = cvs.points(df, 'longitude', 'latitude', ds.count('observation_value')) #tf.shade(agg.where(agg > 0), cmap=["lightblue", "darkblue"]) #img = tf.shade(agg.where(agg > 0), cmap=['green', 'yellow', 'red'], how='linear', span=[275,305]) df = pd.read_csv( '/Users/iregon/C3S/dessaps/test_data/imma_converter/observations-sst-2014-6.psv', usecols=[6, 7, 14], sep="|", skiprows=0) bounds = dict(x_range=(-180, 180), y_range=(-90, 90)) plot_width = 360 * 10 plot_height = 180 * 10 canvas = ds.Canvas(plot_width=plot_width, plot_height=plot_height, **bounds) agg_mean = canvas.points(df, 'longitude', 'latitude', ds.max('observation_value')) img = tf.shade(agg_mean, cmap=['green', 'yellow', 'red'], how='linear', span=[275, 305]) utils.export_image(img=img, filename='Oct2431doshade.png', fmt=".png", background=None) points = hv.Points(df['observation_value'].values) img = points.hist()
def test_max(): out = df.i32.reshape((2, 2, 5)).max(axis=2).T eq(c.points(ddf, 'x', 'y', agg=ds.max('i32')).agg, out) eq(c.points(ddf, 'x', 'y', agg=ds.max('i64')).agg, out.astype('i8')) eq(c.points(ddf, 'x', 'y', agg=ds.max('f32')).agg, out.astype('f4')) eq(c.points(ddf, 'x', 'y', agg=ds.max('f64')).agg, out.astype('f8'))
def image(self, state): """Image colored by time since flash or flash density""" valid_time = _to_datetime(state.valid_time) # 15 minute/1 hour slice of data? window = dt.timedelta(minutes=60) # 1 hour window paths = self.locator.find_period(valid_time, window) frame = self.loader.load(paths) frame = self.select_date(frame, valid_time, window) # Filter intra-cloud/cloud-ground rows if "intra-cloud" in state.variable.lower(): frame = frame[frame["flash_type"] == "IC"] elif "cloud-ground" in state.variable.lower(): frame = frame[frame["flash_type"] == "CG"] # EarthNetworks validity box (not needed if tiling algorithm) longitude_range = (26, 40) latitude_range = (-12, 4) x_range, y_range = geo.web_mercator(longitude_range, latitude_range) x, y = geo.web_mercator(frame["longitude"], frame["latitude"]) frame["x"] = x frame["y"] = y pixels = 256 canvas = datashader.Canvas( plot_width=pixels, plot_height=pixels, x_range=x_range, y_range=y_range, ) if "density" in state.variable.lower(): # N flashes per pixel agg = canvas.points(frame, "x", "y", datashader.count()) else: frame["since_flash"] = self.since_flash(frame["date"], valid_time) agg = canvas.points(frame, "x", "y", datashader.max("since_flash")) # Note: DataArray objects are not JSON serializable, .values is the # same data cast as a numpy array x = agg.x.values.min() y = agg.y.values.min() dw = agg.x.values.max() - x dh = agg.y.values.max() - y image = np.ma.masked_array( agg.values.astype(np.float), mask=np.isnan(agg.values) ) if "density" in state.variable.lower(): image[image == 0] = np.ma.masked # Remove pixels with no data # Update color_mapper color_mapper = self.color_mappers["image"] if "density" in state.variable.lower(): color_mapper.palette = bokeh.palettes.all_palettes["Spectral"][8] color_mapper.low = 0 color_mapper.high = agg.values.max() else: color_mapper.palette = bokeh.palettes.all_palettes["RdGy"][8] color_mapper.low = 0 color_mapper.high = 60 * 60 # 1 hour # Update tooltips for hover_tool in self.hover_tools["image"]: hover_tool.tooltips = self.tooltips(state.variable) hover_tool.formatters = self.formatters(state.variable) if "density" in state.variable.lower(): units = "events" else: units = "seconds" data = { "x": [x], "y": [y], "dw": [dw], "dh": [dh], "image": [image], } meta_data = { "variable": [state.variable], "date": [valid_time], "units": [units], "window": [window.total_seconds()], } data.update(meta_data) self.sources["image"].data = data
def generate(self, bounds_polygon, raster_shape, from_cache: bool = False, hour: int = 8, **kwargs): import pandas as pd import numpy as np import geoviews as gv from copy import deepcopy from holoviews.operation.datashader import rasterize import colorcet import datashader as ds bounds = bounds_polygon.bounds # Hardcode residential tag in as this is always the first OSM query made to find the total area population residential_df = query_osm_polygons('landuse=residential', bounds_polygon) bounded_census_wards = self._census_wards.cx[bounds[1]:bounds[3], bounds[0]:bounds[2]] # Find landuse polygons intersecting/within census wards and merge left census_df = gpd.overlay(residential_df, bounded_census_wards, how='intersection') # Estimate the population of landuse polygons from the density of the census ward they are within # EPSG:4326 is *not* an equal area projection so would give gibberish areas # Project geometries to an equidistant/equal areq projection census_reproj_areas = census_df['geometry'].to_crs( 'EPSG:3395').area * 1e-6 # km^2 census_df['population'] = census_df['density'] * census_reproj_areas total_population = census_df['population'].sum() df = None # Ensure we have a large enough population for this approximation to be valid if total_population > 200000: hour_categories = self.nhaps_df.iloc[:, hour % 24] nhaps_category_gdfs = [] for idx, categories in enumerate(nhaps_category_groupings): group_proportion = hour_categories.iloc[categories].sum() group_population = total_population * group_proportion # Residential areas are handled separately as they depend upon census data # Otherwise, they would become uniform density, when we have census data providing us (unscaled) densities if idx == 0: group_gdf = deepcopy(census_df) group_gdf['population'] = group_gdf[ 'population'] * group_proportion group_gdf['density'] = group_gdf[ 'population'] / census_reproj_areas group_gdf['ln_density'] = np.log(group_gdf['density']) else: group_gdfs = [ query_osm_polygons(tag, bounds_polygon) for tag in nhaps_group_tags[idx] ] group_gdf = gpd.GeoDataFrame(pd.concat(group_gdfs, ignore_index=True), crs='EPSG:4326') areas = group_gdf.to_crs( epsg=3395).geometry.area * 1e-6 # km^2 group_density = group_population / areas.sum() group_gdf['density'] = group_density group_gdf['ln_density'] = np.log(group_gdf['density']) group_gdf['population'] = group_density * areas nhaps_category_gdfs.append(group_gdf) nhaps_category_gdf = gpd.GeoDataFrame(pd.concat( nhaps_category_gdfs, ignore_index=True), crs='EPSG:4326') # Construct the GeoViews Polygons gv_polys = gv.Polygons(nhaps_category_gdf, kdims=['Longitude', 'Latitude'], vdims=['population', 'ln_density', 'density']) \ .opts(color='ln_density', cmap=colorcet.CET_L18, alpha=0.6, colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'}, show_legend=False, line_color='ln_density') if self.buffer_dist > 0: buffered_df = deepcopy(nhaps_category_gdf) buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \ .buffer(self.buffer_dist).to_crs('EPSG:4326') buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['density']) raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) else: raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) df = nhaps_category_gdf else: census_df['ln_density'] = np.log(census_df['density']) # Construct the GeoViews Polygons gv_polys = gv.Polygons(census_df, kdims=['Longitude', 'Latitude'], vdims=['population', 'ln_density', 'density']) \ .opts(color='ln_density', cmap=colorcet.CET_L18, alpha=0.8, colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'}, show_legend=False, line_color='ln_density') if self.buffer_dist > 0: buffered_df = deepcopy(census_df) buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \ .buffer(self.buffer_dist).to_crs('EPSG:4326') buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['name', 'density']) raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) else: raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False) df = census_df raster_grid = np.copy( list(raster.data.data_vars.items())[0][1].data.astype(float)) return gv_polys, raster_grid, gpd.GeoDataFrame(df)