def post_process_3di(full_path, dst_basefilename='_step%d'): """ Simple version: do not use AHN tiles to do the calculation This method is quite fast, but the result has squares. Input: full path of the .nc netcdf file Output: png+pgw files on disk (specified by dst_basefilename). """ print 'post processing %s...' % full_path data = Data(full_path) # NetCDF data #process_3di_nc(full_path) #result_filenames = {} for timestep in range(data.num_timesteps): print('Working on timestep %d...' % timestep) ma_3di = data.to_masked_array(data.depth, timestep) ds_3di = to_dataset(ma_3di, data.geotransform) #print ds_3di.GetGeoTransform() # testing #print ', '.join([i.bladnr for i in get_ahn_indices(ds_3di)]) cdict = { 'red': ((0.0, 170./256, 170./256), (0.5, 65./256, 65./256), (1.0, 4./256, 4./256)), 'green': ((0.0, 200./256, 200./256), (0.5, 120./256, 120./256), (1.0, 65./256, 65./256)), 'blue': ((0.0, 255./256, 255./256), (0.5, 221./256, 221./256), (1.0, 176./256, 176./256)), } colormap = mpl.colors.LinearSegmentedColormap('something', cdict, N=1024) min_value, max_value = 0.0, 4.0 normalize = mpl.colors.Normalize(vmin=min_value, vmax=max_value) rgba = colormap(normalize(ma_3di), bytes=True) #rgba[:,:,3] = np.where(rgba[:,:,0], 153 , 0) dst_filename = dst_basefilename % timestep Image.fromarray(rgba).save(dst_filename + '.png', 'PNG') write_pgw(dst_filename + '.pgw', ds_3di) #write_pgw(tmp_base + '.pgw', extent) #result_filenames[timestep] = dst_filename # gdal.GetDriverByName('Gtiff').CreateCopy(filename_base + '.tif', ds_3di) # gdal.GetDriverByName('AAIGrid').CreateCopy(filename_base + '.asc', ds_3di) return data.num_timesteps #result_filenames
def post_process_detailed_3di(full_path): """ Make detailed images using a 0.5m height map. """ print 'post processing %s...' % full_path data = Data(full_path) # NetCDF data # TODO: Find out which AHN tiles for timestep in range(data.num_timesteps): print('Working on timestep %d...' % timestep) ma_3di = data.to_masked_array(data.depth, timestep) ds_3di = to_dataset(ma_3di, data.geotransform) # testing ahn_indices = models.AhnIndex.get_ahn_indices(ds_3di) print ahn_indices for ahn_index in ahn_indices: print 'reading ahn data... %s' % ahn_index ahn_index.get_ds() filename_base = '_step%d' % timestep cdict = { 'red': ((0.0, 51./256, 51./256), (0.5, 237./256, 237./256), (1.0, 83./256, 83./256)), 'green': ((0.0, 114./256, 114./256), (0.5, 245./256, 245./256), (1.0, 83./256, 83./256)), 'blue': ((0.0, 54./256, 54./256), (0.5, 170./256, 170./256), (1.0, 83./256, 83./256)), } colormap = mpl.colors.LinearSegmentedColormap('something', cdict, N=1024) min_value, max_value = 0.0, 4.0 normalize = mpl.colors.Normalize(vmin=min_value, vmax=max_value) rgba = colormap(normalize(ma_3di), bytes=True) #rgba[:,:,3] = np.where(rgba[:,:,0], 153 , 0) Image.fromarray(rgba).save(filename_base + '.png', 'PNG')
def post_process_3di(full_path): """ Simple version: do not use AHN tiles to do the calculation This method is quite fast, but the result has squares. """ print 'post processing %s...' % full_path data = Data(full_path) # NetCDF data #process_3di_nc(full_path) # TODO: Find out which AHN tiles for timestep in range(data.num_timesteps): print('Working on timestep %d...' % timestep) ma_3di = data.to_masked_array(data.depth, timestep) ds_3di = to_dataset(ma_3di, data.geotransform) # testing #print ', '.join([i.bladnr for i in get_ahn_indices(ds_3di)]) filename_base = '_step%d' % timestep cdict = { 'red': ((0.0, 51./256, 51./256), (0.5, 237./256, 237./256), (1.0, 83./256, 83./256)), 'green': ((0.0, 114./256, 114./256), (0.5, 245./256, 245./256), (1.0, 83./256, 83./256)), 'blue': ((0.0, 54./256, 54./256), (0.5, 170./256, 170./256), (1.0, 83./256, 83./256)), } colormap = mpl.colors.LinearSegmentedColormap('something', cdict, N=1024) min_value, max_value = 0.0, 4.0 normalize = mpl.colors.Normalize(vmin=min_value, vmax=max_value) rgba = colormap(normalize(ma_3di), bytes=True) #rgba[:,:,3] = np.where(rgba[:,:,0], 153 , 0) Image.fromarray(rgba).save(filename_base + '.png', 'PNG')
def post_process_detailed_3di( full_path, dst_basefilename='_step%d', region=None, region_extent=None, gridsize=None, gridsize_divider=2): """ Make detailed images using a 0.5m height map. region_extent = None, or (x0, y0, x1, y1) in RD """ print 'post processing (detailed)%s...' % full_path data = Data(full_path, step_divider=gridsize_divider, gridsize=gridsize) # NetCDF data #process_3di_nc(full_path) #result_filenames = {} ahn_ma = {} # A place to store the ahn tiles. Let's hope 150 tiles will fit into memory. # Determine optional region polygon instead of whole extent #print region_extent region_mask = None if region: # (Over)write region_extent region_extent_lonlat = region.geom.extent # beware: in WGS84 x0, y0 = wgs84_to_rd(region_extent_lonlat[0], region_extent_lonlat[1]) x1, y1 = wgs84_to_rd(region_extent_lonlat[2], region_extent_lonlat[3]) region_extent = (x0, y0, x1, y1) region_polygon = None if region_extent is not None: region_polygon = raster.polygon_from_extent(region_extent) #s = Scenario #s.breaches.all()[0].region.geom.extent for timestep in range(data.num_timesteps): print('Working on timestep %d...' % timestep) ma_3di = data.to_masked_array(data.level, timestep) # The netcdf result file #print data.NY, data.NX, data.geotransform ma_result = np.ma.zeros((data.NY, data.NX), fill_value=-999) ds_3di = to_dataset(ma_3di, data.geotransform) if region is not None and region_mask is None: # Fill region_mask region_geo = (RD, data.geotransform) #raster.get_geo(ds_3di) #print region.geom.wkb region_mask = 1 - raster.get_mask(region.geom, ma_3di.shape, region_geo) #print ', '.join([i.bladnr for i in get_ahn_indices(ds_3di)]) # Find out which ahn tiles if region_polygon is not None: #print "get ahn indices from polygon..." ahn_indices = models.AhnIndex.get_ahn_indices(polygon=region_polygon) else: #print "get ahn indices from ds..." ahn_indices = models.AhnIndex.get_ahn_indices(ds=ds_3di) #print 'number of ahn tiles: %d' % len(ahn_indices) #print ', '.join([str(i) for i in ahn_indices]) for ahn_count, ahn_index in enumerate(ahn_indices): # can be 150! -> is now 15 if ahn_index.bladnr not in ahn_ma: ahn_key = 'ahn_220::%s::%02f::%02f::::' % (ahn_index.bladnr, data.XS, data.YS) new_ahn_ma = cache.get(ahn_key) if new_ahn_ma is None: print 'reading ahn data...(%d) %s' % (ahn_count, str(ahn_index)) ahn_ds = ahn_index.get_ds() ahn_temp = to_masked_array(ahn_ds) #print data.XS, data.YS, data.NX, data.NY new_ahn_ma = ahn_temp[0::data.YS*2, # *2 because every step is 0.5 meters. 0::data.XS*2].flatten() # make it small as needed and flatten #print ahn_temp[0::data.YS*2, 0::data.XS*2].shape cache.set(ahn_key, new_ahn_ma, 86400) else: #print 'from cache: %s' % str(ahn_index) cache.set(ahn_key, new_ahn_ma, 86400) # re-cache ahn_ma[ahn_index.bladnr] = new_ahn_ma # Create crazy stuff: # depth = big image with ma/ds_3di - height # subtract ahn data result_index = data.to_index(int(ahn_index.x - 500), int(ahn_index.x + 500), int(ahn_index.y - 625), int(ahn_index.y + 625)) # Water height minus AHN height = depth print result_index # print ahn_index.bladnr # print ma_3di.shape #print ahn_index.x, ahn_index.y # extra_index = np.bool8(np.ones(result_index[0].shape)) # extra_index[np.less(result_index[0], 0)] = False # extra_index[np.less(result_index[1], 0)] = False # extra_index[np.greater_equal(result_index[0], ma_result.shape[0])] = False # extra_index[np.greater_equal(result_index[1], ma_result.shape[1])] = False try: ma_result[result_index] = ma_3di[result_index] - ahn_ma[ahn_index.bladnr] except: print 'something went wrong with ma_result (should not happen)' traceback.print_exc(file=sys.stdout) # make all values < 0 transparent #ma_result[np.ma.less(ma_3di, 0)] = np.ma.masked if region_mask is not None: #print np.ma.amax(region_mask) ma_result.mask = region_mask ma_result = np.ma.masked_where(ma_result <= 0, ma_result) cdict = { 'red': ((0.0, 170./256, 170./256), (0.5, 65./256, 65./256), (1.0, 4./256, 4./256)), 'green': ((0.0, 200./256, 200./256), (0.5, 120./256, 120./256), (1.0, 65./256, 65./256)), 'blue': ((0.0, 255./256, 255./256), (0.5, 221./256, 221./256), (1.0, 176./256, 176./256)), } colormap = mpl.colors.LinearSegmentedColormap('something', cdict, N=1024) min_value, max_value = 0.0, 1.0 normalize = mpl.colors.Normalize(vmin=min_value, vmax=max_value) rgba = colormap(normalize(ma_result), bytes=True) #rgba[:,:,3] = np.where(rgba[:,:,0], 153 , 0) dst_filename = dst_basefilename % timestep Image.fromarray(rgba).save(dst_filename + '.png', 'PNG') write_pgw(dst_filename + '.pgw', ds_3di) #write_pgw(tmp_base + '.pgw', extent) #result_filenames[timestep] = dst_filename # gdal.GetDriverByName('Gtiff').CreateCopy(filename_base + '.tif', ds_3di) # gdal.GetDriverByName('AAIGrid').CreateCopy(filename_base + '.asc', ds_3di) return data.num_timesteps #result_filenames