def test_rasterValuesAtPoints(self): # We need to make a .tif to test this function. self.make_me_a_tif() # Get the range of the tif tifRange=su.getRasterExtent('PointData_TestData.tif') # Now try to get some point values -- note they will be rounded to the # nearest cell xA=numpy.array([0., 10.3, 50.9, 100.])+tifRange[0]+0.5 yA=numpy.array([0., 20.1, 75.1, 100.])+tifRange[2]+0.5 z_predicted=numpy.round(xA)+numpy.round(yA)-tifRange[0]-tifRange[2]-1.0 InDat=numpy.vstack([xA,yA]).transpose() z_fitted=su.rasterValuesAtPoints(InDat, rasterFile='PointData_TestData.tif') try: assert(numpy.allclose(z_fitted,z_predicted)) except: raise Exception, 'Error could be in rasterValuesAtPoints or in Make_Geotif' # Try with bilinear interpolation z_fitted=su.rasterValuesAtPoints(InDat, rasterFile='PointData_TestData.tif', interpolation='bilinear') z_predicted = xA + yA - tifRange[0] - tifRange[2] - 1.0 try: assert(numpy.allclose(z_fitted,z_predicted)) except: raise Exception, 'Error could be in rasterValuesAtPoints or in Make_Geotif' return
def test_rasterValuesAtPoints(self): # We need to make a .tif to test this function. self.make_me_a_tif() # Get the range of the tif tifRange = su.getRasterExtent('PointData_TestData.tif') # Now try to get some point values -- note they will be rounded to the # nearest cell xA = numpy.array([0., 10.3, 50.9, 100.])+tifRange[0]+0.5 yA = numpy.array([0., 20.1, 75.1, 100.])+tifRange[2]+0.5 z_predicted = numpy.round(xA)+numpy.round(yA) - \ tifRange[0]-tifRange[2]-1.0 InDat = numpy.vstack([xA, yA]).transpose() z_fitted = su.rasterValuesAtPoints( InDat, rasterFile='PointData_TestData.tif') try: assert(numpy.allclose(z_fitted, z_predicted)) except: raise Exception( 'Error could be in rasterValuesAtPoints or in Make_Geotif') # Try with bilinear interpolation z_fitted = su.rasterValuesAtPoints(InDat, rasterFile='PointData_TestData.tif', interpolation='bilinear') z_predicted = xA + yA - tifRange[0] - tifRange[2] - 1.0 try: assert(numpy.allclose(z_fitted, z_predicted)) except: raise Exception( 'Error could be in rasterValuesAtPoints or in Make_Geotif') return
def test_getRasterExtent(self): self.make_me_a_tif() extentOut=su.getRasterExtent('PointData_TestData.tif') assert(numpy.allclose(extentOut[0], 307000.-0.5)) assert(numpy.allclose(extentOut[1], 307100.+0.5)) assert(numpy.allclose(extentOut[2], 6193000.-0.5)) assert(numpy.allclose(extentOut[3], 6193100.+0.5)) extentOut=su.getRasterExtent('PointData_TestData.tif',asPolygon=True) assert(numpy.allclose(extentOut[0][0], 307000.-0.5)) assert(numpy.allclose(extentOut[3][0], 307000.-0.5)) assert(numpy.allclose(extentOut[1][0], 307100.+0.5)) assert(numpy.allclose(extentOut[2][0], 307100.+0.5)) assert(numpy.allclose(extentOut[0][1], 6193000.-0.5)) assert(numpy.allclose(extentOut[1][1], 6193000.-0.5)) assert(numpy.allclose(extentOut[2][1], 6193100.+0.5)) assert(numpy.allclose(extentOut[3][1], 6193100.+0.5))
def test_getRasterExtent(self): self.make_me_a_tif() extentOut = su.getRasterExtent('PointData_TestData.tif') assert(numpy.allclose(extentOut[0], 307000.-0.5)) assert(numpy.allclose(extentOut[1], 307100.+0.5)) assert(numpy.allclose(extentOut[2], 6193000.-0.5)) assert(numpy.allclose(extentOut[3], 6193100.+0.5)) extentOut = su.getRasterExtent( 'PointData_TestData.tif', asPolygon=True) assert(numpy.allclose(extentOut[0][0], 307000.-0.5)) assert(numpy.allclose(extentOut[3][0], 307000.-0.5)) assert(numpy.allclose(extentOut[1][0], 307100.+0.5)) assert(numpy.allclose(extentOut[2][0], 307100.+0.5)) assert(numpy.allclose(extentOut[0][1], 6193000.-0.5)) assert(numpy.allclose(extentOut[1][1], 6193000.-0.5)) assert(numpy.allclose(extentOut[2][1], 6193100.+0.5)) assert(numpy.allclose(extentOut[3][1], 6193100.+0.5))
def F(x, y): """This is the function returned by composite_quantity_setting_function It can be passed to set_quantity """ isSet = numpy.zeros(len(x)) # 0/1 - record if each point has been set quantityVal = x * 0 + numpy.nan # Function return value # Record points which evaluated to nan on their first preference # dataset. was_ever_nan = (x * 0).astype(int) lpf = len(poly_fun_pairs) if (lpf <= 0): raise Exception('Must have at least 1 fun-poly-pair') # Make an array of 'transformed' spatial coordinates, for checking # polygon inclusion xll = domain.geo_reference.xllcorner yll = domain.geo_reference.yllcorner xy_array_trans = numpy.vstack([x + xll, y + yll]).transpose() # Check that none of the pi polygons [except perhaps the last] is 'All' for i in range(lpf - 1): if (poly_fun_pairs[i][0] == 'All'): # This is only ok if all the othe poly_fun_pairs are None remaining_poly_fun_pairs_are_None = \ [poly_fun_pairs[j][0] is None for j in range(i+1,lpf)] if (not all(remaining_poly_fun_pairs_are_None)): raise Exception('Can only have the last polygon = All') # Main Loop # Apply the fi inside the pi for i in range(lpf): fi = poly_fun_pairs[i][1] # The function pi = poly_fun_pairs[i][0] # The polygon # Quick exit if (pi is None): continue ################################################################### # Get indices fInds of points in polygon pi which are not already # set ################################################################### if (pi == 'All'): # Get all unset points fInside = (1 - isSet) fInds = (fInside == 1).nonzero()[0] else: if (pi == 'Extent'): # Here fi MUST be a gdal-compatible raster if (not (type(fi) == str)): msg = ' pi = "Extent" can only be used when fi is a' +\ ' raster file name' raise Exception(msg) if (not os.path.exists(fi)): msg = 'fi ' + str(fi) + ' is supposed to be a ' +\ ' raster filename, but it could not be found' raise Exception(msg) # Then we get the extent from the raster itself pi_path = su.getRasterExtent(fi, asPolygon=True) if verbose: print 'Extracting extent from raster: ', fi print 'Extent: ', pi_path elif ((type(pi) == str) and os.path.isfile(pi)): # pi is a file pi_path = su.read_polygon(pi) else: # pi is the actual polygon data pi_path = pi # Get the insides of unset points inside pi_path notSet = (isSet == 0.).nonzero()[0] fInds = inside_polygon(xy_array_trans[notSet, :], pi_path) fInds = notSet[fInds] if len(fInds) == 0: # No points found, move on continue ################################################################### # Evaluate fi at the points inside pi ################################################################### # We use various tricks to infer whether fi is a function, # a constant, a file (raster or csv), or an array if (hasattr(fi, '__call__')): # fi is a function quantityVal[fInds] = fi(x[fInds], y[fInds]) elif isinstance(fi, (int, long, float)): # fi is a numerical constant quantityVal[fInds] = fi * 1.0 elif (type(fi) is str and os.path.exists(fi)): # fi is a file which is assumed to be # a gdal-compatible raster OR an x,y,z elevation file if os.path.splitext(fi)[1] in ['.txt', '.csv']: fi_array = su.read_csv_optional_header(fi) # Check the results if fi_array.shape[1] is not 3: print 'Treated input file ' + fi +\ ' as xyz array with an optional header' msg = 'Array should have 3 columns -- x,y,value' raise Exception(msg) newfi = make_nearestNeighbour_quantity_function( fi_array, domain, k_nearest_neighbours=default_k_nearest_neighbours) quantityVal[fInds] = newfi(x[fInds], y[fInds]) else: # Treating input file as a raster newfi = quantityRasterFun( domain, fi, interpolation=default_raster_interpolation) quantityVal[fInds] = newfi(x[fInds], y[fInds]) elif (type(fi) is numpy.ndarray): if fi.shape[1] is not 3: msg = 'Array should have 3 columns -- x,y,value' raise Exception(msg) newfi = make_nearestNeighbour_quantity_function( fi, domain, k_nearest_neighbours=default_k_nearest_neighbours) quantityVal[fInds] = newfi(x[fInds], y[fInds]) else: print 'Error with function from' print fi msg = 'Cannot make function from type ' + str(type(fi)) raise Exception, msg ################################################################### # Check for nan values ################################################################### #nan_flag = (quantityVal[fInds] != quantityVal[fInds]) nan_flag = 1 * numpy.isnan(quantityVal[fInds]) nan_inds = nan_flag.nonzero()[0] was_ever_nan[fInds[nan_inds]] = 1 if len(nan_inds) > 0: if nan_treatment == 'exception': msg = 'nan values generated by the poly_fun_pair at '\ 'index ' + str(i) + ' '\ 'in composite_quantity_setting_function. ' + \ 'To allow these values to be set by later ' + \ 'poly_fun pairs, pass the argument ' + \ 'nan_treatment="fall_through" ' + \ 'to composite_quantity_setting_function' raise Exception(msg) elif nan_treatment == 'fall_through': msg = 'WARNING: nan values generated by the ' + \ 'poly_fun_pair at index ' + str(i) + ' '\ 'in composite_quantity_setting_function. ' + \ 'They will be passed to later poly_fun_pairs' if verbose: print msg not_nan_inds = (1 - nan_flag).nonzero()[0] if len(not_nan_inds) > 0: fInds = fInds[not_nan_inds] else: # All values are nan msg = '( Actually all the values were nan - ' + \ 'Are you sure they should be? Possible error?)' if verbose: print msg continue else: msg = 'Found nan values in ' + \ 'composite_quantity_setting_function but ' + \ 'nan_treatment is not a recognized value' raise Exception(msg) # Record that the points have been set isSet[fInds] = 1 # Enforce clip_range if clip_range is not None: lower_bound = clip_range[i][0] upper_bound = clip_range[i][1] quantityVal[fInds] = numpy.maximum(quantityVal[fInds], lower_bound) quantityVal[fInds] = numpy.minimum(quantityVal[fInds], upper_bound) # End of loop # Find points which were nan on their first preference dataset + are # inside nan_interpolation_region_polygon. Then reinterpolate their # values from the other x,y, quantityVal points. if (nan_interpolation_region_polygon is not None) &\ (was_ever_nan.sum() > 0): if nan_interpolation_region_polygon == 'All': points_to_reinterpolate = was_ever_nan.nonzero()[0] else: # nan_interpolation_region_polygon contains information on 1 or # more polygons # Inside those polygons, we need to re-interpolate points which # first evaluted to na possible_points_to_reint = was_ever_nan.nonzero()[0] points_to_reinterpolate = numpy.array([]).astype(int) for i in range(len(nan_interpolation_region_polygon)): nan_pi = nan_interpolation_region_polygon[i] # Ensure nan_pi = list of x,y points making a polygon if (type(nan_pi) == str): nan_pi = su.read_polygon(nan_pi) points_in_nan_pi = inside_polygon( xy_array_trans[possible_points_to_reint, :], nan_pi) if len(points_in_nan_pi) > 0: points_to_reinterpolate = numpy.hstack([ points_to_reinterpolate, possible_points_to_reint[points_in_nan_pi] ]) if verbose: print 'Re-interpolating ', len(points_to_reinterpolate),\ ' points which were nan under their',\ ' first-preference and are inside the',\ ' nan_interpolation_region_polygon' if len(points_to_reinterpolate) > 0: msg = 'WARNING: nan interpolation is being applied. This ',\ 'should be done in serial prior to distributing the ',\ 'domain, as there is no parallel communication ',\ 'implemented yet [so parallel results might depend on ',\ 'the number of processes]' if verbose: print msg # Find the interpolation points = points not needing reinterpolation ip = x * 0 + 1 ip[points_to_reinterpolate] = 0 number_of_ip = ip.sum() ip = ip.nonzero()[0] # Check that none of the ip points has an nan value nan_ip = (quantityVal[ip] != quantityVal[ip]).nonzero()[0] if len(nan_ip) > 0: print 'There are ', len(nan_ip), ' points outside the ',\ 'nan_interpolation_region_polygon have nan values.' print 'The user should ensure this does not happen.' print 'The points have the following coordinates:' print xy_array_trans[ip[nan_ip], :] msg = "There are nan points outside of " +\ "nan_interpolation_region_polygon, even after all " +\ "fall-through's" raise Exception(msg) if (number_of_ip < default_k_nearest_neighbours): raise Exception('Too few non-nan points to interpolate from') # Make function for re-interpolation. Note this requires # x,y,z in georeferenced coordinates, whereas x,y are ANUGA # coordinates reinterp_F = make_nearestNeighbour_quantity_function( numpy.vstack([ xy_array_trans[ip, 0], xy_array_trans[ip, 1], quantityVal[ip] ]).transpose(), domain, k_nearest_neighbours=default_k_nearest_neighbours) # re-interpolate quantityVal[points_to_reinterpolate] = reinterp_F( x[points_to_reinterpolate], y[points_to_reinterpolate]) isSet[points_to_reinterpolate] = 1 # Check there are no remaining nan values if (min(isSet) != 1): print 'Some points remain as nan, which is not allowed' unset_inds = (isSet != 1).nonzero()[0] lui = min(5, len(unset_inds)) print 'There are ', len(unset_inds), ' such points' print 'Here are a few:' for i in range(lui): print x[unset_inds[i]] + xll, y[unset_inds[i]] + yll raise Exception('It seems the input data needs to be fixed') return quantityVal
def F(x,y): """This is the function returned by composite_quantity_setting_function It can be passed to set_quantity """ isSet = numpy.zeros(len(x)) # 0/1 - record if each point has been set quantityVal = x*0 + numpy.nan # Function return value # Record points which evaluated to nan on their first preference # dataset. was_ever_nan = (x*0).astype(int) lpf = len(poly_fun_pairs) if(lpf <= 0): raise Exception('Must have at least 1 fun-poly-pair') # Make an array of 'transformed' spatial coordinates, for checking # polygon inclusion xll = domain.geo_reference.xllcorner yll = domain.geo_reference.yllcorner xy_array_trans = numpy.vstack([x+xll,y+yll]).transpose() # Check that none of the pi polygons [except perhaps the last] is 'All' for i in range(lpf-1): if(poly_fun_pairs[i][0]=='All'): # This is only ok if all the othe poly_fun_pairs are None remaining_poly_fun_pairs_are_None = \ [poly_fun_pairs[j][0] is None for j in range(i+1,lpf)] if(not all(remaining_poly_fun_pairs_are_None)): raise Exception('Can only have the last polygon = All') # Main Loop # Apply the fi inside the pi for i in range(lpf): fi = poly_fun_pairs[i][1] # The function pi = poly_fun_pairs[i][0] # The polygon # Quick exit if(pi is None): continue ################################################################### # Get indices fInds of points in polygon pi which are not already # set ################################################################### if(pi == 'All'): # Get all unset points fInside = (1-isSet) fInds = (fInside==1).nonzero()[0] else: if(pi == 'Extent'): # Here fi MUST be a gdal-compatible raster if(not (type(fi) == str)): msg = ' pi = "Extent" can only be used when fi is a' +\ ' raster file name' raise Exception(msg) if(not os.path.exists(fi)): msg = 'fi ' + str(fi) + ' is supposed to be a ' +\ ' raster filename, but it could not be found' raise Exception(msg) # Then we get the extent from the raster itself pi_path = su.getRasterExtent(fi,asPolygon=True) if verbose: print 'Extracting extent from raster: ', fi print 'Extent: ', pi_path elif( (type(pi) == str) and os.path.isfile(pi) ): # pi is a file pi_path = su.read_polygon(pi) else: # pi is the actual polygon data pi_path = pi # Get the insides of unset points inside pi_path notSet = (isSet==0.).nonzero()[0] fInds = inside_polygon(xy_array_trans[notSet,:], pi_path) fInds = notSet[fInds] if len(fInds) == 0: # No points found, move on continue ################################################################### # Evaluate fi at the points inside pi ################################################################### # We use various tricks to infer whether fi is a function, # a constant, a file (raster or csv), or an array if(hasattr(fi,'__call__')): # fi is a function quantityVal[fInds] = fi(x[fInds], y[fInds]) elif isinstance(fi, (int, long, float)): # fi is a numerical constant quantityVal[fInds] = fi*1.0 elif ( type(fi) is str and os.path.exists(fi)): # fi is a file which is assumed to be # a gdal-compatible raster OR an x,y,z elevation file if os.path.splitext(fi)[1] in ['.txt', '.csv']: fi_array = su.read_csv_optional_header(fi) # Check the results if fi_array.shape[1] is not 3: print 'Treated input file ' + fi +\ ' as xyz array with an optional header' msg = 'Array should have 3 columns -- x,y,value' raise Exception(msg) newfi = make_nearestNeighbour_quantity_function( fi_array, domain, k_nearest_neighbours = default_k_nearest_neighbours) quantityVal[fInds] = newfi(x[fInds], y[fInds]) else: # Treating input file as a raster newfi = quantityRasterFun(domain, fi, interpolation = default_raster_interpolation) quantityVal[fInds] = newfi(x[fInds], y[fInds]) elif(type(fi) is numpy.ndarray): if fi.shape[1] is not 3: msg = 'Array should have 3 columns -- x,y,value' raise Exception(msg) newfi = make_nearestNeighbour_quantity_function(fi, domain, k_nearest_neighbours = default_k_nearest_neighbours) quantityVal[fInds] = newfi(x[fInds], y[fInds]) else: print 'Error with function from' print fi msg='Cannot make function from type ' + str(type(fi)) raise Exception, msg ################################################################### # Check for nan values ################################################################### #nan_flag = (quantityVal[fInds] != quantityVal[fInds]) nan_flag = 1*numpy.isnan(quantityVal[fInds]) nan_inds = nan_flag.nonzero()[0] was_ever_nan[fInds[nan_inds]] = 1 if len(nan_inds)>0: if nan_treatment == 'exception': msg = 'nan values generated by the poly_fun_pair at '\ 'index ' + str(i) + ' '\ 'in composite_quantity_setting_function. ' + \ 'To allow these values to be set by later ' + \ 'poly_fun pairs, pass the argument ' + \ 'nan_treatment="fall_through" ' + \ 'to composite_quantity_setting_function' raise Exception(msg) elif nan_treatment == 'fall_through': msg = 'WARNING: nan values generated by the ' + \ 'poly_fun_pair at index ' + str(i) + ' '\ 'in composite_quantity_setting_function. ' + \ 'They will be passed to later poly_fun_pairs' if verbose: print msg not_nan_inds = (1-nan_flag).nonzero()[0] if len(not_nan_inds)>0: fInds = fInds[not_nan_inds] else: # All values are nan msg = '( Actually all the values were nan - ' + \ 'Are you sure they should be? Possible error?)' if verbose: print msg continue else: msg = 'Found nan values in ' + \ 'composite_quantity_setting_function but ' + \ 'nan_treatment is not a recognized value' raise Exception(msg) # Record that the points have been set isSet[fInds] = 1 # Enforce clip_range if clip_range is not None: lower_bound = clip_range[i][0] upper_bound = clip_range[i][1] quantityVal[fInds] = numpy.maximum( quantityVal[fInds], lower_bound) quantityVal[fInds] = numpy.minimum( quantityVal[fInds], upper_bound) # End of loop # Find points which were nan on their first preference dataset + are # inside nan_interpolation_region_polygon. Then reinterpolate their # values from the other x,y, quantityVal points. if (nan_interpolation_region_polygon is not None) &\ (was_ever_nan.sum() > 0): if nan_interpolation_region_polygon == 'All': points_to_reinterpolate = was_ever_nan.nonzero()[0] else: # nan_interpolation_region_polygon contains information on 1 or # more polygons # Inside those polygons, we need to re-interpolate points which # first evaluted to na possible_points_to_reint = was_ever_nan.nonzero()[0] points_to_reinterpolate = numpy.array([]).astype(int) for i in range(len(nan_interpolation_region_polygon)): nan_pi = nan_interpolation_region_polygon[i] # Ensure nan_pi = list of x,y points making a polygon if(type(nan_pi) == str): nan_pi = su.read_polygon(nan_pi) points_in_nan_pi = inside_polygon( xy_array_trans[possible_points_to_reint,:], nan_pi) if len(points_in_nan_pi)>0: points_to_reinterpolate = numpy.hstack( [points_to_reinterpolate, possible_points_to_reint[points_in_nan_pi]]) if verbose: print 'Re-interpolating ', len(points_to_reinterpolate),\ ' points which were nan under their',\ ' first-preference and are inside the',\ ' nan_interpolation_region_polygon' if len(points_to_reinterpolate) > 0: msg = 'WARNING: nan interpolation is being applied. This ',\ 'should be done in serial prior to distributing the ',\ 'domain, as there is no parallel communication ',\ 'implemented yet [so parallel results might depend on ',\ 'the number of processes]' if verbose: print msg # Find the interpolation points = points not needing reinterpolation ip = x*0 + 1 ip[points_to_reinterpolate] = 0 number_of_ip = ip.sum() ip = ip.nonzero()[0] # Check that none of the ip points has an nan value nan_ip = (quantityVal[ip] != quantityVal[ip]).nonzero()[0] if len(nan_ip) > 0: print 'There are ', len(nan_ip), ' points outside the ',\ 'nan_interpolation_region_polygon have nan values.' print 'The user should ensure this does not happen.' print 'The points have the following coordinates:' print xy_array_trans[ip[nan_ip],:] msg = "There are nan points outside of " +\ "nan_interpolation_region_polygon, even after all " +\ "fall-through's" raise Exception(msg) if(number_of_ip < default_k_nearest_neighbours): raise Exception('Too few non-nan points to interpolate from') # Make function for re-interpolation. Note this requires # x,y,z in georeferenced coordinates, whereas x,y are ANUGA # coordinates reinterp_F = make_nearestNeighbour_quantity_function( numpy.vstack([xy_array_trans[ip,0], xy_array_trans[ip,1], quantityVal[ip]]).transpose(), domain, k_nearest_neighbours = default_k_nearest_neighbours) # re-interpolate quantityVal[points_to_reinterpolate] = reinterp_F( x[points_to_reinterpolate], y[points_to_reinterpolate]) isSet[points_to_reinterpolate] = 1 # Check there are no remaining nan values if( min(isSet) != 1): print 'Some points remain as nan, which is not allowed' unset_inds = (isSet != 1).nonzero()[0] lui = min(5, len(unset_inds)) print 'There are ', len(unset_inds), ' such points' print 'Here are a few:' for i in range(lui): print x[unset_inds[i]] + xll, y[unset_inds[i]] + yll raise Exception('It seems the input data needs to be fixed') return quantityVal
def make_me_some_tifs( sww_file, bounding_polygon, proj4string, my_time_step='collected_max', tif_output_subdir='/TIFS/', cell_size=5.0, k_nearest_neighbours=1, make_highres_drape_plot=False, elevation_raster=None, depth_threshold=None, clip_polygon=None, clip_polygon_layer=None, creation_options=['COMPRESS=DEFLATE'], ): """ ### INPUT DATA - swwFile -- Full path name of sww to read outputs from - bounding_polygon -- ANUGA's bounding polygon, or another polygon to clip the rasters to, [in ANUGA's polygon format]. - proj4string defining the coordinate system - my_time_step -- option from util.Make_Geotif use 'max' to plot the maxima use [0, 5, 10] to plot the first, sixth and eleventh output time step use 'collected_max' to read the csv outputs from collect_max_quantities_operator - tif_outputdir -- Write outputs to this folder inside the swwFile's directory (MUST INCLUDE TRAILING SLASH /) - cell_size -- Desired raster cellSize (m) - k_nearest_neighbours -- use inverse distance weighted interpolation with this many neighbours - make_highres_drape_plot -- True/False, Make a high-res drape plot? - elevation_raster -- Filename of elevation raster for 'high-res-drape' depth plot [from subtracting stage from high res topography] - depth_threshold -- Depth threshold for high-res-drape' plot (m). If ANUGA's triangle has depth < depth_threshold, then depth=0 in all high-res cells in that triangle - clipPolygon -- Polygon to clip 'high-res-drape' plot. Must be provided if make_highres_drape_plot==True (can use bounding polygon or another choice) - clipPolygonLayer -- Layer for above as used by gdal (usually shapefile name without .shp) - creation_options -- list of gdal tif creation options ## OUTPUT Nothing is returned, but tifs are made in tif_output_subdir inside the swwFile directory """ # Convert utm_zone to proj4string # p=Proj(proj='utm', south=(utm_zone<0.), # zone=abs(utm_zone), ellps='WGS84') # proj4string = p.srs tif_output_dir = os.path.dirname(sww_file) + tif_output_subdir # Make the geotifs if my_time_step == 'collected_max': # Read max quantity output files inputs max_qfiles = glob.glob(os.path.dirname(sww_file) + '/*_UH_MAX.csv') if len(max_qfiles) == 0: raise Exception( 'Cannot find any files containing collected maxima') for i in range(len(max_qfiles)): if i == 0: max_qs = numpy.genfromtxt(max_qfiles[i], delimiter=',') else: extra_data = numpy.genfromtxt(max_qfiles[i], delimiter=',') max_qs = \ numpy.vstack([max_qs, extra_data]) # Make the geotiff's for (i, quant) in enumerate( ['stage', 'depth', 'velocity', 'depthIntegratedVelocity']): # FIXME: The interpolation function is remade for every quantity, # since only a 3 column array can be passed to Make_Geotif Could # make it fast (do it only once) by changing Make_Geotif tmp_arr = max_qs[:, [0, 1, i + 2]] util.Make_Geotif( tmp_arr, output_quantities=[quant + '_MAX'], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) # Also plot elevation + friction # Try to reduce memory demands by only extracting first timestep fid = NetCDFFile(sww_file) # Make xy coordinates (note -- max_quantity_collector outputs might # have repeated x,y at parallel ghost cells) x_v = fid.variables['x'][:] + fid.xllcorner y_v = fid.variables['y'][:] + fid.yllcorner vols = fid.variables['volumes'][:] xc = (1. / 3.) * (x_v[vols[:, 0]] + x_v[vols[:, 1]] + x_v[vols[:, 2]]) yc = (1. / 3.) * (y_v[vols[:, 0]] + y_v[vols[:, 1]] + y_v[vols[:, 2]]) for (i, quant) in enumerate(['elevation_c', 'friction_c']): # Get the quantity if it exists if fid.variables.has_key(quant): quant_values = fid.variables[quant] # If multi time-steps, only get first timestep if (len(quant_values.shape) > 1): quant_values = quant_values[0, :] else: # Set quantity to nan if it is not stored quant_values = xc * 0. + numpy.nan tmp_arr = numpy.vstack([xc, yc, quant_values]).transpose() util.Make_Geotif( tmp_arr, output_quantities=[quant + '_INITIAL'], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) else: util.Make_Geotif( sww_file, myTimeStep=my_time_step, output_quantities=[ 'depth', 'stage', 'elevation', 'velocity', 'depthIntegratedVelocity', 'friction', ], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) # Early finish if not make_highres_drape_plot: return # Get extent of geotifs sample_rast = glob.glob(tif_output_dir + '*.tif')[0] raster_extent = su.getRasterExtent(sample_rast) # Make the resampled elevation data make_resampled_elevation( elevation_raster, raster_extent, cell_size, clip_polygon, clip_polygon_layer, proj4string, tif_output_dir, ) elevation = glob.glob(tif_output_dir + 'LIDAR_resampled*')[0] mask = glob.glob(tif_output_dir + 'Mask_resampled*')[0] if my_time_step == 'collected_max': depth = glob.glob(tif_output_dir + 'PointData_depth_*')[0] stage = glob.glob(tif_output_dir + 'PointData_stage_*')[0] else: depth = glob.glob(tif_output_dir + '*_depth_max.tif')[0] stage = glob.glob(tif_output_dir + '*_stage_max.tif')[0] # Call gdal_calc gdal_calc_command(stage, depth, elevation, mask, depth_threshold) return
def make_me_some_tifs( sww_file, bounding_polygon, proj4string, my_time_step='collected_max', tif_output_subdir='/TIFS/', cell_size=5.0, k_nearest_neighbours=1, make_highres_drape_plot=False, elevation_raster=None, depth_threshold=None, clip_polygon=None, clip_polygon_layer=None, creation_options=['COMPRESS=DEFLATE'], ): """ ### INPUT DATA - swwFile -- Full path name of sww to read outputs from - bounding_polygon -- ANUGA's bounding polygon, or another polygon to clip the rasters to, [in ANUGA's polygon format]. - proj4string defining the coordinate system - my_time_step -- option from util.Make_Geotif use 'max' to plot the maxima use [0, 5, 10] to plot the first, sixth and eleventh output time step use 'collected_max' to read the csv outputs from collect_max_quantities_operator - tif_outputdir -- Write outputs to this folder inside the swwFile's directory (MUST INCLUDE TRAILING SLASH /) - cell_size -- Desired raster cellSize (m) - k_nearest_neighbours -- use inverse distance weighted interpolation with this many neighbours - make_highres_drape_plot -- True/False, Make a high-res drape plot? - elevation_raster -- Filename of elevation raster for 'high-res-drape' depth plot [from subtracting stage from high res topography] - depth_threshold -- Depth threshold for high-res-drape' plot (m). If ANUGA's triangle has depth < depth_threshold, then depth=0 in all high-res cells in that triangle - clipPolygon -- Polygon to clip 'high-res-drape' plot. Must be provided if make_highres_drape_plot==True (can use bounding polygon or another choice) - clipPolygonLayer -- Layer for above as used by gdal (usually shapefile name without .shp) - creation_options -- list of gdal tif creation options ## OUTPUT Nothing is returned, but tifs are made in tif_output_subdir inside the swwFile directory """ # Convert utm_zone to proj4string # p=Proj(proj='utm', south=(utm_zone<0.), # zone=abs(utm_zone), ellps='WGS84') # proj4string = p.srs tif_output_dir = os.path.dirname(sww_file) + tif_output_subdir # Make the geotifs if my_time_step == 'collected_max': # Read max quantity output files inputs max_qfiles = glob.glob(os.path.dirname(sww_file) + '/*_UH_MAX.csv') if len(max_qfiles) == 0: raise Exception( 'Cannot find any files containing collected maxima') for i in range(len(max_qfiles)): if i == 0: max_qs = numpy.genfromtxt(max_qfiles[i], delimiter=',') else: extra_data = numpy.genfromtxt(max_qfiles[i], delimiter=',') max_qs = \ numpy.vstack([max_qs, extra_data]) # Make the geotiff's for (i, quant) in enumerate(['stage', 'depth', 'velocity', 'depthIntegratedVelocity']): # FIXME: The interpolation function is remade for every quantity, # since only a 3 column array can be passed to Make_Geotif Could # make it fast (do it only once) by changing Make_Geotif tmp_arr = max_qs[:, [0, 1, i + 2]] util.Make_Geotif( tmp_arr, output_quantities=[quant + '_MAX'], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) # Also plot elevation + friction # Try to reduce memory demands by only extracting first timestep fid = NetCDFFile(sww_file) # Make xy coordinates (note -- max_quantity_collector outputs might # have repeated x,y at parallel ghost cells) x_v = fid.variables['x'][:] + fid.xllcorner y_v = fid.variables['y'][:] + fid.yllcorner vols = fid.variables['volumes'][:] xc = (1. / 3.) * (x_v[vols[:, 0]] + x_v[vols[:, 1]] + x_v[vols[:, 2]]) yc = (1. / 3.) * (y_v[vols[:, 0]] + y_v[vols[:, 1]] + y_v[vols[:, 2]]) for (i, quant) in enumerate(['elevation_c', 'friction_c']): # Get the quantity if it exists if fid.variables.has_key(quant): quant_values = fid.variables[quant] # If multi time-steps, only get first timestep if(len(quant_values.shape) > 1): quant_values = quant_values[0, :] else: # Set quantity to nan if it is not stored quant_values = xc * 0. + numpy.nan tmp_arr = numpy.vstack([xc, yc, quant_values]).transpose() util.Make_Geotif( tmp_arr, output_quantities=[quant + '_INITIAL'], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) else: util.Make_Geotif( sww_file, myTimeStep=my_time_step, output_quantities=[ 'depth', 'stage', 'elevation', 'velocity', 'depthIntegratedVelocity', 'friction', ], CellSize=cell_size, proj4string=proj4string, verbose=True, bounding_polygon=bounding_polygon, output_dir=tif_output_dir, creation_options=creation_options, k_nearest_neighbours=k_nearest_neighbours, ) # Early finish if not make_highres_drape_plot: return # Get extent of geotifs sample_rast = glob.glob(tif_output_dir + '*.tif')[0] raster_extent = su.getRasterExtent(sample_rast) # Make the resampled elevation data make_resampled_elevation( elevation_raster, raster_extent, cell_size, clip_polygon, clip_polygon_layer, proj4string, tif_output_dir, ) elevation = glob.glob(tif_output_dir + 'LIDAR_resampled*')[0] mask = glob.glob(tif_output_dir + 'Mask_resampled*')[0] if my_time_step == 'collected_max': depth = glob.glob(tif_output_dir + 'PointData_depth_*')[0] stage = glob.glob(tif_output_dir + 'PointData_stage_*')[0] else: depth = glob.glob(tif_output_dir + '*_depth_max.tif')[0] stage = glob.glob(tif_output_dir + '*_stage_max.tif')[0] # Call gdal_calc gdal_calc_command(stage, depth, elevation, mask, depth_threshold) return