doc = 'Convert FEWS flood forecast data to hazard layers for InaSAFE' parser = argparse.ArgumentParser(description=doc) parser.add_argument('filename', type=str, help='NetCDF filename from FEWS') parser.add_argument('--hours', metavar='h', type=int, default=24, help='Number of hours to use from forecast') parser.add_argument('--regions', metavar='regions', type=str, help=('Administrative areas to be flagged as ' 'flooded or not')) args = parser.parse_args() print args print tif_filename = convert_netcdf2tif(args.filename, args.hours, verbose=True) # Tag each polygon with Y if it contains at least one pixel # exceeding a specific threshold (e.g. 0.3m). if args.regions is not None: print 'Tagging %s as "affected" or not' % args.regions polygons = read_layer(args.regions) grid = read_layer(tif_filename) res = tag_polygons_by_grid(polygons, grid, threshold=0.3, tag='affected') # Keep only those that are affected (speeds things up a lot, # but will reduce overall bounding box for buildings under # consideration) # geom = res.get_geometry()
def test_tag_regions_by_flood(self): """Regions can be tagged correctly with data from flood forecasts. """ threshold = 0.3 label = 'affected' tif_filename = convert_netcdf2tif(self.nc_filename, 24, verbose=False) region_filename = os.path.join(TESTDATA, 'rw_jakarta_singlepart.shp') grid = read_layer(tif_filename) polygons = read_layer(region_filename) res = tag_polygons_by_grid(polygons, grid, threshold=threshold, tag=label) os.remove(tif_filename) geom = res.get_geometry() data = res.get_data() # Check correctness of affected regions affected_geom = [] affected_data = [] for i, d in enumerate(data): if d[label]: g = geom[i] affected_geom.append(g) affected_data.append(d) assert len(affected_geom) == 37 assert len(affected_data) == 37 # Check that every grid point exceeding threshold lies inside # one of the polygons marked as affected P, V = grid.to_vector_points() flooded_points_geom = [] flooded_points_data = [] for i, point in enumerate(P): val = V[i] if val > threshold: # Point that is flooded must be in one of the tagged polygons found = False for polygon in affected_geom: if is_inside_polygon(point, polygon): found = True msg = ('No affected polygon was found for point [%f, %f] ' 'with value %f' % (point[0], point[1], val)) verify(found, msg) # Collected flooded points for visualisation flooded_points_geom.append(point) flooded_points_data.append({'depth': val}) # To generate files for visual inspection. # See # https://raw.github.com/AIFDR/inasafe/master/files/flood_tagging_test.png # https://github.com/AIFDR/inasafe/blob/master/files/flood_tagging_test.tgz tmp_filename = unique_filename(prefix='grid', suffix='.tif') grid.write_to_file(tmp_filename) #print 'Grid written to ', tmp_filename tmp_filename = unique_filename(prefix='regions', suffix='.shp') res.write_to_file(tmp_filename) #print 'Regions written to ', tmp_filename tmp_filename = unique_filename(prefix='flooded_points', suffix='.shp') v = Vector(geometry=flooded_points_geom, data=flooded_points_data) v.write_to_file(tmp_filename)
def test_convert_netcdf2tif(self): """NetCDF flood forecasts can be converted to tif. """ # First check that input file is as expected from Scientific.IO.NetCDF import NetCDFFile fid = NetCDFFile(self.nc_filename) x = fid.variables['x'][:] # Longitudes y = fid.variables['y'][:] # Latitudes inundation_depth = fid.variables['Inundation_Depth'][:] T = inundation_depth.shape[0] # Number of time steps M = inundation_depth.shape[1] # Steps in the y direction N = inundation_depth.shape[2] # Steps in the x direction assert T == 71 assert M == 162 # Latitudes assert N == 160 # Longitudes assert len(x) == N assert len(y) == M # Pick a max value in an area known to have flooding # (approximately picked with ncview) max_136_51 = max(inundation_depth[:, 136, 51]) assert numpy.allclose(max_136_51, 1.58) #print max_136_51 #print 'y[136]', y[136] # Lat #print 'x[51]', x[51] # Lon assert numpy.allclose(x[51], 106.7777) assert numpy.allclose(y[136], -6.124634) # Run script over all hours all_hours_tif = convert_netcdf2tif(self.nc_filename, T, verbose=False) msg = 'Expected file %s did not exist' % all_hours_tif assert os.path.isfile(all_hours_tif), msg # Read resulting layer and check L = read_layer(all_hours_tif) D = L.get_data() os.remove(all_hours_tif) # Check point taking up-down flip into account assert numpy.allclose(D[-136 - 1, 51], max_136_51) # Run script for one hour and check first band one_hour_tif = convert_netcdf2tif(self.nc_filename, 1, verbose=False) D = read_layer(one_hour_tif).get_data() assert numpy.allclose(max(D.flat), 0.74) # Checked band 1 with QGIS # Characterisation test of location of max inundation assert D[28, 53] == max(D.flat) assert numpy.allclose(y[28], -6.3199) assert numpy.allclose(x[53], 106.781) os.remove(one_hour_tif) return
parser.add_argument('--hours', metavar='h', type=int, default=24, help='Number of hours to use from forecast') parser.add_argument('--regions', metavar='regions', type=str, help=('Administrative areas to be flagged as ' 'flooded or not')) args = parser.parse_args() print args print tif_filename = convert_netcdf2tif(args.filename, args.hours, verbose=True) # Tag each polygon with Y if it contains at least one pixel # exceeding a specific threshold (e.g. 0.3m). if args.regions is not None: print 'Tagging %s as "affected" or not' % args.regions polygons = read_layer(args.regions) grid = read_layer(tif_filename) res = tag_polygons_by_grid(polygons, grid, threshold=0.3, tag='affected') # Keep only those that are affected (speeds things up a lot, # but will reduce overall bounding box for buildings under # consideration)
def process_flood_event(netcdf_file=None, hours=24): """A function to process this_netcdf_file to a forecast file. :param netcdf_file: The netcdf file. If it's None the download it. :param hours: Positive integer determining how many bands to use. :type hours: int """ print 'Start flood forecasting' if netcdf_file is None: # retrieve data from the web netcdf_file = download_file_url(netcdf_url, forecast_directory) else: netcdf_file = download_file_url(netcdf_url, name=netcdf_file, download_directory=forecast_directory) print 'Do flood forecasting for %s ...' % netcdf_file ## check if a forecasting file has been created or not # is_exist, polyforecast_filepath = get_result_file_name(this_netcdf_file, # hours) # #if is_exist: # print 'Current flood forecasting has been already created.' # print 'You can look it at %s' % polyforecast_filepath # return # convert to tif # tif_file = polyforecast_filepath.replace('_regions.shp', '.tif') tif_filename = convert_netcdf2tif(netcdf_file, hours, verbose=False, output_dir=flood_directory) print 'tif_file', tif_filename tif_file = read_layer(tif_filename) # check if there is another file with the same name # if so, do not do the forecasting polyforecast_filepath = tif_filename.replace('.tif', '_regions.shp') zip_filename = polyforecast_filepath.replace('.shp', '.zip') if os.path.isfile(zip_filename): print('File %s is exist, so we do not do the forecasting' % zip_filename) else: polygons = read_layer(polygons_path) result = tag_polygons_by_grid(polygons, tif_file, threshold=0.3, tag='affected') new_geom = result.get_geometry() new_data = result.get_data() date = os.path.split(netcdf_file)[-1].split('_')[0] v = Vector(geometry=new_geom, data=new_data, projection=result.projection, keywords={ 'category': 'hazard', 'subcategory': 'flood', 'title': ('%d hour flood forecast regions ' 'in Jakarta at %s' % (hours, date)) }) print 'polyforecast_filepath', polyforecast_filepath v.write_to_file(polyforecast_filepath) print 'Wrote tagged polygons to %s' % polyforecast_filepath # zip all file if os.path.isfile(zip_filename): print 'Has been zipped to %s' % zip_filename else: zip_shp(polyforecast_filepath, extra_ext=['.keywords'], remove_file=True) print 'Zipped to %s' % zip_filename
def processFloodEvent(netcdf_file=None, hours=24): """A function to process netcdf_file to a forecast file. """ print 'Start flood forecasting' if netcdf_file is None: # retrieve data from the web netcdf_file = download_file_url(netcdf_url, forecast_directory) else: netcdf_file = download_file_url(netcdf_url, name=netcdf_file, download_directory=forecast_directory) print 'Do flood forecasting for %s ...' % netcdf_file # # check if a forecasting file has been created or not # is_exist, polyforecast_filepath = get_result_file_name(netcdf_file, hours) # # if is_exist: # print 'Current flood forecasting has been already created.' # print 'You can look it at %s' % polyforecast_filepath # return # convert to tif # tif_file = polyforecast_filepath.replace('_regions.shp', '.tif') tif_filename = convert_netcdf2tif(netcdf_file, hours, verbose=False, output_dir=flood_directory) print 'tif_file', tif_filename tif_file = read_layer(tif_filename) # check if there is another file with the same name # if so, do not do the forecasting polyforecast_filepath = tif_filename.replace('.tif', '_regions.shp') zip_filename = polyforecast_filepath.replace('.shp', '.zip') if os.path.isfile(zip_filename): print ('File %s is exist, so we do not do the forecasting' % zip_filename) else: my_polygons = read_layer(polygons_path) my_result = tag_polygons_by_grid(my_polygons, tif_file, threshold=0.3, tag='affected') new_geom = my_result.get_geometry() new_data = my_result.get_data() date = os.path.split(netcdf_file)[-1].split('_')[0] v = Vector(geometry=new_geom, data=new_data, projection=my_result.projection, keywords={'category': 'hazard', 'subcategory': 'flood', 'title': ('%d hour flood forecast regions ' 'in Jakarta at %s' % (hours, date))}) print 'polyforecast_filepath', polyforecast_filepath v.write_to_file(polyforecast_filepath) print 'Wrote tagged polygons to %s' % polyforecast_filepath # zip all file if os.path.isfile(zip_filename): print 'Has been zipped to %s' % zip_filename else: zip_shp(polyforecast_filepath, extra_ext=['.keywords'], remove_file=True) print 'Zipped to %s' % zip_filename