def test_geodb_barefile_with_num(self): with utils.WorkSpace(self.geodbworkspace): known_raster = os.path.join(self.geodbworkspace, '_temp_test_7') temp_raster = utils.create_temp_filename('test', filetype='raster', num=7) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.geodbworkspace, '_temp_test_22') temp_shape = utils.create_temp_filename('test', filetype='shape', num=22) nt.assert_equal(temp_shape, known_shape)
def test_geodb_barefile(self): with utils.WorkSpace(self.geodbworkspace): known_raster = os.path.join(self.geodbworkspace, '_temp_test') temp_raster = utils.create_temp_filename('test', filetype='raster') nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.geodbworkspace, '_temp_test') temp_shape = utils.create_temp_filename('test', filetype='shape') nt.assert_equal(temp_shape, known_shape)
def test_folderworkspace_barefile_with_num(self): with utils.WorkSpace(self.folderworkspace): known_raster = os.path.join(self.folderworkspace, '_temp_test_14.tif') temp_raster = utils.create_temp_filename('test', filetype='raster', num=14) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, '_temp_test_3.shp') temp_shape = utils.create_temp_filename('test', filetype='shape', num=3) nt.assert_equal(temp_shape, known_shape)
def test_folderworkspace_withsubfolder_with_num(self): with utils.WorkSpace(self.folderworkspace): known_raster = os.path.join(self.folderworkspace, 'subfolder', '_temp_test_1.tif') temp_raster = utils.create_temp_filename(os.path.join('subfolder', 'test'), filetype='raster', num=1) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, 'subfolder', '_temp_test_12.shp') temp_shape = utils.create_temp_filename(os.path.join('subfolder','test'), filetype='shape', num=12) nt.assert_equal(temp_shape, known_shape)
def test_with_extension_folder_with_num(self): with utils.WorkSpace(self.folderworkspace): filename = 'test' known_raster = os.path.join(self.folderworkspace, '_temp_test_4.tif') temp_raster = utils.create_temp_filename(filename + '.tif', filetype='raster', num=4) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, '_temp_test_4.shp') temp_shape = utils.create_temp_filename(filename + '.shp', filetype='shape', num=4) nt.assert_equal(temp_shape, known_shape)
def test_with_extension_geodb_with_num(self): with utils.WorkSpace(self.folderworkspace): filename = os.path.join(self.geodbworkspace, 'test') known_raster = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_2000') temp_raster = utils.create_temp_filename(filename + '.tif', filetype='raster', num=2000) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_999') temp_shape = utils.create_temp_filename(filename + '.tif', filetype='shape', num=999) nt.assert_equal(temp_shape, known_shape)
def test_geodb_as_subfolder_with_num(self): with utils.WorkSpace(self.folderworkspace): filename = os.path.join(self.geodbworkspace, 'test') known_raster = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_5') temp_raster = utils.create_temp_filename(filename, filetype='raster', num=5) nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_99') temp_shape = utils.create_temp_filename(filename, filetype='shape', num=99) nt.assert_equal(temp_shape, known_shape)
def test_with_extension_folder(self): with utils.WorkSpace(self.folderworkspace): filename = 'test' known_raster = os.path.join(self.folderworkspace, '_temp_test.tif') temp_raster = utils.create_temp_filename(filename + '.tif', filetype='raster') nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, '_temp_test.shp') temp_shape = utils.create_temp_filename(filename + '.shp', filetype='shape') nt.assert_equal(temp_shape, known_shape)
def test_geodb_as_subfolder(self): with utils.WorkSpace(self.folderworkspace): filename = os.path.join(self.geodbworkspace, 'test') known_raster = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test') temp_raster = utils.create_temp_filename(filename, filetype='raster') nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test') temp_shape = utils.create_temp_filename(filename, filetype='shape') nt.assert_equal(temp_shape, known_shape)
def test_folderworkspace_withsubfolder(self): with utils.WorkSpace(self.folderworkspace): known_raster = os.path.join(self.folderworkspace, 'subfolder', '_temp_test.tif') temp_raster = utils.create_temp_filename(os.path.join( 'subfolder', 'test'), filetype='raster') nt.assert_equal(temp_raster, known_raster) known_shape = os.path.join(self.folderworkspace, 'subfolder', '_temp_test.shp') temp_shape = utils.create_temp_filename(os.path.join( 'subfolder', 'test'), filetype='shape') nt.assert_equal(temp_shape, known_shape)
def _prep_flooder_input(elev=None, surge=None, slr=None, num=None, flood_output=None): """ Prepares the basic inputs to the :meth:`.analyze` method. Parameters ---------- elev, slr : float, optional Final elevation and sea level rise associated with the scenario. surge : str, optional The name of the storm surge associated with the scenario (e.g., MHHW, 100yr). flood_output : str Path/filename to where the final flooded areas will be saved. Returns ------- elevation : float Flood elevation for this scenario. title : str The basis of the header to be displayed as an arcpy.Message. temp_fname : str Path/name of the temporary file where the intermediate output will be saved. """ if elev is None: elevation = float(slr + SURGES[surge]) title = "Analyzing flood elevation: {} ft ({}, {})".format( elevation, surge, slr) else: elevation = float(elev) title = "Analyzing flood elevation: {} ft".format(elevation) if flood_output is None: raise ValueError('must provide a `flood_output`') basename, ext = os.path.splitext(flood_output) _temp_fname = basename + str(elevation).replace('.', '_') + ext temp_fname = utils.create_temp_filename(_temp_fname, num=num, prefix='', filetype='shape') return elevation, title, temp_fname
def _prep_flooder_input(elev=None, surge=None, slr=None, num=None, flood_output=None): """ Prepares the basic inputs to the :meth:`.analyze` method. Parameters ---------- elev, slr : float, optional Final elevation and sea level rise associated with the scenario. surge : str, optional The name of the storm surge associated with the scenario (e.g., MHHW, 100yr). flood_output : str Path/filename to where the final flooded areas will be saved. Returns ------- elevation : float Flood elevation for this scenario. title : str The basis of the header to be displayed as an arcpy.Message. temp_fname : str Path/name of the temporary file where the intermediate output will be saved. """ if elev is None: elevation = float(slr + SURGES[surge]) title = "Analyzing flood elevation: {} ft ({}, {})".format(elevation, surge, slr) else: elevation = float(elev) title = "Analyzing flood elevation: {} ft".format(elevation) if flood_output is None: raise ValueError('must provide a `flood_output`') basename, ext = os.path.splitext(flood_output) _temp_fname = basename + str(elevation).replace('.', '_') + ext temp_fname = utils.create_temp_filename(_temp_fname, num=num, prefix='', filetype='shape') return elevation, title, temp_fname
def finish_results(outputname, results, **kwargs): """ Merges and cleans up compiled output from `analyze`. Parameters ---------- outputname : str Path to where the final file sould be saved. results : list of str Lists of all of the floods, flooded wetlands, and flooded buildings, respectively, that will be merged and deleted. sourcename : str, optional Path to the original source file of the results. If provided, its attbutes will be spatially joined to the concatenated results. Returns ------- None """ sourcename = kwargs.pop('sourcename', None) cleanup = kwargs.pop('cleanup', True) if outputname is not None: if sourcename is not None: tmp_fname = utils.create_temp_filename(outputname, filetype='shape') utils.concat_results(tmp_fname, *results) utils.join_results_to_baseline( outputname, utils.load_data(tmp_fname, 'layer'), utils.load_data(sourcename, 'layer') ) utils.cleanup_temp_results(tmp_fname) else: utils.concat_results(outputname, *results) if cleanup: utils.cleanup_temp_results(*results)
def main_execute(self, **params): """ Performs the flood-impact analysis on multiple flood elevations. Parameters ---------- workspace : str The folder or geodatabase where the analysis will be executed. dem : str Filename of the digital elevation model (topography data) to be used in determinging the inundated areas. zones : str Name of zones of influence layer. ID_column : str Name of the field in ``zones`` that uniquely identifies each zone of influence. elevation : list, optional List of (custom) flood elevations to be analyzed. If this is not provided, *all* of the standard scenarios will be evaluated. flood_output : str Filename where the extent of flooding and damage will be saved. wetlands, buildings : str, optional Names of the wetland and building footprint layers. wetland_output, building_output : str, optional Filenames where the flooded wetlands and building footprints will be saved. Returns ------- None """ wetlands = params.get('wetlands', None) buildings = params.get('buildings', None) all_floods = [] all_wetlands = [] all_buildings = [] with utils.WorkSpace(params['workspace']), utils.OverwriteState(True): topo_array, zones_array, template = tidegates.process_dem_and_zones( dem=params['dem'], zones=params['zones'], ID_column=params['ID_column'] ) for scenario in self.make_scenarios(**params): fldlyr, wtlndlyr, blgdlyr = self.analyze( topo_array=topo_array, zones_array=zones_array, template=template, elev=scenario['elev'], surge=scenario['surge_name'], slr=scenario['slr'], **params ) all_floods.append(fldlyr.dataSource) if wetlands is not None: all_wetlands.append(wtlndlyr.dataSource) if buildings is not None: all_buildings.append(blgdlyr.dataSource) self.finish_results( params['flood_output'], all_floods, msg="Merging and cleaning up all flood results", verbose=True, asMessage=True, ) if wetlands is not None: wtld_output = params.get( 'wetland_output', utils.create_temp_filename(params['wetlands'], prefix='output_', filetype='shape') ) self.finish_results( wtld_output, all_wetlands, sourcename=params['wetlands'], msg="Merging and cleaning up all wetlands results", verbose=True, asMessage=True, ) if buildings is not None: bldg_output = params.get( 'building_output', utils.create_temp_filename(params['buildings'], prefix='output_', filetype='shape') ) self.finish_results( bldg_output, all_buildings, sourcename=params['buildings'], msg="Merging and cleaning up all buildings results", verbose=True, asMessage=True, )
def analyze(self, elev=None, surge=None, slr=None, **params): """ Tool-agnostic helper function for :meth:`.main_execute`. Parameters ---------- elev : float, optional Custom elevation to be analyzed slr : float, optional Sea level rise associated with the standard scenario. surge : str, optional The name of the storm surge associated with the scenario (e.g., MHHW, 100yr). **params : keyword arguments Keyword arguments of analysis parameters generated by `self._get_parameter_values` Returns ------- floods, flooded_wetlands, flooded_buildings : arcpy.mapping.Layers Layers (or None) of the floods and flood-impacted wetlands and buildings, respectively. """ # prep input elev, title, floods_path = self._prep_flooder_input( flood_output=params['flood_output'], elev=elev, surge=surge, slr=slr, ) # define the scenario in the message windows self._show_header(title) # run the scenario and add its info the output attribute table flooded_zones = tidegates.flood_area( dem=params['dem'], zones=params['zones'], ID_column=params['ID_column'], elevation_feet=elev, filename=floods_path, verbose=True, asMessage=True ) self._add_scenario_columns(flooded_zones.dataSource, elev=elev, surge=surge, slr=slr) # setup temporary files for impacted wetlands and buildings wl_path = utils.create_temp_filename(floods_path, prefix="_wetlands_", filetype='shape') bldg_path = utils.create_temp_filename(floods_path, prefix="_buildings_", filetype='shape') # asses impacts due to flooding fldlyr, wtlndlyr, blgdlyr = tidegates.assess_impact( floods_path=floods_path, flood_idcol=params['ID_column'], wetlands_path=params.get('wetlands', None), wetlands_output=wl_path, buildings_path=params.get('buildings', None), buildings_output=bldg_path, cleanup=False, verbose=True, asMessage=True, ) if wtlndlyr is not None: self._add_scenario_columns(wtlndlyr.dataSource, elev=elev, surge=surge, slr=slr) return fldlyr, wtlndlyr, blgdlyr
def main_execute(self, **params): """ Performs the flood-impact analysis on multiple flood elevations. Parameters ---------- workspace : str The folder or geodatabase where the analysis will be executed. dem : str Filename of the digital elevation model (topography data) to be used in determinging the inundated areas. zones : str Name of zones of influence layer. ID_column : str Name of the field in ``zones`` that uniquely identifies each zone of influence. elevation : list, optional List of (custom) flood elevations to be analyzed. If this is not provided, *all* of the standard scenarios will be evaluated. flood_output : str Filename where the extent of flooding and damage will be saved. wetlands, buildings : str, optional Names of the wetland and building footprint layers. wetland_output, building_output : str, optional Filenames where the flooded wetlands and building footprints will be saved. Returns ------- None """ wetlands = params.get('wetlands', None) buildings = params.get('buildings', None) all_floods = [] all_wetlands = [] all_buildings = [] with utils.WorkSpace(params['workspace']), utils.OverwriteState(True): topo_array, zones_array, template = tidegates.process_dem_and_zones( dem=params['dem'], zones=params['zones'], ID_column=params['ID_column'] ) for num, scenario in enumerate(self.make_scenarios(**params)): fldlyr, wtlndlyr, blgdlyr = self.analyze( topo_array=topo_array, zones_array=zones_array, template=template, elev=scenario['elev'], surge=scenario['surge_name'], slr=scenario['slr'], num=num, **params ) all_floods.append(fldlyr.dataSource) if wetlands is not None: all_wetlands.append(wtlndlyr.dataSource) if buildings is not None: all_buildings.append(blgdlyr.dataSource) self.finish_results( params['flood_output'], all_floods, msg="Merging and cleaning up all flood results", verbose=True, asMessage=True, ) if wetlands is not None: wtld_output = params.get( 'wetland_output', utils.create_temp_filename(params['wetlands'], prefix='output_', filetype='shape') ) self.finish_results( wtld_output, all_wetlands, sourcename=params['wetlands'], msg="Merging and cleaning up all wetlands results", verbose=True, asMessage=True, ) if buildings is not None: bldg_output = params.get( 'building_output', utils.create_temp_filename(params['buildings'], prefix='output_', filetype='shape') ) self.finish_results( bldg_output, all_buildings, sourcename=params['buildings'], msg="Merging and cleaning up all buildings results", verbose=True, asMessage=True, )
def analyze(self, topo_array, zones_array, template, elev=None, surge=None, slr=None, num=0, **params): """ Tool-agnostic helper function for :meth:`.main_execute`. Parameters ---------- topo_array : numpy array Floating point array of the digital elevation model. zones_array : numpy array Categorical (integer) array of where each non-zero value delineates a tidegate's zone of influence. template : arcpy.Raster or tidegates.utils.RasterTemplate A raster or raster-like object that define the spatial extent of the analysis area. Required attributes are: - templatemeanCellWidth - templatemeanCellHeight - templateextent.lowerLeft elev : float, optional Custom elevation to be analyzed slr : float, optional Sea level rise associated with the standard scenario. surge : str, optional The name of the storm surge associated with the scenario (e.g., MHHW, 100yr). **params : keyword arguments Keyword arguments of analysis parameters generated by `self._get_parameter_values` Returns ------- floods, flooded_wetlands, flooded_buildings : arcpy.mapping.Layers Layers (or None) of the floods and flood-impacted wetlands and buildings, respectively. """ # prep input elev, title, floods_path = self._prep_flooder_input( flood_output=params['flood_output'], elev=elev, surge=surge, slr=slr, num=num, ) # define the scenario in the message windows self._show_header(title) # run the scenario and add its info the output attribute table flooded_zones = tidegates.flood_area( topo_array=topo_array, zones_array=zones_array, template=template, ID_column=params['ID_column'], elevation_feet=elev, filename=floods_path, num=num, verbose=True, asMessage=True ) self._add_scenario_columns(flooded_zones.dataSource, elev=elev, surge=surge, slr=slr) # setup temporary files for impacted wetlands and buildings wl_path = utils.create_temp_filename(floods_path, prefix="_wetlands_", filetype='shape', num=num) bldg_path = utils.create_temp_filename(floods_path, prefix="_buildings_", filetype='shape', num=num) # asses impacts due to flooding fldlyr, wtlndlyr, blgdlyr = tidegates.assess_impact( floods_path=floods_path, flood_idcol=params['ID_column'], wetlands_path=params.get('wetlands', None), wetlands_output=wl_path, buildings_path=params.get('buildings', None), buildings_output=bldg_path, cleanup=False, verbose=True, asMessage=True, ) if wtlndlyr is not None: self._add_scenario_columns(wtlndlyr.dataSource, elev=elev, surge=surge, slr=slr) return fldlyr, wtlndlyr, blgdlyr
def analyze(self, topo_array, zones_array, template, elev=None, surge=None, slr=None, **params): """ Tool-agnostic helper function for :meth:`.main_execute`. Parameters ---------- topo_array : numpy array Floating point array of the digital elevation model. zones_array : numpy array Categorical (integer) array of where each non-zero value delineates a tidegate's zone of influence. template : arcpy.Raster or tidegates.utils.RasterTemplate A raster or raster-like object that define the spatial extent of the analysis area. Required attributes are: - templatemeanCellWidth - templatemeanCellHeight - templateextent.lowerLeft elev : float, optional Custom elevation to be analyzed slr : float, optional Sea level rise associated with the standard scenario. surge : str, optional The name of the storm surge associated with the scenario (e.g., MHHW, 100yr). **params : keyword arguments Keyword arguments of analysis parameters generated by `self._get_parameter_values` Returns ------- floods, flooded_wetlands, flooded_buildings : arcpy.mapping.Layers Layers (or None) of the floods and flood-impacted wetlands and buildings, respectively. """ # prep input elev, title, floods_path = self._prep_flooder_input( flood_output=params['flood_output'], elev=elev, surge=surge, slr=slr, ) # define the scenario in the message windows self._show_header(title) # run the scenario and add its info the output attribute table flooded_zones = tidegates.flood_area( topo_array=topo_array, zones_array=zones_array, template=template, ID_column=params['ID_column'], elevation_feet=elev, filename=floods_path, verbose=True, asMessage=True ) self._add_scenario_columns(flooded_zones.dataSource, elev=elev, surge=surge, slr=slr) # setup temporary files for impacted wetlands and buildings wl_path = utils.create_temp_filename(floods_path, prefix="_wetlands_", filetype='shape') bldg_path = utils.create_temp_filename(floods_path, prefix="_buildings_", filetype='shape') # asses impacts due to flooding fldlyr, wtlndlyr, blgdlyr = tidegates.assess_impact( floods_path=floods_path, flood_idcol=params['ID_column'], wetlands_path=params.get('wetlands', None), wetlands_output=wl_path, buildings_path=params.get('buildings', None), buildings_output=bldg_path, cleanup=False, verbose=True, asMessage=True, ) if wtlndlyr is not None: self._add_scenario_columns(wtlndlyr.dataSource, elev=elev, surge=surge, slr=slr) return fldlyr, wtlndlyr, blgdlyr